LEDU'S POPULAR FUNCTIONS
Overall imports
import os
import requests
import json
import pandas as pd
Run to view results
SnowFlake query executor
'''
snowflake database connection
'''
import snowflake.connector
def executeQuery(role,database,schema,sql):
conn = snowflake.connector.connect(
account='gfa04036.us-east-1',
user='LEDUAR.FILHO@SOUTUL.COM.BR',
password='Z3aV&%n7z5C&2APd#IoeC@M2',
database=database,
schema=schema,
role=role
)
cursor = conn.cursor()
cursor.execute(sql)
results = cursor.fetchall()
df = pd.DataFrame(results, columns=[desc[0] for desc in cursor.description])
conn.close()
return df
Run to view results
SendBird Whatsapp Messages
'''
SendBird connection to send messages by a template
'''
def send_message(phone_number, template_name):
# SendBird URL
url = "https://conversations.messagebird.com/v1/send"
headers = {
"Authorization": "AccessKey TOn8N7CChzfvcOJWl6gGGuusJ",
"Content-Type": "application/json"
}
body = {
"content": {
"hsm": {
"namespace": "9a40e598_c4b1_4ffa_9e58_f7f7df6ab8c2",
"language": {
"code": "es"
},
"templateName": template_name
}
},
"to": phone_number,
"type": "hsm",
"from": "d0b4b702-e615-4226-8d8e-32267b68a693"
}
response = requests.post(url, headers=headers, json=body)
# Create a log to keep track of the messages
with open('logFile.csv', 'a', newline='') as log_file:
writer = csv.writer(log_file)
writer.writerow([datetime.now(),phone_number, template_name,response.status_code])
# print the response
print(response.status_code)
print(response.json())
return
#just to test
def send_messageTST (phone_number, template_name):
print ('phone_number',phone_number)
print ('template_name',template_name)
return
Run to view results
Bucket files on the google cloud
'''
Work with bucket on the google cloud
Account: leduatan@gmail.com
'''
from google.cloud import storage
#google cloud info
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "/work/googleCloudStorageKey.json"
clientStorage = storage.Client()
def getFileInBucket(bucket_name, folder, fileName = None):
#Creatint a bucket
bucket = clientStorage.get_bucket(bucket_name)
#Catpturing the newest file if not specified file name
if fileName is None:
#pegar somente o novo
blobs = bucket.list_blobs(prefix=folder)
blobs = sorted(blobs, key=lambda blob: blob.time_created, reverse=True)
blob = blobs[0]
else:
#if a file name is specified,capture it
blob = bucket.blob(f"{folder}/{fileName}")
#download and parse the JSON content
content = blob.download_as_string()
data = json.loads(content)
return data
Run to view results
Work with google sheets
'''
Work with google sheets
Account: leduar.filho@soutul.com.br
IMPORTANT: add the email on the persmissions with full access
creditrequestbr@tulbr-377822.iam.gserviceaccount.com
'''
import gspread
from oauth2client.service_account import ServiceAccountCredentials
scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
creds = ServiceAccountCredentials.from_json_keyfile_name('/work/googleSheetsKey.json', scope)
clientSheets = gspread.authorize(creds)
# read google sheets document
def readSheets(sheet_id, tab_name):
try:
# Access the worksheet
spreadsheet = clientSheets.open_by_key(sheet_id)
# Access the tab by the name
sheet = spreadsheet.worksheet(tab_name)
if not sheet:
print(f"No worksheet found with name: {tab_name}")
return None
# Read the data
data = sheet.get_all_records()
df = pd.DataFrame(data)
return df
except gspread.exceptions.APIError:
print("Sheet not found!")
#write google sheets document
def writeSheets(df, sheet_id, tab_name):
# Access the worksheet
spreadsheet = clientSheets.open_by_key(sheet_id)
# Access the tab by the name
sheet = spreadsheet.worksheet(tab_name)
# Clean the content
sheet.clear()
# Accesss the tab by the name
sheet.update([df.columns.values.tolist()] + df.values.tolist())
return
#examples
#df = readSheets('1SuDuALDnURyWBcsyRf5ivyl65hkA0Z1qhf88OabQlBc', 'Construja_API')
#writeSheets(df, '1SuDuALDnURyWBcsyRf5ivyl65hkA0Z1qhf88OabQlBc', 'CasaLojista_API')
Run to view results
Send a message by slack
#pending
Run to view results
Notebook code
#General import
import pandas as pd
import json
from google.cloud import storage
import os
import datetime
import pytz
import requests
# API Call
url = "https://api-fera.azurewebsites.net/api/WebProdutos"
headers = {
'x-api-key': '01A114026716FDD9CCE841D52BC4AA9AF4D7BB56AC9A15C3DFC4B47BD20EED3E'
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
data = response.json()
# Upload the response string to Google Cloud Storage
client = storage.Client()
bucket_name = "tul-providers"
bucket = client.get_bucket(bucket_name)
folder_name = "fera"
now = datetime.datetime.now()
filename = "fera_"+ str(now)+".json"
blob_name = f"{folder_name}/{filename}"
blob = bucket.blob(blob_name)
blob.upload_from_string(json.dumps(data))
else:
print(f"Request failed with status code {response.status_code}")
Run to view results