I managed to get it done with the following python code:
import datetime
import logging
import os
import sqlalchemy
from google.cloud import storage
import pandas as pd
# Remember - storing secrets in plaintext is potentially unsafe. Consider using
# something like https://cloud.google.com/kms/ to help keep secrets secret.
db_user = "<DB_USER>"#os.environ.get("DB_USER")
db_pass = "<DB_PASS>"#os.environ.get("DB_PASS")
db_name = "<DB_NAME>"#os.environ.get("DB_NAME")
cloud_sql_connection_name = "<Cloud SQL Instance Connection Name>"#os.environ.get("CLOUD_SQL_CONNECTION_NAME")
logger = logging.getLogger()
# [START cloud_sql_postgres_sqlalchemy_create]
db = sqlalchemy.create_engine(
# Equivalent URL:
# postgres+pg8000://<db_user>:<db_pass>@/<db_name>?unix_sock=/cloudsql/<cloud_sql_instance_name>/.s.PGSQL.5432
sqlalchemy.engine.url.URL(
drivername='postgres+pg8000',
username=db_user,
password=db_pass,
database=db_name,
query={
'unix_sock': '/cloudsql/{}/.s.PGSQL.5432'.format(
cloud_sql_connection_name)
}
),
# ... Specify additional properties here.
pool_size=5,
max_overflow=2,
pool_timeout=30, # 30 seconds
pool_recycle=1800, # 30 minutes
)
def read_source_data(request):
bucket_name = <YOUR_BUCKET_NAME>
folder_name = "sample_files"
file_name = "test.txt"
with db.connect() as conn:
sales_records = conn.execute(
"SELECT * FROM sales;"
).fetchall()
if len(sales_records) > 0:
#for val in sales_records:
#print(val)
df = pd.DataFrame(sales_records)
df.columns = sales_records[0].keys()
create_file(bucket_name, "sample_files/test.txt", df)
return "Done!"
else:
print("Nothing!")
return "Nothing!"
def create_file(bucketname, path, records_read):
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucketname)
blob = storage.Blob(
name=path,
bucket=bucket,
)
content = records_read.to_csv(index=False)#'\n'.join(map(str, records_read))
blob.upload_from_string(
data=content,
content_type='text/plain',
client=storage_client,
)
I stitched this together from multiple code snippets and as not-a-python-developer I'm pretty sure there are better ways of getting this done.
I then deployed my function using
gcloud deployment-manager deployments create