I am trying to return some tables from my database as geo JSON to the leaflet. There are about 4000 rows in each, but rendering the geo JSON on my leaflet map is so slow. Am I running the query the wrong way? is there a way to improve performance
def layers():
conn = psycopg2.connect( host=hostname, user=username, password=password, dbname=database )
c = conn.cursor()
c.execute("select layer, type, st_AsGeoJSON(geom4326) FROM eng_floodzone2 ")
rows = c.fetchall()
objects_list = []
for row in rows:
#geo_json={
varcons = collections.OrderedDict()
varcons["type"]= "Feature"
varcons["name"]= row[0]
varcons["properties"]= {"name": row[0]}
varcons["geometry"]= json.loads(row[2])
objects_list.append(varcons)
getfloodzone2 = json.dumps(objects_list)
c.execute("select sac_name, st_AsGeoJSON(geom4326) FROM eng_sac_pg ")
rows = c.fetchall()
objects_list = []
for row in rows:
varcons = collections.OrderedDict()
varcons["type"]= "Feature"
varcons["name"]= row[0]
varcons["properties"]= {"SAC_NAME": row[0]}
varcons["geometry"]= json.loads(row[1])
objects_list.append(varcons)
getsac = json.dumps(objects_list)
with app.app_context():
return render_template('index2.html',
floodzone2 = getfloodzone2,
sac = getsac
)
c.close()
if __name__ == '__main__':
app.run( debug= True, use_reloader=False, threaded=True)