I have a sqlite database that I've built and it gets both added to and updated on a weekly basis. The issue I have is the update seems to take a very long time. (Roughly 2 hours without the transaction table). I'm hoping there is a faster way to do this. What the script does is read from a CSV and updates the database line by line through a loop
An example data entry would be:
JohnDoe123 018238e1f5092c66d896906bfbcf9abf5abe978975a8852eb3a78871e16b4268
The Code that I use is
#updates reported table
def update_sha(conn, sha, ID, op):
sql_update_reported = 'UPDATE reported SET sha = ? WHERE ID = ? AND operator = ?'
sql_update_blocked = 'UPDATE blocked SET sha = ? WHERE ID = ? AND operator = ?'
sql_update_trans = 'UPDATE transactions SET sha = ? WHERE ID = ? AND operator = ?'
data = (sha, ID, op)
cur = conn.cursor()
cur.execute(sql_update_reported, data)
cur.execute(sql_update_blocked, data)
cur.execute(sql_update_trans, data)
conn.commit()
def Count(conn):
#Creates a dataframe with the Excel sheet information and ensures them to
#be strings
df = pd.DataFrame()
df = pd.read_excel("Count.xlsx", engine='openpyxl',converters={'ID':str})
#Runs through the DataFrame once for reported
for i in df.index:
ID = df['ID'][i]
Sha = df['Sha'][i]
op = df['op'][i]
print(i)
with conn:
update_dupi(conn, Sha, ID, op)
if __name__ == '__main__':
conn = create_connection(database)
print("Updating Now..")
Count(conn)
conn.close()