I'm trying to parse a huge 12 GB JSON file with almost 5 million lines(each one is an object) in python and store it to a database. I'm using ijson and multiprocessing in order to run it faster. Here is the code
def parse(paper):
global mydata
if 'type' not in paper["venue"]:
venue = Venues(venue_raw = paper["venue"]["raw"])
venue.save()
else:
venue = Venues(venue_raw = paper["venue"]["raw"], venue_type = paper["venue"]["type"])
venue.save()
paper1 = Papers(paper_id = paper["id"],paper_title = paper["title"],venue = venue)
paper1.save()
paper_authors = paper["authors"]
paper_authors_json = json.dumps(paper_authors)
obj = ijson.items(paper_authors_json,'item')
for author in obj:
mydata = mydata.append({'author_id': author["id"] , 'venue_raw': venue.venue_raw, 'year' : paper["year"],'number_of_times': 1},ignore_index=True)
if __name__ == '__main__':
p = Pool(4)
filename = 'C:/Users/dintz/Documents/finaldata/dblp.v12.json'
with open(filename,encoding='UTF-8') as infile:
papers = ijson.items(infile, 'item')
for paper in papers:
p.apply_async(parse,(paper,))
p.close()
p.join()
mydata = mydata.groupby(by=['author_id','venue_raw','year'], axis=0, as_index = False).sum()
mydata = mydata.groupby(by = ['author_id','venue_raw'], axis=0, as_index = False, group_keys = False).apply(lambda x: sum((1+x.year-x.year.min())*numpy.log10(x.number_of_times+1)))
df = mydata.index.to_frame(index = False)
df = pd.DataFrame({'author_id':df["author_id"],'venue_raw':df["venue_raw"],'rating':mydata.values[:,2]})
for index, row in df.iterrows():
author_id = row['author_id']
venue = Venues.objects.get(venue_raw = row['venue_raw'])
rating = Ratings(author_id = author_id, venue = venue, rating = row['rating'])
rating.save()
However I get the following error without knowing the reason
Can somebody help me?