I have some stored results in a .json file in this format:
(one item per line)
{"category": ["ctg1"], "pages": 3, "websites": ["x1.com","x2.com","x5.com"]}
{"category": ["ctg2"], "pages": 2, "websites": ["x1.com", "d4.com"]}
.
.
I have tried to remove the duplicate value without deleting the whole item but without success.
the code :
import scrapy
import json
import codecs
from scrapy.exceptions import DropItem
class ResultPipeline(object):
def __init__(self):
self.ids_seen = set()
self.file = codecs.open('results.json', 'w', encoding='utf-8')
def process_item(self, item, spider):
for sites in item['websites']:
if sites in self.ids_seen:
raise DropItem("Duplicate item found: %s" % sites)
else:
self.ids_seen.add(sites)
line = json.dumps(dict(item), ensure_ascii=False) + "\n"
self.file.write(line)
return item
def spider_closed(self, spider):
self.file.close()