trying to wrap a working query into the python code to avoid the time-out issue, but get the error JSONDecodeError: Expecting value: line 1 column 1 (char 0) could someone check what's wrong in there? I feel like I have to catch the exception somewhere but not sure how:
#! /usr/bin/env python3
from qwikidata.sparql import return_sparql_query_results
import json
data = return_sparql_query_results("""
SELECT DISTINCT ?poi ?itemLabel_es ?itemLabel_en ?itemLabel_ceb ?itemLabel_ca ?itemLabel_gl ?itemLabel_eu ?itemLabel_ru ?itemLabel_uk ?typeLabel ?localizationLabel WHERE {
#here change: wd:Q for country
?poi (p:P17/ps:P17) wd:Q29.
#here change: adapt all UNION statements, remove which not needed, change wd:Q for feature types for UNION statements
{ ?poi p:P31/ps:P31/wdt:P279* wd:Q10354598. }
UNION { ?poi p:P31/ps:P31/wdt:P279* wd:Q532. }
UNION { ?poi p:P31/ps:P31/wdt:P279* wd:Q4632675. }
UNION { ?poi p:P31/ps:P31/wdt:P279* wd:Q699405. }
UNION { ?poi p:P31/ps:P31/wdt:P279* wd:Q5084. }
#here change: adapt all language lables, remove which not needed, language code can be taken from wikidata
OPTIONAL {
?poi rdfs:label ?itemLabel_es.
FILTER((LANG(?itemLabel_es)) = "es")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_en.
FILTER((LANG(?itemLabel_en)) = "en")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_ceb.
FILTER((LANG(?itemLabel_ceb)) = "ceb")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_ca.
FILTER((LANG(?itemLabel_ca)) = "ca")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_gl.
FILTER((LANG(?itemLabel_gl)) = "gl")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_eu.
FILTER((LANG(?itemLabel_eu)) = "eu")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_ru.
FILTER((LANG(?itemLabel_ru)) = "ru")
}
OPTIONAL {
?poi rdfs:label ?itemLabel_uk.
FILTER((LANG(?itemLabel_uk)) = "uk")
}
OPTIONAL{
?poi p:P31/ps:P31 ?type
SERVICE wikibase:label { ?type rdfs:label ?typeLabel . bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". } }
OPTIONAL{
?poi p:P131/ps:P131 ?localization
SERVICE wikibase:label { ?localization rdfs:label ?localizationLabel . bd:serviceParam wikibase:language "[AUTO_LANGUAGE],en". } }
}
Limit 50000
""")
encodedUnicode = json.dumps(data, ensure_ascii=False)
decodingJSON = json.loads(encodedUnicode, strict=False)
with open ('data_smple.txt', 'w') as target_file:
target_file.write(json.dumps(decodingJSON, indent=4))
print(repr(data[24257164-20:24257164+20]))