I am working on a scrapy crawler, and this issue really bothers me since I already been trapped by this for days.
This placeholders function works fine when I using "?" instead of "%s" for SQLite db. But while using "?" as the database switching to MySQL, it shows:
"TypeError: not all arguments converted during string formatting "
even I pay lots of effort modifying codes and changing the placeholder(supposedly?) it still shows :
" query = query % self._escape_args(args, conn) ValueError: unsupported format character ',' "
more specifically :
Traceback (most recent call last):
File "/usr/lib64/python3.4/dist-packages/twisted/internet/defer.py", line 653, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/home/ec2-user/lulu_testing/get_download_file/hello_scrapy/hello/hello/pipelines.py", line 42, in process_item
self.cur.execute(insert_query, insert_values)
File "/usr/lib/python3.4/dist-packages/pymysql/cursors.py", line 163, in execute
query = self.mogrify(query, args)
File "/usr/lib/python3.4/dist-packages/pymysql/cursors.py", line 142, in mogrify
query = query % self._escape_args(args, conn)
ValueError: unsupported format character ',' (0x2c) at index 94
the pipline for mysql version
import pymysql
import scrapy
from hello.items import HelloItem
class HelloPipeline(object):
def __init__(self):#
self.conn = pymysql.connect(host="localhost", port=3306, user="root", passwd="lulu", db="test", charset="utf8", use_unicode=True)
self.cur = self.conn.cursor()
self.cur.execute("drop table IF EXISTS test;")
self.conn.commit()
self.cur.execute("create table if not EXISTS table_test_4(test0 text, test1 text, test2 text, test3 text,test4 text, test5 text, test6 text, test7 text, test8 text, test9 text);")
self.conn.commit()
#pass
def process_item(self, item, spider):#
col = ",".join(item.keys())
placeholders = ",".join(len(item) * "%s")
insert_query = "INSERT INTO test_table_4({0}) VALUES({1});".format(col,placeholders)
insert_values = tuple(item.values())
self.cur.execute(insert_query, insert_values)
return item
def close_spider(self, spider):#
self.cur.close()
self.conn.close()
#pass
the SQLite version(what i was using b4)
import sqlite3
import scrapy
from hello.items import HelloItem
class HelloPipeline(object):
def open_spider(self, spider):#
self.conn = sqlite3.connect("test_database_ver_2018_03_31.sqlite")
self.cur = self.conn.cursor()
self.cur.execute("create table if not exists test_table(test0 text, test1 text, test2 text, test3 text,test4 text, test5 text, test6 text, test7 text, test8 text, test9 text);")
#pass
def close_spider(self, spider):#
self.conn.commit()
self.conn.close()
#pass
def process_item(self, item, spider):#
col = ",".join(item.keys())
placeholders = ",".join(len(item) * "?")
sql = "insert into test_table({}) values({})"
self.cur.execute(sql.format(col, placeholders), tuple(item.values()))
return item
data settings of main scrapy crawler program
testitem = HelloItem()
testitem["test0"] = house_detail.select(".houseInfoTitle")[0].text
testitem["test1"] = house_detail.select(".pageView")[0].text
testitem["test2"] = house_detail.select(".detailInfo")[0].text
testitem["test3"] = house_detail.select(".houseIntro")[0].text
testitem["test4"] = house_detail.select(".lifeBox")[0].text
testitem["test5"] = house_detail.select(".labelList")[0].text
testitem["test6"] = house_detail.select(".facility")[0].text
testitem["test7"] = str(house_detail.select(".userInfo"))
testitem["test8"] = str(house_detail.select(".banner"))
testitem["test9"] = str(house_detail.select("#show"))
return testitem
item setting
import scrapy
class HelloItem(scrapy.Item):
test0 = scrapy.Field()
test1 = scrapy.Field()
test2 = scrapy.Field()
test3 = scrapy.Field()
test4 = scrapy.Field()
test5 = scrapy.Field()
test6 = scrapy.Field()
test7 = scrapy.Field()
test8 = scrapy.Field()
test9 = scrapy.Field()