While using the bulk insertion code as given in the performance link in SQLAlchemy http://docs.sqlalchemy.org/en/latest/faq/performance.html , the sqlite works fine and takes time as described in their document. While using the same code for the postgresql connection string . The total time is multiplied by many times.
Is there any way to make it faster in postgresql? What i am doing wrong here ??
Especially bulk_insert_mappings and bulk_save_objects, which are my only options to insert 370,000 rows.
Postgresql connection string
connection_string = 'postgresql://' + conf.DB_USER + ':' + conf.DB_PASSWORD + '@' + \
conf.DB_HOST + ':' + conf.DB_PORT + '/' + conf.DB_NAME
Code used for checking performance :
import time
import sqlite3
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
Base = declarative_base()
DBSession = scoped_session(sessionmaker())
engine = None
class Customer(Base):
__tablename__ = "customer"
id = Column(Integer, primary_key=True)
name = Column(String(255))
def init_sqlalchemy(dbname='sqlite:///sqlalchemy.db'):
global engine
connection_string = 'postgresql://' + 'scott' + ':' + 'tiger' + '@' + \
'localhost' + ':' + '5432' + '/' + 'test_db'
engine = create_engine(connection_string, echo=False)
DBSession.remove()
DBSession.configure(bind=engine, autoflush=False, expire_on_commit=False)
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
def test_sqlalchemy_orm(n=100000):
init_sqlalchemy()
t0 = time.time()
for i in xrange(n):
customer = Customer()
customer.name = 'NAME ' + str(i)
DBSession.add(customer)
if i % 1000 == 0:
DBSession.flush()
DBSession.commit()
print(
"SQLAlchemy ORM: Total time for " + str(n) +
" records " + str(time.time() - t0) + " secs")
def test_sqlalchemy_orm_pk_given(n=100000):
init_sqlalchemy()
t0 = time.time()
for i in xrange(n):
customer = Customer(id=i+1, name="NAME " + str(i))
DBSession.add(customer)
if i % 1000 == 0:
DBSession.flush()
DBSession.commit()
print(
"SQLAlchemy ORM pk given: Total time for " + str(n) +
" records " + str(time.time() - t0) + " secs")
def test_sqlalchemy_orm_bulk_save_objects(n=100000):
init_sqlalchemy()
t0 = time.time()
n1 = n
while n1 > 0:
n1 = n1 - 10000
DBSession.bulk_save_objects(
[
Customer(name="NAME " + str(i))
for i in xrange(min(10000, n1))
]
)
DBSession.commit()
print(
"SQLAlchemy ORM bulk_save_objects(): Total time for " + str(n) +
" records " + str(time.time() - t0) + " secs")
def test_sqlalchemy_orm_bulk_insert(n=100000):
init_sqlalchemy()
t0 = time.time()
n1 = n
while n1 > 0:
n1 = n1 - 10000
DBSession.bulk_insert_mappings(
Customer,
[
dict(name="NAME " + str(i))
for i in xrange(min(10000, n1))
]
)
DBSession.commit()
print(
"SQLAlchemy ORM bulk_insert_mappings(): Total time for " + str(n) +
" records " + str(time.time() - t0) + " secs")
def test_sqlalchemy_core(n=100000):
init_sqlalchemy()
t0 = time.time()
engine.execute(
Customer.__table__.insert(),
[{"name": 'NAME ' + str(i)} for i in xrange(n)]
)
print(
"SQLAlchemy Core: Total time for " + str(n) +
" records " + str(time.time() - t0) + " secs")
def init_sqlite3(dbname):
conn = sqlite3.connect(dbname)
c = conn.cursor()
c.execute("DROP TABLE IF EXISTS customer")
c.execute(
"CREATE TABLE customer (id INTEGER NOT NULL, "
"name VARCHAR(255), PRIMARY KEY(id))")
conn.commit()
return conn
def test_sqlite3(n=100000, dbname='sqlite3.db'):
conn = init_sqlite3(dbname)
c = conn.cursor()
t0 = time.time()
for i in xrange(n):
row = ('NAME ' + str(i),)
c.execute("INSERT INTO customer (name) VALUES (?)", row)
conn.commit()
print(
"sqlite3: Total time for " + str(n) +
" records " + str(time.time() - t0) + " sec")
if __name__ == '__main__':
test_sqlalchemy_orm(100000)
test_sqlalchemy_orm_pk_given(100000)
test_sqlalchemy_orm_bulk_save_objects(100000)
test_sqlalchemy_orm_bulk_insert(100000)
test_sqlalchemy_core(100000)
test_sqlite3(100000)
Output :
SQLAlchemy ORM: Total time for 100000 records 40.6781959534 secs
SQLAlchemy ORM pk given: Total time for 100000 records 21.0855250359 secs
SQLAlchemy ORM bulk_save_objects(): Total time for 100000 records 14.068707943 secs
SQLAlchemy ORM bulk_insert_mappings(): Total time for 100000 records 11.6551070213 secs
SQLAlchemy Core: Total time for 100000 records 12.5298728943 secs
sqlite3: Total time for 100000 records 0.477468013763 sec
Using the original connection string (i.e. sqlite):
engine = create_engine(dbname, echo=False)
Output :
SQLAlchemy ORM: Total time for 100000 records 16.9145789146 secs
SQLAlchemy ORM pk given: Total time for 100000 records 10.2713520527 secs
SQLAlchemy ORM bulk_save_objects(): Total time for 100000 records 3.69206118584 secs
SQLAlchemy ORM bulk_insert_mappings(): Total time for 100000 records 1.00701212883 secs
SQLAlchemy Core: Total time for 100000 records 0.467703104019 secs
sqlite3: Total time for 100000 records 0.566409826279 sec