I am looping over a queue of 1 million XMLS and I parse every xml and insert its values into 5 arraylist. each arraylist load data into a table in Oracle. After parsing 1000 XMLs I load the data into the DB while the data volume is around 500k - 1M records for each table. the problem after every load the insertion time extend and after 3-4 loads the program barly moves. questions 1. if talking about performance what is better: open many DB connections and insert small amount of data or open few connection with big amount of data? 2. please help to reduce the insertion time.
example for 1 of my DB load code
//Party
sql="INSERT INTO CRMAS_ODS_RAW_DATA.PARTY_TMP(BATCH_ID, SOURCE_SYSTEM, UPDATE_DATE, PARTY_KEY, TCCID, LEI_ID, NATIONAL_ID, OFF_SET) VALUES(?,?,?,?,?,?,?,?)";
ps = con.prepareStatement(sql);
timestamp = new Timestamp(System.currentTimeMillis());
count = 0;
con.setAutoCommit(false);
for(Party par : al4)
{
// get batchid
ps.setInt(1,par.getA1());
// get SOURCE_SYSTEM
ps.setString(2,par.getA2());
// get UPDATE_DATE
ps.setTimestamp(3,timestamp);
// get party_key
ps.setString(4,par.getParty_key());
// get TCCID
ps.setInt(5,par.getA4());
// LEI_ID
ps.setString(6,par.getLEI());
// NATIONAL_ID
ps.setString(7,par.getNationalId());
// OFF_SET
ps.setLong(8,par.getOffset());
ps.addBatch();
if(++count % Batch_party == 0) {
ps.executeBatch();
timestamp = new Timestamp(System.currentTimeMillis());
}
}
con.setAutoCommit(false);
ps.executeBatch(); // insert remaining records
con.commit();