1

I am about to finish the creation of an APP, which consumes data from a REST API. This application, works with CLIENTS, ORDERS and PRODUCTS, brings them from the REST API and inserts them into the local database (SQLite).

I would need to know what efficient way there is to store the data that comes in an array? That is to say, the first time, it brings me 48,000 clients, within an array of arrays. The problem is that when you insert ITEM by ITEM, the application is labeled, or takes a long time to do so.

I would need to know what is the most efficient way to do this.

I speak of "only the first time", because based on the highest ranks, the next synchronization, I do it from then on (and it only brings the updated ones or the new ones).

The query is:

'INSERT OR REPLACE INTO orders (id, C_IDPEDIDO, N_NROPEDIDO, C_CLIENTE, N_NROAUTORIZACION, FH_AUTORIZACION, FH_BAJA, C_IDFACTNOTA, N_ESTADO, X_OBSERVACION, FH_INCORPORACION, ORA_ROWSCN, state, error) VALUES ((SELECT id FROM orders WHERE C_IDPEDIDO=?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);

But, as I mentioned, when inserting one by one, or it is marked or takes a lot.

When returning "promises", those queries I wait for them (to end) with an "Observable.ForkJoin" and when I finish all, I give the synchronization finished

Here I go through the clients that came from the webservice in "clients: []"

The code is:

let promises = [];
for(let key = 0; key < clients.length; key++) {
    // Insertamos el cliente.
    promises.push(this.SQLite_query('INSERT OR REPLACE INTO clients (id, C_CLIENTE, X_APELLIDOCLI, X_NOMBRES, X_DOMICCLI, X_LOCALIDAD, X_PROVINCIA, X_NROCUIT, X_CONDICIONIMP, ORA_ROWSCN) VALUES ((SELECT id FROM clients WHERE C_CLIENTE=?), ?, ?, ?, ?, ?, ?, ?, ?, ?)', [clients[key]['C_CLIENTE'], clients[key]['C_CLIENTE'], clients[key]['X_APELLIDOCLI'], clients[key]['X_NOMBRES'], clients[key]['X_DOMICCLI'], clients[key]['X_LOCALIDAD'], clients[key]['X_PROVINCIA'], clients[key]['X_NROCUIT'], clients[key]['X_CONDICIONIMP'], clients[key]['ORA_ROWSCN']]).then((data) => {
        // "{"rows":{"length":0},"rowsAffected":1,"insertId":1}
        if(data['rowsAffected']) {
            // Si se insertó.
            ret['inserted'].push(clients[key]['C_CLIENTE']);
        } else {
            // Si no se insertó, lo agregamos a los existentes.
            ret['existents'].push(clients[key]['C_CLIENTE']);   
        }
    }, (error) => {
        // Si hubo un error, lo agregamos a errores.
        ret['error'].push(clients[key]['C_CLIENTE']);
    }));
}
Observable.forkJoin(promises).subscribe((data) => {
    // When all the promises are over, we return the data.
    resolve(ret)
});

Please, I would need to be able to solve that since I eat my time.

I am stuck in that part that is the only one that I would lack to be able to complete it.

Of course, thank you very much to everyone!

1 Answers1

0

Take a look at cordova-sqlite-porter, specifically the section on JSON import optimisations.

I created it for easy importing/exporting of data from SQLite DB's, but in addition to optimise the import of large numbers of records from a JSON structure.

By using the UNION SELECT method described here, I was able to make the import 100 times faster.

You could use the plugin directly via importJsonToDb().

Or use the relevant bit of code that batches the INSERTs to create your own implementation:

// Default maximum number of statements to use for batch inserts for bulk importing data via JSON.
var DEFAULT_BATCH_INSERT_SIZE = 250;

// Statement separator
var separator = ";\n";

var mainSql = "";
var batchInsertSize = opts.batchInsertSize ? opts.batchInsertSize : DEFAULT_BATCH_INSERT_SIZE;

if(json.data.inserts){
    for(var tableName in json.data.inserts){
        var _count = 0;
        for(var i=0; i<json.data.inserts[tableName].length; i++){
            if(_count === batchInsertSize){
                mainSql += separator;
                _count = 0;
            }

            var _row = json.data.inserts[tableName][i];
            var _fields = [];
            var _values = [];
            for(var col in _row){
                _fields.push(col);
                _values.push(sanitiseForSql(_row[col]));
            }

            if(_count === 0){
                mainSql += "INSERT OR REPLACE INTO " + sqlEscape(tableName) + " SELECT";
                for(var j = 0; j < _fields.length; j++){
                    if(typeof _values[j] === "undefined" || _values[j] === null || _values[j].toLowerCase() == 'null'){
                        mainSql += " NULL AS '" + _fields[j] + "'";
                    }else{
                        mainSql += " '" + _values[j] + "' AS '" + _fields[j] + "'";
                    }
                    if(j < _fields.length-1){
                        mainSql += ",";
                    }
                }
            }else{
                mainSql += " UNION SELECT ";
                for(var j = 0; j < _values.length; j++){
                    if(typeof _values[j] === "undefined" || _values[j] === null || _values[j].toLowerCase() == 'null'){
                        mainSql += " NULL";
                    }else{
                        mainSql += " '" + _values[j] + "'";
                    }
                    if(j < _values.length-1){
                        mainSql += ",";
                    }
                }
            }
            _count++;
        }
        mainSql += separator;
    }
}

/**
 * Sanitises a value for insertion into a SQL statement.
 * Replace occurrences of 1 single quote with 2 single quotes to SQL-escape them.
 * @param {string} value - unsanitised value
 * @returns {string} sanitised value
 */
function sanitiseForSql(value){
    if (value === null || value === undefined) { return null; }
    return (value+"").replace(/'/g,"''");
}

/**
 * Escapes the given value if it contains special characters by wrapping it with back-ticks: value => `value`.
 * @param {string} value - unescaped value
 * @return {string} escaped value
 */
function sqlEscape(value){
    if(value.match(/[_-]+/)){
        value = '`' + value + '`';
    }
    return value;
}
DaveAlden
  • 30,083
  • 11
  • 93
  • 155