I'm experiencing very bad performance and crashes with Linq2indexedDB when bulk inserting this way:
for(var i=0;i<clients.length;i++) {
db.from(config.objectStoreName).insert(clients[i]).then(function(args){
deferred.resolve(clients.length);
}, function(args){
deferred.reject("Client item couldn't be added!");
});
}
When doing something very similar with the native indexedDB, it's working fine:
var store = db.transaction(["client"], "readwrite").objectStore("client");
for(var i=0;i<clients.length;i++) {
var request = store.put(clients[i]);
}
request.onsuccess = function() {
deferred.resolve(clients.length);
}
request.onerror = function(e) {
deferred.reject("Client item couldn't be added!");
}
When the array of "clients" doesn't go above a few 1000, it's ok, but by 50000 it's hanging and then the tab crashes. On the native implementation, it only takes a few seconds to upsert the 50000 (on Chrome).
Am I missing anything, i.e. is there another method to batch insert records with Linq2indexedDB or is Linq2indexedDB simply not working with batch insert/update?