I am looking for best practices when it comes to adding many items to an indexedDB. The upper bound of the number of items is usually around 100k, and each item is a js object usually around 350 bytes.
I maintain a list of pending items (actions) to be taken on the database (each action is either "add", "put", or "delete"), and each item could belong to a different object store.
I simply take the first item, apply it to the appropriate objectStore, and once that's done (either successfully or not), I move on to the next item.
Is there a better (more efficient) way to do this? And are there any scoping issues w.r.t. transaction lifetime I need to be concerned with in the snippet below?
function flushPendingDbActions() {
var transaction = db.transaction(storeNames, "readwrite");
var addNext = function() {
var nextItem = pendingDbActions[0];
if (nextItem) {
pendingDbActions.shift();
var objectStore = transaction.objectStore(nextItem.store),
params,
request;
switch(nextItem.action) {
case 'add':
case 'put':
params = nextItem;
break;
case 'delete':
params = nextItem.key;
break;
}
request = objectStore[nextItem.action](params);
request.onsuccess = request.onerror = addNext;
}
};
addNext();
}