0

I am porting an old ruby script over to use javascript setting the function as a cron instance so it will run on schedule. The function queries our mysql database and retrieves inventory information for our products and then sends requests to a trading partners api to update our inventory on their site.

Due to nodes a-synchronicity I am running into issues. We need to chunk requests into 1000 items per request, and we are sending 10k products. The issue is each request is just sending the last 1000 items each time. The for loop that is inside the while loop is moving forward before it finishes crafting the json request body. I tried creating anon setTimeout functions in the while loop to try and handle it, as well as creating an object with the request function and the variables to be passed and stuffing it into an array to iterate over once the while loop completes but I am getting the same result. Not sure whats the best way to handle it so that each requests gets the correct batch of items. I also need to wait 3 minutes between each request of 1000 items to not hit the request cap.

  query.on('end',()=>{
                   connection.release();
                   writeArray = itemArray.slice(0),
                   alteredArray = [];
                   var csv = json2csv({data: writeArray,fields:fields}),
                   timestamp = new Date(Date.now());
                   timestamp = timestamp.getFullYear() + '-' +(timestamp.getMonth() + 1) + '-' + timestamp.getDate()+ ' '+timestamp.getHours() +':'+timestamp.getMinutes()+':'+timestamp.getSeconds();
                   let fpath = './public/assets/archives/opalEdiInventory-'+timestamp+'.csv';

                   while(itemArray.length > 0){
                        alteredArray = itemArray.splice(0,999);
                        for(let i = 0; i < alteredArray.length; i++){
                            jsonObjectArray.push({
                                sku: alteredArray[i]['sku'],
                                quantity: alteredArray[i]["quantity"],
                                overstockquantity: alteredArray[i]["osInv"],
                                warehouse: warehouse,
                                isdiscontinued: alteredArray[i]["disc"],
                                backorderdate: alteredArray[i]["etd"],
                                backorderavailability: alteredArray[i]["boq"]
                            });
                        }

                        var jsonObject = {
                            login: user,
                            password: password,
                            items: jsonObjectArray
                        };

                        postOptions.url = endpoint;
                        postOptions.body = JSON.stringify(jsonObject);
                        funcArray.push({func:function(postOptions){request(postOptions,(err,res,body)=>{if(err){console.error(err);throw err;}console.log(body);})},vars:postOptions});
                        jsonObjectArray.length = 0;
                    }
                    var mili = 180000;
                    for(let i = 0;i < funcArray.length; i++){
                        setTimeout(()=>{
                            var d = JSON.parse(funcArray[i]['vars'].body);
                            console.log(d);
                            console.log('request '+ i);
                            //funcArray[i]['func'](funcArray[i]['vars']);
                        }, mili * i);
                    }
               });
            });
Jay Lane
  • 1,379
  • 15
  • 28

2 Answers2

0

You would need async/await or Promise to handle async actions in node js. I am not sure if you have node version which supports Async/await so i have tried a promise based solution.

query.on('end', () => {
    connection.release();
    writeArray = itemArray.slice(0),
        alteredArray = [];
    var csv = json2csv({ data: writeArray, fields: fields }),
        timestamp = new Date(Date.now());
    timestamp = timestamp.getFullYear() + '-' + (timestamp.getMonth() + 1) + '-' + timestamp.getDate() + ' ' + timestamp.getHours() + ':' + timestamp.getMinutes() + ':' + timestamp.getSeconds();
    let fpath = './public/assets/archives/opalEdiInventory-' + timestamp + '.csv';

    var calls = chunk(itemArray, 1000)
                        .map(function(chunk) {
                            var renameditemsArray = chunk.map((item) => new renamedItem(item, warehouse));
                            var postOptions = {};
                            postOptions.url = endpoint;
                            postOptions.body = JSON.stringify({
                                login: user,
                                password: password,
                                items: renameditemsArray
                            });
                            return postOptions;
                        });
    sequenceBatch(calls, makeRequest)
        .then(function() {
            console.log('done');
        })
        .catch(function(err) {
            console.log('failed', err)
        });

    function sequenceBatch (calls, cb) {
        var sequence = Promise.resolve();
        var count = 1;
        calls.forEach(function (callOptions) {
            count++;
            sequence = sequence.then(()=> {
                return new Promise(function (resolve, reject){
                    setTimeout(function () {
                        try {
                            cb(callOptions);
                            resolve(`callsequence${count} done`);
                        }
                        catch(err)  {
                            reject(`callsequence ${count} failed`);
                        }
                    }, 180000);
                });
            })
        });
        return sequence;
    }
    function makeRequest(postOptions) { 
        request(postOptions, (err, res, body) => {
            if (err) {
                console.error(err);
                throw err; 
            }
            console.log(body)
        });
    }

    function chunk(arr, len) {
        var chunks = [],
            i = 0,
            n = arr.length;
        while (i < n) {
            chunks.push(arr.slice(i, i += len));
        }
        return chunks;
    }

    function renamedItem(item, warehouse) {
        this.sku = item['sku']
        this.quantity = item["quantity"]
        this.overstockquantity = item["osInv"]
        this.warehouse = warehouse
        this.isdiscontinued = item["disc"]
        this.backorderdate =  item["etd"]
        this.backorderavailability=  item["boq"]
    }
});

Could you please try this snippet and let me know if it works?I couldn't test it since made it up on the fly. the core logic is in the sequenceBatch function. the The answer is based on an another question which explains how timeouts and promises works together.

Shyam Babu
  • 1,069
  • 7
  • 14
0

Turns out this wasn't a closure or async issues at all, the request object I was building was using references to objects instead of shallow copies resulting in the data all being linked to the same object ref in the ending array.

Jay Lane
  • 1,379
  • 15
  • 28