2

in my application, I have and array of promises that access network in parallel, but some times, when my app is running full speed, my network slows down, due to many promises accessing network, I would like to know how I could control how many access network in parallel. This is a sample of code:

var ids = {1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 6: 56, 7: 7, 8: 8, 5:6 }; // this is random 
Promise.all( Object.keys(ids).map(function(dp){
  return new Promise(function(resolve, reject){

       http.post({url: addr, form: { data: dp }}, function(err, res, body){
        if (err){
            reject(err)
        }            
        resolve(body.xx);
      });

  });
})).then(function(data){
       http.post({url: hostAddress, form: { data: data.x }}, function(err, res, body){
        ......
        resolve(body.xx);
      });    
});
  });
}))

There are a lot of networking. I would be great I could allow only 2 or 3 at the same time. Thanks for your help.

dmx
  • 1,862
  • 3
  • 26
  • 48
  • https://github.com/ForbesLindesay/throat lets you limit how many concurrent promises can run – Andy Ray Dec 07 '16 at 22:59
  • Have a look at http://stackoverflow.com/a/38778887/1048572 or http://stackoverflow.com/a/39197252/1048572. – Bergi Dec 09 '16 at 00:25

2 Answers2

2

You can use Bluebird's .map() which has a concurrency option to control how many requests are in-flight at the same time:

const Promise = require('bluebird');
const http = Promise.promisifyAll(require('http');

var ids = {1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 6: 56, 7: 7, 8: 8, 5:6 }; // this is random 

Promise.map(Object.keys(ids).map(function(dp){
    return  http.post({url: addr, form: { data: dp }).then(function(body) {
        return body.xx;
    });
}), {concurrency: 2}).then(function(results) {
    // process results here
});

FYI, I didn't understand what you were trying to do with your second http.post() because you where referencing data.x when data is an array. I think the code is a bit too much pseudo-code to illustrate what you were really trying to do with that second http.post().


Otherwise, you can code your own concurrency control where you fire up N requests initially and then each time one finishes, you fire up another until you have no more to do. Here's an example of coding the concurrency control manually:

Fire off 1,000,000 requests 100 at a time

Or, you could write it yourself like this:

const http = require('http');

function httpPost(options) {
    return new Promise(function(resolve, reject) {
        http.post(options, function(err, res, body) {
            if (err) {
                reject(err);
            } else {
                resolve(body);
            }
        });
    });
}

// takes an array of items and a function that returns a promise
function mapConcurrent(items, maxConcurrent, fn) {
    let index = 0;
    let inFlightCntr = 0;
    let doneCntr = 0;
    let results = new Array(items.length);
    let stop = false;

    return new Promise(function(resolve, reject) {

        function runNext() {
            let i = index;
            ++inFlightCntr;
            fn(items[index], index++).then(function(val) {
                ++doneCntr;
                --inFlightCntr;
                results[i] = val;
                run();
            }, function(err) {
                // set flag so we don't launch any more requests
                stop = true;
                reject(err);
            });
        }

        function run() {
            // launch as many as we're allowed to
            while (!stop && inflightCntr < maxConcurrent && index < items.length) {
                runNext();
            }
            // if all are done, then resolve parent promise with results
            if (doneCntr === items.length) {
                resolve(results);
            }
        }

        run();
    });
}

var ids = {1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 6: 56, 7: 7, 8: 8, 5:6 }; // this is random 


mapConcurrent(Object.keys(ids), 2, function(item, index) {
    return httpPost({url: addr, form: {data: item}}).then(function(body) {
        return body.xxx;
    });
}).then(function(results) {
    // array of results here
}, function(err) {
    // error here    
});
Community
  • 1
  • 1
jfriend00
  • 683,504
  • 96
  • 985
  • 979
  • in the second post I am making new request – dmx Dec 07 '16 at 23:01
  • @dmx - But, I can't tell what you're trying to feed into it because the pseudo-code isn't accurate. You can just make a new request using the result in the code I've provided where it says "process results here". – jfriend00 Dec 07 '16 at 23:02
  • @ jfriend00 Basically I have a list of ids, I make request to get users infos, and the last request to get users friends – dmx Dec 07 '16 at 23:09
  • @dmx - Your code is simply too abstract for me to make a more concrete recommendation. Is there something about my answer you don't understand? I've shown you how to limit the number of concurrent requests to whatever value you want. – jfriend00 Dec 07 '16 at 23:11
  • Thanks for your answer, I was wandering if I could still use bluebird with other packages like https://www.npmjs.com/package/npmview, https://www.npmjs.com/package/npm-remote-ls, ... – dmx Dec 07 '16 at 23:22
  • 1
    @dmx - Sure, Bluebird should be compatible with pretty much any other package that isn't trying to also replace the `Promise` object. – jfriend00 Dec 07 '16 at 23:26
  • Thank I going to try it – dmx Dec 07 '16 at 23:28
  • 1
    @dmx - FYI, I added a manually coded version that does not rely on the Bluebird promise library. If you like Bluebird, I'd recommend using it, but I thought it would be instructional to see how one could code this yourself. – jfriend00 Dec 09 '16 at 00:34
1

This is one way of achieving your aim without using a library. Within the promise returned from makeMaxConcurrencyRequests(), the startNew() function is recursively called, sending new requests until we have been through every id, and without exceeding a current request count of maxConcurrency.

When each request completes, its return data is pushed into the returnedData array. When all requests are completed, the promise is resolved with returnedData.

I haven't tested this, but looking at it my only concern is that startNew() is going to be called multiple times in quick succession while requests are pending. If this causes issues then rather than immediately calling startNew(), we could use setTimeout to delay the next invocation - this is commented out in my code.

function makeMaxConcurrencyRequests(ids, maxConcurrency) {
    return new Promise(function(resolve, reject) {
        let i = 0, currentlyRunning = 0, returnedData = [];
        function startNew() {        
            while (i < ids.length && currentlyRunning <= maxConcurrency) {
                makeRequest(ids[i++]).then(function(data) {
                    returnedData.push(data);
                    currentlyRunning--;
                    startNew();
                }).catch(function(err) {
                    reject(err);
                });
                currentlyRunning++;
            }
            if (i >= ids.length && currentlyRunning === 0) {
                resolve(returnedData);
            }
            startNew();
            // setTimeout(startNew, 200);           
        }
    }
}

function makeRequest(id) {
    return new Promise(function(resolve, reject){
        http.post({url: addr, form: { data: dp }}, function(err, res, body){
            if (err){
                reject(err)
            } 

            http.post({url: hostAddress, form: { data: body.xx }}, function(err2, res2, body2){
                if(err2) {
                    reject(err2);
                }
                resolve(body2.xx);
           }); 
       });

   });
}

Usage:

var ids = {1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 6: 56, 7: 7, 8: 8, 5:6 };
var maxConcurrency = 3;
makeMaxConcurrencyRequests(Object.keys(ids), maxConcurrency)
.then(function(data) {
    // do something with data
}).catch(function(error) {
    // do something with error
});
Alex Young
  • 4,009
  • 1
  • 16
  • 34