I have this code where I try to cache neccesary memories before using them to do HTTP requests with a library. I cache them before to make the code faster when it actually do the requests which is critical to go as fast as possible.
The code is working but now it does the requests in a "synchronous" manner.
I beleive the problem code is the below line:
for (i = 0; i < exchanges.length; i++)
I am not sure what is the best/fastest approach to run the above for loop async?
'use strict';
const ccxt = require('ccxt');
const fs = require('fs');
const path = require('path');
//Cache some memories first
var exchangesArray = [];
var exchangesArray2 = [];
(async () => {
const allexchanges = ccxt.exchanges.filter((id) => !['coinmarketcap', 'theocean'].includes(id))
.map(async (id) => {
const Exchange = ccxt[id];
const exchange = new Exchange({ enableRateLimit: true });
if (exchange.has['fetchTickers']) {
exchangesArray.push(exchange);
exchangesArray2.push(id);
}
});
await Promise.all(allexchanges);
})();
//The cached memories
const exchanges = exchangesArray; //Holds exchanges object
const exchangesnames = exchangesArray2; //Holds exchanges name
var i;
//Use cached memories to do the "fetchTickers()" as fast as possible now
(async () => {
console.log(`start`);
const start = Date.now();
//What is the fastest way to make this for loop async/await to run in parallel?
for (i = 0; i < exchanges.length; i++) {
// pop one exchange from the array
const exchange = exchanges[i]
const exchangename = exchangesnames[i]
try {
const tickers = await exchange.fetchTickers();
const dumpFile = path.join(__dirname, 'tickers', `${exchangename}Tickers.json`);
await fs.promises.writeFile(dumpFile, JSON.stringify(tickers));
} catch (e) {
console.error(e);
}
}
// wait for all of them to execute or fail
await Promise.all(exchanges)
const end = Date.now();
console.log(`Done in ${(end - start) / 1000} seconds`);
})();