I have a json file with about 20000 to 100000 links in it it looks like this
[{
"file_name": "Blessed_Titles.jpg",
"url": "https://i.imgur.com/FRDibHa.jpg",
"downloadId": "6r44r4k340rvvr"
}]
Are there any ways to parallel download them about 100 at a time? Will I get any warnings or errors while downloading 1000s of links? Right now I'm using sequential download,but I'm not sure it's suitable for such large number of links.
Here's how I'm downloading currently
async function downloadALL(ultUrls) {
let i = 1;
const len = ultUrls.length;
for (ult of ultUrls) {
await downloadFile(ult, i, len)
.then(() => i++)
.catch(err => console.log(err));
}
}
function downloadFile(ult, i, len) {
return new Promise((resolve, reject, cb) => {
console.log('Downloading File: () ', ult.file_name);
const download = {
file: {},
};
let percentage = 0;
const percentage2 = ((i / len) * 100).toFixed(0);
download.file.name = ult.file_name;
download.file.percentage = percentage;
download.file.downloadId = ult.downloadId;
download.percentage = percentage2;
// console.log(download);
// let console_message = download;
let request = (ult.url.substr(0, 5) === 'https' ? https : http)
.get(ult.url, function(response) {
const lent = parseInt(response.headers['content-length'], 10);
let body = '';
let cur = 0;
const total = lent / 1048576; // 1048576 - bytes in 1Megabyte
response.on('data', function(chunk) {
body += chunk;
cur += chunk.length;
percentage = ((100.0 * cur) / lent).toFixed(0);
download.file.percentage = percentage;
mainWindow.webContents.send('download-info', download);
});
const file = utility.writeFile(ult.file_name, dir);
response.pipe(file);
file.on('error', function(err) {
console.log(`ERROR:${ err}`);
file.read();
});
file.on('finish', function() {
console.log('File downloaded');
return resolve(file.close(cb)); // close() is async, call cb after close completes.
});
})
.on('error', function(err) {
// Handle errors
return reject(err);
});
});