I want to iterate over 4435 products in an collection. For every product I want to download the image and upload it to an Amazon S3 bucket. For the last part I have written an function what is working fine.
But after a 750 products node.js crashes without giving me an error. With the > output.log &
function I get an
'RangeError: Maximum call stack size exceeded'
I using async.eachSeries
to iterate over the products and the product images. This is the complete function which will loop over all the products. As you can see I comment out the 'counter', but that was not enough. Can anybody tell me what i'm doing wrong?
module.exports.putImagesToAmazon = function(req, res){
console.log("putImagesToAmazon");
var resultArray = [];
//var counter =0;
Product.find({}).sort({_id: -1}).exec(function(err, products){
console.log("aantal producten", products.length);
async.eachSeries(products, function(product, callback){
if(product.images.length > 0){
async.eachSeries(product.images, function(image, cb){
if(image.src){
if(image.src.indexOf("http://") !== -1 || image.src.indexOf("https://") !== -1){
upload.uploadStream(image.src, 'product-image', function(err, data){
if(err) {
console.log("ERROR1", err);
resultArray.push(err);
cb();
}else{
image.src = data;
product.save(function(err, result){
if (err) {
console.log("saving product error", err);
}else{
console.log("product updated", product.name);
}
})
cb();
}
})
}else{
cb();
}
}
}, function(){
//counter++;
console.log("next");
callback();
})
}
}, function(){
console.log("callback", counter);
var fields = ['statusCode', 'statusMessage', 'error', "url"];
var csv = json2csv({data: resultArray, fields: fields});
fs.writeFile('errors.csv', csv, function(err){
if (err) throw err;
console.log("saved");
})
})
})
res.send("Ok");
}