I am very new to express and node. I was trying to upload an image using multiparty and code given here. I have put a check for file size. When I upload a file of size greater than the limit it lands in the "problem section". The problem is the server hangs and responds back only after request timeout. I have tried many solution on stack overflow but nothing seems to work. It works if the file size is below the limit. I am very sure that the code reaches the problem section and there is no problem with the upload logic. But it seems that I have to do something in the "problem section". Please tell me what am I missing.
I have replaced the code in the problem section with next(), res.send(), res.end(), next(err), return; but It does not work. It hangs no matter what.
Following is the code:
router.post("/image", function(req, res, next) {
if(req.user) {
upload.uploadToS3(req, S3_BUCKET, S3_PROFILE_IMAGE_FOLDER, function(result) {
if(result.error != null && result.error === false) {
models.Customer.update({
orignalImage : result.fileLocation
},{
where : { mobileNumber : req.user.mobileNumber}
}).then(function(customer) {
if(customer) {
res.send({
url: result.fileLocation,
error : false
});
} else {
res.status(400);
res.send({error : true,
error_message : 'Image upload failed'});
}
});
} else {
//PROBLEM SECTION
res.status(404);
res.json({error : true, error_message : result.error_message});
}
});
} else {
res.status(403);
res.send({message: "access denied"});
}
});
Please tell me if you need more details I will upload it. var uploadToS3 = function(req, S3_BUCKET, folder, callback) { var s3Client = knox.createClient({ secure: false, key: awsConfig.accessKeyId, secret: awsConfig.secretAccessKey, bucket: S3_BUCKET, });
var headers = {
'x-amz-acl': 'public-read',
};
var form = new multiparty.Form();
var batch = new Batch();
batch.push(function(cb) {
form.on('part', function(part) {
var validity = validateFile({type : part.headers['content-type'], name : part.filename, length : part.byteCount});
console.log(validity);
if(validity.isValid) {
cb(null, { filename : folder+"/"+generateFileName({name : part.filename}), part : part});
} else {
cb({error : true, error_message : validity.reason, part:part }, "advra kedavra");
}
});
});
batch.end(function(err, results) {
if (err) {
console.log(err);
err.statusCode = 200;
callback(err);
} else {
form.removeListener('close', onEnd);
var result = results[0];
var part = result.part;
var destPath = result.filename;
var counter = new ByteCounter();
part.pipe(counter); // need this until knox upgrades to streams2
headers['Content-Length'] = part.byteCount;
s3Client.putStream(part, destPath, headers, function(err, s3Response) {
result = {};
if(err) {
console.log(err);
result.error = true;
result.error_message = "Problem in uploading!";
} else {
console.log(s3Response.req.url);
result = {error: false, statusCode : s3Response.statusCode, message : "file upload successful.", fileLocation : s3Response.req.url};
}
callback(result);
});
part.on('end', function() {
console.log("File upload complete", counter.bytes);
});
}
});
function onEnd() {
console.log("no uploaded file");
callback({error:false, error_message:"no uploaded file."});
}
form.on('error', function(err) {
console.log('Error parsing form: ' + err.stack);
});
form.on('close', onEnd);
form.parse(req);
}