The solution was to use a library (formidable in this case) and let them handle the stream.
How ever it does NOT work so don't take this approach if you're trying to.
GCS is considerably slower than hdd so, when uploading large files, it consume all available memory from RAM or hdd/buffer. Linux crush or nginx reports no space available.
- Most elegant solution is with signed URL, clients uploads directly in gcs. However I've met another problem, I will post it in another issues, here, on stackoverflow.
- Compromise solution that is currently in use is to upload the file on the server, upload it from server to gcs and then delete it from server. It takes about twice the amount of time, not very elegant, but it's working.
Here is the full code for streams pairings:
const uploadFile2 = async (req, res, next) => {
// Create a pass through stream from a string
const passthroughStream = new stream.PassThrough();
// will be set in hook. Will be used to log the new uploaded file.
let fileName;
let error = null;
// override function to prevent disk writing and only to stream upload
formidable.IncomingForm.prototype.onPart = (part) => {
if (!part.filename) {
// let formidable handle all non-file parts
form._handlePart(part);
}
part.on('data', (data) => {
passthroughStream.write(data, er => {
if (er) {
console.error('Eroare cand am incercat sa salvez fisierul', fileName, er);
error = er;
}
});
});
part.on('end', () => {
// weird error: "try to write after end". This delay fix it, but it's sure not something elegant.
setTimeout(() => {
passthroughStream.end();
// we'll send the res when the cloud ends the writing
console.log('Ended stream for:', fileName);
res.status(200).send({
succes: true
});
}, 500);
});
part.on('error', (err) => {
console.error('Something went wrong in uploading file:', err);
res.status(500).send({
success: false,
message: err
});
});
};
const form = new formidable.IncomingForm({
multiples : true,
keepExtensions: true,
maxFileSize : 1024 * 1024 * 1024
});
form.multiples = true;
form.keepExtensions = true;
form.maxFileSize = 1024 * 1024 * 1024;
form.options.maxFileSize = 1024 * 1024 * 1024;
form.parse(req, async (err, fields, files) => {
if (err) {
console.log('Error parsing the files', err);
return res.status(500).json({
error : true,
success : false,
message : 'There was an error parsing the files',
errorMessage: err
});
}
});
form.on('fileBegin', (_, file) => {
// set filename but eliminate the /upload/ part, so 7 chars
fileName = req.originalUrl.substring(8) + file.originalFilename;
console.debug('begining upload for: ', req.originalUrl);
const bucketFile = bucket.file(fileName);
passthroughStream
.pipe(bucketFile.createWriteStream())
.on('finish', (error) => {
// The file upload is complete
if (!error) {
console.log('New file was uploaded:', fileName);
}
else {
console.error('Error in processing passthroughStream');
res.status(500).send({
error : true,
success: false,
message: error
});
}
})
.on('error', er => {
console.error('Error while trying to write on gcs', fileName, er);
error = er;
});
});
form.on('error', (err) => {
console.error('Something went wrong in uploading file:', err);
res.status(500).send({
error : true,
success: false,
message: err
});
});
};