So i've got my defaults of 1Mb chunks to upload, and 5 simultaneous chunks at once, but after those chunks have completed my client side doesn't send the remaining chunks. Hence the max out at 5Mb, does any one know why? I've not changed any other default settings, testChunks are enabled.
My node side post is modelled upon cleversprocket 's answer here :
reassemble binary after flow.js upload on node/express server
My get side is based upon:
//'found', filename, original_filename, identifier
//'not_found', null, null, null
$.get = function(req, callback) {
var chunkNumber = req.param('flowChunkNumber', 0);
var chunkSize = req.param('flowChunkSize', 0);
var totalSize = req.param('flowTotalSize', 0);
var identifier = req.param('flowIdentifier', "");
var filename = req.param('flowFilename', "");
if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.exists(chunkFilename, function(exists) {
if (exists) {
callback('found', chunkFilename, filename, identifier);
} else {
callback('not_found', null, null, null);
}
});
} else {
callback('not_found', null, null, null);
}
};
I don't understand why it's not firing again, the processes go: Try and GET file chunk, no chunk, reply 404, POST CHUNK, POST PARTLY_DONE. (x5 for each simultaneous upload). Then it doesn't upload anymore chunks, it just stops in some part (numerous console.logs haven't cleared up the issue).
I've removed 404 from permanent errors so that the application shouldn't stop, yet it still does.
Any help would be great as files are likely to be larger than 5Mb at times.
Thanks, Joe
EDIT (FULL POST CODE):
Client side settings (only change default settings from mg-flow):
chunkSize: 1024 * 1024,
forceChunkSize: false,
simultaneousUploads: 5,
singleFile: false,
fileParameterName: 'file',
progressCallbacksInterval: 0, //instant feedback
speedSmoothingFactor: 1,
query: {},
headers: {},
withCredentials: false,
preprocess: null,
method: 'multipart',
prioritizeFirstAndLastChunk: false,
target: '/',
testChunks: true,
generateUniqueIdentifier: null,
maxChunkRetries: undefined,
chunkRetryInterval: undefined,
permanentErrors: [415, 500, 501],
onDropStopPropagation: false
Target gets dynamically set in the tag by setting target option to url, which is a dynamically created variable in scope, so it uploads to the tmp then gets moved once all chunks received to a lecture directory to make organisation and administration easier e.g. delete
i.e. uploadFile/{{module.id}}/{{lecture.id}}
Then the server side,
var multipart = require('connect-multiparty');
var multipartMiddleware = multipart();
GET
//Handle status checks on chunks through Flow.js
app.get('/uploadFile/:mId/:id', lectureRoutes.fileGet);
exports.fileGet = function(req, res) {
flow.get(req, function(status, filename, original_filename, identifier) {
console.log('GET', status);
res.send(200, (status == 'found' ? 200 : 404));
});
}
//'found', filename, original_filename, identifier
//'not_found', null, null, null
$.get = function(req, callback) {
var chunkNumber = req.param('flowChunkNumber', 0);
var chunkSize = req.param('flowChunkSize', 0);
var totalSize = req.param('flowTotalSize', 0);
var identifier = req.param('flowIdentifier', "");
var filename = req.param('flowFilename', "");
if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.exists(chunkFilename, function(exists) {
if (exists) {
callback('found', chunkFilename, filename, identifier);
} else {
callback('not_found', null, null, null);
}
});
} else {
callback('not_found', null, null, null);
}
};
POST
//To Save File
app.post('/uploadFile/:mId/:id', multipartMiddleware, lectureRoutes.fileAddPost);
//FIX THIS
exports.fileAddPost = function(req,res){
flow.post(req, function(status, filename, original_filename, identifier, currentTestChunk, numberOfChunks) {
console.log('POST', status, original_filename, identifier);
if (status === 'done' && currentTestChunk > numberOfChunks) {
var stream = fs.createWriteStream('./tmp/' + filename);
//EDIT: I removed options {end: true} because it isn't needed
//and added {onDone: flow.clean} to remove the chunks after writing
//the file.
flow.write(identifier, stream, {onDone: flow.clean});
//Once written move the file.
mv('./tmp/'+filename,'./public/files/'+req.params.mId+'/'+req.params.id+'/'+filename, {mkdirp: true,clobber: false}, function(err){
if(err == 'EEXIST') {
//FILE ALREADY EXIST STOP IT FROM BEING RE-ADDED TO DB, although allow file to be moved incase its newer
}
else if(!err){
var set = {};
Modules.findById(req.params.mId, function(err,module){
module.Lectures.id(req.params.id).Files.push({"fileName" : filename});
module.save(function(err){
if(err) console.log(err)
//return id of file back
else{
var link = module.Lectures.id(req.params.id).Files;
res.send(200, link[link.length-1]._id)
}
})
})
}
});
}
});
}
//'partly_done', filename, original_filename, identifier
//'done', filename, original_filename, identifier
//'invalid_flow_request', null, null, null
//'non_flow_request', null, null, null
$.post = function(req, callback){
var fields = req.body;
var files = req.files;
var chunkNumber = fields['flowChunkNumber'];
var chunkSize = fields['flowChunkSize'];
var totalSize = fields['flowTotalSize'];
var identifier = cleanIdentifier(fields['flowIdentifier']);
var filename = fields['flowFilename'];
var original_filename = fields['flowIdentifier'];
if (!files[$.fileParameterName] || !files[$.fileParameterName].size) {
callback('invalid_flow_request', null, null, null);
return;
}
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, files[$.fileParameterName].size);
if (validation == 'valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.rename(files[$.fileParameterName].path, chunkFilename, function(){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize/(chunkSize*1.0)), 1);
var testChunkExists = function(){
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists){
currentTestChunk++;
if(exists){
if(currentTestChunk>numberOfChunks) {
callback('done', filename, original_filename, identifier, currentTestChunk, numberOfChunks);
} else {
// Recursion
testChunkExists();
}
} else {
//Add currentTestChunk and numberOfChunks to the callback
callback('partly_done', filename, original_filename, identifier, currentTestChunk, numberOfChunks);
}
});
}
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}
}