I have a 2 gig csv file to parse and I think node only allows memory usage below 1 gig and im getting this error:
Attempt to allocate Buffer larger than maximum size: 0x3fffffff bytes.
I would Like to stream the file and write it to a data.json file.
Then use mongoimport to write it to mongo. I'm open to any ideas if there's a more efficient way. I have been using csvtojson but i would prefer to use papaparse as its the best parser ive seen and really enjoy using it.
Using csvtojson
function csvToJson(callback){
console.log("in csvToJson");
var csvConverter = new Converter({constructResult: false, delimiter: delimiter, noheader: false });
var readStream = require("fs").createReadStream("fileName");
var writeStream = require("fs").createWriteStream("outputData.json");
writeStream.on("close", function () {
callback();
readStream.close();
writeStream.close();
});
readStream.pipe(csvConverter).pipe(writeStream);
}