I'm trying to read several files synchronously with fast-csv, it should looks like:
read file 1
execute something while reading
read file 2
execute something while reading (it must be execute after first execution's file that's why I need to do this synchronously)
...
Here is my code simplified:
const csv = require('fast-csv');
const PROCEDURES = [
{ "name": "p1", "file": "p1.csv" },
{ "name": "p2", "file": "p2.csv" },
];
const launchProcedure = (name, file) => {
try {
const fs = require("fs");
const stream = fs.createReadStream(file, {
encoding: 'utf8'
});
console.log('launching parsing...');
stream.once('readable', () => {
// ignore first line
let chunk;
while (null !== (chunk = stream.read(1))) {
if (chunk == '\n') {
break;
}
}
// CSV parsing
const csvStream = csv.fromStream(stream, {
renameHeaders: false,
headers: true,
delimiter: ',',
rowDelimiter: '\n',
quoteHeaders: false,
quoteColumns: false
}).on("data", data => {
console.log('procedure execution...');
// I execute a procedure...
}).on("error", error => {
logger.error(error);
}).on("end", data => {
logger.info(data);
});
});
}
catch (e) {
logger.info(e);
}
}
PROCEDURES.forEach(procedure => {
launchProcedure(procedure.name, procedure.file);
});
Output will be:
launching parsing...
launching parsing...
procedure execution...
procedure execution...
Problem is on stream.once but I used this to ignore first line. I tried to promisify my function and use async await... ( I had a similar problem when executing my procedure and I solved it by using csvStream.pause() and csvStream.resume() ).
Any idea ?