I have a directory of files (files
dir below) each with data that is being processed (and mixed with data from the initial file data/phrase.js
) before the transformed data is written (actually appended) to new files in the output dir. My problem is that all of the data for each file is being written to file at the very end (after all the processing is complete). I would rather have the first file processed then written to disk, then the second file processed and written to disk etc, so that less data is held in memory. (Although very few files are involved in this example, in my actual application, there are many more files)
Question: Why is the data being written to file at the very end (once all of the files have been processed)? is there a way to write the data to file as soon as it is ready, rather than holding it all in memory until all the data for each file is ready?
var fs = require('fs');
//file with some data
fs.readFile('./data/phrase.js', function(err, data){
var somephrase = data.toString();
//directory of many files
fs.readdir('./files/', (err, files) => {
files.forEach(file => {
let f = './files/' + file;
fs.readFile(f, (err, data2) => {
let somenumber = data2.toString();
//intermingle the data from initial file (phrase.js) with each of the files in files dir
let output = somenumber + somephrase;
//write output to new files
let output_file = './output/' + somenumber + 'js';
fs.appendFile(output_file, output,function(err){
if (err){
console.log("err")
}
});
});
});
});
});
phrase.js
cow jumped over the moon
files/one.js 1
files/two.js 2
output
output/1.js (1 cow jumped over the moon)
output/2.js (2 cow jumped over the moon)