I have a NodeJS backend that listens for a POST request with a json package. Using the information in the json package it builds a csv file with around 15,000 lines and then uses res.download to send the csv file back to the client.
Reading from a cloud database into the csv file is no problem. I have checked the file in the server and the lines are all there and they are accurate. However, the file that is downloaded on the client side has maybe a few hundred lines cut off. It seems like res.download() is running too soon even though I've explicitly set the stream to end once the for loop is over or it is running when it is supposed to but the csv file is still buffering or something
Here is my code:
Server side:
app.post('/dashboard/download_data', function (req, res) {
let payload = req.body;
ref.orderByKey().once("value", function (snapshot) {
let data = snapshot.val();
writer.pipe(fs.createWriteStream('C:\\user\\EVCS_portal\\out.csv'));
for (let key in data) {
if (data.hasOwnProperty(key)) {
test_time = data[key]['time'];
writer.write({
time: data[key]['time'],
ac2p: data[key]['ac2p'],
dcp: data[key]['dctp']
})
}
}
writer.end('This is the end of writing\n');
writer.on('finish', () => {
console.log(test_time);
res.download('C:\\user\\EVCS_portal\\out.csv');
console.log('file sent out!')
});
})
Client side js:
firebase.auth().currentUser.getIdToken(true).then(function (idToken) {
let xhr = new XMLHttpRequest();
xhr.onreadystatechange = function () {
if (xhr.readyState === 4 && xhr.status === 200) {
let a = document.createElement('a');
a.href = window.URL.createObjectURL(xhr.response);
a.download = download_date + '.csv';
a.style.display = 'none';
document.body.appendChild(a);
a.click();
}
};
let url = "/dashboard/download_data";
xhr.open("POST", url, true);
xhr.setRequestHeader("Content-Type", "application/json");
xhr.responseType = 'blob';
// Package our payload including the idToken and the date
let data = JSON.stringify({"idToken": idToken, 'date': download_date});
xhr.send(data);