1

I need to upload multiple json files and store their contents in an array. My goal is to somehow merge them together. I tried doing this:

var results = new Array();    //global variable

document.getElementById('uploadId').onclick = function () {
   var files = document.getElementById('selectFiles').files;

   for (var i = 0; i < files.length; i++) {
       var fr = new FileReader();

       fr.onload = function (e) {
           console.log(e);
           var result = JSON.parse(e.target.result);
           results.push(result);
       }

       fr.readAsText(files.item(i));
   }
   process();
}; 

function process(){
   console.log(results); // displays everything as expected
   console.log(results.length); // returns 0 ?!
   console.log(results[0]) // return undefined ?!
}

then while logging the results array into console, everything displays as expected. But when I try to iterate through the array, all the individual objects are undefined. And results.length returns 0 as well.

I guess there might be a problem connected to the asynchronicity. Any idea how to solve this?

pelikann
  • 11
  • 2
  • Does there happen to be a little blue '`i`' symbol next to `results` when it gets logged? – matthew-e-brown Dec 03 '19 at 19:06
  • @matthew-e-brown yeah, it does – pelikann Dec 03 '19 at 19:08
  • That little '`i`' is the browser's console telling you that that piece of data was evaluated when you went to look at in the console, and **not** when its line was reached. This means that (probably) it was empty while `process()` was running, and it wasn't until afterwards when you inspected that it was actually populated. – matthew-e-brown Dec 03 '19 at 19:12
  • @matthew-e-brown so I need to wait a bit before calling process() until the adding to the array is done? – pelikann Dec 03 '19 at 19:25
  • You need to execute `process()` once all `fr.onload()` are finished. – Lain Dec 03 '19 at 19:29
  • Perhaps you should also be using `onchange` instead of `onclick`. – matthew-e-brown Dec 03 '19 at 19:29
  • @Lain any tips on how to do that? onload is asynchronous, right? – pelikann Dec 03 '19 at 19:31
  • You can bind a predefined object to the load function (or define a scope and/or global variable) which stores two values: filesToProcess and filesProcessed. On each onload you increment filesProcessed until it reaches filesToProcess. – Lain Dec 03 '19 at 19:33

2 Answers2

1

You can bind a predefined object to the load function (or define a scope and/or global variable) which stores two values: filesToProcess and filesProcessed. On each onload you increment filesProcessed until it reaches filesToProcess. This is, if you want to keep old browser like IE or old Safari versions supported.

Sadly I can only show it in theory, without any actual testing.

document.getElementById('uploadId').onclick = function () {
   var files = document.getElementById('selectFiles').files,
       param = {filesProcessed: 0, filesToProcess: files.length};

   for (var i = 0; i < files.length; i++) {
       var fr = new FileReader();

       fr.onload = function(e){
           results.push(JSON.parse(e.target.result));

           //REM: this being param
           this.filesProcessed++;

           if(this.filesProcessed === this.filesToProcess){
               process()
           }
       }.bind(param)

       fr.readAsText(files.item(i));
   }
};
Lain
  • 3,657
  • 1
  • 20
  • 27
0

Usually you would avoid this problem by making the code you're waiting for the files for (in this case, process) a part of the FileReader's listener, so that it happens once it is finished. I can't think of a scenario when you would need to have the user upload multiple files to the same input and not be able to process them separately (i.e., if you needed two files to go hand-in-hand, take them in separate inputs and use a submit button).

However, if you ever do, my advice would be to use async/await or ES6 Promises. Here is my approach to read all files into an array as JSON, and then do something with them.

const results = [];
const input = document.querySelector('input[type="file"]');

input.addEventListener('change', async() => {
  await Promise.all([...input.files].map(file => new Promise((resolve, reject) => {
    const reader = new FileReader();
    reader.onloadend = () => {
      try {
        resolve(results.push(JSON.parse(reader.result)));
      } catch (err) {
        // Return a blank value; ignore non-JSON (or do whatever else)
        console.log('Please use .json!');
        resolve();
      }
    }
    reader.readAsText(file);
  })));

  // Do Stuff
  console.log(results);
  console.log(results.length);
  console.log(results[0]);
});
<form action="#">
  <input type="file" multiple />
</form>

This is very similar to my answer here.

Some JSON files to test:

[
  {
    "name": "bobby",
    "age": 12
  },
  {
    "name": "don",
    "age": 64
  },
  {
    "name": "dale",
    "age": 16
  },
  {
    "name": "toby",
    "age": 52
  },
  {
    "name": "alfred",
    "age": 234
  },{
    "name": "steve",
    "age": 12
  }
]
[
  {
    "name": "denmark"
  },
  {
    "name": "sweden"
  },
  {
    "name": "norway"
  },
  {
    "name": "finland"
  }
]
matthew-e-brown
  • 2,837
  • 1
  • 10
  • 29
  • @pelikann Just keep in mind that when using Promises / Async & Await that you lose IE support. Don’t forget to pick an answer as accepted if it solved your problem! :) – matthew-e-brown Dec 03 '19 at 20:28
  • One lame solution also came to my mind. I could just wrap process() into setTimeout of like 5s, and I guess it would work in most cases. – pelikann Dec 03 '19 at 20:40
  • @pelikann you could most certainly, but I think that saying most would frown upon such a solution is an understatement ;) – matthew-e-brown Dec 03 '19 at 20:57