2

I have an array of File's and I need to store their content in Objects of the form {name:...,content:...} where content is a Blob|ArrayBuffer.

Filling the content field requires to read the File, but doing so with stuff 3MB worth or more is incredibly expensive in terms of performance (to the point the entire application crashes).

At the moment I'm using this snippet

var results = []

[File, File, File, ...].forEach(function (file) {
    var reader = new FileReader()

    reader.onload = function() {
        results.push({name: file.name, content: reader.result})
    }

    reader.readAsArrayBuffer(file)
})

For some vague reasons I thought to switch to a chunked solution via

var results = []

// Chunksize is 1M
var chunkSize = 1024 * 1024 * 1

var fileSize = file.size

// Starting with an empty Blob
var blob = new Blob()

for (var i = 0; i < fileSize; i += chunkSize) {
    (function (f, start) {
        var reader = new FileReader()

        // Slicing file in chunks
        var tmpBlob = f.slice(start, start +chunkSize)

        reader.onload = function() {
            // Merging the old chunks with the just read one
            blob = new Blob([blob, reader.result])

            // If dealing with the last chunk, push
            // the result into the results array
            if (start +chunkSize >= fileSize) {
                results.push({
                    name: file.name,
                    content: blob
                })
            }
        }

        reader.readAsArrayBuffer(tmpBlob)
    })(file, i)
}

0 Answers0