I am trying to reconstruct the file/folder structure of a decompressed zip file in the browser with JavaScript. Ideally, I'd like to have all files in a FileList (as if they just got uploaded through a web page) or other iterable object. For instance, a compressed folder containing
folder/file1
folder/file2
someotherfile
should be reconstructed to a FileList/iterable object in which each item corresponds to one of the files in the package (to my knowledge, there is no way to retain the folder structure in JS).
I've been quite successful in reading a tar.gz file and decompressing it using pako with the code at the bottom of this question. However, pako's result is one large ArrayBuffer (the inflator.result
in the code below), and I can't make heads nor tails from this when trying to reconstruct the original files and folders. I am bumping into the following issues:
- How do I know where one file ends and another one begins in the ArrayBuffer?
- How do I determine the original file type of the current file?
Once I know this, I should be able to cast the ArrayBuffer data to a file with
File(segment, {type: filetype})
Searching the web also hasn't delivered any useful info. Does anyone have any clues on how to approach this problem?
Here is the code that I use to decompress the zipfile.
import pako from 'pako';
import isFunction from 'lodash/isFunction'
class FileStreamer {
constructor(file, chunkSize = 64 * 1024) {
this.file = file;
this.offset = 0;
this.chunkSize = chunkSize; // bytes
this.rewind();
}
rewind() {
this.offset = 0;
}
isEndOfFile() {
return this.offset >= this.getFileSize();
}
readBlock() {
const fileReader = new FileReader();
const blob = this.file.slice(this.offset, this.offset + this.chunkSize);
return new Promise((resolve, reject) => {
fileReader.onloadend = (event) => {
const target = (event.target);
if (target.error) {
return reject(target.error);
}
this.offset += target.result.byteLength;
resolve({
data: target.result,
progress: Math.min(this.offset / this.file.size, 1)
});
};
fileReader.readAsArrayBuffer(blob);
});
}
getFileSize() {
return this.file.size;
}
}
export async function decompress(zipfile, onProgress) {
const fs = new FileStreamer(zipfile);
const inflator = new pako.Inflate();
let block;
while (!fs.isEndOfFile()) {
block = await fs.readBlock();
inflator.push(block.data, fs.isEndOfFile());
if (inflator.err) {
throw inflator.err
}
if (isFunction(onProgress)) onProgress(block.progress)
}
return inflator.result;
}