I am currently trying to write a tool to browse and look for certain nodes in a local 900+ MB XML file by using Just Google Chrome and Javascript. As there is no way to directly access local files, i set up a file Input Element, that i read with the FileReader API, just to parse it to XML afterwards.
When reading the file, Chrome crashes because the RAM limit has been reached. This seems to be a limitation implemented by the Browser, because the Ram limit of my Computer isn't reached by far. This is my code:
let xmlFile = "";
let fileInput = document.getElementById('fileInput');
let fileReader = new FileReader();
let readProgress = document.getElementById('readProgress');
fileInput.addEventListener('change', readFile);
function readFile(){
let f = fileInput.files[0];
fileReader.readAsText(f);
}
fileReader.onload = function(event){
xmlFile = $.parseXML(event.target.result);
console.log(xmlFile);
console.log('done');
}
fileReader.onprogress = function(event){
let percent;
percent = event.loaded/event.total*100;
readProgress.textContent = percent;
}
Is there any way to increase the Chrome RAM limit, or are there any better ways to handle an XML file this big?
Thanks in Advance
Barneo