I am trying to get the SHA512 of a large file. 2.5 G and maybe more larger file. I so the approach it's to create an arraybuffer to be digest by the crypto.subtle.digest API.
The problem is i always have a
Array buffer allocation failed
Is it my chunk size, it's there a limit on the array buffer. I got no more idea ? Or maybe there is a better way to get the hash digest instead use a full arraybuffer ?
// received a file object
function CalculateHash(file)
{
var obj = { File : file};
var chunkSize = 10485760;
const chunksQuantity = Math.ceil(obj.File.size / chunkSize);
const chunksQueue = new Array(chunksQuantity).fill().map((_, index) => index).reverse();
var buffer = null;
reader.onload = async function (evt) {
if (buffer == null) {
buffer = evt.currentTarget.result;
} else {
var tmp = new Uint8Array(buffer.byteLength + evt.currentTarget.result.byteLength);
tmp.set(new Uint8Array(buffer), 0);
tmp.set(new Uint8Array(evt.currentTarget.result), buffer.byteLength);
buffer = tmp;
}
readNext();
}
var readNext = async function () {
if (chunksQueue.length > 0) {
const chunkId = chunksQueue.pop();
const sentSize = chunkId * chunkSize;
const chunk = obj.File.slice(sentSize, sentSize + chunkSize);
reader.readAsArrayBuffer(chunk);
} else {
var x = await digestMessage(buffer);
hash.SHA512 = x.toUpperCase();
buffer = null;
}
}
readNext();
}
async function digestMessage(file) {
const hashBuffer = await crypto.subtle.digest('SHA-512', file); // hash the message
const hashArray = Array.from(new Uint8Array(hashBuffer)); // convert buffer to byte array
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); // convert bytes to hex string
return hashHex;
}