I just started using GPU.js. I'm doing some basic math on an array of numbers, and when I run smaller (~3,000,000 entries in the array) and medium (~65,000,000 entries in the array) datasets, everything works fine, but when I go to larger datasets (~200,000,000 entries in the array), I get a runtime exception (in Chrome 116.0.5845.97 (Official Build) (64-bit)):
Uncaught RangeError: Array buffer allocation failed
at new ArrayBuffer (<anonymous>)
at new Float32Array (<anonymous>)
at WebGL2Kernel.readFloatPixelsToFloat32Array (gpu-browser.min.js:28:191697)
at WebGL2Kernel.renderValues (gpu-browser.min.js:28:193814)
at WebGL2Kernel.t (gpu-browser.min.js:28:379382)
at WebGL2Kernel.t (gpu-browser.min.js:28:379402)
at n (gpu-browser.min.js:28:379450)
at <myCallerFunction> (foo.js:420:69)
I'm getting GPU.js from the CDN:
<script src="https://cdn.jsdelivr.net/npm/gpu.js@latest/dist/gpu-browser.min.js"></script>
My myCallerFunction
is this:
function myCallerFunction(buffer, kernel, width, kernelDetectionThreshold) {
const convolutionFunction = gpu.createKernel(
function (image, kernel, imageWidth, kernelWidth, kernelHalfWidth, kernelDetectionThreshold) {
// only use square kernels
const row = Math.floor(this.thread.x / imageWidth);
const col = Math.round(this.thread.x % imageWidth);
let sum = 0;
for (let kernelRowOffset = -kernelHalfWidth; kernelRowOffset <= kernelHalfWidth; ++kernelRowOffset) {
const targetRow = row + kernelRowOffset;
for (let kernelColOffset = -kernelHalfWidth; kernelColOffset <= kernelHalfWidth; ++kernelColOffset) {
sum +=
image[imageWidth * targetRow + col + kernelColOffset] *
kernel[kernelHalfWidth + kernelRowOffset][kernelHalfWidth + kernelColOffset];
}
}
return sum >= kernelDetectionThreshold ? sum : 0;
},
{ constants: {}, output: [buffer.length] }
);
return convolutionFunction(buffer, kernel, width, kernel.length, Math.floor(kernel.length / 2), kernelDetectionThreshold);
}
Since this is pretty much just straight math, I don't expect the content of the function to be the issue, I have a feeling it's the handling of the large array being passed in as buffer
.
I know I can chop my buffer into smaller bits and pass them sequentially, but I'd like some insight as to where this issue is coming from/what the limit is. I didn't find anything in the GPU.js docs related to input limits.
I have a feeling it's related to a memory limitation of JS in Chrome, but more insight would be appreciated