See edits for Version 2 below for a faster version.
Version 1
FYI, in addition to all my comments above, here's the fastest I could get an asynchronous version:
async function asyncTestStreamParallel(files) {
const startTime = performance.now();
let results = [];
for (let filename of files) {
results.push(new Promise((resolve, reject) => {
const stream = fs.createReadStream(filename, {highWaterMark: 64 * 1024 * 10});
const data = [];
stream.on('data', chunk => {
data.push(chunk);
}).on('end', () => {
resolve(Buffer.concat(data));
}).on('error', reject);
}));
}
await Promise.all(results);
console.log(`Async stream parallel version took ${performance.now() - startTime}ms`);
}
And, here are the results:
And, here are my results on Windows 10, node v12.13.1:
node --expose_gc temp
Sync version took 1175.2680000066757ms
Async version took 2315.0439999699593ms
Async stream version took 1600.0085990428925ms
Async stream parallel version took 1111.310200035572ms
Async serial version took 4387.053400993347ms
Note, I modified the scheme a bit to pass an array of filenames into each test rather than create the filenames each time so I could centralize the creation of the files.
The things that helped me speed it up were:
- Using a larger
highWaterMark
which is presumably stream buffer size
- Collecting data in an array and then concatenating it at the end (this drastically reduces peak memory consumption and GC work).
- Allowing the different files in the loop to run in parallel with each other
With these changes, it's about the same speed as the synchronous version, sometimes a bit slower, sometimes about the same.
I also put a delay of 2 seconds between the running of each test and forced a run of the garbage collector to make sure GC running wasn't messing with my results.
Here's my whole script which can run on any platform. Note that you must use the --expose_gc
command line parameter as in node --expose_gc temp.js
:
// Run this with the --expose_gc command line option
const {performance} = require('perf_hooks');
const fs = require('fs');
const path = require('path')
const sizes = [512, 1024, 256, 512]; // file sizes in MB
const data = "0123456789\n";
const testDir = path.join(__dirname, "bigfile");
function makeFiles() {
// make a bigger string to make fewer disk writes
const bData = [];
for (let i = 0; i < 1000; i++) {
bData.push(data);
}
const biggerData = bData.join("");
try {
fs.mkdirSync(testDir); // ignore errors if it already exists
} catch(e) {
// do nothing if it already exists
}
const files = [];
for (let i = 0; i < sizes.length; i++) {
let targetLen = sizes[i] * 1024 * 1024;
let f;
try {
let fname = `${path.join(testDir, "test")}-${i}.txt`;
f = fs.openSync(fname, 'w');
files.push(fname);
let len = 0;
while (len < targetLen) {
fs.writeSync(f, biggerData);
len += biggerData.length;
}
} catch(e) {
console.log(e);
process.exit(1);
} finally {
if (f) fs.closeSync(f);
}
}
return files;
}
function clearFiles(files) {
for (let filename of files) {
fs.unlinkSync(filename);
}
fs.rmdirSync(testDir);
}
function syncTest(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
results.push(fs.readFileSync(filename));
}
console.log(`Sync version took ${performance.now() - startTime}ms`);
}
async function asyncTest(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
results.push(fs.promises.readFile(filename));
}
await Promise.all(results);
console.log(`Async version took ${performance.now() - startTime}ms`);
}
async function asyncTestStream(files) {
const startTime = performance.now();
for (let filename of files) {
await new Promise((resolve, reject) => {
let stream = fs.createReadStream(filename, {highWaterMark: 64 * 1024 * 10});
let data = [];
stream.on('data', chunk => {
data.push(chunk);
}).on('close', () => {
resolve(Buffer.concat(data));
}).on('error', reject);
});
}
console.log(`Async stream version took ${performance.now() - startTime}ms`);
}
async function asyncTestStreamParallel(files) {
const startTime = performance.now();
let results = [];
for (let filename of files) {
results.push(new Promise((resolve, reject) => {
const stream = fs.createReadStream(filename, {highWaterMark: 64 * 1024 * 100});
const data = [];
stream.on('data', chunk => {
data.push(chunk);
}).on('end', () => {
resolve(Buffer.concat(data));
}).on('error', reject);
}));
}
await Promise.all(results);
console.log(`Async stream parallel version took ${performance.now() - startTime}ms`);
}
async function asyncTestSerial(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
results.push(await fs.promises.readFile(filename));
}
console.log(`Async serial version took ${performance.now() - startTime}ms`);
}
function delay(t) {
return new Promise(resolve => {
global.gc();
setTimeout(resolve, t);
});
}
// delay between each test to let any system stuff calm down
async function run() {
const files = makeFiles();
try {
await delay(2000);
syncTest(files);
await delay(2000);
await asyncTest(files)
await delay(2000);
await asyncTestStream(files);
await delay(2000);
await asyncTestStreamParallel(files);
await delay(2000);
await asyncTestSerial(files);
} catch(e) {
console.log(e);
} finally {
clearFiles(files);
}
}
run();
Version 2
Then, I figured out that for files under 2GB, we can pre-allocate a buffer for the whole file and read them in a single read and that can be even faster. This version adds several new options for syncTestSingleRead()
, asyncTestSingleReadSerial()
and asyncTestSingleReadParallel()
.
These new options are all faster and, for once, the asynchronous options are consistently faster than the synchronous options:
node --expose_gc temp
Sync version took 1602.546700000763ms
Sync single read version took 680.5937000513077ms
Async version took 2337.3639990091324ms
Async serial version took 4320.517499983311ms
Async stream version took 1625.9839000105858ms
Async stream parallel version took 1119.7469999790192ms
Async single read serial version took 580.7244000434875ms
Async single read parallel version took 360.47460001707077ms
And, the code that matches these:
// Run this with the --expose_gc command line option
const {performance} = require('perf_hooks');
const fs = require('fs');
const fsp = fs.promises;
const path = require('path')
const sizes = [512, 1024, 256, 512]; // file sizes in MB
const data = "0123456789\n";
const testDir = path.join(__dirname, "bigfile");
function makeFiles() {
// make a bigger string to make fewer disk writes
const bData = [];
for (let i = 0; i < 1000; i++) {
bData.push(data);
}
const biggerData = bData.join("");
try {
fs.mkdirSync(testDir); // ignore errors if it already exists
} catch(e) {
// do nothing if it already exists
}
const files = [];
for (let i = 0; i < sizes.length; i++) {
let targetLen = sizes[i] * 1024 * 1024;
let f;
try {
let fname = `${path.join(testDir, "test")}-${i}.txt`;
f = fs.openSync(fname, 'w');
files.push(fname);
let len = 0;
while (len < targetLen) {
fs.writeSync(f, biggerData);
len += biggerData.length;
}
} catch(e) {
console.log(e);
process.exit(1);
} finally {
if (f) fs.closeSync(f);
}
}
return files;
}
function clearFiles(files) {
for (let filename of files) {
fs.unlinkSync(filename);
}
fs.rmdirSync(testDir);
}
function readFileSync(filename) {
let handle = fs.openSync(filename, "r");
try {
let stats = fs.fstatSync(handle);
let buffer = Buffer.allocUnsafe(stats.size);
let bytesRead = fs.readSync(handle, buffer, 0, stats.size, 0);
if (bytesRead !== stats.size) {
throw new Error("bytesRead not full file size")
}
} finally {
fs.closeSync(handle);
}
}
// read a file in one single read
async function readFile(filename) {
let handle = await fsp.open(filename, "r");
try {
let stats = await handle.stat();
let buffer = Buffer.allocUnsafe(stats.size);
let {bytesRead} = await handle.read(buffer, 0, stats.size, 0);
if (bytesRead !== stats.size) {
throw new Error("bytesRead not full file size")
}
} finally {
handle.close()
}
}
function syncTest(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
results.push(fs.readFileSync(filename));
}
console.log(`Sync version took ${performance.now() - startTime}ms`);
}
function syncTestSingleRead(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
readFileSync(filename);
}
console.log(`Sync single read version took ${performance.now() - startTime}ms`);
}
async function asyncTest(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
results.push(fs.promises.readFile(filename));
}
await Promise.all(results);
console.log(`Async version took ${performance.now() - startTime}ms`);
}
async function asyncTestStream(files) {
const startTime = performance.now();
for (let filename of files) {
await new Promise((resolve, reject) => {
let stream = fs.createReadStream(filename, {highWaterMark: 64 * 1024 * 10});
let data = [];
stream.on('data', chunk => {
data.push(chunk);
}).on('close', () => {
resolve(Buffer.concat(data));
}).on('error', reject);
});
}
console.log(`Async stream version took ${performance.now() - startTime}ms`);
}
async function asyncTestStreamParallel(files) {
const startTime = performance.now();
let results = [];
for (let filename of files) {
results.push(new Promise((resolve, reject) => {
const stream = fs.createReadStream(filename, {highWaterMark: 64 * 1024 * 100});
const data = [];
stream.on('data', chunk => {
data.push(chunk);
}).on('end', () => {
resolve(Buffer.concat(data));
}).on('error', reject);
}));
}
await Promise.all(results);
console.log(`Async stream parallel version took ${performance.now() - startTime}ms`);
}
async function asyncTestSingleReadSerial(files) {
const startTime = performance.now();
let buffer;
for (let filename of files) {
let handle = await fsp.open(filename, "r");
try {
let stats = await handle.stat();
if (!buffer || buffer.length < stats.size) {
buffer = Buffer.allocUnsafe(stats.size);
}
let {bytesRead} = await handle.read(buffer, 0, stats.size, 0);
if (bytesRead !== stats.size) {
throw new Error("bytesRead not full file size")
}
} finally {
handle.close()
}
}
console.log(`Async single read serial version took ${performance.now() - startTime}ms`);
}
async function asyncTestSingleReadParallel(files) {
const startTime = performance.now();
await Promise.all(files.map(readFile));
console.log(`Async single read parallel version took ${performance.now() - startTime}ms`);
}
async function asyncTestSerial(files) {
const startTime = performance.now();
const results = [];
for (let filename of files) {
results.push(await fs.promises.readFile(filename));
}
console.log(`Async serial version took ${performance.now() - startTime}ms`);
}
function delay(t) {
return new Promise(resolve => {
global.gc();
setTimeout(resolve, t);
});
}
// delay between each test to let any system stuff calm down
async function run() {
const files = makeFiles();
try {
await delay(2000);
syncTest(files);
await delay(2000);
syncTestSingleRead(files);
await delay(2000);
await asyncTest(files)
await delay(2000);
await asyncTestSerial(files);
await delay(2000);
await asyncTestStream(files);
await delay(2000);
await asyncTestStreamParallel(files);
await delay(2000);
await asyncTestSingleReadSerial(files);
await delay(2000);
await asyncTestSingleReadParallel(files);
} catch(e) {
console.log(e);
} finally {
clearFiles(files);
}
}
run();