benchmark of my function:
mark@ichikawa:~/inbox/D3/read_logs$ time python countbytes.py
bytes: 277464
real 0m0.037s
user 0m0.036s
sys 0m0.000s
mark@ichikawa:~/inbox/D3/read_logs$ time node countbytes.js
bytes: 277464
real 0m0.144s
user 0m0.120s
sys 0m0.032s
The measurements have been taken on a Ubuntu 13.04 x86_64 bit machine.
This is the simple version of my benchmark (I did 1000 iterations as well). I shows that the function that I wrote to read tgz files take more than 3x as long as a function I have written in Python.
For 1000 iterations filesize 277kB (I used process.hrtime and timeit):
Node: 30.608409032000015
Python: 6.84210395813
For 1000 iterations size 9.7MB:
Node: 590.491709309999
Python: 200.796745062
Please let me know if you have any idea on how to speed up reading the tgz files.
here is the code:
var fs = require('fs');
var tar = require('tar');
var zlib = require('zlib');
var Stream = require('stream');
var countBytes = new Stream;
countBytes.writable = true;
countBytes.count = 0;
countBytes.bytes = 0;
countBytes.write = function (buf) {
countBytes.bytes += buf.length;
};
countBytes.end = function (buf) {
if (arguments.length) countBytes.write(buf);
countBytes.writable = false;
console.log('bytes: ' + countBytes.bytes);
};
countBytes.destroy = function () {
countBytes.writable = false;
};
fs.createReadStream('supercars-logs-13060317.tgz')
.pipe(zlib.createUnzip())
.pipe(tar.Extract({path: "responsetimes.log.13060317"}))
.pipe(countBytes);
Any idea how to speed things up?