I have a json file and the structure of that file is as follows:
{
"orders":[
{
"id": 876876876,
"app_id":580714,
"client_details": {},
"discount_codes": [{}],
"line_items": [
{
"id": 466157049,
...
}
],
......
},
{
"id": 47844583,
"app_id":580714,
"client_details": {},
"discount_codes": [{}],
"line_items": [
{
"id": 466157049,
...
}],
....
},
{...},
{...},
{...}
]
}
This array can contains more than 10lakhs (1 million) objects. Currently I need to:
- find the object with order id
- Total number of orders
- get orders with order id and with the number of limit
I am using the following code:
return new Promise((resolve, reject) => {
var orders = []
var getStream = function () {
var stream = fs.createReadStream(file_path, { encoding: 'utf8' }),
parser = JSONStream.parse('*');
return stream.pipe(parser);
};
getStream()
.pipe(es.mapSync(function (data) {
orders = data
})) .on('end', function() {
resolve(orders)
})
})
But it makes the system hang. Also, I have used the following command as well:
node --max-old-space-size=8192 index.js
But that also does not worked. Can anyone please help me with processing such big json file.
Edited: Now filesize is 850MB and I am using the following code:
return new Promise((resolve, reject) => {
var data = ''
var reader_stream = fs.createReadStream(file_path)
reader_stream.setEncoding('UTF8')
reader_stream.on('data', function(chunk) {
data += chunk
})
reader_stream.on('end',function() {
try{
const orders_result = JSON.parse(data)
var order_count = (orders_result.orders)
resolve({
"count": order_count.length
})
} catch(err) {
console.log(err)
}
})
reader_stream.on('error', function(err) {
console.log(err.stack)
reject(err.stack)
})
})
and getting the following error
Uncaught Exception: RangeError: Invalid string length