I'm trying to transfer data from SQL server to DynamoDb using Node.js
sql.connect(config, function (err) {
if (err) console.log(err);
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query('SELECT Client, Brand FROM Client WHERE Brand = 6',
function (err, recordset) {
if (err) console.log(err);
console.log('request exectued');
var itemProcessed = 0;
// send records as a response
recordset.forEach(function (record) {
var params = {
TableName: 'ClientBrandLookUp',
Item: {
'ClientID': record.Client,
'Brand': record.Brand
}
};
docClient.put(params, function (err, data) {
if (err) {
console.log(err);
} else {
console.log('Added');
itemProcessed++;
if (itemProcessed === recordset.length) { alldone(); }
}
});
});
});
});
I am new to Node.js and I think I am doing this wrong. I have 150000 thousands records in my sql table.
The process is using a lot of memory and I'm getting a Javascript heap out of memory. I checked the Provisioned Throughput of my dynamo table and set it up to 4000 writes per seconds. If I reduce the number of line coming from SQL this code works fine.
Any help or advice will be greatly appreciated.