i have a redis with more than 10M rows, i want to copy all data to sql server db, currently i wrote a nodejs script to transfer data as below
redis.scan(cursor, "MATCH", "*_*", "COUNT", "100", function (err, res) {
var keys = res[1];
cursor = res[0];
keys.forEach(function (id) {
redis.hgetall(id, function (err, items) {
let splited = Object.keys(items)[0].split("#");
let data = {
customerId: splited[0],
orderId: splited[1],
};
sql.connect(config).then((pool) => {
pool.request().query(
`INSERT INTO [dbo].[Bi]
([CustomerId]
,[OrderId]
,[AddressId])
SELECT
${data.customerId}
,${data.orderId}
,[dbo].[Address].Id
FROM [dbo].[Address] where CustomerId = ${data.customerId}`
)
})
})
})
})
but it takes 1 day to complete, so how should i optimize the script or is there any alternative way to transfer data?