I've been creating a small node js app that iterates through an array of names and queries an API for the names. The issue I have is that the array is very large (400,000+ words) and my application runs out of memory before the forEach is complete.
I've been able to diagnose the issue by researching about how JS works with the call stack, web api, and callback queue. What I believe the issue to be is that the forEach loop is blocking the call stack and so the http requests continue to clog up the callback queue without getting resolved.
If anyone can provide a solution for unblocking the forEach loop or an alternative way of coding this app I would be very greatful.
Node JS App
const mongoose = require("mongoose");
const fs = require("fs");
const ajax = require("./modules/ajax.js");
// Bring in Models
let Dictionary = require("./models/dictionary.js");
//=============================
// MongoDB connection
//=============================
// Opens connection to database "test"
mongoose.connect("mongodb://localhost/bookCompanion");
let db = mongoose.connection;
// If database test encounters an error, output error to console.
db.on("error", (err)=>{
console.error("Database connection failed.");
});
db.on("open", ()=>{
console.info("Connected to MongoDB database...");
}).then(()=>{
fs.readFile("./words-2.json", "utf8", (err, data)=>{
if(err){
console.log(err);
} else {
data = JSON.parse(data);
data.forEach((word)=>{
let search = ajax.get(`API url Here?=${word}`);
search.then((response)=>{
let newWord = new Dictionary ({
word: response.word,
phonetic: response.phonetic,
meaning: response.meaning
}).save();
console.log("word saved");
}).catch((err)=>{
console.log("Word not found");
});
});
};
});
});