I have a mongodb database with several collections: Collection1: holds records of books. Collection2: holds records of authors. Collection3: holds records of Owner.
Collection3 has approximately 500 MB.
I need to loop over the Owner collection and get the details about the book and author of each record in Collection3.
have the following api call:
app.get('/api/linkCollections/', function(req, res){
functions.linkCollections(function(finished){
if( finished )
res.json(finished);
});
});
in file named fucntions i have:
linkCollections: function(callback){
var owners = Owner.find().cursor();
owners.on('data', function(owner){
Book.findOne({'bookName': owner.bookName}, function(err, book){
if (err) {
console.log(err);
}else {
if ( book !== null ) {
//do stuff to book
}
}
});
Author.findOne({'author': owner.bookAuthor}, function(err, author){
if (err) {
console.log(err);
}else {
if ( author !== null ) {
//do stuff to author
}
}
});
}).on('error', function(err){
console.log('error retrieving records');
}).on('close', function(){
callback(true);
});
}
I then use fiddler to run this get request and it ends up getting the following error message:
[Fiddler] ReadResponse() failed: The server did not return a complete response for this request. Server returned 0 bytes.
How can I process each record of a very large collection without blocking the node event loop?