0

I am working on a project which is related to my master's degree. The project includes MongoDB and Node.JS. In the project, the previous programmer used insertMany() function to insert multiple records to the database. But now the data got larger and this method gave me the error below.

I changed this method with initializeUnorderedBulkOp() function as below. It gave me Unhandled rejection MongoError: E11000 duplicate key error index error. Although this error, the function inserts all records to the database. But, since it gave an error to the application, the application stop to step forward in the backend, so my program cancels. How can I need to make an improvement?

var poolSchema = mongoose.Schema({
    "topic_id": {
        type: Number,
        default: null,
        required: true
    },
    "document_id": {
        type: String,
        default: null,
        required: true
    },
    "index": {
        type: Number,
        default: null
    },
    "score": {
        type: mongoose.Schema.Types.Decimal128,
        default: null
    },
    "search_engine_id": {
        type: String,
        default: null,
        required: false
    },
    "is_assigned": {
        type: Boolean,
        default: false,
        required: false
    },
    "project":{
        type : String,
        trim : true,
        default : null,
        required :true
    },
    "unique_id":{
        type: String,
        default: null,
        unique : true
    },
    "docno_projectid":{
        type: String
    },
    "createddate":{
        type:Date,
        default : Date.now
    }
}, { collection: "pools" });

module.exports.createPoolItems = function(poolItems, callback) { 


            populateUniqueId(poolItems);
            Pools.collection.insertMany(poolItems,{ordered:false},callback);      

}

function populateUniqueId (poolItems){
    if(poolItems.length > 0){
        poolItems.forEach(element => {
            element.unique_id = element.project+'_'+element.topic_id+'_'+element.document_id
        });
    }
}

Error:

--- Last few GCs --->

[15968:000002C335C32F20]    86903 ms: Mark-sweep 1398.1 (1427.9) -> 1396.8 (1424.9) MB, 689.3 / 0.0 ms  (+ 0.0 ms in 25 steps since start of marking, biggest step 0.0 ms, walltime since start of marking 694 ms) (average mu = 0.075, current mu = 0.007) all[15968:000002C335C32F20] 
   87628 ms: Mark-sweep 1397.4 (1424.9) -> 1396.9 (1425.4) MB, 722.7 / 0.0 ms  (average mu = 0.039, current mu = 0.003) allocation failure scavenge might not succeed


<--- JS stacktrace --->

==== JS stack trace =========================================

    0: ExitFrame [pc: 000002132645C5C1]
Security context: 0x01f1eef9e6e9 <JSObject>
    1: /* anonymous */ [0000014965ACD3F9] [C:\TopicBinder2-master\node_modules\mongoose\lib\document.js:~878] [pc=00000213267DC691](this=0x03783882f629 <model map = 0000005679E8B731>,pathToMark=0x02ac0065e369 <String[8]: topic_id>,path=0x02ac0065e369 <String[8]: topic_id>,constructing=0x030d1ba828c9 <true>,parts=0x0378388302a1 <JSArray[1]>,schema=0x01b059d...

FATAL ERROR: Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory
 1: 00007FF7C206832A v8::internal::GCIdleTimeHandler::GCIdleTimeHandler+4506
 2: 00007FF7C2042DB6 node::MakeCallback+4534
 3: 00007FF7C2043730 node_module_register+2032
 4: 00007FF7C235C14E v8::internal::FatalProcessOutOfMemory+846
 5: 00007FF7C235C07F v8::internal::FatalProcessOutOfMemory+639
 6: 00007FF7C2542874 v8::internal::Heap::MaxHeapGrowingFactor+9620
 7: 00007FF7C2539856 v8::internal::ScavengeJob::operator=+24550
 8: 00007FF7C2537EAC v8::internal::ScavengeJob::operator=+17980
 9: 00007FF7C2540BF7 v8::internal::Heap::MaxHeapGrowingFactor+2327
10: 00007FF7C2540C76 v8::internal::Heap::MaxHeapGrowingFactor+2454
11: 00007FF7C266AF17 v8::internal::Factory::NewFillerObject+55
12: 00007FF7C26E8106 v8::internal::operator<<+73494
13: 000002132645C5C1

When I use initializeUnorderedBulkOp():

module.exports.createPoolItems = function(poolItems, callback) {

        let bulk = Pools.collection.initializeUnorderedBulkOp();
        populateUniqueId(poolItems);

        for (var i=0; i<poolItems.length;i++){
            bulk.insert(poolItems[i]);
        }

        bulk.execute();      

        //bulk.execute({w:0,j:false}); 

}

Error:

Unhandled rejection MongoError: E11000 duplicate key error index: heroku_bwcmzm1p.pools.$unique_id dup key: { : "tralkc", : 501, : "96284" }    at Function.MongoError.create
  • [Did you read this question](https://stackoverflow.com/questions/24430220/e11000-duplicate-key-error-index-in-mongodb-mongoose)? Could you clarify your answer? Would you like those duplicates being filtered out or implement validation? Whats `improvement` for you? – Józef Podlecki May 18 '20 at 21:30

1 Answers1

0

I see no error handling in your code. Try running the function asynchronously and catching the exception.you say the data still makes it to your database, so handling the exception in a try block for example using asyn/await should stop your server crash