4

The full error: "Knex: Timeout acquiring a connection. The pool is probably full. Are you missing a .transacting(trx) call?"

I have a csv / json with over 70k rows. After 15k-17k insertions, it stops and throws the above error/. Below the code:

  csvtojson({
        colParser: {
        name: "string",
        lastname: "string"
        },
        checkType: true
    }).fromFile(csvfile)
        .then(jsonArrary => {
            res.json(jsonArrary[0]);
            console.log(jsonArrary.length);
            jsonArrary.forEach(function(array) {
                knex_insert(array);
                // pg_insert(array);
            });
        });

  function knex_insert(x) {
        db("tablename")
        .insert({
            name: null_if_empty(x["name"]),
            lastname: null_if_empty(x["lastname"])
        })
        .then(data => {
            console.log("success!");
        })
        .catch(err => {
            console.log(err);
        });
    }

    function null_if_empty(value) {
        if (value == "") {
        return null;
        } else {
        return value;
        }
    }

Any idea what's happening?

Thank you

Blinhawk
  • 379
  • 2
  • 7
  • 19

1 Answers1

2

You are creating thousands and thousands of asynchronous connections with the database when you use forEach. Map the data and put all of it in one insert:

csvtojson({
colParser: {
  name: "string",
  lastname: "string"
},
checkType: true
})
.fromFile(csvfile)
.then((jsonArrary) => {
  const insertingData = jsonArrary.map((info) => {
    return {
      name: null_if_empty(info["name"]),
      lastname: null_if_empty(info["lastname"])
    }
  });
  knex_insert(insertingData)
});

function knex_insert(mappedData) {
    db("tablename")
    .insert(mappedData)
    .then(data => {
        console.log("success!");
    })
    .catch(err => {
        console.log(err);
    });
}

function null_if_empty(value) {
    if (value == "") {
    return null;
    } else {
    return value;
    }
}