I am trying to Post a large JSON into my Database, the JSON has at least 400.000 Objects.
If I cut the file, and try to Post 20.000 Objects everything works just fine, so the problem should be JSON's size.
I've split the JSON into 20 chunks, and my idea is to upload one at a time but i'm struggling to make it work.
This is what I'm using:
var rows = {};
Papa.parse(content, {
header: false,
delimiter: '|',
worker: true,
encoding: "utf16le",
dynamicTyping: true,
skipEmptyLines: true,
complete: function(results) {
rows = results.data;
let obj = []
for(var i=0; i < rows.length; i++){
obj.push(rows[i])
}
let result = []
for(let i in obj) {
let temp = {}
if(i > 0) {
temp["id"] = obj[i][0]
temp["name"] = obj[i][1]
temp["tel"] = obj[i][2]
temp["email"] = obj[i][3]
temp["status"] = obj[i][5]
result.push(temp)
}
}
var array1 = result.map((e) => {
return {
id: e.id,
name: e.name,
email: e.email
}
})
let chunked = []
let size = 20000;
Array.from({length: Math.ceil(array1.length / size)}, (val, i) => {
chunked.push(array1.slice(i * size, i * size + size))
})
console.log(chunked); // at this point I have my array divided into chunks of 20000
axios({
url: 'url',
method: 'post',
data: chunked
})
.then(function (response) {
// your action after success
console.log(response);
})
.catch(function (error) {
// your action on error successif (error.response) {
console.log(error);
});