I have a request that returns a large JSON:
function fetchDataFromApi(request, params) {
var url = 'https://someurl/get-records?' +
'school=someSchool&' +
'year=';
var yearToday = parseInt(new Date().getFullYear());
var requests = [];
for (var i = 2011; i < yearToday; i++)
{
requests.push(url + i);
}
return JSON.parse(UrlFetchApp.fetchAll(requests).map(function(e) { return e.getContentText()}));
}
I have sliced it by year so that UrlFetchApp won't have to deal with the 50MB blob limit. However it throws an error:
Exception details: InternalError: Array length 53673928 exceeds supported capacity limit.
But when I try to access each element of the response, for example
JSON.parse(UrlFetchApp.fetchAll(requests)[0].getContentText()
...it works fine.
Any idea on what causes the issue?
I have also tried dividing all responses into separate requests.
var responses = []
for (var i = 2011; i < yearToday; i++)
{
var request = url + i;
responses.push(UrlFetchApp.fetch(request).getContentText());
}
var json = JSON.parse([].concat.apply([], responses));
return json;
but this error is being thrown in Google Data Studio:
Array length 68522537 exceeds supported capacity limit.