I have the following code:
let idx = 0;
for (const e of parsedData) {
try {
const datastore = new Datastore({
namespace: 'bdlight',
projectId: e.credential.project_id,
credentials: {
type: e.credential.type,
private_key: e.credential.private_key,
token_url: e.credential.token_uri,
client_email: e.credential.client_email,
},
});
this.logger.log(
`Getting Registration - CNS: ${
e.cns
} - CNPJCPF: ${documentNumber} - ${idx + 1}/${parsedData.length}`,
);
const query = datastore
.createQuery('geral')
.filter('CNPJCPF', '=', documentNumber);
const [result] = await datastore.runQuery(query);
registrations.push(...(result ? result : []));
} catch {
this.logger.log('Error CNS: ' + e.cns);
errors.push('Erro no CNS: ' + e.cns);
} finally {
idx++;
}
}
The parsedDate has more than 300 credentials, when I run this on a pod on Kubernetes, I get a memory leak error, I have a pod with 4096 of RAM. Can I run the garbage collector after each iteration?
I tried to set datastore as null after each iteration.