I'm working on a node script to download all the images that are shared with my account on Google Drive.
I'm hitting a wall just as others have posted about on Stack Overflow: Google Drive API file watch rate limits.
The exponential backoff makes a lot of sense to me, I just don't know how to go about implementing that.
Any insight I could get into this would be incredibly helpful. Even just a "get started by..." would be great!
I've added the method I'm working on below. Auth and everything is working just fine, it's just a matter of receiving the userRateLimitExceeded
error.
Any and all help would be fantastic and greatly appreciated.
Thank you!
/**
* Download all of the shared images.
*
* @param {google.auth.OAuth2} auth An authorized OAuth2 client.
*/
function downloadImages(auth) {
const gDrive = google.drive({
version: 'v3',
auth: auth
});
gDrive.files.list({
q: 'sharedWithMe = true and mimeType = "image/jpeg"'
}, (err, resp) => {
if(err) {
console.log('The API returned an error: ' + err);
return;
}
if(!resp.files.length) {
console.error('No files found.');
} else {
// Remove existing images.
// removeImages();
_.each(resp.files, (file) => {
if(fs.existsSync(IMAGE_DIR + file.name)) {
return;
}
gDrive.files.get({
fileId: file.id
})
.on('end', () => {
console.log(chalk.green(file.name + ' successfully downloaded.'));
})
.on('error', (err) => {
console.log(err);
})
.pipe(fs.createWriteStream(IMAGE_DIR + file.name));
});
}
});
}
EDIT: I looked into batching, but I guess google-api-nodejs-client doesn't support batches. I tried a third-party lib called "Batchelor". Still can't get it to work for the life of me. :(