0

With a node.js script via googleapis I done dump of all databases created on my Google SQL instance, the dump generate a single file for all databases which i store in a bucket. My target is to have one file for each database and not one file for all, the main problem is that if I run an export request for database A i can't run another for database B until the first is done.

MarBer
  • 535
  • 1
  • 5
  • 22

2 Answers2

0

You may use Async with callback in order to run the exports sequentially, you may use the operations list method that will get you the status of the exports in order to be able to know when the export has finished and when to move to the next step using callback. For more information check this other post

Andres S
  • 1,168
  • 7
  • 11
  • My first idea is similar, with a list of all databases names I call an export for first db, with returned data I know the name of operation and I can call operations.get with params project and operation name, but if the operation status returned is not DONE I need to re-call the same operations.get for get update about operation, how async can help me? – MarBer Mar 13 '20 at 08:57
  • Maybe using a while loop until you confirm that the export is complete and using [sleep](https://stackoverflow.com/questions/14249506/how-can-i-wait-in-node-js-javascript-l-need-to-pause-for-a-period-of-time/41957152) for waiting a bit until each iteration could be an option – Andres S Mar 13 '20 at 15:43
0

My solution is to use a recursive function like this:

"use strict"
const { google } = require("googleapis");
const { auth } = require("google-auth-library");
const dateFormat = require('date-format');
var sqladmin = google.sql("v1beta4");
const project = "my-project-name";
const instanceName = "my-sql-instance-name";
const dbToDump = [];

exports.dumpDatabase = (_req, res) => {

async function dump() {
    let count = 0;
    let currentRequestName = '';
    const authRes = await auth.getApplicationDefault();
    var authClient = authRes.credential;

    let databases = await sqladmin.databases.list({
        project: project,
        instance: instanceName,
        auth: authClient
    });
    for (let i = 0; i < databases.data.items.length; i++) {
        const element = databases.data.items[i];
        // the system databases will be omitted
        if (
            element.name != "information_schema" &&
            element.name != "sys" &&
            element.name != "mysql" &&
            element.name != "performance_schema"
        ) {
            dbToDump.push(element.name);
        }
    }

    async function recursiveCall() {

        //exit condition  
        if (count >= dbToDump.length) {
            res.status(200).send("Command complete");
            return true;
        }


        // no request running
        if (currentRequestName == '') {
            // set data for next export call  
            var request = {
                project: project,
                instance: instanceName,
                resource: {
                    exportContext: {
                        kind: "sql#exportContext",
                        fileType: "SQL",
                        uri: 'gs://my-gsc-bucket/${dbToDump[count]}-${dateFormat.asString('yyyyMMddhhmm', new Date())}.gz',
                        databases: [dbToDump[count]]
                    }
                },
                auth: authClient
            };
            let r = await sqladmin.instances.export(request); //dump start
            currentRequestName = r.data.name;
        }

        // call to monitor request status
        let requestStatus = await sqladmin.operations.get({ project: project, operation: currentRequestName, auth: authClient });

        if (requestStatus.data.status == 'DONE') {
            // the current request is completed, prepare for next call
            count++;
            currentRequestName = '';
            recursiveCall();
        } else {
            // wait 10 seconds before check status
            setTimeout(recursiveCall, 10000)
        }
    }

    recoursiveCall();

}

dump();
};

This work for me, the only one more setting is to increase the timeout over the 60s. Thank's Andres S for the support

MarBer
  • 535
  • 1
  • 5
  • 22