I need to stream data into BigQuery from my Google Apps Script addon.
But I need to use my service account only (I need to insert data into my BigQuery table, not user's BigQuery table)
I followed this example: https://developers.google.com/apps-script/advanced/bigquery#load_csv_data
Because Apps Script Advanced Service doesn't support service account natively, so I need to change this example a bit:
Instead of using Advanced Service BigQuery, I need to get the OAuth token from my service account, then using BigQuery Rest API to handle the same job:
This is what I did:
function getBigQueryService() {
return (
OAuth2.createService('BigQuery')
// Set the endpoint URL.
.setTokenUrl('https://accounts.google.com/o/oauth2/token')
// Set the private key and issuer.
.setPrivateKey(PRIVATE_KEY)
.setIssuer(CLIENT_EMAIL)
// Set the property store where authorized tokens should be persisted.
.setPropertyStore(PropertiesService.getScriptProperties())
// Caching
.setCache(CacheService.getUserCache())
// Locking
.setLock(LockService.getUserLock())
// Set the scopes.
.setScope('https://www.googleapis.com/auth/bigquery')
)
}
export const insertLog = (userId, type) => {
const bigQueryService = getBigQueryService()
if (!bigQueryService.hasAccess()) {
console.error(bigQueryService.getLastError())
return
}
const projectId = bigqueryCredentials.project_id
const datasetId = 'usage'
const tableId = 'logs'
const row = {
timestamp: new Date().toISOString(),
userId,
type,
}
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
// Create the data upload job.
const job = {
configuration: {
load: {
destinationTable: {
projectId,
datasetId,
tableId,
},
sourceFormat: 'NEWLINE_DELIMITED_JSON',
},
},
}
const url = `https://bigquery.googleapis.com/upload/bigquery/v2/projects/${projectId}/jobs`
const headers = {
Authorization: `Bearer ${bigQueryService.getAccessToken()}`,
'Content-Type': 'application/json',
}
const options = {
method: 'post',
headers,
payload: JSON.stringify(job),
}
try {
const response = UrlFetchApp.fetch(url, options)
const result = JSON.parse(response.getContentText())
console.log(JSON.stringify(result, null, 2))
} catch (err) {
console.error(err)
}
}
As you can see in my code, I get the Blob data (which is the actual json data that I need to put in BigQuery table) using this line:
const data = Utilities.newBlob(convertToNDJson(row), 'application/octet-stream')
But I don't know where to use this data
with the BigQuery Rest API
The documentation doesn't mention it: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert
How this can be done? Thank you.