Using Python API, I have created an export. How do I download the .zip file in the export using the same authorized service? When creating the export, I can see the bucketName and objectNames of the cloudStorageSink, however I cannot find any documentation on how to download them to my host using the existing service that created the export
#!/usr/bin/env python
from __future__ import print_function
import datetime
import json
import time
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# If modifying these scopes, delete the file token.json.
SCOPES = 'https://www.googleapis.com/auth/ediscovery'
def list_exports(service, matter_id):
return service.matters().exports().list(matterId=matter_id).execute()
def get_export_by_id(service, matter_id, export_id):
return service.matters().exports().get(matterId=matter_id, exportId=export_id).execute()
def get_service():
'''
Look for an active credential token, if one does not exist, use credentials.json
and ask user for permission to access. Store new token, return the service object
'''
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('vault', 'v1', http=creds.authorize(Http()))
return service
def create_drive_export(service, matter_id, export_name, num_days):
"""
once we have a matter_id , we can create an export under it with the relevant files we are looking for.
"""
# set times for beginning and end of query:
today = datetime.datetime.now()
print("creating a drive export at {}".format(today))
start_time = today - datetime.timedelta(days=num_days)
drive_query_options = {'includeTeamDrives': True}
user_list = ['me@gmail.com']
drive_query = {
'corpus': 'DRIVE',
'dataScope': 'ALL_DATA',
'searchMethod': 'ACCOUNT',
'accountInfo': {
'emails': user_list
},
'driveOptions': drive_query_options,
# end time is more recent date, start time is older date
'endTime': '{}-{}-{}T00:00:00Z'.format(today.year, today.month, today.day),
'startTime': '{}-{}-{}T00:00:00Z'.format(start_time.year, start_time.month, start_time.day),
'timeZone': 'Etc/GMT'
}
wanted_export = {
'name': export_name,
'query': drive_query,
'exportOptions': {
'driveOptions': {}
}
}
return service.matters().exports().create(matterId=matter_id, body=wanted_export).execute()
def get_export(service, matter_id, export_id):
return service.matters().exports().get(matterId=matter_id, exportId=export_id).execute()
def main():
service = get_service()
matter_id = '<known_matter_id>'
timestamp = datetime.datetime.now().strftime("%Y%m%d.%H%M%s")
export = create_drive_export(service, matter_id, "code_gen_export.{}".format(timestamp), 1)
# check every 5 seconds until export is done being created:
while export['status'] == 'IN_PROGRESS':
export = get_export(service, matter_id, export['id'])
print('...')
time.sleep(5)
# print(json.dumps(export, indent=2))
print(json.dumps(export['cloudStorageSink']['files'], indent=2))
if __name__ == '__main__':
main()
and running the above code produces:
creating a drive export at 2018-09-20 17:12:38.026402
...
...
...
...
...
...
[
{
"md5Hash": "hash_value",
"bucketName": "bucket_string",
"objectName": "object1_string/code_gen_export.20180920.17121537481558-custodian-docid.csv",
"size": "1684"
},
{
"md5Hash": "hash_value",
"bucketName": "bucket_string",
"objectName": "object2_string/code_gen_export.20180920.17121537481558-metadata.xml",
"size": "10600"
},
{
"md5Hash": "hash_value",
"bucketName": "bucket_string",
"objectName": "object3_string/code_gen_export.20180920.17121537481558_0.zip",
"size": "21599222"
}
]
Can I download the .zip file using the service object I created in get_service()?