19

I manually upload the JSON file to google cloud storage by creating a new project. I am able to read the metadata for a file but I don't know how to read the JSON content.

The code I used to read the metadata is:

var Storage = require('@google-cloud/storage');
const storage = Storage({
    keyFilename: 'service-account-file-path',
    projectId: 'project-id'
});
storage
    .bucket('project-name')
    .file('file-name')
    .getMetadata()
    .then(results => {
        console.log("results is", results[0])
    })
    .catch(err => {
        console.error('ERROR:', err);
    });

Can someone guide me to the way to read the JSON file content?

Zeeshan Hassan Memon
  • 8,105
  • 4
  • 43
  • 57
Aravindh
  • 441
  • 1
  • 6
  • 17

4 Answers4

23

I've used the following code to read a json file from Cloud Storage:

    'use strict';
    const Storage = require('@google-cloud/storage');
    const storage = Storage();
    exports.readFile = (req, res) => {
            console.log('Reading File');
            var archivo = storage.bucket('your-bucket').file('your-JSON-file').createReadStream();
            console.log('Concat Data');
            var  buf = '';
            archivo.on('data', function(d) {
              buf += d;
            }).on('end', function() {
              console.log(buf);
              console.log("End");
              res.send(buf);
            });     

    };

I'm reading from a stream and concat all the data within the file to the buf variable.

Hope it helps.

UPDATE

To read multiple files:

'use strict';
const {Storage} = require('@google-cloud/storage');
const storage = new Storage();
listFiles();

async function listFiles() {
        const bucketName = 'your-bucket'
        console.log('Listing objects in a Bucket');
        const [files] = await storage.bucket(bucketName).getFiles();
        files.forEach(file => {
            console.log('Reading: '+file.name);
            var archivo = file.createReadStream();
            console.log('Concat Data');
            var  buf = '';
            archivo.on('data', function(d) {
                buf += d;
            }).on('end', function() {
                console.log(buf);
                console.log("End");
            });    
        });
};
F10
  • 2,843
  • 2
  • 12
  • 18
  • This works if you are reading ONE file. But what if you want to read multiple files. – Joakim M Mar 26 '19 at 12:37
  • Thanks. Looks like what I had at first. But have you tested this? createReadStream seems to go on an async adventure and the result is ballistic :D I had to trick it with await... – Joakim M Mar 27 '19 at 09:21
  • Yes, that will depend on how big are your files too – F10 Mar 27 '19 at 11:46
  • Surprising how long I had to look before someone finally had a working example. Thank you! – user1819575 Jul 27 '19 at 02:59
16

I was using the createWriteStream method like the other answers but I had a problem with the output in that it randomly output invalid characters (�) for some characters in a string. I thought it could be some encoding problems.

I came up with my workaround that uses the download method. The download method returns a DownloadResponse that contains an array of Buffer. We then use Buffer.toString() method and give it an encoding of utf8 and parse the result with JSON.parse().

const downloadAsJson = async (bucket, path) => {
  const file = await new Storage()
    .bucket(bucket)
    .file(path)
    .download();
  return JSON.parse(file[0].toString('utf8'));
}
Nathan
  • 1,074
  • 9
  • 14
  • I'm not clear how this answer differs from [the answer added a year before this one?](https://stackoverflow.com/a/56374129/542251) – Liam Oct 05 '22 at 11:12
14

There exists a convenient method:'download' to download a file into memory or to a local destination. You may use download method as follows:

const bucketName='bucket name here';
const fileName='file name here';
const storage = new Storage.Storage();
const file = storage.bucket(bucketName).file(fileName);

file.download(function(err, contents) {
     console.log("file err: "+err);  
     console.log("file data: "+contents);   
}); 
Pratap Singh
  • 401
  • 1
  • 4
  • 14
10

A modern version of this:

const { Storage } = require('@google-cloud/storage')
const storage = new Storage()
const bucket = storage.bucket('my-bucket')

// The function that returns a JSON string
const readJsonFromFile = async remoteFilePath => new Promise((resolve, reject) => {
  let buf = ''
  bucket.file(remoteFilePath)
    .createReadStream()
    .on('data', d => (buf += d))
    .on('end', () => resolve(buf))
    .on('error', e => reject(e))
})

// Example usage
(async () => {
  try {
    const json = await readJsonFromFile('path/to/json-file.json')
    console.log(json)
  } catch (e) {
    console.error(e)
  }
})()
Jamie Curnow
  • 754
  • 1
  • 6
  • 9