0

I read Pipe a stream to s3.upload() but im having difficulty with I am not sure if that actually solves and I have tried.

What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.

heres my try.

fetch('https://www.example.com',fileName{
 method: 'GET',
    headers: {
        'Authorization': "Bearer " + myAccessToken,
       
    },
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});


const uploadToS3 = (data) => {


    // Setting up S3 upload parameters
    const params = {
        Bucket:myBucket,
        Key: "fileName",
        Body: data
    };

    // Uploading files to the bucket
    s3.upload(params, function(err, data) {
        if (err) {
            throw err;
        }
        console.log(`File uploaded successfully. ${data.Location}`);
    });
};

output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf

however this is blank.

aaron
  • 68
  • 6
  • If you're fetching a stream, you could read it [as a a stream](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_streams) and then use streams in the AWS client [as per the last answer here](https://stackoverflow.com/a/73332454/5774952). If you're stuck on an older version of the SDK, the other answers in that thread providing a passthrough writable stream should work, as long as you're reading from a stream in the initial request. – Zac Anger Jan 21 '23 at 23:32
  • sorry which answer? function upload(s3, inputStream) { const pass = new PassThrough(); inputStream.pipe(pass); return s3.upload( { Bucket: 'bucket name', Key: 'unique file name', Body: pass, }, { queueSize: 4, // default concurrency }, ).promise() .then((data) => console.log(data)) .catch((error) => console.error(error)); } – aaron Jan 21 '23 at 23:35

1 Answers1

0

I figured it out, but i did not keep using fetch. and I actually download the file, then upload it. then delete the file.

function  getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
  let url2 = 'https://example.com' + fileKey;
  axios
      .get(url2, {
          headers: { 'Authorization': "Bearer " + myAccessToken },
          responseType: 'stream', 
      })
      .then(response => {
        let file = fileName;
        response.data.pipe(fs.createWriteStream(file))
    
     let myFileInfo = [];
          if( myFileInfo.length > 0){
              myFileInfo.splice(0,  myFileInfo.length)
            }
              myFileInfo.push(file)       
          processArray(myFileInfo)

           console.log(file + " saved")
      })
      .catch(error => console.log(error));

}

async function processArray(array) {
  for (const item of array) {
    await delayedLog(item);
  }
  console.log('Downloaded!');
  console.log('Uploading to s3!');
}


function delay() {
  return new Promise(resolve => setTimeout(resolve, 300));
}


async function delayedLog(item) {
  await delay();
  uploadFiles(item)  
}

async function uploadFiles(file){
    uploadToS3List(file)
   
    await new Promise((resolve, reject) => setTimeout(resolve, 1000));

    deleteMyFiles(file)

}


const uploadToS3List = (fileName) => {
  // Read content from the file
  const fileContent = fs.readFileSync(fileName);

  // Setting up S3 upload parameters
  const params = {
      Bucket:"myBucketName",
      Key: fileName,
      Body: fileContent
  };

  // Uploading files to the bucket
  s3.upload(params, function(err, data) {
      if (err) {
          throw err;
      }
      console.log(`File uploaded successfully. ${data.Location}`);
  });
};


function deleteMyFiles(path){
  fs.unlink(path, (err) => {

      console.log(path + " has been deleted")

      if (err) {
        console.error(err)
        return

        
      }
  
})
}
aaron
  • 68
  • 6