56

I am at a loss of what I am doing wrong, here is what I have:

HTML

<html>
<body>
    <form method="POST" action="/upload" enctype="multipart/form-data">
        <div class="field">
            <label for="image">Image Upload</label>
            <input type="file" name="image" id="image">
        </div>
        <input type="submit" class="btn" value="Save">
    </form>
</body>
</html>

Port 5000 is my Node.js server's port.

In this example I am using POST to /upload, and it works fine.

module.exports = function(app, models) {

    var fs = require('fs');
    var AWS = require('aws-sdk');
    var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
    var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

    AWS.config.update({
        accessKeyId: accessKeyId,
        secretAccessKey: secretAccessKey
    });

    var s3 = new AWS.S3();

    app.post('/upload', function(req, res){

        var params = {
            Bucket: 'makersquest',
            Key: 'myKey1234.png',
            Body: "Hello"
        };

        s3.putObject(params, function (perr, pres) {
            if (perr) {
                console.log("Error uploading data: ", perr);
            } else {
                console.log("Successfully uploaded data to myBucket/myKey");
            }
        });
    });

}

Now I want to post the file that I am POSTing, which is where the problem arises.

module.exports = function(app, models) {

    var fs = require('fs');
    var AWS = require('aws-sdk');
    var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
    var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

    AWS.config.update({
        accessKeyId: accessKeyId,
        secretAccessKey: secretAccessKey
    });

    var s3 = new AWS.S3();

    app.post('/upload', function(req, res){
        var path = req.files.image.path;
        fs.readFile(path, function(err, file_buffer){
            var params = {
                Bucket: 'makersquest',
                Key: 'myKey1234.png',
                Body: file_buffer
            };

            s3.putObject(params, function (perr, pres) {
                if (perr) {
                    console.log("Error uploading data: ", perr);
                } else {
                    console.log("Successfully uploaded data to myBucket/myKey");
                }
            });
        });
    });
}

The error I get is:

TypeError: Cannot read property 'path' of undefined

As a matter of fact files is completely empty.

I am assuming I am missing something pretty obvious but I can't seem to find it.

Zeeshan Hassan Memon
  • 8,105
  • 4
  • 43
  • 57
abritez
  • 2,616
  • 3
  • 29
  • 36

6 Answers6

42

Simple S3 File Upload Without Multer

var express = require('express')
const fileUpload = require('express-fileupload');
const app = express();

app.use(fileUpload());


var AWS = require('aws-sdk');

app.post('/imageUpload', async (req, res) => {
    AWS.config.update({
        accessKeyId: "ACCESS-KEY", // Access key ID
        secretAccesskey: "SECRET-ACCESS-KEY", // Secret access key
        region: "us-east-1" //Region
    })


    const s3 = new AWS.S3();

    // Binary data base64
    const fileContent  = Buffer.from(req.files.uploadedFileName.data, 'binary');

    // Setting up S3 upload parameters
    const params = {
        Bucket: 'BUKET-NAME',
        Key: "test.jpg", // File name you want to save as in S3
        Body: fileContent 
    };

    // Uploading files to the bucket
    s3.upload(params, function(err, data) {
        if (err) {
            throw err;
        }
        res.send({
            "response_code": 200,
            "response_message": "Success",
            "response_data": data
        });
    });

})

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});
Surojit Paul
  • 1,232
  • 9
  • 11
  • 9
    Best answer because you use express-fileupload and native Buffer function – kuzey beytar Oct 12 '20 at 07:43
  • 1
    Agree, this is the best answer – Joulss Jan 27 '21 at 12:33
  • 1
    Agreed this is my preferred answer. – James Parker Apr 10 '21 at 11:56
  • how does one get the image back via a get request using this way, can you tell me in short – Yash Jun 22 '21 at 11:58
  • 1
    Agreed this is the best answer – burnedfaceless Jul 07 '21 at 15:30
  • 1
    I had problems with this because aws-sdk did not seem to be applying the credentials. I also notice the options you are using like `accessKeyId` are tagged as deprecated. However from the docs, I simply set the environment variables, and they are automatically applied: https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/loading-node-credentials-environment.html which then worked. – defraggled Jul 31 '21 at 13:06
41

You will need something like multer to handle multipart uploading. Here is an example streaming your file upload to s3 using aws-sdk.

var multer = require('multer');
var AWS = require('aws-sdk');

var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

AWS.config.update({
    accessKeyId: accessKeyId,
    secretAccessKey: secretAccessKey
});

var s3 = new AWS.S3();

app.use(multer({ // https://github.com/expressjs/multer
  dest: './public/uploads/', 
  limits : { fileSize:100000 },
  rename: function (fieldname, filename) {
    return filename.replace(/\W+/g, '-').toLowerCase();
  },
  onFileUploadData: function (file, data, req, res) {
    // file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
    var params = {
      Bucket: 'makersquest',
      Key: file.name,
      Body: data
    };

    s3.putObject(params, function (perr, pres) {
      if (perr) {
        console.log("Error uploading data: ", perr);
      } else {
        console.log("Successfully uploaded data to myBucket/myKey");
      }
    });
  }
}));

app.post('/upload', function(req, res){
    if(req.files.image !== undefined){ // `image` is the field name from your form
        res.redirect("/uploads"); // success
    }else{
        res.send("error, no file chosen");
    }
});
theRemix
  • 2,154
  • 16
  • 16
  • 4
    In this example, Should I remove local file? such as `, onFileUploadComplete: function (file, req, res) { fs.unlink(file.path) }` – Eun Bit Hwang Jun 29 '15 at 14:24
35

[Update Mar 2022] Supports multiple file uploads at a time, and returns the uploaded file(s)' public URL(s) too.

Latest Answer @ Dec-2016 [New]

Use multer-s3 for multipart uploading to s3 without saving on local disk as:

var express = require('express'),
    aws = require('aws-sdk'),
    bodyParser = require('body-parser'),
    multer = require('multer'),
    multerS3 = require('multer-s3');

aws.config.update({
    secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
    accessKeyId: 'XXXXXXXXXXXXXXX',
    region: 'us-east-1'
});

var app = express(),
    s3 = new aws.S3();

app.use(bodyParser.json());

var upload = multer({
    storage: multerS3({
        s3: s3,
        acl: 'public-read',
        bucket: 'bucket-name',
        key: function (req, file, cb) {
            console.log(file);
            cb(null, file.originalname); //use Date.now() for unique file keys
        }
    })
});

//open in browser to see upload form
app.get('/', function (req, res) {
    res.sendFile(__dirname + '/index.html');
});

//use by upload form
app.post('/upload', upload.array('upl', 25), function (req, res, next) {
    res.send({
        message: "Uploaded!",
        urls: req.files.map(function(file) {
            return {url: file.location, name: file.key, type: file.mimetype, size: file.size};
        })
    });
});
  
app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

Latest Answer @ Mar-2016 [Old-One]

Edited 1 use multer@1.1.0 and multer-s3@1.4.1 for following snippet:

var express = require('express'),
    bodyParser = require('body-parser'),
    multer = require('multer'),
    s3 = require('multer-s3');

var app = express();

app.use(bodyParser.json());

var upload = multer({
    storage: s3({
        dirname: '/',
        bucket: 'bucket-name',
        secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
        accessKeyId: 'XXXXXXXXXXXXXXX',
        region: 'us-east-1',
        filename: function (req, file, cb) {
            cb(null, file.originalname); //use Date.now() for unique file keys
        }
    })
});

//open in browser to see upload form
app.get('/', function (req, res) {
    res.sendFile(__dirname + '/index.html');
});

//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
    res.send("Uploaded!");
});

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

For complete running example clone express_multer_s3 repo and run node app.

Zeeshan Hassan Memon
  • 8,105
  • 4
  • 43
  • 57
  • 6
    getting errors with this code `default: throw new TypeError('Expected opts.s3 to be object') ^ TypeError: Expected opts.s3 to be object at new S3Storage (M:\workspace\Angular2StartKit\node_modules\multer-s3\index.js:66:20) at module.exports (M:\workspace\Angular2StartKit\node_modules\multer-s3\index.js:150:10)` – user2180794 Jun 19 '16 at 20:33
  • How do I know if the upload was success or not and then redirect the screen based on that. Can you please show me how this is done. I am using `Multer-S3` – Illep Aug 13 '16 at 03:00
  • **Answer has been updated** @user2180794 hope both snippets work for you now – Zeeshan Hassan Memon Nov 30 '16 at 21:47
  • old but doesn't work for me (either of the 2). I get "Error: unexpected field". – Manuel Maestrini Aug 10 '17 at 09:44
  • @ManuelMaestrini did you follow the instructions properly or did you try it by cloning node-cheat? – Zeeshan Hassan Memon Aug 10 '17 at 10:49
  • I just followed the instructions and did .single as opposed to .array – Manuel Maestrini Aug 10 '17 at 11:12
  • i am using your code but this error is come "Expected opts.s3 to be object " – Nallasamy Nss May 10 '18 at 12:41
  • 1
    @NallasamyNss you must be using inappropriate versions of npm modules, see this node-cheat link, see and install mentioned modules with correct version to make it work: https://github.com/zishon89us/node-cheat/blob/master/aws/express_multer_s3/app_es8.js – Zeeshan Hassan Memon May 10 '18 at 15:42
  • how does one get the image via get request using this method? – Yash Jun 22 '21 at 11:56
  • Currently multer-s3 3.0.1 has a severe security issue. https://github.com/anacronw/multer-s3/issues/188 – wasserholz Oct 10 '22 at 08:58
1

You need something like multer in your set of middleware to handle multipart/form-data for you and populate req.files. From the doco:

var express = require('express')
var multer  = require('multer')

var app = express()
app.use(multer({ dest: './uploads/'}))

Now req.files.image.path should be populated in your app.post function.

morloch
  • 1,781
  • 1
  • 16
  • 23
1

One of the easy ways to upload your image is to use an NPM package Multer You can upload an image to S3 and then store its name in your database so every time you want to fetch it you can generate a signed URL for that image. This is one of the ways to secure access to your S3 bucket.

For uploading an image you can do something like this

const AWS = require("aws-sdk");
const express = require("express");
const multer = require("multer");
const crypto = require("crypto");
const cors = require("cors");
const {
  S3Client,
  PutObjectCommand
} = require("@aws-sdk/client-s3");

const app = express();

app.use(cors());
app.use(express.json());

const port = process.env.PORT || 3000 


const storage = multer.memoryStorage();
const upload = multer({ storage: storage });

// Read the values from .env file

const bucketName = process.env.BUCKET_NAME;
const bucketRegion = process.env.BUCKET_REGION;
const accessId = process.env.ACCESS_ID;
const secretAccessKey = process.env.SECRET_ACCESS_KEY;

// Create a client

const s3 = new S3Client({
  credentials: {
    accessKeyId: accessId,
    secretAccessKey: secretAccessKey,
  },
  region: bucketRegion,
});

// This function generates unique name for our files 

const generateFileName = (bytes = 32) =>
  crypto.randomBytes(bytes).toString("hex");
  
// Notice the upload middleware.
// "image" is the same name that you will pass form your UI request

app.post('/', upload.single("image"), (req, res) => {
  
  # When you use multer the image can be accessed from req.file

    let fileName = generateFileName()
    let params = {
        Bucket: bucketName,
        Key:  fileName,
        Body: req.file.buffer ,
        ContentType: req.file.mimetype,
        ContentEncoding: 'base64',
      };

    const command = new PutObjectCommand(params);
    await s3.send(command);

// before sending response you can save the 'fileName' in the DB of your choice
      
    res.send('image uploaded')
  
})


app.listen(port, () => {
  console.log(`app listening on port ${port}`)
})

Next, to get the signed URL for the image you can do as follows

// assuming other things are set as above snippet 
const { GetObjectCommand } = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");


app.get('/', (req, res) => {
 // First you will get the image name that was saved in DB
 // lets say it was called user_image. 
 
   let obj_params = {
      Bucket: bucketName,
      Key: user_image,
    };
    
    let command = new GetObjectCommand(obj_params);
    image_url = await getSignedUrl(
      s3,
      command,
      { expiresIn: 86400 } // seconds in a day
    );
    
    let response = {
      success: true, 
      data: {
        image_url
      },
    };
    res.status(200).send(response); 
})

Note:

  • Note that you might need to install some packages to make it work.
  • Make sure in your API requests you are setting 'content-type': 'multipart/form-data' in request headers
  • In your API gateway in S3, you might also need to set the Binary Media Type as multipart/form-data. More info on that in this link
Hadi Mir
  • 4,497
  • 2
  • 29
  • 31
0

This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.

AWS Missing credentials when i try send something to my S3 Bucket (Node.js)

This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.

my aconfig.json file with the amazon credentials looks like this:

{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }

The final contents of the route file look like the file pasted below.

router.post('/sendToS3', function(req, res) {

var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');

var awsCredFile = path.join(__dirname, '.', 'aconfig.json');

console.log('awsCredFile is');
console.log(awsCredFile);

AWS.config.loadFromPath(awsCredFile);

var s3 = new AWS.S3();

var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});

var sampleFile = {
    "_id" : 345345,
    "fieldname" : "uploads[]",
    "originalname" : "IMG_1030.JPG",
    "encoding" : "7bit",
    "mimetype" : "image/jpeg",
    "destination" : "./public/images/uploads",
    "filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
    "path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
    "size" : 251556,
    "user" : "579fbe61adac4a8a73b6f508"
};

var filePathToSend = path.join(__dirname, '../public', sampleFile.path);


function uploadToS3(filepath, destFileName, callback) {
    photoBucket
        .upload({
            ACL: 'public-read',
            Body: fs.createReadStream(filepath),
            Key: destFileName.toString(),
            ContentType: 'application/octet-stream' // force download if it's accessed as a top location
        })
        // http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
        .on('httpUploadProgress', function(evt) { console.log(evt); })
        // http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
        .send(callback);
}

multer({limits: {fileSize:10*1024*1024}});

console.log('filePathToSend is ');
console.log(filePathToSend);

uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
    if (err) {
        console.error(err);
        return res.status(500).send('failed to upload to s3').end();
    }
    res.status(200)
        .send('File uploaded to S3: '
            + data.Location.replace(/</g, '&lt;')
            + '<br/><img src="' + data.Location.replace(/"/g, '&quot;') + '"/>')
        .end();
});

console.log('uploading now...');

});

This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.

Hope this helps

Community
  • 1
  • 1
JasonPerr
  • 339
  • 3
  • 16
  • Please do not hesitate to describe the answer even though the link leads to the detailed view of it. – Gar Aug 12 '16 at 17:04