Simple file upload to S3 using aws-sdk and Node/Express

node.jsFile UploadAmazon S3Aws SdkMulter S3

node.js Problem Overview


I am at a loss of what I am doing wrong, here is what I have:

HTML

<html>
<body>
	<form method="POST" action="/upload" enctype="multipart/form-data">
        <div class="field">
            <label for="image">Image Upload</label>
            <input type="file" name="image" id="image">
        </div>
        <input type="submit" class="btn" value="Save">
    </form>
</body>
</html>

Port 5000 is my Node.js server's port.

In this example I am using POST to /upload, and it works fine.

module.exports = function(app, models) {

    var fs = require('fs');
    var AWS = require('aws-sdk');
    var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
    var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

    AWS.config.update({
        accessKeyId: accessKeyId,
        secretAccessKey: secretAccessKey
    });

    var s3 = new AWS.S3();

    app.post('/upload', function(req, res){
       
        var params = {
            Bucket: 'makersquest',
            Key: 'myKey1234.png',
            Body: "Hello"
        };

        s3.putObject(params, function (perr, pres) {
            if (perr) {
                console.log("Error uploading data: ", perr);
            } else {
                console.log("Successfully uploaded data to myBucket/myKey");
            }
        });
    });

}

Now I want to post the file that I am POSTing, which is where the problem arises.

module.exports = function(app, models) {

    var fs = require('fs');
    var AWS = require('aws-sdk');
    var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
    var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

    AWS.config.update({
        accessKeyId: accessKeyId,
        secretAccessKey: secretAccessKey
    });

    var s3 = new AWS.S3();

    app.post('/upload', function(req, res){
        var path = req.files.image.path;
        fs.readFile(path, function(err, file_buffer){
            var params = {
                Bucket: 'makersquest',
                Key: 'myKey1234.png',
                Body: file_buffer
            };

            s3.putObject(params, function (perr, pres) {
                if (perr) {
                    console.log("Error uploading data: ", perr);
                } else {
                    console.log("Successfully uploaded data to myBucket/myKey");
                }
            });
        });
    });
}

The error I get is:

>TypeError: Cannot read property 'path' of undefined

As a matter of fact files is completely empty.

I am assuming I am missing something pretty obvious but I can't seem to find it.

node.js Solutions


Solution 1 - node.js

You will need something like multer to handle multipart uploading. Here is an example streaming your file upload to s3 using aws-sdk.

var multer = require('multer');
var AWS = require('aws-sdk');

var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

AWS.config.update({
    accessKeyId: accessKeyId,
    secretAccessKey: secretAccessKey
});

var s3 = new AWS.S3();

app.use(multer({ // https://github.com/expressjs/multer
  dest: './public/uploads/', 
  limits : { fileSize:100000 },
  rename: function (fieldname, filename) {
    return filename.replace(/\W+/g, '-').toLowerCase();
  },
  onFileUploadData: function (file, data, req, res) {
    // file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
    var params = {
      Bucket: 'makersquest',
      Key: file.name,
      Body: data
    };

    s3.putObject(params, function (perr, pres) {
      if (perr) {
        console.log("Error uploading data: ", perr);
      } else {
        console.log("Successfully uploaded data to myBucket/myKey");
      }
    });
  }
}));

app.post('/upload', function(req, res){
    if(req.files.image !== undefined){ // `image` is the field name from your form
        res.redirect("/uploads"); // success
    }else{
        res.send("error, no file chosen");
    }
});

Solution 2 - node.js

[Update Mar 2022] Supports multiple file uploads at a time, and returns the uploaded file(s)' public URL(s) too.

Latest Answer @ Dec-2016 [New]

Use multer-s3 for multipart uploading to s3 without saving on local disk as:

var express = require('express'),
    aws = require('aws-sdk'),
    bodyParser = require('body-parser'),
    multer = require('multer'),
    multerS3 = require('multer-s3');

aws.config.update({
    secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
    accessKeyId: 'XXXXXXXXXXXXXXX',
    region: 'us-east-1'
});

var app = express(),
    s3 = new aws.S3();

app.use(bodyParser.json());

var upload = multer({
    storage: multerS3({
        s3: s3,
        acl: 'public-read',
        bucket: 'bucket-name',
        key: function (req, file, cb) {
            console.log(file);
            cb(null, file.originalname); //use Date.now() for unique file keys
        }
    })
});

//open in browser to see upload form
app.get('/', function (req, res) {
    res.sendFile(__dirname + '/index.html');
});

//use by upload form
app.post('/upload', upload.array('upl', 25), function (req, res, next) {
    res.send({
		message: "Uploaded!",
		urls: req.files.map(function(file) {
			return {url: file.location, name: file.key, type: file.mimetype, size: file.size};
		})
	});
});
  
app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

Latest Answer @ Mar-2016 [Old-One]

Edited 1 use [email protected] and [email protected] for following snippet:

var express = require('express'),
    bodyParser = require('body-parser'),
    multer = require('multer'),
    s3 = require('multer-s3');

var app = express();

app.use(bodyParser.json());

var upload = multer({
    storage: s3({
        dirname: '/',
        bucket: 'bucket-name',
        secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
        accessKeyId: 'XXXXXXXXXXXXXXX',
        region: 'us-east-1',
        filename: function (req, file, cb) {
            cb(null, file.originalname); //use Date.now() for unique file keys
        }
    })
});

//open in browser to see upload form
app.get('/', function (req, res) {
    res.sendFile(__dirname + '/index.html');
});

//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
    res.send("Uploaded!");
});

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

For complete running example clone express_multer_s3 repo and run node app.

Solution 3 - node.js

Simple S3 File Upload Without Multer

var express = require('express')
const fileUpload = require('express-fileupload');
const app = express();

app.use(fileUpload());


var AWS = require('aws-sdk');

app.post('/imageUpload', async (req, res) => {
    AWS.config.update({
        accessKeyId: "ACCESS-KEY", // Access key ID
        secretAccesskey: "SECRET-ACCESS-KEY", // Secret access key
        region: "us-east-1" //Region
    })
    

    const s3 = new AWS.S3();

    // Binary data base64
    const fileContent  = Buffer.from(req.files.uploadedFileName.data, 'binary');

    // Setting up S3 upload parameters
    const params = {
        Bucket: 'BUKET-NAME',
        Key: "test.jpg", // File name you want to save as in S3
        Body: fileContent 
    };

    // Uploading files to the bucket
    s3.upload(params, function(err, data) {
        if (err) {
            throw err;
        }
        res.send({
            "response_code": 200,
            "response_message": "Success",
            "response_data": data
        });
    });

})

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

Solution 4 - node.js

You need something like multer in your set of middleware to handle multipart/form-data for you and populate req.files. From the doco:

var express = require('express')
var multer  = require('multer')

var app = express()
app.use(multer({ dest: './uploads/'}))

Now req.files.image.path should be populated in your app.post function.

Solution 5 - node.js

This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.

https://stackoverflow.com/questions/26284181/aws-missing-credentials-when-i-try-send-something-to-my-s3-bucket-node-js

This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.

my aconfig.json file with the amazon credentials looks like this:

{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }

The final contents of the route file look like the file pasted below.

router.post('/sendToS3', function(req, res) {

var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');

var awsCredFile = path.join(__dirname, '.', 'aconfig.json');

console.log('awsCredFile is');
console.log(awsCredFile);

AWS.config.loadFromPath(awsCredFile);

var s3 = new AWS.S3();

var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});

var sampleFile = {
    "_id" : 345345,
    "fieldname" : "uploads[]",
    "originalname" : "IMG_1030.JPG",
    "encoding" : "7bit",
    "mimetype" : "image/jpeg",
    "destination" : "./public/images/uploads",
    "filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
    "path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
    "size" : 251556,
    "user" : "579fbe61adac4a8a73b6f508"
};

var filePathToSend = path.join(__dirname, '../public', sampleFile.path);


function uploadToS3(filepath, destFileName, callback) {
    photoBucket
        .upload({
            ACL: 'public-read',
            Body: fs.createReadStream(filepath),
            Key: destFileName.toString(),
            ContentType: 'application/octet-stream' // force download if it's accessed as a top location
        })
        // http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
        .on('httpUploadProgress', function(evt) { console.log(evt); })
        // http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
        .send(callback);
}

multer({limits: {fileSize:10*1024*1024}});

console.log('filePathToSend is ');
console.log(filePathToSend);

uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
    if (err) {
        console.error(err);
        return res.status(500).send('failed to upload to s3').end();
    }
    res.status(200)
        .send('File uploaded to S3: '
            + data.Location.replace(/</g, '&lt;')
            + '<br/><img src="' + data.Location.replace(/"/g, '&quot;') + '"/>')
        .end();
});

console.log('uploading now...');

});

This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.

Hope this helps

Attributions

All content for this solution is sourced from the original question on Stackoverflow.

The content on this page is licensed under the Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license.

Content TypeOriginal AuthorOriginal Content on Stackoverflow
QuestionabritezView Question on Stackoverflow
Solution 1 - node.jstheRemixView Answer on Stackoverflow
Solution 2 - node.jsZeeshan Hassan MemonView Answer on Stackoverflow
Solution 3 - node.jsSurojit PaulView Answer on Stackoverflow
Solution 4 - node.jsmorlochView Answer on Stackoverflow
Solution 5 - node.jsJasonPerrView Answer on Stackoverflow