aws lambda function getting access denied when getObject from s3

Amazon Web-ServicesAmazon S3Aws LambdaAmazon Iam

Amazon Web-Services Problem Overview


I am getting an acccess denied error from S3 AWS service on my Lambda function.

This is the code:

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true }); // Enable ImageMagick integration.

exports.handler = function(event, context) {
    var srcBucket = event.Records[0].s3.bucket.name;
    // Object key may have spaces or unicode non-ASCII characters.
    var key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
/*
{
    originalFilename: <string>,
    versions: [
        {
            size: <number>,
            crop: [x,y],
            max: [x, y],
            rotate: <number>
        }
    ]
}*/
    var fileInfo;
    var dstBucket = "xmovo.transformedimages.develop";
    try {
        //TODO: Decompress and decode the returned value
        fileInfo = JSON.parse(key);
        //download s3File

        // get reference to S3 client
        var s3 = new AWS.S3();

        // Download the image from S3 into a buffer.
        s3.getObject({
                Bucket: srcBucket,
                Key: key
            },
            function (err, response) {
                if (err) {
                    console.log("Error getting from s3: >>> " + err + "::: Bucket-Key >>>" + srcBucket + "-" + key + ":::Principal>>>" + event.Records[0].userIdentity.principalId, err.stack);
                    return;
                }

                // Infer the image type.
                var img = gm(response.Body);
                var imageType = null;
                img.identify(function (err, data) {
                    if (err) {
                        console.log("Error image type: >>> " + err);
                        deleteFromS3(srcBucket, key);
                        return;
                    }
                    imageType = data.format;

                    //foreach of the versions requested
                    async.each(fileInfo.versions, function (currentVersion, callback) {
                        //apply transform
                        async.waterfall([async.apply(transform, response, currentVersion), uploadToS3, callback]);

                    }, function (err) {
                        if (err) console.log("Error on excecution of watefall: >>> " + err);
                        else {
                            //when all done then delete the original image from srcBucket
                            deleteFromS3(srcBucket, key);
                        }
                    });
                });
            });
    }
    catch (ex){
        context.fail("exception through: " + ex);
        deleteFromS3(srcBucket, key);
        return;
    }
        function transform(response, version, callback){
            var imageProcess = gm(response.Body);
            if (version.rotate!=0) imageProcess = imageProcess.rotate("black",version.rotate);
            if(version.size!=null) {
                if (version.crop != null) {
                    //crop the image from the coordinates
                    imageProcess=imageProcess.crop(version.size[0], version.size[1], version.crop[0], version.crop[1]);
                }
                else {
                    //find the bigger and resize proportioned the other dimension
                    var widthIsMax = version.size[0]>version.size[1];
                    var maxValue = Math.max(version.size[0],version.size[1]);
                    imageProcess=(widthIsMax)?imageProcess.resize(maxValue):imageProcess.resize(null, maxValue);
                }
            }


            //finally convert the image to jpg 90%
            imageProcess.toBuffer("jpg",{quality:90}, function(err, buffer){
                if (err) callback(err);
                callback(null, version, "image/jpeg", buffer);
            });

        }

        function deleteFromS3(bucket, filename){
            s3.deleteObject({
                Bucket: bucket,
                Key: filename
            });
        }

        function uploadToS3(version, contentType, data, callback) {
            // Stream the transformed image to a different S3 bucket.
            var dstKey = fileInfo.originalFilename + "_" + version.size + ".jpg";
            s3.putObject({
                Bucket: dstBucket,
                Key: dstKey,
                Body: data,
                ContentType: contentType
            }, callback);
        }
};

This is the error on Cloudwatch:

AccessDenied: Access Denied

This is the stack error:

at Request.extractError (/var/runtime/node_modules/aws-sdk/lib/services/s3.js:329:35)

at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20) 

at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)

at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:596:14)

at Request.transition (/var/runtime/node_modules/aws-sdk/lib/request.js:21:10) 

at AcceptorStateMachine.runTo (/var/runtime/node_modules/aws-sdk/lib/state_machine.js:14:12) 

at /var/runtime/node_modules/aws-sdk/lib/state_machine.js:26:10 

at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:37:9) 

at Request.<anonymous> (/var/runtime/node_modules/aws-sdk/lib/request.js:598:12) 

at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:115:18)

Without any other description or info on S3 bucket permissions allow to everyone put list and delete.

What can I do to access the S3 bucket?

PS: on Lambda event properties the principal is correct and has administrative privileges.

Amazon Web-Services Solutions


Solution 1 - Amazon Web-Services

Interestingly enough, AWS returns 403 (access denied) when the file does not exist. Be sure the target file is in the S3 bucket.

Solution 2 - Amazon Web-Services

Your Lambda does not have privileges (S3:GetObject).

Go to IAM dashboard, check the role associated with your Lambda execution. If you use AWS wizard, it automatically creates a role called oneClick_lambda_s3_exec_role. Click on Show Policy. It should show something similar to the attached image. Make sure S3:GetObject is listed.

enter image description here

Solution 3 - Amazon Web-Services

If you are specifying the Resource don't forget to add the sub folder specification as well. Like this:

"Resource": [
  "arn:aws:s3:::BUCKET-NAME",
  "arn:aws:s3:::BUCKET-NAME/*"
]

Solution 4 - Amazon Web-Services

I ran into this issue and after hours of IAM policy madness, the solution was to:

  1. Go to S3 console
  2. Click bucket you are interested in.
  3. Click 'Properties'
  4. Unfold 'Permissions'
  5. Click 'Add more permissions'
  6. Choose 'Any Authenticated AWS User' from dropdown. Select 'Upload/Delete' and 'List' (or whatever you need for your lambda).
  7. Click 'Save'

Done. Your carefully written IAM role policies don't matter, neither do specific bucket policies (I've written those too to make it work). Or they just don't work on my account, who knows.

[EDIT]

After a lot of tinkering the above approach is not the best. Try this:

  1. Keep your role policy as in the helloV post.
  2. Go to S3. Select your bucket. Click Permissions. Click Bucket Policy.
  3. Try something like this:

> { > "Version": "2012-10-17", > "Id": "Lambda access bucket policy", > "Statement": [ > { > "Sid": "All on objects in bucket lambda", > "Effect": "Allow", > "Principal": { > "AWS": "arn:aws:iam::AWSACCOUNTID:root" > }, > "Action": "s3:", > "Resource": "arn:aws:s3:::BUCKET-NAME/" > }, > { > "Sid": "All on bucket by lambda", > "Effect": "Allow", > "Principal": { > "AWS": "arn:aws:iam::AWSACCOUNTID:root" > }, > "Action": "s3:*", > "Resource": "arn:aws:s3:::BUCKET-NAME" > } > ] > }

Worked for me and does not require for you to share with all authenticated AWS users (which most of the time is not ideal).

Solution 5 - Amazon Web-Services

If you have encryption set on your S3 bucket (such as AWS KMS), you may need to make sure the IAM role applied to your Lambda function is added to the list of IAM > Encryption keys > region > key > Key Users for the corresponding key that you used to encrypt your S3 bucket at rest.

In my screenshot, for example, I added the CyclopsApplicationLambdaRole role that I have applied to my Lambda function as a Key User in IAM for the same AWS KMS key that I used to encrypt my S3 bucket. Don't forget to select the correct region for your key when you open up the Encryption keys UI.

Find the execution role you've applied to your Lambda function: screenshot of Lambda execution role

Find the key you used to add encryption to your S3 bucket: screenshot of the key selected for the S3 bucket

In IAM > Encryption keys, choose your region and click on the key name: screenshot of region dropdown in IAM

Add the role as a Key User in IAM Encryption keys for the key specified in S3: screenshot of IAM key users selection

Solution 6 - Amazon Web-Services

If all the other policy ducks are in a row, S3 will still return an Access Denied message if the object doesn't exist AND the requester doesn't have ListBucket permission on the bucket.

From https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectGET.html:

> ...If the object you request does not exist, the error Amazon S3 > returns depends on whether you also have the s3:ListBucket permission. > > If you have the s3:ListBucket permission on the bucket, Amazon S3 will > return an HTTP status code 404 ("no such key") error. if you don’t > have the s3:ListBucket permission, Amazon S3 will return an HTTP > status code 403 ("access denied") error.

Solution 7 - Amazon Web-Services

I too ran into this issue, I fixed this by providing s3:GetObject* in the ACL as it is attempting to obtain a version of that object.

Solution 8 - Amazon Web-Services

I solved my problem following all the instruction from the AWS - How do I allow my Lambda execution role to access my Amazon S3 bucket?:

  1. Create an AWS Identity and Access Management (IAM) role for the Lambda function that grants access to the S3 bucket.

  2. Modify the IAM role's trust policy.

  3. Set the IAM role as the Lambda function's execution role.

  4. Verify that the bucket policy grants access to the Lambda function's execution role.

Solution 9 - Amazon Web-Services

I tried to execute a basic blueprint Python lambda function [example code] and I had the same issue. My execition role was lambda_basic_execution

I went to S3 > (my bucket name here) > permissions .

S3:BucketPolicyView

Because I'm beginner, I used the Policy Generator provided by Amazon rather than writing JSON myself: http://awspolicygen.s3.amazonaws.com/policygen.html my JSON looks like this:

{
    "Id": "Policy153536723xxxx",
    "Version": "2012-10-17",
    "Statement": [
        {
            "Sid": "Stmt153536722xxxx",
            "Action": [
                "s3:GetObject"
            ],
            "Effect": "Allow",
            "Resource": "arn:aws:s3:::tokabucket/*",
            "Principal": {
                "AWS": [
                    "arn:aws:iam::82557712xxxx:role/lambda_basic_execution"
                ]
            }
        }
    ]

And then the code executed nicely:

foo

Solution 10 - Amazon Web-Services

I was trying to read a file from s3 and create a new file by changing content of file read (Lambda + Node). Reading file from S3 did not had any problem. As soon I tried writing to S3 bucket I get 'Access Denied' error.

I tried every thing listed above but couldn't get rid of 'Access Denied'. Finally I was able to get it working by giving 'List Object' permission to everyone on my bucket. S3 Bucket Access Control List

Obviously this not the best approach but nothing else worked.

Solution 11 - Amazon Web-Services

I was struggling with this issue for hours. I was using AmazonS3EncryptionClient and nothing I did helped. Then I noticed that the client is actually deprecated, so I thought I'd try switching to the builder model they have:

var builder = AmazonS3EncryptionClientBuilder.standard()
  .withEncryptionMaterials(new StaticEncryptionMaterialsProvider(encryptionMaterials))
if (accessKey.nonEmpty && secretKey.nonEmpty) builder = builder.withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey.get, secretKey.get)))
builder.build()

And... that solved it. Looks like Lambda has trouble injecting the credentials in the old model, but works well in the new one.

Solution 12 - Amazon Web-Services

I was getting the same error "AccessDenied: Access Denied" while cropping s3 images using lambda function. I updated the s3 bucket policy and IAM role inline policy as per the document link given below.

But still, I was getting the same error. Then I realised, I was trying to give "public-read" access in a private bucket. After removed ACL: 'public-read' from S3.putObject problem get resolved.

https://aws.amazon.com/premiumsupport/knowledge-center/access-denied-lambda-s3-bucket/

Solution 13 - Amazon Web-Services

I had this error message in aws lambda environment when using boto3 with python:

botocore.exceptions.ClientError: An error occurred (AccessDenied) when calling the GetObject operation: Access Denied

It turns out I needed an extra permission because I was using object tags. If your objects have tags you will need s3:GetObject AND s3:GetObjectTagging for getting the object.

Attributions

All content for this solution is sourced from the original question on Stackoverflow.

The content on this page is licensed under the Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license.

Content TypeOriginal AuthorOriginal Content on Stackoverflow
QuestioncyberdantesView Question on Stackoverflow
Solution 1 - Amazon Web-ServicesvedatView Answer on Stackoverflow
Solution 2 - Amazon Web-ServiceshelloVView Answer on Stackoverflow
Solution 3 - Amazon Web-ServicesTheVTMView Answer on Stackoverflow
Solution 4 - Amazon Web-ServicesAdam OwczarczykView Answer on Stackoverflow
Solution 5 - Amazon Web-ServicesDanny BullisView Answer on Stackoverflow
Solution 6 - Amazon Web-ServicesJeremiahView Answer on Stackoverflow
Solution 7 - Amazon Web-ServicesSteven LuView Answer on Stackoverflow
Solution 8 - Amazon Web-ServicesdasilvadanielView Answer on Stackoverflow
Solution 9 - Amazon Web-ServicesO-9View Answer on Stackoverflow
Solution 10 - Amazon Web-ServicesimTheManagerView Answer on Stackoverflow
Solution 11 - Amazon Web-Servicesyi1View Answer on Stackoverflow
Solution 12 - Amazon Web-ServicesSanjeev ChauhanView Answer on Stackoverflow
Solution 13 - Amazon Web-Servicesa_giant_squidView Answer on Stackoverflow