I'm working on a use case where I need to copy data from AWS S3 in account1(region1) to AWS S3 in account2(region2). For doing this a have created a lambda
function in account1 which will write to AWS S3 in account2.
So the destination bucket in account2 has below bucket policy defined -
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Deny",
"Principal": {
"AWS": "*"
},
"Action": "s3:*",
"Resource": "arn:aws:s3:::destination-bucket/*",
"Condition": {
"Bool": {
"aws:SecureTransport": "false"
},
"ForAllValues:StringNotEquals": {
"s3:TlsVersion": [
"1.2",
"1.3"
]
}
}
},
{
"Effect": "Allow",
"Principal": {
"AWS": [
"arn:aws:iam::<account1-id>:role/cross-account-file-share-role"
]
},
"Action": [
"s3:Get*",
"s3:Put*",
"s3:List*",
"s3:AbortMultipartUpload",
"s3:Delete*"
],
"Resource": [
"arn:aws:s3:::destination-bucket",
"arn:aws:s3:::destination-bucket/*"
]
}
]
}
In account1 cross-account-file-share-role
trust relalationship looks like this -
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": [
"lambda.amazonaws.com",
"ec2.amazonaws.com",
"storagegateway.amazonaws.com",
"s3.amazonaws.com"
],
"AWS": "arn:aws:iam::<account1-id>:role/FullResourceAccessforEC2"
},
"Action": "sts:AssumeRole"
}
]
}
I also tried creating a inline policy for my lambda in addition to AWSLambdaBasicExecutionRole
as suggested here -
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::account1-source-bucket/*"
},
{
"Effect": "Allow",
"Action": [
"s3:PutObject",
"s3:PutObjectAcl"
],
"Resource": "arn:aws:s3:::account2-destination-bucket/*"
}
]
}
Below is the lambda
I wrote in account1 -
import json
import boto3
import urllib
TARGET_BUCKET = 'account2-destination-bucket'
def lambda_handler(event, context):
# Get incoming bucket and key
source_bucket = json.loads(event['Records'][0]['Sns']['Message'])['Records'][0]['s3']['bucket']['name']
source_key = urllib.parse.unquote_plus(json.loads(event['Records'][0]['Sns']['Message'])['Records'][0]['s3']['object']['key'])
print("source_bucket :", source_bucket)
print("source_key :", source_key)
# Copy object to different bucket
s3_resource = boto3.resource('s3')
copy_source = {
'Bucket': source_bucket,
'Key': source_key
}
target_key = source_key
s3_resource.Bucket(TARGET_BUCKET).Object(target_key).copy(copy_source, ExtraArgs={'ACL': 'bucket-owner-full-control'})
The lambda has a SNS trigger which gets triggered whenever a new file is uploaded to account1-source-bucket
.
However, I'm getting below error -
{
"errorMessage": "An error occurred (AccessDenied) when calling the CreateMultipartUpload operation: Access Denied",
"errorType": "ClientError",
"requestId": "d37ad461-8c9a-409b-bd53-0bc11a5c2263",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 25, in lambda_handler\n s3_resource.Bucket(TARGET_BUCKET).Object(target_key).copy(copy_source, ExtraArgs={'ACL': 'bucket-owner-full-control'})\n",
" File \"/var/runtime/boto3/s3/inject.py\", line 565, in object_copy\n return self.meta.client.copy(\n",
" File \"/var/runtime/boto3/s3/inject.py\", line 444, in copy\n return future.result()\n",
" File \"/var/runtime/s3transfer/futures.py\", line 103, in result\n return self._coordinator.result()\n",
" File \"/var/runtime/s3transfer/futures.py\", line 266, in result\n raise self._exception\n",
" File \"/var/runtime/s3transfer/tasks.py\", line 139, in __call__\n return self._execute_main(kwargs)\n",
" File \"/var/runtime/s3transfer/tasks.py\", line 162, in _execute_main\n return_value = self._main(**kwargs)\n",
" File \"/var/runtime/s3transfer/tasks.py\", line 348, in _main\n response = client.create_multipart_upload(\n",
" File \"/var/runtime/botocore/client.py\", line 530, in _api_call\n return self._make_api_call(operation_name, kwargs)\n",
" File \"/var/runtime/botocore/client.py\", line 960, in _make_api_call\n raise error_class(parsed_response, operation_name)\n"
]
}
Please help how to overcome this situation.