Skip to content

Commit

Permalink
Merge pull request #861 from jkiang13/SYNPY-1138-boto-upload-acl
Browse files Browse the repository at this point in the history
SYNPY-1138 apply bucket-owner-full-control ACL on Synapse boto uploads
  • Loading branch information
jkiang13 authored May 19, 2021
2 parents bebf271 + b10f3a8 commit 1967179
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 5 deletions.
11 changes: 8 additions & 3 deletions docs/S3Storage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,9 @@ Using credentials with the awscli
---------------------------------
This example illustrates obtaining STS credentials and using them with the awscli command line tool.
The first command outputs the credentials as shell commands to execute which will then be picked up
by subsequent aws cli commands.
by subsequent aws cli commands. Note that the bucket-owner-full-control ACL is required when putting
an object via STS credentials. This ensures that the object ownership will be transferred to the
owner of the AWS bucket.

.. code-block::
Expand All @@ -116,12 +118,14 @@ by subsequent aws cli commands.
# if the above are executed in the shell, the awscli will automatically apply them
# e.g. copy a file directly to the bucket using the exported credentials
$ aws s3 cp /path/to/local/file $SYNAPSE_STS_S3_LOCATION
$ aws s3 cp /path/to/local/file $SYNAPSE_STS_S3_LOCATION --acl bucket-owner-full-control
Using credentials with boto3 in python
--------------------------------------
This example illustrates retrieving STS credentials and using them with boto3 within python code,
in this case to upload a file.
in this case to upload a file. Note that the bucket-owner-full-control ACL is required when putting
an object via STS credentials. This ensures that the object ownership will be transferred to the
owner of the AWS bucket.

.. code-block::
Expand All @@ -133,6 +137,7 @@ in this case to upload a file.
Filename='/path/to/local/file,
Bucket='my-external-synapse-bucket',
Key='path/within/bucket/file',
ExtraArgs={'ACL': 'bucket-owner-full-control'},
)
Automatic transfers to/from STS storage locations using boto3 with synapseclient
Expand Down
3 changes: 2 additions & 1 deletion synapseclient/core/remote_file_storage_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,8 @@ def upload_file(bucket, endpoint_url, remote_file_key, upload_file_path,
upload_file_path,
remote_file_key,
Callback=progress_callback,
Config=transfer_config
Config=transfer_config,
ExtraArgs={'ACL': 'bucket-owner-full-control'},
)
return upload_file_path

Expand Down
36 changes: 36 additions & 0 deletions tests/integration/synapseclient/core/test_external_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import pytest
import unittest
from unittest import mock

try:
boto3 = importlib.import_module('boto3')
Expand Down Expand Up @@ -154,6 +155,7 @@ def test_sts_external_storage_location(self):
Filename=temp_file.name,
Bucket=bucket_name,
Key=remote_key,
ExtraArgs={'ACL': 'bucket-owner-full-control'},
)

# now read the file using our read credentials
Expand Down Expand Up @@ -181,3 +183,37 @@ def test_sts_external_storage_location(self):
retrieved_file_entity = self.syn.get(file_entity['id'])
with open(retrieved_file_entity.path, 'r') as f:
assert file_contents == f.read()

def test_boto_upload__acl(self):
"""Verify when we store a Synapse object using boto we apply a bucket-owner-full-control ACL to the object"""
bucket_name, _ = get_aws_env()
_, folder, storage_location_id = self._configure_storage_location(sts_enabled=True)

file_contents = str(uuid.uuid4())
upload_file = self._make_temp_file(contents=file_contents)

# mock the sts setting so that we upload this file using boto regardless of test configuration
with mock.patch.object(self.syn, 'use_boto_sts_transfers', new_callable=mock.PropertyMock(return_value=True)):
file = self.syn.store(File(path=upload_file.name, parent=folder))

s3_read_client = boto3.client('s3', **get_aws_env()[1])
bucket_acl = s3_read_client.get_bucket_acl(Bucket=bucket_name)
bucket_grantee = bucket_acl['Grants'][0]['Grantee']
assert bucket_grantee['Type'] == 'CanonicalUser'
bucket_owner_id = bucket_grantee['ID']

# with_retry to avoid acidity issues of an S3 put
object_acl = with_retry(
lambda: s3_read_client.get_object_acl(
Bucket=bucket_name,
Key=file['_file_handle']['key']
),
retry_exceptions=[s3_read_client.exceptions.NoSuchKey]
)
grants = object_acl['Grants']
assert len(grants) == 1
grant = grants[0]
grantee = grant['Grantee']
assert grantee['Type'] == 'CanonicalUser'
assert grantee['ID'] == bucket_owner_id
assert grant['Permission'] == 'FULL_CONTROL'
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,8 @@ def _upload_test(**kwargs):
upload_file_path,
remote_file_key,
Callback=progress_callback,
Config=transfer_config
Config=transfer_config,
ExtraArgs={'ACL': 'bucket-owner-full-control'},
)

# why do we return something we passed...?
Expand Down

0 comments on commit 1967179

Please sign in to comment.