diff options
author | arcadia-devtools <[email protected]> | 2022-06-01 16:22:57 +0300 |
---|---|---|
committer | arcadia-devtools <[email protected]> | 2022-06-01 16:22:57 +0300 |
commit | 63a84accbe6624a8f7258f16c3568b903845ce9a (patch) | |
tree | 660e04171958103edb06b36ab2c1f0a1bc5436e3 /contrib/python/s3transfer/py3 | |
parent | 3078efc0435f57bcd781b58118a6b82578d11037 (diff) |
intermediate changes
ref:33582b2639153838c696b8bf2b6926f34f6d34e9
Diffstat (limited to 'contrib/python/s3transfer/py3')
7 files changed, 16 insertions, 14 deletions
diff --git a/contrib/python/s3transfer/py3/.dist-info/METADATA b/contrib/python/s3transfer/py3/.dist-info/METADATA index dbafb16cb7d..2323710a8e5 100644 --- a/contrib/python/s3transfer/py3/.dist-info/METADATA +++ b/contrib/python/s3transfer/py3/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: s3transfer -Version: 0.5.2 +Version: 0.6.0 Summary: An Amazon S3 Transfer Manager Home-page: https://github.com/boto/s3transfer Author: Amazon Web Services @@ -13,12 +13,11 @@ Classifier: Natural Language :: English Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 -Requires-Python: >= 3.6 +Requires-Python: >= 3.7 License-File: LICENSE.txt License-File: NOTICE.txt Requires-Dist: botocore (<2.0a.0,>=1.12.36) diff --git a/contrib/python/s3transfer/py3/s3transfer/__init__.py b/contrib/python/s3transfer/py3/s3transfer/__init__.py index c6760f87c2f..e8ff66f00ca 100644 --- a/contrib/python/s3transfer/py3/s3transfer/__init__.py +++ b/contrib/python/s3transfer/py3/s3transfer/__init__.py @@ -144,7 +144,7 @@ import s3transfer.compat from s3transfer.exceptions import RetriesExceededError, S3UploadFailedError __author__ = 'Amazon Web Services' -__version__ = '0.5.2' +__version__ = '0.6.0' class NullHandler(logging.Handler): diff --git a/contrib/python/s3transfer/py3/s3transfer/manager.py b/contrib/python/s3transfer/py3/s3transfer/manager.py index 31aa488bdde..b11daeba958 100644 --- a/contrib/python/s3transfer/py3/s3transfer/manager.py +++ b/contrib/python/s3transfer/py3/s3transfer/manager.py @@ -171,6 +171,9 @@ class TransferManager: 'GrantReadACP', 'GrantWriteACP', 'Metadata', + 'ObjectLockLegalHoldStatus', + 'ObjectLockMode', + 'ObjectLockRetainUntilDate', 'RequestPayer', 'ServerSideEncryption', 'StorageClass', diff --git a/contrib/python/s3transfer/py3/s3transfer/upload.py b/contrib/python/s3transfer/py3/s3transfer/upload.py index 18368e0e363..0c99bd7b296 100644 --- a/contrib/python/s3transfer/py3/s3transfer/upload.py +++ b/contrib/python/s3transfer/py3/s3transfer/upload.py @@ -790,7 +790,7 @@ class UploadPartTask(Task): UploadId=upload_id, PartNumber=part_number, Body=body, - **extra_args + **extra_args, ) etag = response['ETag'] part_metadata = {'ETag': etag, 'PartNumber': part_number} diff --git a/contrib/python/s3transfer/py3/s3transfer/utils.py b/contrib/python/s3transfer/py3/s3transfer/utils.py index ba881c67dd8..61407eba5c5 100644 --- a/contrib/python/s3transfer/py3/s3transfer/utils.py +++ b/contrib/python/s3transfer/py3/s3transfer/utils.py @@ -29,8 +29,8 @@ MAX_PARTS = 10000 # The maximum file size you can upload via S3 per request. # See: http://docs.aws.amazon.com/AmazonS3/latest/dev/UploadingObjects.html # and: http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html -MAX_SINGLE_UPLOAD_SIZE = 5 * (1024 ** 3) -MIN_UPLOAD_CHUNKSIZE = 5 * (1024 ** 2) +MAX_SINGLE_UPLOAD_SIZE = 5 * (1024**3) +MIN_UPLOAD_CHUNKSIZE = 5 * (1024**2) logger = logging.getLogger(__name__) diff --git a/contrib/python/s3transfer/py3/tests/unit/test_download.py b/contrib/python/s3transfer/py3/tests/unit/test_download.py index 2bd095f8675..e8b5fe1f86e 100644 --- a/contrib/python/s3transfer/py3/tests/unit/test_download.py +++ b/contrib/python/s3transfer/py3/tests/unit/test_download.py @@ -576,7 +576,7 @@ class TestGetObjectTask(BaseTaskTest): self.stream = BytesIO(self.content) self.fileobj = WriteCollector() self.osutil = OSUtils() - self.io_chunksize = 64 * (1024 ** 2) + self.io_chunksize = 64 * (1024**2) self.task_cls = GetObjectTask self.download_output_manager = DownloadSeekableOutputManager( self.osutil, self.transfer_coordinator, self.io_executor diff --git a/contrib/python/s3transfer/py3/tests/unit/test_utils.py b/contrib/python/s3transfer/py3/tests/unit/test_utils.py index a1ff904e7a1..217779943b0 100644 --- a/contrib/python/s3transfer/py3/tests/unit/test_utils.py +++ b/contrib/python/s3transfer/py3/tests/unit/test_utils.py @@ -1146,8 +1146,8 @@ class TestAdjustChunksize(unittest.TestCase): self.adjuster = ChunksizeAdjuster() def test_valid_chunksize(self): - chunksize = 7 * (1024 ** 2) - file_size = 8 * (1024 ** 2) + chunksize = 7 * (1024**2) + file_size = 8 * (1024**2) new_size = self.adjuster.adjust_chunksize(chunksize, file_size) self.assertEqual(new_size, chunksize) @@ -1164,17 +1164,17 @@ class TestAdjustChunksize(unittest.TestCase): self.assertEqual(new_size, MAX_SINGLE_UPLOAD_SIZE) def test_chunksize_too_small(self): - chunksize = 7 * (1024 ** 2) - file_size = 5 * (1024 ** 4) + chunksize = 7 * (1024**2) + file_size = 5 * (1024**4) # If we try to upload a 5TB file, we'll need to use 896MB part # sizes. new_size = self.adjuster.adjust_chunksize(chunksize, file_size) - self.assertEqual(new_size, 896 * (1024 ** 2)) + self.assertEqual(new_size, 896 * (1024**2)) num_parts = file_size / new_size self.assertLessEqual(num_parts, MAX_PARTS) def test_unknown_file_size_with_valid_chunksize(self): - chunksize = 7 * (1024 ** 2) + chunksize = 7 * (1024**2) new_size = self.adjuster.adjust_chunksize(chunksize) self.assertEqual(new_size, chunksize) |