How to use the s3transfer.TransferConfig function in s3transfer

To help you get started, we’ve selected a few s3transfer examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
self.is_first = True

            def submit(self, function):
                future = super(FailedDownloadParts, self).submit(function)
                if self.is_first:
                    # This is the download_parts_thread.
                    future.set_exception(
                        Exception("fake download parts error"))
                    self.is_first = False
                return future

        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}

        downloader = MultipartDownloader(client, TransferConfig(),
                                         InMemoryOSLayer({}),
                                         FailedDownloadParts)
        with self.assertRaisesRegexp(Exception, "fake download parts error"):
            downloader.download_file('bucket', 'key', 'filename',
                                     len(response_body), {})
github scality / Zenko / tests / sfko / sfko / util / etag.py View on Github external
import hashlib
import math
from binascii import unhexlify
from s3transfer.utils import ChunksizeAdjuster
from s3transfer import TransferConfig

DEFAULT_CHUNKSIZE = TransferConfig().multipart_chunksize
MAX_FILESIZE = TransferConfig().multipart_threshold


def get_chunksize(filesize):
    return ChunksizeAdjuster().adjust_chunksize(DEFAULT_CHUNKSIZE, filesize)


# returns the size of the last chunk of an mpu (can be smaller than others)
def _get_last_chunksize(filesize, chunksize, parts):
    return chunksize - ((chunksize * parts) - filesize)

def _compute_mpu_etag(filesize):
    chunksize = get_chunksize(filesize)
    num_parts = math.ceil(filesize/float(chunksize))
    last_chunk = _get_last_chunksize(filesize, chunksize, num_parts)
    part_etag = hashlib.md5(b'0' * chunksize).hexdigest()
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_io_thread_failure_triggers_shutdown(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}
        os_layer = mock.Mock()
        mock_fileobj = mock.MagicMock()
        mock_fileobj.__enter__.return_value = mock_fileobj
        mock_fileobj.write.side_effect = Exception("fake IO error")
        os_layer.open.return_value = mock_fileobj

        downloader = MultipartDownloader(client, TransferConfig(),
                                         os_layer, SequentialExecutor)
        # We're verifying that the exception raised from the IO future
        # propogates back up via download_file().
        with self.assertRaisesRegexp(Exception, "fake IO error"):
            downloader.download_file('bucket', 'key', 'filename',
                                     len(response_body), {})
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_multipart_download_uses_correct_client_calls(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        client.get_object.return_value = {'Body': six.BytesIO(response_body)}

        downloader = MultipartDownloader(client, TransferConfig(),
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        downloader.download_file('bucket', 'key', 'filename',
                                 len(response_body), {})

        client.get_object.assert_called_with(
            Range='bytes=0-',
            Bucket='bucket',
            Key='key'
        )
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_exception_raised_on_exceeded_retries(self):
        client = mock.Mock()
        response_body = b'foobarbaz'
        stream_with_errors = mock.Mock()
        stream_with_errors.read.side_effect = socket.error("fake error")
        client.get_object.return_value = {'Body': stream_with_errors}
        config = TransferConfig(multipart_threshold=4,
                                multipart_chunksize=4)

        downloader = MultipartDownloader(client, config,
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        with self.assertRaises(RetriesExceededError):
            downloader.download_file('bucket', 'key', 'filename',
                                     len(response_body), {})
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_multipart_upload_is_aborted_on_error(self):
        # If the create_multipart_upload succeeds and any upload_part
        # fails, then abort_multipart_upload will be called.
        client = mock.Mock()
        uploader = MultipartUploader(
            client, TransferConfig(),
            InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor)
        client.create_multipart_upload.return_value = {'UploadId': 'upload_id'}
        client.upload_part.side_effect = Exception(
            "Some kind of error occurred.")

        with self.assertRaises(S3UploadFailedError):
            uploader.upload_file('filename', 'bucket', 'key', None, {})

        client.abort_multipart_upload.assert_called_with(
            Bucket='bucket', Key='key', UploadId='upload_id')
github boto / s3transfer / tests / integration / test_s3transfer.py View on Github external
def test_upload_below_threshold(self):
        config = s3transfer.TransferConfig(
            multipart_threshold=2 * 1024 * 1024)
        transfer = self.create_s3_transfer(config)
        filename = self.files.create_file_with_size(
            'foo.txt', filesize=1024 * 1024)
        transfer.upload_file(filename, self.bucket_name,
                             'foo.txt')
        self.addCleanup(self.delete_object, 'foo.txt')

        self.assertTrue(self.object_exists('foo.txt'))
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_multipart_upload_injects_proper_kwargs(self):
        client = mock.Mock()
        uploader = MultipartUploader(
            client, TransferConfig(),
            InMemoryOSLayer({'filename': b'foobar'}), SequentialExecutor)
        client.create_multipart_upload.return_value = {'UploadId': 'upload_id'}
        client.upload_part.return_value = {'ETag': 'first'}

        extra_args = {
            'SSECustomerKey': 'fakekey',
            'SSECustomerAlgorithm': 'AES256',
            'StorageClass': 'REDUCED_REDUNDANCY'
        }
        uploader.upload_file('filename', 'bucket', 'key', None, extra_args)

        client.create_multipart_upload.assert_called_with(
            Bucket='bucket', Key='key',
            # The initial call should inject all the storage class params.
            SSECustomerKey='fakekey',
            SSECustomerAlgorithm='AES256',
github boto / s3transfer / tests / integration / test_s3transfer.py View on Github external
def test_can_configure_threshold(self):
        config = s3transfer.TransferConfig(
            multipart_threshold=6 * 1024 * 1024
        )
        transfer = self.create_s3_transfer(config)
        filename = self.files.create_file_with_size(
            'foo.txt', filesize=8 * 1024 * 1024)
        transfer.upload_file(filename, self.bucket_name,
                             'foo.txt')
        self.addCleanup(self.delete_object, 'foo.txt')

        self.assertTrue(self.object_exists('foo.txt'))
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_retry_on_failures_from_stream_reads(self):
        # If we get an exception during a call to the response body's .read()
        # method, we should retry the request.
        client = mock.Mock()
        response_body = b'foobarbaz'
        stream_with_errors = mock.Mock()
        stream_with_errors.read.side_effect = [
            socket.error("fake error"),
            response_body
        ]
        client.get_object.return_value = {'Body': stream_with_errors}
        config = TransferConfig(multipart_threshold=4,
                                multipart_chunksize=4)

        downloader = MultipartDownloader(client, config,
                                         InMemoryOSLayer({}),
                                         SequentialExecutor)
        downloader.download_file('bucket', 'key', 'filename',
                                 len(response_body), {})

        # We're storing these in **extra because the assertEqual
        # below is really about verifying we have the correct value
        # for the Range param.
        extra = {'Bucket': 'bucket', 'Key': 'key'}
        self.assertEqual(client.get_object.call_args_list,
                         # The first call to range=0-3 fails because of the
                         # side_effect above where we make the .read() raise a
                         # socket.error.