How to use the s3transfer.manager.TransferManager function in s3transfer

To help you get started, we’ve selected a few s3transfer examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github boto / s3transfer / tests / functional / test_upload.py View on Github external
def test_uses_provided_osutil(self):
        osutil = RecordingOSUtils()
        # Use the recording os utility for the transfer manager
        self._manager = TransferManager(self.client, self.config, osutil)

        self.add_put_object_response_with_default_expected_params()

        future = self.manager.upload(self.filename, self.bucket, self.key)
        future.result()

        # The upload should have used the os utility. We check this by making
        # sure that the recorded opens are as expected.
        expected_opens = [(self.filename, 'rb')]
        self.assertEqual(osutil.open_records, expected_opens)
github boto / s3transfer / tests / functional / test_manager.py View on Github external
def test_error_in_context_manager_cancels_incomplete_transfers(self):
        # The purpose of this test is to make sure if an error is raised
        # in the body of the context manager, incomplete transfers will
        # be cancelled with value of the exception wrapped by a CancelledError

        # NOTE: The fact that delete() was chosen to test this is arbitrary
        # other than it is the easiet to set up for the stubber.
        # The specific operation is not important to the purpose of this test.
        num_transfers = 100
        futures = []
        ref_exception_msg = 'arbitrary exception'

        for _ in range(num_transfers):
            self.stubber.add_response('delete_object', {})

        manager = TransferManager(
            self.client,
            TransferConfig(
                max_request_concurrency=1, max_submission_concurrency=1)
        )
        try:
            with manager:
                for i in range(num_transfers):
                    futures.append(manager.delete('mybucket', 'mykey'))
                raise ArbitraryException(ref_exception_msg)
        except ArbitraryException:
            # At least one of the submitted futures should have been
            # cancelled.
            with self.assertRaisesRegexp(FatalError, ref_exception_msg):
                for future in futures:
                    future.result()
github boto / s3transfer / tests / functional / test_manager.py View on Github external
def test_config_property(self):
        config = TransferConfig()
        manager = TransferManager(self.client, config)
        self.assertIs(manager.config, config)
github boto / s3transfer / tests / functional / test_delete.py View on Github external
def setUp(self):
        super(TestDeleteObject, self).setUp()
        self.bucket = 'mybucket'
        self.key = 'mykey'
        self.manager = TransferManager(self.client)
github boto / s3transfer / tests / functional / test_manager.py View on Github external
def test_use_custom_executor_implementation(self):
        mocked_executor_cls = mock.Mock(BaseExecutor)
        transfer_manager = TransferManager(
            self.client, executor_cls=mocked_executor_cls)
        transfer_manager.delete('bucket', 'key')
        self.assertTrue(mocked_executor_cls.return_value.submit.called)
github boto / s3transfer / tests / functional / test_upload.py View on Github external
def setUp(self):
        super(TestMultipartUpload, self).setUp()
        self.chunksize = 4
        self.config = TransferConfig(
            max_request_concurrency=1, multipart_threshold=1,
            multipart_chunksize=self.chunksize)
        self._manager = TransferManager(self.client, self.config)
        self.multipart_id = 'my-upload-id'
github boto / s3transfer / tests / functional / test_manager.py View on Github external
def test_cntrl_c_in_context_manager_cancels_incomplete_transfers(self):
        # The purpose of this test is to make sure if an error is raised
        # in the body of the context manager, incomplete transfers will
        # be cancelled with value of the exception wrapped by a CancelledError

        # NOTE: The fact that delete() was chosen to test this is arbitrary
        # other than it is the easiet to set up for the stubber.
        # The specific operation is not important to the purpose of this test.
        num_transfers = 100
        futures = []

        for _ in range(num_transfers):
            self.stubber.add_response('delete_object', {})

        manager = TransferManager(
            self.client,
            TransferConfig(
                max_request_concurrency=1, max_submission_concurrency=1)
        )
        try:
            with manager:
                for i in range(num_transfers):
                    futures.append(manager.delete('mybucket', 'mykey'))
                raise KeyboardInterrupt()
        except KeyboardInterrupt:
            # At least one of the submitted futures should have been
            # cancelled.
            with self.assertRaisesRegexp(
                    CancelledError, 'KeyboardInterrupt()'):
                for future in futures:
                    future.result()
github boto / boto3 / boto3 / s3 / transfer.py View on Github external
:type client: boto3.client
    :param client: The S3 client to use

    :type config: boto3.s3.transfer.TransferConfig
    :param config: The transfer config to use

    :type osutil: s3transfer.utils.OSUtils
    :param osutil: The os utility to use

    :rtype: s3transfer.manager.TransferManager
    :returns: A transfer manager based on parameters provided
    """
    executor_cls = None
    if not config.use_threads:
        executor_cls = NonThreadedExecutor
    return TransferManager(client, config, osutil, executor_cls)
github epam / cloud-pipeline / pipe-cli / src / utilities / storage / s3.py View on Github external
else:
            source_key = source_wrapper.path
        destination_key = S3BucketOperations.normalize_s3_path(destination_wrapper, relative_path)
        if skip_existing:
            local_size = self.get_local_file_size(source_key)
            remote_size = self.get_s3_file_size(destination_wrapper.bucket.path, destination_key)
            if remote_size is not None and local_size == remote_size:
                if not quiet:
                    click.echo('Skipping file %s since it exists in the destination %s' % (source_key, destination_key))
                return
        tags += ("CP_SOURCE={}".format(source_key),)
        tags += ("CP_OWNER={}".format(self._get_user()),)
        extra_args = {
            'Tagging': self._convert_tags_to_url_string(tags)
        }
        TransferManager.ALLOWED_UPLOAD_ARGS.append('Tagging')
        if StorageItemManager.show_progress(quiet, size):
            self.bucket.upload_file(source_key, destination_key, Callback=ProgressPercentage(relative_path, size),
                                    ExtraArgs=extra_args)
        else:
            self.bucket.upload_file(source_key, destination_key, ExtraArgs=extra_args)
        if clean:
            source_wrapper.delete_item(source_key)
github boto / boto3 / boto3 / s3 / transfer.py View on Github external
# old version of the names.
        for alias in self.ALIAS:
            setattr(self, alias, getattr(self, self.ALIAS[alias]))
        self.use_threads = use_threads

    def __setattr__(self, name, value):
        # If the alias name is used, make sure we set the name that it points
        # to as that is what actually is used in governing the TransferManager.
        if name in self.ALIAS:
            super(TransferConfig, self).__setattr__(self.ALIAS[name], value)
        # Always set the value of the actual name provided.
        super(TransferConfig, self).__setattr__(name, value)


class S3Transfer(object):
    ALLOWED_DOWNLOAD_ARGS = TransferManager.ALLOWED_DOWNLOAD_ARGS
    ALLOWED_UPLOAD_ARGS = TransferManager.ALLOWED_UPLOAD_ARGS

    def __init__(self, client=None, config=None, osutil=None, manager=None):
        if not client and not manager:
            raise ValueError(
                'Either a boto3.Client or s3transfer.manager.TransferManager '
                'must be provided'
            )
        if manager and any([client, config, osutil]):
            raise ValueError(
                'Manager cannot be provided with client, config, '
                'nor osutil. These parameters are mutually exclusive.'
            )
        if config is None:
            config = TransferConfig()
        if osutil is None: