How to use the s3transfer.S3Transfer function in s3transfer

To help you get started, we’ve selected a few s3transfer examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_can_create_with_just_client(self):
        transfer = S3Transfer(client=mock.Mock())
        self.assertIsInstance(transfer, S3Transfer)
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_download_file_fowards_extra_args(self):
        extra_args = {
            'SSECustomerKey': 'foo',
            'SSECustomerAlgorithm': 'AES256',
        }
        below_threshold = 20
        osutil = InMemoryOSLayer({'smallfile': b'hello world'})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        self.client.get_object.return_value = {
            'Body': six.BytesIO(b'foobar')
        }
        transfer.download_file('bucket', 'key', '/tmp/smallfile',
                               extra_args=extra_args)

        # Note that we need to invoke the HeadObject call
        # and the PutObject call with the extra_args.
        # This is necessary.  Trying to HeadObject an SSE object
        # will return a 400 if you don't provide the required
        # params.
        self.client.get_object.assert_called_with(
            Bucket='bucket', Key='key', SSECustomerAlgorithm='AES256',
            SSECustomerKey='foo')
github boto / s3transfer / tests / integration / test_s3transfer.py View on Github external
def test_callback_called_once_with_sigv4(self):
        # Verify #98, where the callback was being invoked
        # twice when using signature version 4.
        self.amount_seen = 0
        lock = threading.Lock()

        def progress_callback(amount):
            with lock:
                self.amount_seen += amount

        client = self.session.create_client(
            's3', self.region,
            config=Config(signature_version='s3v4'))
        transfer = s3transfer.S3Transfer(client)
        filename = self.files.create_file_with_size(
            '10mb.txt', filesize=10 * 1024 * 1024)
        transfer.upload_file(filename, self.bucket_name,
                             '10mb.txt', callback=progress_callback)
        self.addCleanup(self.delete_object, '10mb.txt')

        self.assertEqual(self.amount_seen, 10 * 1024 * 1024)
github aws / aws-cli / tests / unit / customizations / test_s3uploader.py View on Github external
def setUp(self):
        self.s3client = botocore.session.get_session().create_client(
                's3', region_name="us-east-1")
        self.s3client_stub = Stubber(self.s3client)
        self.transfer_manager_mock = Mock(spec=S3Transfer)
        self.transfer_manager_mock.upload = Mock()
        self.bucket_name = "bucketname"
        self.prefix = None
        self.region = "us-east-1"

        self.s3uploader = S3Uploader(
            self.s3client, self.bucket_name, self.region, self.prefix, None, False,
            self.transfer_manager_mock)
github boto / s3transfer / tests / integration / test_s3transfer.py View on Github external
def create_s3_transfer(self, config=None):
        return s3transfer.S3Transfer(
            self.client, config=config
        )
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_get_object_stream_uses_all_retries_and_errors_out(self):
        below_threshold = 20
        osutil = InMemoryOSLayer({})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        # Here we're raising an exception every single time, which
        # will exhaust our retry count and propogate a
        # RetriesExceededError.
        self.client.get_object.side_effect = socket.error("fake error")
        with self.assertRaises(RetriesExceededError):
            transfer.download_file('bucket', 'key', 'smallfile')

        self.assertEqual(self.client.get_object.call_count, 5)
        # We should have also cleaned up the in progress file
        # we were downloading to.
        self.assertEqual(osutil.filemap, {})
github boto / s3transfer / tests / unit / test_s3transfer.py View on Github external
def test_download_file_with_invalid_extra_args(self):
        below_threshold = 20
        osutil = InMemoryOSLayer({})
        transfer = S3Transfer(self.client, osutil=osutil)
        self.client.head_object.return_value = {
            'ContentLength': below_threshold}
        with self.assertRaises(ValueError):
            transfer.download_file('bucket', 'key', '/tmp/smallfile',
                                   extra_args={'BadValue': 'foo'})
github dimagi / commcare-hq / corehq / apps / dump_reload / management / commands / dump_domain_data_raw.py View on Github external
def _upload(self, type_, path):
        filename = _filename(self.domain, type_, self.timestamp)
        print("Uploading {} to s3://{}/{}".format(type_, self.bucket, filename))
        S3Transfer(self.client).upload_file(
            path, self.bucket, filename,
            extra_args={'ServerSideEncryption': 'AES256'}
        )