How to use the awscli.compat.six.BytesIO function in awscli

To help you get started, we’ve selected a few awscli examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aws / aws-cli / tests / unit / customizations / cloudtrail / test_validation.py View on Github external
def test_ensures_digests_can_be_json_parsed(self):
        json_str = '{{{'
        out = six.BytesIO()
        f = gzip.GzipFile(fileobj=out, mode="wb")
        f.write(json_str.encode())
        f.close()
        gzipped_data = out.getvalue()
        s3_client = Mock()
        s3_client.get_object.return_value = {
            'Body': six.BytesIO(gzipped_data),
            'Metadata': {'signature': 'abc', 'signature-algorithm': 'SHA256'}}
        provider = self._get_mock_provider(s3_client)
        with self.assertRaises(InvalidDigestFormat):
            provider.fetch_digest('bucket', 'key')
github gkrizek / bash-lambda-layer / bin / awscli / testutils.py View on Github external
def assert_key_contents_equal(self, bucket, key, expected_contents):
        self.wait_until_key_exists(bucket, key)
        if isinstance(expected_contents, six.BytesIO):
            expected_contents = expected_contents.getvalue().decode('utf-8')
        actual_contents = self.get_key_contents(bucket, key)
        # The contents can be huge so we try to give helpful error messages
        # without necessarily printing the actual contents.
        self.assertEqual(len(actual_contents), len(expected_contents))
        if actual_contents != expected_contents:
            self.fail("Contents for %s/%s do not match (but they "
                      "have the same length)" % (bucket, key))
github aws / aws-cli / tests / functional / s3 / __init__.py View on Github external
def get_object_response(self):
        return {
            'ETag': '"foo-1"',
            'Body': six.BytesIO(b'foo')
        }
github aws / aws-cli / tests / functional / cloudtrail / test_validation.py View on Github external
def _gz_compress(data):
    out = six.BytesIO()
    f = gzip.GzipFile(fileobj=out, mode="wb")
    f.write(data.encode())
    f.close()
    return out.getvalue()
github aws / aws-cli / tests / unit / customizations / cloudtrail / test_validation.py View on Github external
def test_ensures_digest_has_proper_metadata(self):
        out = six.BytesIO()
        f = gzip.GzipFile(fileobj=out, mode="wb")
        f.write('{"foo":"bar"}'.encode())
        f.close()
        gzipped_data = out.getvalue()
        s3_client = Mock()
        s3_client.get_object.return_value = {
            'Body': six.BytesIO(gzipped_data),
            'Metadata': {}}
        provider = self._get_mock_provider(s3_client)
        with self.assertRaises(DigestSignatureError):
            provider.fetch_digest('bucket', 'key')
github aws / aws-cli / tests / integration / customizations / s3 / test_plugin.py View on Github external
def test_mv_with_large_file(self):
        bucket_name = _SHARED_BUCKET
        # 40MB will force a multipart upload.
        file_contents = six.BytesIO(b'abcd' * (1024 * 1024 * 10))
        foo_txt = self.files.create_file(
            'foo.txt', file_contents.getvalue().decode('utf-8'))
        p = aws('s3 mv %s s3://%s/foo.txt' % (foo_txt, bucket_name))
        self.assert_no_errors(p)
        # When we move an object, the local file is gone:
        self.assertTrue(not os.path.exists(foo_txt))
        # And now resides in s3.
        self.assert_key_contents_equal(bucket_name, 'foo.txt', file_contents)

        # Now verify we can download this file.
        p = aws('s3 mv s3://%s/foo.txt %s' % (bucket_name, foo_txt))
        self.assert_no_errors(p)
        self.assertTrue(os.path.exists(foo_txt))
        self.assertEqual(os.path.getsize(foo_txt),
                         len(file_contents.getvalue()))
github aws / aws-cli / tests / functional / s3 / test_sync_command.py View on Github external
def test_glacier_sync_with_force_glacier(self):
        self.parsed_responses = [
            {
                'Contents': [
                    {'Key': 'foo/bar.txt', 'ContentLength': '100',
                     'LastModified': '00:00:00Z',
                     'StorageClass': 'GLACIER',
                     'Size': 100},
                ],
                'CommonPrefixes': []
            },
            {'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo')},
        ]
        cmdline = '%s s3://bucket/foo %s --force-glacier-transfer' % (
            self.prefix, self.files.rootdir)
        self.run_cmd(cmdline, expected_rc=0)
        self.assertEqual(len(self.operations_called), 2, self.operations_called)
        self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
        self.assertEqual(self.operations_called[1][0].name, 'GetObject')
github aws / aws-cli / tests / functional / gamelift / test_get_game_session_log.py View on Github external
def setUp(self):
        super(TestGetGameSessionLog, self).setUp()
        self.files = FileCreator()
        self.filename = os.path.join(self.files.rootdir, 'myfile')
        self.urlopen_patch = mock.patch(
            'awscli.customizations.gamelift.getlog.urlopen')
        self.contents = b'My Contents'
        self.urlopen_mock = self.urlopen_patch.start()
        self.urlopen_mock.return_value = six.BytesIO(self.contents)
github aws / aws-cli / awscli / customizations / codedeploy / push.py View on Github external
Bucket=params.bucket,
            Key=params.key
        )
        upload_id = create_response['UploadId']
        try:
            part_num = 1
            multipart_list = []
            bundle.seek(0)
            while size_remaining > 0:
                data = bundle.read(MULTIPART_LIMIT)
                upload_response = self.s3.upload_part(
                    Bucket=params.bucket,
                    Key=params.key,
                    UploadId=upload_id,
                    PartNumber=part_num,
                    Body=six.BytesIO(data)
                )
                multipart_list.append({
                    'PartNumber': part_num,
                    'ETag': upload_response['ETag']
                })
                part_num += 1
                size_remaining -= len(data)
            return self.s3.complete_multipart_upload(
                Bucket=params.bucket,
                Key=params.key,
                UploadId=upload_id,
                MultipartUpload={'Parts': multipart_list}
            )
        except ClientError as e:
            self.s3.abort_multipart_upload(
                Bucket=params.bucket,
github aws / aws-cli / awscli / customizations / codedeploy.py View on Github external
bucket=parsed_args.bucket,
            key=parsed_args.key
        )
        upload_id = create_response['UploadId']
        try:
            part_num = 1
            multipart_list = []
            bundle.seek(0)
            while size_remaining > 0:
                data = bundle.read(MULTIPART_LIMIT)
                upload_response = self.s3.UploadPart(
                    bucket=parsed_args.bucket,
                    key=parsed_args.key,
                    upload_id=upload_id,
                    part_number=part_num,
                    body=six.BytesIO(data)
                )
                multipart_list.append({
                    'PartNumber': part_num,
                    'ETag': upload_response['ETag']
                })
                part_num += 1
                size_remaining -= len(data)
            return self.s3.CompleteMultipartUpload(
                bucket=parsed_args.bucket,
                key=parsed_args.key,
                upload_id=upload_id,
                multipart_upload={'Parts': multipart_list}
            )
        except Exception as e:
            self.s3.AbortMultipartUpload(
                bucket=parsed_args.bucket,