Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_ensures_digests_can_be_json_parsed(self):
json_str = '{{{'
out = six.BytesIO()
f = gzip.GzipFile(fileobj=out, mode="wb")
f.write(json_str.encode())
f.close()
gzipped_data = out.getvalue()
s3_client = Mock()
s3_client.get_object.return_value = {
'Body': six.BytesIO(gzipped_data),
'Metadata': {'signature': 'abc', 'signature-algorithm': 'SHA256'}}
provider = self._get_mock_provider(s3_client)
with self.assertRaises(InvalidDigestFormat):
provider.fetch_digest('bucket', 'key')
def assert_key_contents_equal(self, bucket, key, expected_contents):
self.wait_until_key_exists(bucket, key)
if isinstance(expected_contents, six.BytesIO):
expected_contents = expected_contents.getvalue().decode('utf-8')
actual_contents = self.get_key_contents(bucket, key)
# The contents can be huge so we try to give helpful error messages
# without necessarily printing the actual contents.
self.assertEqual(len(actual_contents), len(expected_contents))
if actual_contents != expected_contents:
self.fail("Contents for %s/%s do not match (but they "
"have the same length)" % (bucket, key))
def get_object_response(self):
return {
'ETag': '"foo-1"',
'Body': six.BytesIO(b'foo')
}
def _gz_compress(data):
out = six.BytesIO()
f = gzip.GzipFile(fileobj=out, mode="wb")
f.write(data.encode())
f.close()
return out.getvalue()
def test_ensures_digest_has_proper_metadata(self):
out = six.BytesIO()
f = gzip.GzipFile(fileobj=out, mode="wb")
f.write('{"foo":"bar"}'.encode())
f.close()
gzipped_data = out.getvalue()
s3_client = Mock()
s3_client.get_object.return_value = {
'Body': six.BytesIO(gzipped_data),
'Metadata': {}}
provider = self._get_mock_provider(s3_client)
with self.assertRaises(DigestSignatureError):
provider.fetch_digest('bucket', 'key')
def test_mv_with_large_file(self):
bucket_name = _SHARED_BUCKET
# 40MB will force a multipart upload.
file_contents = six.BytesIO(b'abcd' * (1024 * 1024 * 10))
foo_txt = self.files.create_file(
'foo.txt', file_contents.getvalue().decode('utf-8'))
p = aws('s3 mv %s s3://%s/foo.txt' % (foo_txt, bucket_name))
self.assert_no_errors(p)
# When we move an object, the local file is gone:
self.assertTrue(not os.path.exists(foo_txt))
# And now resides in s3.
self.assert_key_contents_equal(bucket_name, 'foo.txt', file_contents)
# Now verify we can download this file.
p = aws('s3 mv s3://%s/foo.txt %s' % (bucket_name, foo_txt))
self.assert_no_errors(p)
self.assertTrue(os.path.exists(foo_txt))
self.assertEqual(os.path.getsize(foo_txt),
len(file_contents.getvalue()))
def test_glacier_sync_with_force_glacier(self):
self.parsed_responses = [
{
'Contents': [
{'Key': 'foo/bar.txt', 'ContentLength': '100',
'LastModified': '00:00:00Z',
'StorageClass': 'GLACIER',
'Size': 100},
],
'CommonPrefixes': []
},
{'ETag': '"foo-1"', 'Body': six.BytesIO(b'foo')},
]
cmdline = '%s s3://bucket/foo %s --force-glacier-transfer' % (
self.prefix, self.files.rootdir)
self.run_cmd(cmdline, expected_rc=0)
self.assertEqual(len(self.operations_called), 2, self.operations_called)
self.assertEqual(self.operations_called[0][0].name, 'ListObjectsV2')
self.assertEqual(self.operations_called[1][0].name, 'GetObject')
def setUp(self):
super(TestGetGameSessionLog, self).setUp()
self.files = FileCreator()
self.filename = os.path.join(self.files.rootdir, 'myfile')
self.urlopen_patch = mock.patch(
'awscli.customizations.gamelift.getlog.urlopen')
self.contents = b'My Contents'
self.urlopen_mock = self.urlopen_patch.start()
self.urlopen_mock.return_value = six.BytesIO(self.contents)
Bucket=params.bucket,
Key=params.key
)
upload_id = create_response['UploadId']
try:
part_num = 1
multipart_list = []
bundle.seek(0)
while size_remaining > 0:
data = bundle.read(MULTIPART_LIMIT)
upload_response = self.s3.upload_part(
Bucket=params.bucket,
Key=params.key,
UploadId=upload_id,
PartNumber=part_num,
Body=six.BytesIO(data)
)
multipart_list.append({
'PartNumber': part_num,
'ETag': upload_response['ETag']
})
part_num += 1
size_remaining -= len(data)
return self.s3.complete_multipart_upload(
Bucket=params.bucket,
Key=params.key,
UploadId=upload_id,
MultipartUpload={'Parts': multipart_list}
)
except ClientError as e:
self.s3.abort_multipart_upload(
Bucket=params.bucket,
bucket=parsed_args.bucket,
key=parsed_args.key
)
upload_id = create_response['UploadId']
try:
part_num = 1
multipart_list = []
bundle.seek(0)
while size_remaining > 0:
data = bundle.read(MULTIPART_LIMIT)
upload_response = self.s3.UploadPart(
bucket=parsed_args.bucket,
key=parsed_args.key,
upload_id=upload_id,
part_number=part_num,
body=six.BytesIO(data)
)
multipart_list.append({
'PartNumber': part_num,
'ETag': upload_response['ETag']
})
part_num += 1
size_remaining -= len(data)
return self.s3.CompleteMultipartUpload(
bucket=parsed_args.bucket,
key=parsed_args.key,
upload_id=upload_id,
multipart_upload={'Parts': multipart_list}
)
except Exception as e:
self.s3.AbortMultipartUpload(
bucket=parsed_args.bucket,