Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_callback_called_once_with_sigv4(self):
# Verify #98, where the callback was being invoked
# twice when using signature version 4.
self.amount_seen = 0
lock = threading.Lock()
def progress_callback(amount):
with lock:
self.amount_seen += amount
client = self.session.client(
's3', self.region,
config=Config(signature_version='s3v4'))
transfer = boto3.s3.transfer.S3Transfer(client)
filename = self.files.create_file_with_size(
'10mb.txt', filesize=10 * 1024 * 1024)
transfer.upload_file(filename, self.bucket_name,
'10mb.txt', callback=progress_callback)
self.addCleanup(self.delete_object, '10mb.txt')
self.assertEqual(self.amount_seen, 10 * 1024 * 1024)
def upload_files(s3_client, filenames, bucket_name):
print('Uploading files to the bucket {}...'.format(bucket_name))
for f in filenames:
try:
key = f.split('/')[-2:]
key = key[0] + '/' + key[1]
tc = boto3.s3.transfer.TransferConfig()
t = boto3.s3.transfer.S3Transfer(client=s3_client, config=tc)
t.upload_file(f, bucket_name, key, extra_args={'ACL': 'public-read'})
file_url = 'https://{}.s3.amazonaws.com/{}'.format(bucket_name, key)
print('The uploaded file is public and accessible with the following url: \n {}'.format(file_url))
except S3UploadFailedError:
print('File upload is not successful: PutObject permission missing.')
def _s3_initialize_client(s3_bucket):
client = boto3.client('s3', region_name=_s3_get_bucket_location(s3_bucket))
config = boto3.s3.transfer.TransferConfig(
multipart_chunksize=256 * 1024 * 1024,
max_concurrency=10,
max_io_queue=1000,
io_chunksize=2 * 1024 * 1024)
transfer_client = boto3.s3.transfer.S3Transfer(client, config, boto3.s3.transfer.OSUtils())
return client, transfer_client
def store_new_state(source_file_name, s3_dest_file_name, bucket):
"""Store the new state file to S3.
Args:
source_file_name: The name of the local source file.
s3_dest_file_name: The name of the destination file on S3.
"""
client = boto3.client("s3", "us-west-2")
transfer = boto3.s3.transfer.S3Transfer(client)
# Update the state in the analysis bucket.
key_path = S3_DATA_PATH + s3_dest_file_name
transfer.upload_file(source_file_name, bucket, key_path)
def upload_file(s3_client, file_name, bucket_name, args):
if args['key']:
key = args['key']
else:
key = file_name
print('Uploading files...')
try:
tc = boto3.s3.transfer.TransferConfig()
t = boto3.s3.transfer.S3Transfer(client=s3_client, config=tc)
t.upload_file(file_name, bucket_name, key, extra_args={'ACL': 'public-read'})
file_url = 'https://{}.s3.amazonaws.com/{}'.format(bucket_name, key)
print('The uploaded file is public and accessible with the following url: {}'.format(file_url))
except S3UploadFailedError:
print('File upload is not successful: PutObject permission missing.')
def create_transfer_manager(*arg, **kwargs):
return transfer.TransferManager(
*arg, **kwargs, executor_cls=transfer.NonThreadedExecutor
)
def create_transfer_manager(*arg, **kwargs):
return transfer.TransferManager(
*arg, **kwargs, executor_cls=transfer.NonThreadedExecutor
)
def __init__(self, key, metadata,
**kwargs):
super().__init__(metadata, meta_only)
self.s3 = boto3.client(**kwargs)
# Unique key specifying processing of a run
self.strax_unique_key = key
self.config = boto3.s3.transfer.TransferConfig(max_concurrency=40,
num_download_attempts=30)