Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_download_requires_string_filename(self):
transfer = S3Transfer(client=mock.Mock())
with self.assertRaises(ValueError):
transfer.download_file(bucket='foo', key='bar', filename=object())
def setUp(self):
self.client = mock.Mock()
self.manager = mock.Mock(TransferManager(self.client))
self.transfer = S3Transfer(manager=self.manager)
self.callback = mock.Mock()
def upload_files(s3_client, filenames, bucket_name):
print('Uploading files to the bucket {}...'.format(bucket_name))
for f in filenames:
try:
key = f.split('/')[-2:]
key = key[0] + '/' + key[1]
tc = boto3.s3.transfer.TransferConfig()
t = boto3.s3.transfer.S3Transfer(client=s3_client, config=tc)
t.upload_file(f, bucket_name, key, extra_args={'ACL': 'public-read'})
file_url = 'https://{}.s3.amazonaws.com/{}'.format(bucket_name, key)
print('The uploaded file is public and accessible with the following url: \n {}'.format(file_url))
except S3UploadFailedError:
print('File upload is not successful: PutObject permission missing.')
with Image(filename=file) as img:
crop_center(img)
img.format = 'png'
img.save(filename=os.path.join(UPLOAD_FOLDER, content_type, filename_template % (image_id, 'raw')))
# sizes
for (name, size) in sizes:
with Image(filename=file) as img:
crop_center(img)
img.sample(size, size)
img.format = 'png'
img.save(filename=os.path.join(UPLOAD_FOLDER, content_type, filename_template % (image_id, name)))
if AWS_BUCKET:
s3 = boto3.client('s3')
transfer = S3Transfer(s3)
transfer.upload_file(
os.path.join(UPLOAD_FOLDER, content_type, filename_template % (image_id, 'raw')),
AWS_BUCKET,
os.path.join(content_type, filename_template % (image_id, 'raw')),
extra_args={'ACL': 'public-read', 'ContentType': 'image/png'}
)
os.remove(os.path.join(UPLOAD_FOLDER, content_type, filename_template % (image_id, 'raw')))
for (name, size) in sizes:
transfer.upload_file(
os.path.join(UPLOAD_FOLDER, content_type, filename_template % (image_id, name)),
AWS_BUCKET,
os.path.join(content_type, filename_template % (image_id, name)),
extra_args={'ACL': 'public-read', 'ContentType': 'image/png'}
)
os.remove(os.path.join(UPLOAD_FOLDER, content_type, filename_template % (image_id, name)))
def download(bucket, key, file_path, session=None):
"""Download a file from S3 to the given path."""
s3_client = _get_client(session)
transfer = S3Transfer(s3_client)
transfer.download_file(bucket, key, file_path)
return file_path
# create intermediate and intermediate_tmp directories
os.makedirs(intermediate_path)
os.makedirs(tmp_dir_path)
# configure unique s3 output location similar to how SageMaker platform does it
# or link it to the local output directory
url = urlparse(s3_output_location)
if url.scheme == "file":
logger.debug("Local directory is used for output. No need to sync any intermediate output.")
return
elif url.scheme != "s3":
raise ValueError("Expecting 's3' scheme, got: %s in %s" % (url.scheme, url))
# create s3 transfer client
client = boto3.client("s3", region)
s3_transfer = s3transfer.S3Transfer(client)
s3_uploader = {
"transfer": s3_transfer,
"bucket": url.netloc,
"key_prefix": os.path.join(
url.path.lstrip("/"), os.environ.get("TRAINING_JOB_NAME", ""), "output", "intermediate"
),
}
# Add intermediate folder to the watch list
inotify = inotify_simple.INotify()
watch_flags = inotify_simple.flags.CLOSE_WRITE | inotify_simple.flags.CREATE
watchers = {}
wd = inotify.add_watch(intermediate_path, watch_flags)
watchers[wd] = ""
# start subprocess to sync any files from intermediate folder to s3