Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def download(directory):
mark_uploaded(cache_name) # reset
try:
print("Downloading {} tarball from S3...".format(cache_name))
with timer():
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
mark_needs_uploading(cache_name)
raise SystemExit("Cached {} download failed!".format(cache_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(cache_name))
def get_specified_bucket(sitename):
"""
Returns the bucket for the specified sitename.
"""
from boto.s3.connection import S3Connection
conn = S3Connection()
from boto.exception import S3ResponseError
try:
bucket = conn.get_bucket(sitename)
bucket.get_website_configuration()
except S3ResponseError:
raise NoSuchBucket
return bucket
:type headers: dict
:param headers: Headers to pass in the web request
:type query_args: string
:param query_args: Arguments to pass in the query string (ie, 'torrent')
"""
if self.resp == None:
self.mode = 'r'
self.resp = self.bucket.connection.make_request('GET',
self.bucket.name,
self.name, headers,
query_args=query_args)
if self.resp.status < 199 or self.resp.status > 299:
body = self.resp.read()
raise S3ResponseError(self.resp.status, self.resp.reason, body)
response_headers = self.resp.msg
provider = self.bucket.connection.provider
self.metadata = boto.utils.get_aws_metadata(response_headers,
provider)
for name,value in response_headers.items():
if name.lower() == 'content-length':
self.size = int(value)
elif name.lower() == 'etag':
self.etag = value
elif name.lower() == 'content-type':
self.content_type = value
elif name.lower() == 'content-encoding':
self.content_encoding = value
elif name.lower() == 'last-modified':
self.last_modified = value
elif name.lower() == 'cache-control':
def _get_or_create_bucket(self, name):
"""Retrieves a bucket if it exists, otherwise creates it."""
try:
return self.connection.get_bucket(name)
except S3ResponseError, e:
if AUTO_CREATE_BUCKET:
return self.connection.create_bucket(name)
raise ImproperlyConfigured, ("Bucket specified by "
"AWS_STORAGE_BUCKET_NAME does not exist. Buckets can be "
def get_xml_acl(self, key_name='', headers=None):
response = self.connection.make_request('GET', self.name, key_name,
query_args='acl', headers=headers)
body = response.read()
if response.status != 200:
raise S3ResponseError(response.status, response.reason, body)
return body
def _s3_set(args, keyname, filename):
try:
bucket = _s3_bucket(args)
except S3ResponseError:
bucket = boto.connect_s3(args.accesskey, args.secretkey).create_bucket(
args.bucket,
location=Location.DEFAULT
)
k = Key(bucket)
k.key = keyname
k.set_contents_from_filename(filename)
def smart_download_s3_keys(s3conn, bucket_name, prefix, target, creds):
start = time.time()
try:
bucket = s3conn.get_bucket(bucket_name, validate=False)
key = bucket.get_key(prefix)
if key.size > 10*1024*1024 :
print "File > 10Mb: downloading with s3 cli"
duration = fast_download_s3_keys(creds, bucket_name, prefix, target)
else:
print "File < 10Mb: using get_contents_to_file"
key.get_contents_to_filename(target)
duration = time.time() - start
except boto.exception.S3ResponseError, e :
print "ERROR: Caught S3ResponseError: ", e
return -1
except Exception, e:
print "ERROR: Could not access the bucket"
raise
return duration
def get_acl(self, key_name='', headers=None, version_id=None):
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('GET', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
if response.status == 200:
policy = Policy(self)
h = handler.XmlHandler(policy, self)
xml.sax.parseString(body, h)
return policy
else:
raise S3ResponseError(response.status, response.reason, body)
def _get_bucket(self, bucket_name):
""" Sometimes a handle to a bucket is not established right away so try
it a few times. Raise error is connection is not established. """
for i in range(5):
try:
bucket = self.conn.get_bucket(bucket_name)
log.debug("Using cloud object store with bucket '%s'", bucket.name)
return bucket
except S3ResponseError:
try:
log.debug("Bucket not found, creating s3 bucket with handle '%s'", bucket_name)
self.conn.create_bucket(bucket_name)
except S3ResponseError:
log.exception("Could not get bucket '%s', attempt %s/5", bucket_name, i + 1)
time.sleep(2)
# All the attempts have been exhausted and connection was not established,
# raise error
raise S3ResponseError
i = 0
for k in bucket.list():
i = i + 1
keys.append(k.key)
if i == 10:
break
issues.append('READ<ul><li>%s</li></ul>' % '<li>'.join(keys))
except S3ResponseError as error:
print('Error Code (list): ' + str(error.error_code))
if self.wordlist_path:
enumerate_keys(bucket, bucket_name, 'GS')
try:
key = bucket.new_key('test.txt')
key.set_contents_from_string('')
issues.append('WRITE<ul><li>test.txt</li></ul>')
except S3ResponseError as error:
print('Error Code (set_contents_from_string): ' + str(error.error_code))
try:
bucket.add_email_grant('FULL_CONTROL', 0)
issues.append('FULL_CONTROL')
except S3ResponseError as error:
if error.error_code == 'UnresolvableGrantByEmailAddress':
issues.append('FULL_CONTROL')
else:
print('Error Code (add_email_grant): ' + str(error.error_code))
except AttributeError as error:
if error.message.startswith("'Policy'"):
issues.append('FULL_CONTROL')
else:
raise
elif bucket_type == 'Azure':</li>