Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def diagnose(self, key):
k = boto.s3.key.Key(self.s3_bucket)
k.key = key
log_text_gz = k.get_contents_as_string()
log_text = gzip.GzipFile(fileobj=StringIO(log_text_gz)).read().decode('utf-8')
summary = parse_test_failure.extract_failure_summary(log_text)
if not summary:
summary = "Unable to diagnose"
template = Template("""
<h1>Diagnosed failure</h1>
<code><pre>{{ summary|e }}</pre></code>
<h1>Full log</h1>
<code><pre>{{ log_text|e }}</pre></code>
""")
return self.render_container(template.render(summary=summary, log_text=log_text))
z.write(filename)
try:
aws_id = settings.AWS_ID
aws_key = settings.AWS_KEY
aws_bucket_name = settings.AWS_BACKUP_BUCKET_NAME
except:
print 'Please configure AWS credentials in settings/local.py!'
return
conn = boto.connect_s3(aws_id, aws_key)
buckets = conn.get_all_buckets()
for bucket in buckets:
if bucket.name == aws_bucket_name:
k = Key(bucket)
k.key = filename_zip
k.set_contents_from_filename(filename_zip)
break
call(['rm', filename])
call(['rm', filename_zip])
def put_obj_acl(conn, opts):
print "uploading object ACL to bucket: '%s', object name: '%s'" \
% (opts.bucket_name, opts.obj_name)
bucket = conn.get_bucket(opts.bucket_name)
k = Key(bucket)
k.key = opts.obj_name
f = open(opts.putacl_file, "r")
try:
xml = f.read()
finally:
f.close()
k.set_xml_acl(xml)
return 0
def open_s3(self):
"""
Opens connection to S3 returning bucket and key
"""
conn = boto.connect_s3(self.AWS_ACCESS_KEY_ID, self.AWS_SECRET_ACCESS_KEY)
try:
bucket = conn.get_bucket(self.AWS_STORAGE_BUCKET_NAME)
except boto.exception.S3ResponseError:
bucket = conn.create_bucket(self.AWS_STORAGE_BUCKET_NAME)
return bucket, boto.s3.key.Key(bucket)
def upload_to_S3(bucket, key, file_object):
"""
Uploads a file object to a S3 instance
Inputs: ::\n
bucket: S3 bucket we want to upload to
key: the key to access the file in the bucket;
file_object: the file that needs to be uploaded
"""
k = Key(bucket)
k.key = key
k.set_contents_from_file(file_object)
k.set_acl('public-read')
return k.generate_url(expires_in=0, query_auth=False, force_http=True)
def s3_url(user_id, filename, kq_id):
conn = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(settings.AWS_STORAGE_BUCKET_NAME)
k = boto.s3.key.Key(bucket)
name = "%d/%s/%s" % (user_id, kq_id, filename)
k.key = name
return k.generate_url(expires_in=0, query_auth=False)
def deploy_json_data(s3_buckets):
TUMBLR_FILENAME = app_config.TUMBLR_FILENAME
with open(TUMBLR_FILENAME, 'r') as json_output:
with gzip.open(TUMBLR_FILENAME + '.gz', 'wb') as f:
f.write(json_output.read())
for bucket in s3_buckets:
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket)
key = boto.s3.key.Key(bucket)
key.key = '%s/live-data/%s-data.json' % (app_config.PROJECT_SLUG, app_config.PROJECT_SLUG)
key.set_contents_from_filename(
TUMBLR_FILENAME + '.gz',
policy='public-read',
headers={
'Cache-Control': 'max-age=5 no-cache no-store must-revalidate',
'Content-Encoding': 'gzip'
}
)
os.remove(TUMBLR_FILENAME + '.gz')
def download(self, keyname):
k = Key(self.bucket)
k.key = keyname
encrypted_out = tempfile.TemporaryFile()
k.get_contents_to_file(encrypted_out)
encrypted_out.seek(0)
return encrypted_out
def open_s3(self):
"""
Opens connection to S3 returning bucket and key
"""
conn = boto.connect_s3(self.AWS_ACCESS_KEY_ID, self.AWS_SECRET_ACCESS_KEY)
try:
bucket = conn.get_bucket(self.AWS_BUCKET_NAME)
except boto.exception.S3ResponseError:
bucket = conn.create_bucket(self.AWS_BUCKET_NAME)
return bucket, boto.s3.key.Key(bucket)