Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def retrieve_from_amazon(self, bucket, object):
"""Retrieves file from Amazon S3.
Args:
bucket: Bucket to retrieve file from.
object: File object to retrieve.
Returns:
A byte string containing the file content.
"""
global s3_client
if s3_client is None:
s3_client = boto3.client("s3",
aws_access_key_id=conf.remote_cfg["aws_access_key_id"],
aws_secret_access_key=conf.remote_cfg["aws_secret_access_key"],
config=botocore.client.Config(
connect_timeout=conf.remote_cfg["remote_timeout"],
read_timeout=conf.remote_cfg["remote_timeout"],
region_name=conf.remote_cfg["aws_default_region"],
retries={"max_attempts": conf.remote_cfg["remote_retries"]}
))
return s3_client.get_object(Bucket=bucket, Key=object)['Body'].read()
def DeleteImageHandlerUI(deploy_config):
#Expected dict entries
#deploy_config['UIBucket']
#deploy_config['UIPrefix']
log.info("Deleting Serverless Image Handler UI from %s/%s", deploy_config['UIBucket'], deploy_config['UIPrefix'])
try:
s3 = boto3.client("s3", config=Config(signature_version='s3v4'))
log.info("Listing UI objects in %s/%s", deploy_config['UIBucket'], deploy_config['UIPrefix'])
for s3object in s3.list_objects(Bucket=deploy_config['UIBucket'], Prefix=deploy_config['UIPrefix'])['Contents']:
log.info("Deleting %s/%s", deploy_config['UIBucket'], s3object['Key'])
s3.delete_object(Bucket=deploy_config['UIBucket'], Key=s3object['Key'])
log.info("Deleting %s/%s", deploy_config['UIBucket'], deploy_config['UIPrefix'])
s3.delete_object(Bucket=deploy_config['UIBucket'], Key=deploy_config['UIPrefix'])
except Exception as e:
log.error("Error deleting UI. Error: %s", e)
raise
def process_multiple_documents(bucket, documents):
config = Config(retries = dict(max_attempts = 5))
# Amazon Textract client
textract = boto3.client('textract', config=config)
for documentName in documents:
print("\nProcessing: {}\n==========================================".format(documentName))
# Call Amazon Textract
response = textract.detect_document_text(
Document={
'S3Object': {
'Bucket': bucket,
'Name': documentName
}
})
def __upload_attachment(attachment, jira_issue):
s3_client = boto3.client('s3', config=Config(signature_version='s3v4'))
key = attachment.get('id', '')
try:
response = s3_client.get_object(Bucket = custom_resource_utils.get_embedded_physical_id(CloudCanvas.get_setting(constants.SANITIZED_BUCKET)), Key = key)
except Exception as e:
print "Unable to GET the sanitized attachment. Key==>", key
return
new_attachment = StringIO.StringIO()
new_attachment.write(response['Body'].read())
attachment_object = get_jira_client().add_attachment(
issue = jira_issue,
attachment = new_attachment,
filename = ('{}.{}').format(attachment.get('name', ''), attachment.get('extension', '')))
log.error("Couldn't connect to Amazon EC2 because: %s" %
e.error_message)
elif self.cloud_type == "Eucalyptus":
try:
log.verbose("Created a connection to Eucalyptus (%s)" % self.name)
except boto.exception.EC2ResponseError, e:
log.error("Couldn't connect to Eucalyptus EC2 because: %s" %
e.error_message)
elif self.cloud_type.lower() == "opennebula":
try:
connection = boto3.client('ec2', region_name=self.regions, endpoint_url=self.host,
aws_access_key_id=self.access_key_id, aws_secret_access_key=self.secret_access_key,
config=botocore.client.Config(signature_version='v2'))
log.verbose("Created a connection to OpenNebula.")
except Exception as e:
log.error("Couldn't connect to OpenNebula: %s" % e.error_message)
elif self.cloud_type == "OpenStack":
try:
log.debug("Use the boto2 interface for OpenStack ec2 accounts.")
#log.verbose("Created a connection to OpenStack (%s)" % self.name)
return None
except boto.exception.EC2ResponseError, e:
log.error("Couldn't connect to OpenStack because: %s" %
e.error_message)
else:
log.error("BotoCluster don't know how to handle a %s cluster." %
self.cloud_type)
def _create_xray_client(self, ip='127.0.0.1', port='2000'):
session = botocore.session.get_session()
url = 'http://%s:%s' % (ip, port)
return session.create_client('xray', endpoint_url=url,
region_name='us-west-2',
config=Config(signature_version=UNSIGNED),
aws_access_key_id='', aws_secret_access_key=''
)
"s3",
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region,
)
if self.region and self.region != "us-east-1":
self.location = self.region
else:
scheme = "https" if is_secure else "http"
custom_url = "{scheme}://{host}:{port}".format(scheme=scheme, host=host, port=port)
if self.region:
signature_version = "s3v4"
self.location = self.region
else:
signature_version = "s3"
custom_config = botocore.client.Config(
s3={'addressing_style': 'path'},
signature_version=signature_version,
)
self.s3_client = botocore_session.create_client(
"s3",
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
config=custom_config,
endpoint_url=custom_url,
region_name=region,
verify=is_verify_tls,
)
self.check_or_create_bucket()
self.multipart_chunk_size = segment_size
kwarg. See :py:meth:`boto3.session.Session.client`.
:type client: botocore.client.BaseClient
:param client: a low-level service client for swf.
See :py:meth:`boto3.session.Session.client`
:type config: botocore.client.Config
:param config: custom config to instantiate the 'swf' client; by default
it sets connection and read timeouts to 70 sec
:type kwargs: dict
:param kwargs: kwargs for passing to client initialisation. The config
param can be overwritten here
"""
kwargs = kwargs if isinstance(kwargs, dict) else {}
config = config or Config(connect_timeout=70, read_timeout=70)
kwargs.setdefault('config', config)
self.client = client or boto3.client('swf', **kwargs)
session = mk_boto_session(profile=profile,
creds=creds,
region_name=region_name)
extras = {} # type: Dict[str, Any]
if creds is not None:
extras.update(aws_access_key_id=creds.access_key,
aws_secret_access_key=creds.secret_key,
aws_session_token=creds.token)
if region_name is not None:
extras['region_name'] = region_name
return session.create_client('s3',
use_ssl=use_ssl,
**extras,
config=botocore.client.Config(**cfg))
)
logger.error(message)
except Exception as e:
logger.error('There was a problem checking status of Athena query..')
logger.error('Glue job Run Id "{}"'.format(athena_query_execution_id))
logger.error('Reason: {}'.format(e.message))
logger.info('Checking next Athena query.')
# Task failed, next item
athena = boto3.client('athena')
# Because Step Functions client uses long polling, read timeout has to be > 60 seconds
sfn_client_config = Config(connect_timeout=50, read_timeout=70)
sfn = boto3.client('stepfunctions', config=sfn_client_config)
dynamodb = boto3.resource('dynamodb')
# Load logging config and create logger
logger = load_log_config()
def handler(event, context):
logger.debug('*** Athena Runner lambda function starting ***')
try:
# Get config (including a single activity ARN) from local file
config = load_config()
# One round of starting Athena queries