Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_user_agent_with_existing_user_agent_extra(self):
config = Config(user_agent_extra='foo/1.0')
client = ClientFactory({'config': config}).create_client()
self.assertIn(PROCESS_USER_AGENT, client.meta.config.user_agent)
def setUp(self):
super(TestS3SigV4, self).setUp()
self.client = self.session.create_client(
's3', self.region, config=Config(signature_version='s3v4'))
self.http_stubber = ClientHTTPStubber(self.client)
self.http_stubber.add_response()
def _setup_max_retry_attempts(session, case_configuration):
config = botocore.config.Config(
retries={'max_attempts': case_configuration['maxRetries']})
session.set_default_client_config(config)
sqs_client.send_message(QueueUrl=queue_url, MessageBody=message)
except Exception as e:
LOGGER.warning('Unable to send notification for S3 bucket "%s" to SQS queue "%s": %s' %
(bucket_name, notif['Queue'], e))
if notif.get('Topic'):
sns_client = aws_stack.connect_to_service('sns')
try:
sns_client.publish(TopicArn=notif['Topic'], Message=message, Subject='Amazon S3 Notification')
except Exception:
LOGGER.warning('Unable to send notification for S3 bucket "%s" to SNS topic "%s".' %
(bucket_name, notif['Topic']))
# CloudFunction and LambdaFunction are semantically identical
lambda_function_config = notif.get('CloudFunction') or notif.get('LambdaFunction')
if lambda_function_config:
# make sure we don't run into a socket timeout
connection_config = botocore.config.Config(read_timeout=300)
lambda_client = aws_stack.connect_to_service('lambda', config=connection_config)
try:
lambda_client.invoke(FunctionName=lambda_function_config,
InvocationType='Event', Payload=message)
except Exception:
LOGGER.warning('Unable to send notification for S3 bucket "%s" to Lambda function "%s".' %
(bucket_name, lambda_function_config))
if not filter(lambda x: notif.get(x), NOTIFICATION_DESTINATION_TYPES):
LOGGER.warning('Neither of %s defined for S3 notification.' %
'/'.join(NOTIFICATION_DESTINATION_TYPES))
def s3_get(url, temp_file, proxies=None):
"""Pull a file directly from S3."""
s3_resource = boto3.resource("s3", config=Config(proxies=proxies))
bucket_name, s3_path = split_s3_path(url)
s3_resource.Bucket(bucket_name).download_fileobj(s3_path, temp_file)
# Here we a) disable user's default config to let ec2api works independetly
# of user's local settings;
# b) specify region to be used by botocore;
# c) do not change standard botocore keys to get these settings
# from environment
connection_data = {
'config_file': (None, 'AWS_CONFIG_FILE', None, None),
'region': ('region', 'AWS_DEFAULT_REGION', region, None),
}
session = botocore.session.get_session(connection_data)
return session.create_client(
's3', region_name=region, endpoint_url=CONF.s3_url,
aws_access_key_id=ec2_creds[0].access,
aws_secret_access_key=ec2_creds[0].secret,
config=botocore.config.Config(signature_version='s3v4'))
def _get_botocore_config():
return botocore.config.Config(
read_timeout=360,
retries={
'max_attempts': 10,
},
def _resource(name, config):
boto_config = Config(retries={"max_attempts": BOTO_MAX_RETRIES})
return boto3.resource(
name, config["provider"]["region"], config=boto_config)
def invoke_lambda(self, lambda_function_arn, invocation_type, payload, config=Config(retries={'max_attempts': 1})):
""" Format config=Config(connect_timeout=1, read_timeout=0.1, retries={'max_attempts': 1}) """
lambda_client = boto3.client('lambda', region_name=self.region_name, config=config)
lambda_response = lambda_client.invoke(FunctionName=lambda_function_arn, InvocationType=invocation_type,
Payload=payload)
if invocation_type == "Event":
return lambda_response
return json.loads(lambda_response.get('Payload').read())
def _client(name, config):
boto_config = Config(retries={"max_attempts": BOTO_MAX_RETRIES})
return boto3.client(name, config["provider"]["region"], config=boto_config)