Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def find_nonvpc_domains():
domains = []
enabled_regions = set(boto3.session.Session().get_available_regions('es'))
if os.environ.get('REGIONS'):
enabled_regions &= set([x for x in re.split(',| ', os.environ.get('REGIONS')) if x != ''])
for region in enabled_regions:
es = boto3.client('es', region)
for domain in es.list_domain_names()['DomainNames']:
tags = []
domain_info = es.describe_elasticsearch_domain(DomainName=domain['DomainName'])
if 'Endpoint' not in domain_info['DomainStatus']:
continue
endpoint = domain_info['DomainStatus']['Endpoint']
tags_info = es.list_tags(ARN=domain_info['DomainStatus']['ARN'])
for tag in tags_info['TagList']:
if re.match(r'\d+[y|m|w|d|h]', tag['Value']):
tags.append(tag)
if __name__ == '__main__':
parser = setup_parser()
args = parser.parse_args()
cwd = os.getcwd()
# Build an absolute path to the zip file if one not provided.
zip_file_abs_path = os.path.join(cwd, args.zip_file)
if not args.upload_only:
if not zip(args.source_folder, zip_file_abs_path):
sys.exit(1)
if args.aws_credentials is None:
# This allows aws roles to be used to create sessions.
session = boto3.session.Session()
else:
# moved so the script can be independent if not passing credentials
from lambdautils import create_session
session = create_session(args.aws_credentials)
upload_to_s3(session, args.zip_file, args.bucket)
print('Done.\n')
def setup_session(self):
'''
Checks to see if running locally by use of test_context
If so use profile and region from test_context
If not let use default session
'''
try:
if isinstance(self.context,test_context):
# For testing use profile and region from test_context
logger.debug('Using test_context')
logger.debug("Profile: {}".format(self.context.profile))
logger.debug("Region: {}".format(self.context.region))
self.test = True
self.session = boto3.session.Session(profile_name=self.context.profile,region_name=self.context.region)
else:
# Sets up the session in lambda context
self.session = boto3.session.Session()
except Exception as e:
# Client failed
self.reason = "Setup Session Failed: {}".format(e)
logger.error(self.reason)
self._send_status(FAILED)
return
log.setLevel(stackOutputs['LogLevel'])
log.debug(stackOutputs)
if stackOutputs['SendAnonymousData'] == 'true':
sendAnonymousData = True
log.debug('Setting sendAnonymousData to %s due to CloudFormation stack parameters', sendAnonymousData)
childFunctionArn = stackOutputs['ChildFunctionArn']
laClient = boto3.client('lambda')
# Get all WorkSpaces regions
for i in range(0, maxRetries):
log.debug('Try #%s to get_regions for WorkSpaces', i)
try:
wsRegions = boto3.session.Session().get_available_regions('workspaces')
break
except botocore.exceptions.ClientError as e:
log.error(e)
if i >= maxRetries - 1: log.error('Error processing get_regions for WorkSpaces: ExceededMaxRetries')
else: time.sleep(i/10)
# For each region
for wsRegion in wsRegions:
regionCount += 1
# Create a WS Client
wsClient = boto3.client('workspaces', region_name=wsRegion)
log.debug('Describing WorkSpace Directories')
for i in range(0, maxRetries):
def __init__(self, session=None):
if not session:
session = boto3.session.Session()
self.session = session
self.ecs_client = session.client('ecs')
self.ec2_client = session.client('ec2')
def _upload_to_s3(mbtiles_path, session_token, access_id, secret_key, bucket, s3_key):
''' Upload MBTiles file to S3 using Mapbox credentials.
https://www.mapbox.com/developers/api/uploads/#Stage.a.file.on.Amazon.S3
'''
session = boto3.session.Session(access_id, secret_key, session_token)
s3 = session.resource('s3')
bucket = s3.Bucket(bucket)
_L.debug('{} --> {}'.format(mbtiles_path, s3_key))
bucket.upload_file(mbtiles_path, s3_key)
def _get_session(boto3_session, boto3_profile_name):
if boto3_session:
return boto3_session
if boto3_profile_name:
return boto3.session.Session(profile_name=boto3_profile_name)
return boto3
def apply_s3_inventory():
try:
response = s3.put_bucket_inventory_configuration(**target_policy)
except Exception as e:
print(account + ' ' + bucket['Name'] + ' Error! S3 Inventory Policy not added:')
print(e)
return False
else:
print(account + ' ' + bucket['Name'] + ' S3 Inventory Policy added, delivering inventory reports to ' + target_policy['InventoryConfiguration']['Destination']['S3BucketDestination']['Bucket'])
return True
inventories = {}
if ABORT is False:
for account in accounts:
session = boto3.session.Session(profile_name=account)
s3 = session.client('s3')
inventories[account] = []
if args['--inventory_account'] is not None:
target_policy['InventoryConfiguration']['Destination']['S3BucketDestination']['AccountId'] = args['--inventory_account']
else:
target_policy['InventoryConfiguration']['Destination']['S3BucketDestination']['AccountId'] = session.client('sts').get_caller_identity()['Account']
target_policy['InventoryConfiguration']['Destination']['S3BucketDestination']['Prefix'] = account
try:
bucketlist = s3.list_buckets()
except Exception as e:
print(e)
else:
for bucket in bucketlist["Buckets"]:
Raises
------
UploadException
In the case of a fatal exception during an upload. Note we rely on boto3 to handle its own
retry logic.
Returns
-------
`dict` : JSON results from internal confirm import callback URL
"""
import boto3
from boto3.s3.transfer import TransferConfig
from boto3.exceptions import S3UploadFailedError
boto3_session = boto3.session.Session()
# actually do the upload
client = boto3_session.client(
"s3",
aws_access_key_id=fields["upload_aws_access_key_id"],
aws_secret_access_key=fields["upload_aws_secret_access_key"],
)
multipart_chunksize = _choose_boto3_chunksize(file_obj)
# if boto uses threads, ctrl+c won't work
config = TransferConfig(use_threads=False, multipart_chunksize=multipart_chunksize)
# let boto3 update our progressbar rather than our FASTX wrappers, if applicable
boto_kwargs = {}
if hasattr(file_obj, "progressbar"):
def _boto3_conn(region, credentials):
boto_profile = credentials.pop('aws_profile', None)
try:
connection = boto3.session.Session(profile_name=boto_profile).client('secretsmanager', region, **credentials)
except (botocore.exceptions.ProfileNotFound, botocore.exceptions.PartialCredentialsError) as e:
if boto_profile:
try:
connection = boto3.session.Session(profile_name=boto_profile).client('secretsmanager', region)
except (botocore.exceptions.ProfileNotFound, botocore.exceptions.PartialCredentialsError) as e:
raise AnsibleError("Insufficient credentials found.")
else:
raise AnsibleError("Insufficient credentials found.")
return connection