Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def setup(self):
self.helper_configuration = helpers.Configuration()
cis_environment = os.getenv("CIS_ENVIRONMENT", "development")
os.environ["CIS_ENVIRONMENT"] = cis_environment
os.environ["CIS_ASSUME_ROLE_ARN"] = "None"
self.connection_object = connect.AWS()
self.connection_object._boto_session = boto3.session.Session(region_name="us-west-2")
self.idv = self.connection_object.identity_vault_client()
# u = fake_profile.FakeUser()
# u = helpers.ensure_appropriate_publishers_and_sign(fake_profile=u, condition="create")
# u.verify_all_publishers(profile.User(user_structure_json=None))
fh = open("fixtures/durable.json")
self.durable_profile = fh.read()
fh.close()
self.durable_profiles = []
logger.info("Loading 10 fake users.")
for x in range(0, 10):
fh = open("fixtures/{}.json".format(x))
self.durable_profiles.append(fh.read())
fh.close()
def test_storage_provider_aws(release_config_aws, tmpdir):
s3 = boto3.session.Session().resource('s3')
bucket = release_config_aws['bucket']
s3_bucket = s3.Bucket(bucket)
assert s3_bucket in s3.buckets.all(), (
"Bucket '{}' must exist with full write access to AWS testing account and created objects must be globally "
"downloadable from: {}").format(bucket, release_config_aws['download_url'])
exercise_storage_provider(tmpdir, 'aws_s3', release_config_aws)
def syncIdx(bucket,
dir,
prefix,
force,
s3_uri=None):
session = boto3.session.Session()
s3 = session.client(service_name='s3', endpoint_url=s3_uri)
if bucket.startswith('http'):
bucket = bucket.split('/')[2].split('.')[0]
if prefix.endswith('/'):
prefix = prefix[:-1]
objects = reduce(keyMapper, getAllKeys(s3, bucket, prefix), {})
for data, config in objects.items():
print(f'Loading configs from: {prefix}')
# Find existing files on file system
if force: # we don't care about existing files when forcing overwrite
fs_files = []
else:
fs_list = list(Path(dir).rglob("*.[yY][aA][mM][lL]"))
def __init_connection__(bucket_name):
try:
session = boto3.session.Session()
except botocore.exceptions.BotoCoreError as e:
print('Cannot establish connection to bucket "%s". Check you credentials.' % bucket_name)
print(e)
sys.exit(1)
s3client = session.client('s3', config=boto3.session.Config(signature_version='s3v4'))
s3 = session.resource('s3', config=boto3.session.Config(signature_version='s3v4'))
bucket = s3.Bucket(bucket_name)
return (s3, s3client, bucket)
def __init__(self, session=None, connection=None, resource_class=None):
super(ResourceCollection, self).__init__()
self._session = session
self._connection = connection
if self._session is None:
import boto3
self._session = boto3.session
if self._connection is None:
self._connection = self._session.connect_to(self.service_name)
if resource_class is not None:
self._resource_class = resource_class
else:
self._resource_class = self.__class__.resource_class
self._update_docstrings()
self._check_api_version()
def s3_client(self):
return boto3.session.Session(profile_name=aws_sign_in(self.aws_profile)).client('s3')
def upsert_secret(event, secret_pwd):
region_name = event['ResourceProperties']['Region']
secret_username = event['ResourceProperties']['SecretUserName']
secret_desc = event['ResourceProperties']['SecretDescription']
secret_name = event['ResourceProperties']['SecretName']
secret_string = json.dumps({'username':secret_username,'password':secret_pwd})
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name,
)
try:
upsert_secret_response = client.create_secret(
Name=secret_name,
Description=secret_desc,
SecretString=secret_string
)
logger.info('***** The requested secret %s has been successfully created *****' % secret_name)
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceExistsException':
try:
put_secret_value_response = client.put_secret_value(
def get_secret(secret_name,region_name):
# Create a Secrets Manager client
session = boto3.session.Session()
client = session.client(
service_name='secretsmanager',
region_name=region_name
)
# In this sample we only handle the specific exceptions for the 'GetSecretValue' API.
# See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
# We rethrow the exception by default.
try:
get_secret_value_response = client.get_secret_value(
SecretId=secret_name
)
except ClientError as e:
if e.response['Error']['Code'] == 'DecryptionFailureException':
# Secrets Manager can't decrypt the protected secret text using the provided KMS key.
def get_account_id(credentials: dict):
try:
sts_client = boto3.session.Session(
aws_access_key_id=credentials.get('AccessKeyId'),
aws_secret_access_key=credentials.get('SecretAccessKey'),
aws_session_token=credentials.get('SessionToken'),
region_name=credentials.get('Region', 'us-east-1'),
).client('sts') # type: botostubs.STS
response = sts_client.get_caller_identity()
return response.get('Account', 'Unavailable')
except:
return 'Unavailable'
def _get_s3_client(self):
if self._s3 is None:
if environ.get('AWS_ACCESS_KEY_ID') and \
environ.get('AWS_SECRET_ACCESS_KEY'):
logger.info('Got credentials in environment for client')
self._s3 = boto3.session.Session(
aws_access_key_id=environ.get('AWS_ACCESS_KEY_ID'),
aws_secret_access_key=environ.get('AWS_SECRET_ACCESS_KEY')
).client('s3')
else:
logger.info('Using stored AWS profile for client')
self._s3 = boto3.session.Session(
profile_name=self.aws_name).client('s3')
return self._s3