How to use the stacker.session_cache.get_session function in stacker

To help you get started, we’ve selected a few stacker examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github cloudtools / stacker / stacker / lookups / handlers / kms.py View on Github external
and reference it within stacker (NOTE: the path should be relative
            to the stacker config file):

            conf_key: ${kms file://kms_value.txt}

            # Both of the above would resolve to
            conf_key: PASSWORD

        """
        value = read_value_from_path(value)

        region = None
        if "@" in value:
            region, value = value.split("@", 1)

        kms = get_session(region).client('kms')

        # encode str value as an utf-8 bytestring for use with codecs.decode.
        value = value.encode('utf-8')

        # get raw but still encrypted value from base64 version.
        decoded = codecs.decode(value, 'base64')

        # decrypt and return the plain text raw value.
        return kms.decrypt(CiphertextBlob=decoded)["Plaintext"]
github cloudtools / stacker / stacker / lookups / handlers / ssmstore.py View on Github external
and reference it within stacker (NOTE: the path should be relative to
        the stacker config file):

        conf_key: ${ssmstore file://ssmstore_value.txt}

        # Both of the above would resolve to
        conf_key: PASSWORD

    """
    value = read_value_from_path(value)

    region = "us-east-1"
    if "@" in value:
        region, value = value.split("@", 1)

    client = get_session(region).client("ssm")
    response = client.get_parameters(
        Names=[
            value,
        ],
        WithDecryption=True
    )
    if 'Parameters' in response:
        return response['Parameters'][0]['Value']

    raise ValueError('SSMKey "{}" does not exist in region {}'.format(value,
                                                                      region))
github cloudtools / stacker / stacker / lookups / handlers / ami.py View on Github external
e.g. my-ubuntu-server-[0-9]+
    
            executable_users (comma delimited) OPTIONAL ONCE:
                aws_account_id | amazon | self
    
            Any other arguments specified are sent as filters to the aws api
            For example, "architecture:x86_64" will add a filter
        """  # noqa
        value = read_value_from_path(value)

        if "@" in value:
            region, value = value.split("@", 1)
        else:
            region = provider.region

        ec2 = get_session(region).client('ec2')

        values = {}
        describe_args = {}

        # now find any other arguments that can be filters
        matches = re.findall('([0-9a-zA-z_-]+:[^\s$]+)', value)
        for match in matches:
            k, v = match.split(':', 1)
            values[k] = v

        if not values.get('owners'):
            raise Exception("'owners' value required when using ami")
        owners = values.pop('owners').split(',')
        describe_args["Owners"] = owners

        if not values.get('name_regex'):
github cloudtools / stacker / stacker / lookups / handlers / dynamodb.py View on Github external
if not table_name:
        raise ValueError('Please make sure to include a dynamodb table name')

    table_lookup, table_keys = table_keys.split(':', 1)

    table_keys = table_keys.split('.')

    key_dict = _lookup_key_parse(table_keys)
    new_keys = key_dict['new_keys']
    clean_table_keys = key_dict['clean_table_keys']

    projection_expression = _build_projection_expression(clean_table_keys)

    # lookup the data from dynamodb
    dynamodb = get_session(region).client('dynamodb')
    try:
        response = dynamodb.get_item(
            TableName=table_name,
            Key={
                table_lookup: new_keys[0]
            },
            ProjectionExpression=projection_expression
        )
    except ClientError as e:
        if e.response['Error']['Code'] == 'ResourceNotFoundException':
            raise ValueError(
                'Cannot find the dynamodb table: {}'.format(table_name))
        elif e.response['Error']['Code'] == 'ValidationException':
            raise ValueError(
                'No dynamodb record matched the partition key: '
                '{}'.format(table_lookup))
github cloudtools / stacker / stacker / hooks / aws_lambda.py View on Github external
custom_bucket_region,
        context.config.stacker_bucket_region,
        provider.region
    )

    # Check if we should walk / follow symlinks
    follow_symlinks = kwargs.get('follow_symlinks', False)
    if not isinstance(follow_symlinks, bool):
        raise ValueError('follow_symlinks option must be a boolean')

    # Check for S3 object acl. Valid values from:
    # https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl
    payload_acl = kwargs.get('payload_acl', 'private')

    # Always use the global client for s3
    session = get_session(bucket_region)
    s3_client = session.client('s3')

    ensure_s3_bucket(s3_client, bucket_name, bucket_region)

    prefix = kwargs.get('prefix', '')

    results = {}
    for name, options in kwargs['functions'].items():
        results[name] = _upload_function(s3_client, bucket_name, prefix, name,
                                         options, follow_symlinks, payload_acl)

    return results
github cloudtools / stacker / stacker / providers / aws / default.py View on Github external
"""Get or create the provider for the given region and profile."""

        with self.lock:
            # memoization lookup key derived from region + profile.
            key = "{}-{}".format(profile, region)
            try:
                # assume provider is in provider dictionary.
                provider = self.providers[key]
            except KeyError:
                msg = "Missed memoized lookup ({}), creating new AWS Provider."
                logger.debug(msg.format(key))
                if not region:
                    region = self.region
                # memoize the result for later.
                self.providers[key] = Provider(
                    get_session(region=region, profile=profile),
                    region=region,
                    **self.kwargs
                )
                provider = self.providers[key]

        return provider
github cloudtools / stacker / stacker / util.py View on Github external
klass.__name__,
                             config['key'],
                             config['bucket'])
                dir_name = self.sanitize_uri_path(
                    "s3-%s-%s" % (config['bucket'],
                                  config['key'][:-len(suffix)])
                )
                break

        if extractor is None:
            raise ValueError(
                "Archive type could not be determined for S3 object \"%s\" "
                "in bucket %s." % (config['key'], config['bucket'])
            )

        session = get_session(region=None)
        extra_s3_args = {}
        if config.get('requester_pays', False):
            extra_s3_args['RequestPayer'] = 'requester'

        # We can skip downloading the archive if it's already been cached
        if config.get('use_latest', True):
            try:
                # LastModified should always be returned in UTC, but it doesn't
                # hurt to explicitly convert it to UTC again just in case
                modified_date = session.client('s3').head_object(
                    Bucket=config['bucket'],
                    Key=config['key'],
                    **extra_s3_args
                )['LastModified'].astimezone(dateutil.tz.tzutc())
            except botocore.exceptions.ClientError as client_error:
                logger.error("Error checking modified date of "
github cloudtools / stacker / stacker / hooks / iam.py View on Github external
def create_ecs_service_role(provider, context, **kwargs):
    """Used to create the ecsServieRole, which has to be named exactly that
    currently, so cannot be created via CloudFormation. See:

    http://docs.aws.amazon.com/AmazonECS/latest/developerguide/IAM_policies.html#service_IAM_role

    Args:
        provider (:class:`stacker.providers.base.BaseProvider`): provider
            instance
        context (:class:`stacker.context.Context`): context instance

    Returns: boolean for whether or not the hook succeeded.

    """
    role_name = kwargs.get("role_name", "ecsServiceRole")
    client = get_session(provider.region).client('iam')

    try:
        client.create_role(
            RoleName=role_name,
            AssumeRolePolicyDocument=get_ecs_assumerole_policy().to_json()
        )
    except ClientError as e:
        if "already exists" in str(e):
            pass
        else:
            raise

    policy = Policy(
        Version='2012-10-17',
        Statement=[
            Statement(
github cloudtools / stacker / stacker / hooks / route53.py View on Github external
def create_domain(provider, context, **kwargs):
    """Create a domain within route53.

    Args:
        provider (:class:`stacker.providers.base.BaseProvider`): provider
            instance
        context (:class:`stacker.context.Context`): context instance

    Returns: boolean for whether or not the hook succeeded.

    """
    session = get_session(provider.region)
    client = session.client("route53")
    domain = kwargs.get("domain")
    if not domain:
        logger.error("domain argument or BaseDomain variable not provided.")
        return False
    zone_id = create_route53_zone(client, domain)
    return {"domain": domain, "zone_id": zone_id}