Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Arg:
stream_arn (str): A kinesis or dynamodb stream arn.
Returns:
list: A list of statements.
"""
action_type = get_stream_action_type(stream_arn)
arn_parts = stream_arn.split("/")
# Cut off the last bit and replace it with a wildcard
wildcard_arn_parts = arn_parts[:-1]
wildcard_arn_parts.append("*")
wildcard_arn = "/".join(wildcard_arn_parts)
return [
Statement(
Effect=Allow,
Resource=[stream_arn],
Action=[
action_type("DescribeStream"),
action_type("GetRecords"),
action_type("GetShardIterator"),
]
),
Statement(
Effect=Allow,
Resource=[wildcard_arn],
Action=[action_type("ListStreams")]
)
Statement(
Effect=Allow,
Action=[
Action('acm', 'AddTagsToCertificate'),
Action('acm', 'DeleteCertificate'),
Action('acm', 'DescribeCertificate'),
Action('acm', 'RemoveTagsFromCertificate'),
],
Resource=[Sub('arn:aws:acm:*:${AWS::AccountId}:certificate/*')],
),
Statement(
Effect=Allow,
Action=[
Action('acm', 'RequestCertificate'),
Action('acm', 'ListTagsForCertificate'),
Action('acm', 'ListCertificates')
],
Resource=['*']
),
],
),
)
],
)
)
if CERTIFICATE_LAMBDA not in template.resources:
with open(pkgutil.get_loader('troposphere_dns_certificate.certificate').get_filename()) as f:
code = python_minifier.awslambda(f.read(), entrypoint='handler')
template.add_resource(
Action=[ AssumeRole ],
Principal=Principal("Service", ['codepipeline.amazonaws.com']),
)
]
)
)
pipeline_policy_statement_list = [
Statement(
Sid='CodePipelineAccess',
Effect=Allow,
Action=[
Action('codepipeline', '*'),
Action('sns', 'Publish'),
Action('s3', 'ListAllMyBuckets'),
Action('s3', 'GetBucketLocation'),
Action('iam', 'ListRoles'),
Action('iam', 'PassRole'),
],
Resource=[ '*' ]
),
Statement(
Sid='KMSCMK',
Effect=Allow,
Action=[
Action('kms', 'Decrypt'),
],
Resource=[ troposphere.Ref(self.cmk_arn_param) ]
),
]
# S3.Source Action requires more generous permissions on the Artifacts S3 Bucket
if self.s3_source_enabled:
pipeline_policy_statement_list.append(
awacs.kms.ListGrants,
awacs.kms.RevokeGrant,
],
Resource=["*"],
Condition=Condition(Bool("kms:GrantIsForAWSResource", True))
)
)
if key_admin_arns:
statements.append(
Statement(
Sid="Allow access for Key Administrators",
Effect=Allow,
Principal=AWSPrincipal(key_admin_arns),
Action=[
Action("kms", "Create*"),
Action("kms", "Describe*"),
Action("kms", "Enable*"),
Action("kms", "List*"),
Action("kms", "Put*"),
Action("kms", "Update*"),
Action("kms", "Revoke*"),
Action("kms", "Disable*"),
Action("kms", "Get*"),
Action("kms", "Delete*"),
Action("kms", "ScheduleKeyDeletion"),
Action("kms", "CancelKeyDeletion"),
],
Resource=["*"],
)
)
],
Resource=[
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'),
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}')
]
),
)
else:
pipeline_policy_statement_list.append(
Statement(
Sid='S3Access',
Effect=Allow,
Action=[
Action('s3', 'PutObject'),
Action('s3', 'GetBucketPolicy'),
Action('s3', 'GetObject'),
Action('s3', 'ListBucket'),
],
Resource=[
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'),
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}')
]
),
)
if self.lambda_invoke_enabled:
pipeline_policy_statement_list.append(
Statement(
Sid='LambdaInvoke',
Effect=Allow,
Action=[
Action('lambda', 'InvokeFunction'),
],
Action=[
Action('codebuild', 'BatchGetBuilds'),
Action('codebuild', 'StartBuild')
],
Resource=[ troposphere.Ref(self.codebuild_project_arn_param) ]
)
)
if self.ecr_source_enabled:
# Add Statement to allow ECR
pipeline_policy_statement_list.append(
Statement(
Sid='ECRPullAccess',
Effect=Allow,
Action=[
Action('ecr', 'Describe*'),
Action('ecr', 'List*'),
Action('ecr', 'Get*'),
],
Resource=['*']
)
)
if self.codecommit_source_enabled:
# Add Statements to allow CodeCommit if a CodeCommit.Source is enabled
pipeline_policy_statement_list.append(
Statement(
Sid='CodeCommitAssumeRole',
Effect=Allow,
Action=[
Action('sts', 'AssumeRole'),
],
Resource=[ troposphere.Ref(self.codecommit_role_arn_param) ]
)
def logstream_policy():
"""Policy needed for logspout -> kinesis log streaming."""
p = Policy(
Statement=[
Statement(
Effect=Allow,
Resource=["*"],
Action=[
kinesis.CreateStream, kinesis.DescribeStream,
Action(kinesis.prefix, "AddTagsToStream"),
Action(kinesis.prefix, "PutRecords")
])])
return p
SubnetId=Ref(subnetid_param)
)
template.add_resource(efs_mount_target)
# Create the policy that allows the instance to describe file systems and tags,
# so it can lookup the file system using AWS tags. An alternative would be to
# pass in the FileSystem name as UserData.
efs_host_role = Role(
"EFSHostRole",
AssumeRolePolicyDocument=PolicyDocument(
Statement=[
Statement(
Effect=Allow,
Action=[
Action('elasticfilesystem', 'DescribeFileSystems'),
Action('elasticfilesystem', 'DescribeTags')
],
Resource=["*"]
)
]
)
)
template.add_resource(efs_host_role)
efs_host_instance_profile = InstanceProfile(
"EFSInstanceProfile",
Roles=[Ref(efs_host_role)]
)
template.add_resource(efs_host_instance_profile)
# And finally the EC2 instance.
ec2_instance = Instance(
Action('s3', 'List*'),
],
Resource=[
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'),
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}')
]
),
)
else:
pipeline_policy_statement_list.append(
Statement(
Sid='S3Access',
Effect=Allow,
Action=[
Action('s3', 'PutObject'),
Action('s3', 'GetBucketPolicy'),
Action('s3', 'GetObject'),
Action('s3', 'ListBucket'),
],
Resource=[
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'),
troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}')
]
),
)
if self.lambda_invoke_enabled:
pipeline_policy_statement_list.append(
Statement(
Sid='LambdaInvoke',
Effect=Allow,
Action=[
Action('lambda', 'InvokeFunction'),