示例#1
0
        'Bucket',
        BucketName=If('HasBucketName', Ref(param_bucket_name),
                      Ref(AWS_NO_VALUE)),
        LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
            # Add a rule to
            s3.LifecycleRule(
                # Rule attributes
                Id='S3BucketRule1',
                Prefix='',
                Status='Enabled',
                # Applies to current objects
                ExpirationInDays=Ref(param_retire_days),
                Transitions=[
                    s3.LifecycleRuleTransition(
                        StorageClass='STANDARD_IA',
                        TransitionInDays=Ref(param_ia_days),
                    ),
                ],
                # Applies to Non Current objects
                # NoncurrentVersionExpirationInDays=90,
                # NoncurrentVersionTransitions=[
                #     s3.NoncurrentVersionTransition(
                #         StorageClass='STANDARD_IA',
                #         TransitionInDays=30,
                #     ),
                # ],
            ),
        ]),
    ))

bucket_policy = t.add_resource(
示例#2
0
文件: bucket.py 项目: pierretr/e3-aws
    def resources(self, stack: Stack) -> list[AWSObject]:
        """Construct and return a s3.Bucket and its associated s3.BucketPolicy."""
        # Handle versioning configuration
        optional_resources = []
        versioning_status = "Suspended"
        if self.enable_versioning:
            versioning_status = "Enabled"

        # Block all public accesses
        public_access_block_config = s3.PublicAccessBlockConfiguration(
            BlockPublicAcls=True,
            BlockPublicPolicy=True,
            IgnorePublicAcls=True,
            RestrictPublicBuckets=True,
        )

        # Set default bucket encryption to AES256
        bucket_encryption = None
        if self.default_bucket_encryption:
            bucket_encryption = s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault(
                            SSEAlgorithm=self.default_bucket_encryption.value
                        )
                    )
                ]
            )

        lifecycle_config = None
        if self.lifecycle_rules:
            lifecycle_config = s3.LifecycleConfiguration(
                name_to_id(self.name) + "LifeCycleConfig", Rules=self.lifecycle_rules
            )

        notification_config, notification_resources = self.notification_setup
        optional_resources.extend(notification_resources)

        attr = {}
        for key, val in {
            "BucketName": self.name,
            "BucketEncryption": bucket_encryption,
            "PublicAccessBlockConfiguration": public_access_block_config,
            "VersioningConfiguration": s3.VersioningConfiguration(
                Status=versioning_status
            ),
            "LifecycleConfiguration": lifecycle_config,
            "NotificationConfiguration": notification_config,
            "DependsOn": self.depends_on,
        }.items():
            if val:
                attr[key] = val

        return [
            s3.Bucket(name_to_id(self.name), **attr),
            s3.BucketPolicy(
                name_to_id(self.name) + "Policy",
                Bucket=self.ref,
                PolicyDocument=self.policy_document.as_dict,
            ),
            *optional_resources,
        ]
示例#3
0
    def create_template(self):
        """Create template (main function called by Stacker)."""
        template = self.template
        variables = self.get_variables()
        self.template.add_version('2010-09-09')
        self.template.add_description('Terraform State Resources')

        # Conditions
        for i in ['BucketName', 'TableName']:
            template.add_condition(
                "%sOmitted" % i,
                Or(Equals(variables[i].ref, ''),
                   Equals(variables[i].ref, 'undefined')))

        # Resources
        terraformlocktable = template.add_resource(
            dynamodb.Table(
                'TerraformStateTable',
                AttributeDefinitions=[
                    dynamodb.AttributeDefinition(AttributeName='LockID',
                                                 AttributeType='S')
                ],
                KeySchema=[
                    dynamodb.KeySchema(AttributeName='LockID', KeyType='HASH')
                ],
                ProvisionedThroughput=dynamodb.ProvisionedThroughput(
                    ReadCapacityUnits=2, WriteCapacityUnits=2),
                TableName=If('TableNameOmitted', NoValue,
                             variables['TableName'].ref)))
        template.add_output(
            Output('%sName' % terraformlocktable.title,
                   Description='Name of DynamoDB table for Terraform state',
                   Value=terraformlocktable.ref()))

        terraformstatebucket = template.add_resource(
            s3.Bucket(
                'TerraformStateBucket',
                AccessControl=s3.Private,
                BucketName=If('BucketNameOmitted', NoValue,
                              variables['BucketName'].ref),
                LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                    s3.LifecycleRule(NoncurrentVersionExpirationInDays=90,
                                     Status='Enabled')
                ]),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled')))
        template.add_output(
            Output('%sName' % terraformstatebucket.title,
                   Description='Name of bucket storing Terraform state',
                   Value=terraformstatebucket.ref()))
        template.add_output(
            Output('%sArn' % terraformstatebucket.title,
                   Description='Arn of bucket storing Terraform state',
                   Value=terraformstatebucket.get_att('Arn')))

        managementpolicy = template.add_resource(
            iam.ManagedPolicy(
                'ManagementPolicy',
                Description='Managed policy for Terraform state management.',
                Path='/',
                PolicyDocument=PolicyDocument(
                    Version='2012-10-17',
                    Statement=[
                        # https://www.terraform.io/docs/backends/types/s3.html#s3-bucket-permissions
                        Statement(
                            Action=[awacs.s3.ListBucket],
                            Effect=Allow,
                            Resource=[terraformstatebucket.get_att('Arn')]),
                        Statement(
                            Action=[awacs.s3.GetObject, awacs.s3.PutObject],
                            Effect=Allow,
                            Resource=[
                                Join('', [
                                    terraformstatebucket.get_att('Arn'), '/*'
                                ])
                            ]),
                        Statement(Action=[
                            awacs.dynamodb.GetItem, awacs.dynamodb.PutItem,
                            awacs.dynamodb.DeleteItem
                        ],
                                  Effect=Allow,
                                  Resource=[terraformlocktable.get_att('Arn')])
                    ])))
        template.add_output(
            Output('PolicyArn',
                   Description='Managed policy Arn',
                   Value=managementpolicy.ref()))
示例#4
0
    def create_template(self):
        """Create template (main function called by Stacker)."""
        template = self.template
        variables = self.get_variables()
        template.add_version('2010-09-09')
        template.add_description('Static Website - Bucket and Distribution')

        # Conditions
        template.add_condition(
            'AcmCertSpecified',
            And(Not(Equals(variables['AcmCertificateArn'].ref, '')),
                Not(Equals(variables['AcmCertificateArn'].ref, 'undefined'))))
        template.add_condition(
            'AliasesSpecified',
            And(Not(Equals(Select(0, variables['Aliases'].ref), '')),
                Not(Equals(Select(0, variables['Aliases'].ref), 'undefined'))))
        template.add_condition(
            'CFLoggingEnabled',
            And(Not(Equals(variables['LogBucketName'].ref, '')),
                Not(Equals(variables['LogBucketName'].ref, 'undefined'))))
        template.add_condition(
            'DirectoryIndexSpecified',
            And(Not(Equals(variables['RewriteDirectoryIndex'].ref, '')),
                Not(Equals(variables['RewriteDirectoryIndex'].ref,
                           'undefined')))  # noqa
        )
        template.add_condition(
            'WAFNameSpecified',
            And(Not(Equals(variables['WAFWebACL'].ref, '')),
                Not(Equals(variables['WAFWebACL'].ref, 'undefined'))))

        # Resources
        oai = template.add_resource(
            cloudfront.CloudFrontOriginAccessIdentity(
                'OAI',
                CloudFrontOriginAccessIdentityConfig=cloudfront.
                CloudFrontOriginAccessIdentityConfig(  # noqa pylint: disable=line-too-long
                    Comment='CF access to website')))

        bucket = template.add_resource(
            s3.Bucket(
                'Bucket',
                AccessControl=s3.Private,
                LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                    s3.LifecycleRule(NoncurrentVersionExpirationInDays=90,
                                     Status='Enabled')
                ]),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled'),
                WebsiteConfiguration=s3.WebsiteConfiguration(
                    IndexDocument='index.html', ErrorDocument='error.html')))
        template.add_output(
            Output('BucketName',
                   Description='Name of website bucket',
                   Value=bucket.ref()))

        allowcfaccess = template.add_resource(
            s3.BucketPolicy(
                'AllowCFAccess',
                Bucket=bucket.ref(),
                PolicyDocument=PolicyDocument(
                    Version='2012-10-17',
                    Statement=[
                        Statement(
                            Action=[awacs.s3.GetObject],
                            Effect=Allow,
                            Principal=Principal(
                                'CanonicalUser',
                                oai.get_att('S3CanonicalUserId')),
                            Resource=[Join('', [bucket.get_att('Arn'), '/*'])])
                    ])))

        cfdirectoryindexrewriterole = template.add_resource(
            iam.Role('CFDirectoryIndexRewriteRole',
                     Condition='DirectoryIndexSpecified',
                     AssumeRolePolicyDocument=PolicyDocument(
                         Version='2012-10-17',
                         Statement=[
                             Statement(Effect=Allow,
                                       Action=[awacs.sts.AssumeRole],
                                       Principal=Principal(
                                           'Service', [
                                               'lambda.amazonaws.com',
                                               'edgelambda.amazonaws.com'
                                           ]))
                         ]),
                     ManagedPolicyArns=[
                         IAM_ARN_PREFIX + 'AWSLambdaBasicExecutionRole'
                     ]))

        cfdirectoryindexrewrite = template.add_resource(
            awslambda.Function(
                'CFDirectoryIndexRewrite',
                Condition='DirectoryIndexSpecified',
                Code=awslambda.Code(ZipFile=Join(
                    '',
                    [
                        "'use strict';\n",
                        "exports.handler = (event, context, callback) => {\n",
                        "\n",
                        "    // Extract the request from the CloudFront event that is sent to Lambda@Edge\n",  # noqa pylint: disable=line-too-long
                        "    var request = event.Records[0].cf.request;\n",
                        "    // Extract the URI from the request\n",
                        "    var olduri = request.uri;\n",
                        "    // Match any '/' that occurs at the end of a URI. Replace it with a default index\n",  # noqa pylint: disable=line-too-long
                        "    var newuri = olduri.replace(/\\/$/, '\\/",
                        variables['RewriteDirectoryIndex'].ref,
                        "');\n",  # noqa
                        "    // Log the URI as received by CloudFront and the new URI to be used to fetch from origin\n",  # noqa pylint: disable=line-too-long
                        "    console.log(\"Old URI: \" + olduri);\n",
                        "    console.log(\"New URI: \" + newuri);\n",
                        "    // Replace the received URI with the URI that includes the index page\n",  # noqa pylint: disable=line-too-long
                        "    request.uri = newuri;\n",
                        "    // Return to CloudFront\n",
                        "    return callback(null, request);\n",
                        "\n",
                        "};\n"
                    ])),
                Description=
                'Rewrites CF directory HTTP requests to default page',  # noqa
                Handler='index.handler',
                Role=cfdirectoryindexrewriterole.get_att('Arn'),
                Runtime='nodejs8.10'))

        # Generating a unique resource name here for the Lambda version, so it
        # updates automatically if the lambda code changes
        code_hash = hashlib.md5(
            str(cfdirectoryindexrewrite.properties['Code'].
                properties['ZipFile'].to_dict()).encode()  # noqa pylint: disable=line-too-long
        ).hexdigest()

        cfdirectoryindexrewritever = template.add_resource(
            awslambda.Version('CFDirectoryIndexRewriteVer' + code_hash,
                              Condition='DirectoryIndexSpecified',
                              FunctionName=cfdirectoryindexrewrite.ref()))

        cfdistribution = template.add_resource(
            cloudfront.Distribution(
                'CFDistribution',
                DependsOn=allowcfaccess.title,
                DistributionConfig=cloudfront.DistributionConfig(
                    Aliases=If('AliasesSpecified', variables['Aliases'].ref,
                               NoValue),
                    Origins=[
                        cloudfront.Origin(
                            DomainName=Join(
                                '.', [bucket.ref(), 's3.amazonaws.com']),
                            S3OriginConfig=cloudfront.S3Origin(
                                OriginAccessIdentity=Join(
                                    '', [
                                        'origin-access-identity/cloudfront/',
                                        oai.ref()
                                    ])),
                            Id='S3Origin')
                    ],
                    DefaultCacheBehavior=cloudfront.DefaultCacheBehavior(
                        AllowedMethods=['GET', 'HEAD'],
                        Compress=False,
                        DefaultTTL='86400',
                        ForwardedValues=cloudfront.ForwardedValues(
                            Cookies=cloudfront.Cookies(Forward='none'),
                            QueryString=False,
                        ),
                        LambdaFunctionAssociations=If(
                            'DirectoryIndexSpecified',
                            [
                                cloudfront.LambdaFunctionAssociation(
                                    EventType='origin-request',
                                    LambdaFunctionARN=cfdirectoryindexrewritever
                                    .ref()  # noqa
                                )
                            ],
                            NoValue),
                        TargetOriginId='S3Origin',
                        ViewerProtocolPolicy='redirect-to-https'),
                    DefaultRootObject='index.html',
                    Logging=If(
                        'CFLoggingEnabled',
                        cloudfront.Logging(Bucket=Join('.', [
                            variables['LogBucketName'].ref, 's3.amazonaws.com'
                        ])), NoValue),
                    PriceClass=variables['PriceClass'].ref,
                    Enabled=True,
                    WebACLId=If('WAFNameSpecified', variables['WAFWebACL'].ref,
                                NoValue),
                    ViewerCertificate=If(
                        'AcmCertSpecified',
                        cloudfront.ViewerCertificate(
                            AcmCertificateArn=variables['AcmCertificateArn'].
                            ref,  # noqa
                            SslSupportMethod='sni-only'),
                        NoValue))))
        template.add_output(
            Output('CFDistributionId',
                   Description='CloudFront distribution ID',
                   Value=cfdistribution.ref()))
        template.add_output(
            Output('CFDistributionDomainName',
                   Description='CloudFront distribution domain name',
                   Value=cfdistribution.get_att('DomainName')))
bucket = t.add_resource(
    s3.Bucket('Bucket',
              BucketName=If('HasBucketName', Ref(param_bucket_name),
                            Ref(AWS_NO_VALUE)),
              VersioningConfiguration=s3.VersioningConfiguration(
                  Status=Ref(param_versioning)),
              LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                  s3.LifecycleRule(
                      Id='S3BucketRule1',
                      Prefix='',
                      Status='Enabled',
                      Transitions=[
                          s3.LifecycleRuleTransition(
                              StorageClass='STANDARD_IA',
                              TransitionInDays=365,
                          ),
                      ],
                      NoncurrentVersionExpirationInDays=90,
                      NoncurrentVersionTransitions=[
                          s3.NoncurrentVersionTransition(
                              StorageClass='STANDARD_IA',
                              TransitionInDays=30,
                          ),
                      ],
                  ),
              ])))

user = t.add_resource(
    iam.User(
        'BackupUser',
        Policies=[
            iam.Policy(
示例#6
0
    def create_template(self):
        """Create template (main function called by Stacker)."""
        template = self.template
        # variables = self.get_variables()
        template.set_version('2010-09-09')
        template.set_description('Static Website - Dependencies')

        # Resources
        awslogbucket = template.add_resource(
            s3.Bucket(
                'AWSLogBucket',
                AccessControl=s3.Private,
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled'
                )
            )
        )
        template.add_output(Output(
            'AWSLogBucketName',
            Description='Name of bucket storing AWS logs',
            Value=awslogbucket.ref()
        ))

        template.add_resource(
            s3.BucketPolicy(
                'AllowAWSLogWriting',
                Bucket=awslogbucket.ref(),
                PolicyDocument=Policy(
                    Version='2012-10-17',
                    Statement=[
                        Statement(
                            Action=[awacs.s3.PutObject],
                            Effect=Allow,
                            Principal=AWSPrincipal(Join(':',
                                                        ['arn:aws:iam:',
                                                         AccountId,
                                                         'root'])),
                            Resource=[
                                Join('', ['arn:aws:s3:::',
                                          awslogbucket.ref(),
                                          '/*'])
                            ]
                        )
                    ]
                )
            )
        )
        artifacts = template.add_resource(
            s3.Bucket(
                'Artifacts',
                AccessControl=s3.Private,
                LifecycleConfiguration=s3.LifecycleConfiguration(
                    Rules=[
                        s3.LifecycleRule(
                            NoncurrentVersionExpirationInDays=90,
                            Status='Enabled'
                        )
                    ]
                ),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled'
                )
            )
        )
        template.add_output(Output(
            'ArtifactsBucketName',
            Description='Name of bucket storing artifacts',
            Value=artifacts.ref()
        ))
示例#7
0
    def _build_template(self, template):

        t = template
        s3b = t.add_resource(s3.Bucket(self.name))

        if self.public_read:
            s3b.AccessControl = s3.PublicRead
            t.add_resource(
                s3.BucketPolicy('{}BucketPolicy'.format(self.name),
                                Bucket=Ref(s3b),
                                PolicyDocument={
                                    "Statement": [{
                                        "Action": ["s3:GetObject"],
                                        "Effect":
                                        "Allow",
                                        "Resource":
                                        Join('',
                                             ["arn:aws:s3:::",
                                              Ref(s3b), "/*"]),
                                        "Principal":
                                        "*"
                                    }]
                                }))

        versioning = "Suspended"

        if self.versioning:
            versioning = "Enabled"

        s3b.VersioningConfiguration = s3.VersioningConfiguration(
            Status=versioning)

        if self.website_mode:
            s3b.WebsiteConfiguration = s3.WebsiteConfiguration(
                **self.website_config)

        if self.cors_enabled is True \
                and len(self.cors_rules) <= 0:
            self.add_cors_rule("CorsAll", ['*'], ['GET', 'POST', 'PUT'], ['*'],
                               3000)

        if len(self.cors_rules) > 0:
            cors = s3.CorsConfiguration(CorsRules=self.cors_rules)
            s3b.CorsConfiguration = cors

        if len(self.lifecycle_rules) > 0:
            s3b.LifecycleConfiguration = s3.LifecycleConfiguration(Rules=[])
            for lcr in self.lifecycle_rules:
                s3b.LifecycleConfiguration.Rules.append(lcr)

        t.add_output([
            Output("{}BucketName".format(self.name),
                   Value=Ref(s3b),
                   Description="{} Bucket Name".format(self.name)),
            Output("{}BucketUrl".format(self.name),
                   Value=GetAtt(s3b, "DomainName"),
                   Description="{} Bucket Name".format(self.name)),
            Output('{}WebsiteUrl'.format(self.name),
                   Value=GetAtt(s3b, 'WebsiteURL'))
        ])

        return s3b
示例#8
0
    return (myRule)


lifeCycleRules.append(
    CreateLifeCycleRules("Christina", "/Christina/Pics", "3650", "Enabled"))
lifeCycleRules.append(
    CreateLifeCycleRules("Jorge", "/Jorge/Pics", "3650", "Enabled"))
lifeCycleRules.append(
    CreateLifeCycleRules("Hazel", "/Hazel/Pics", "3650", "Enabled"))

# miscTransitions = []
# lifeCycleRules.append(CreateLifeCycleRules("Misc", "/Misc/Pics", "3650", "Enabled", miscTransitions))
# miscTransitions.append(CreateLifecycleRuleTransition("STANDARD_IA", "90"))
# miscTransitions.append(CreateLifecycleRuleTransition("GLACIER", "365"))

lifeCycleConfiguration = s3.LifecycleConfiguration(Rules=lifeCycleRules)

template.add_resource(
    Bucket("JHSBucket",
           BucketName=Ref(bucketName),
           LifecycleConfiguration=lifeCycleConfiguration))

#Adding Output By Passing Object
myOutput = Output("JHSBucketName")
myOutput.Description = "This is the name of personal bucket"
myOutput.Value = Ref(bucketName)
template.add_output(myOutput)

#Adding Output Directly
template.add_output(
    Output("DirectOutput",
示例#9
0
    def create_template(self):
        """Create template (main function called by Stacker)."""
        template = self.template
        variables = self.get_variables()
        template.set_version('2010-09-09')
        template.set_description('App - Build Pipeline')

        # Resources
        boundary_arn = Join('', [
            'arn:', Partition, ':iam::', AccountId, ':policy/',
            variables['RolePermissionsBoundaryName'].ref
        ])

        # Repo image limit is 1000 by default; this lambda function will prune
        # old images
        image_param_path = Join(
            '', ['/', variables['AppPrefix'].ref, '/current-hash'])
        image_param_arn = Join('', [
            'arn:', Partition, ':ssm:', Region, ':', AccountId, ':parameter',
            image_param_path
        ])
        ecr_repo_arn = Join('', [
            'arn:', Partition, ':ecr:', Region, ':', AccountId, ':repository/',
            variables['EcrRepoName'].ref
        ])
        cleanuplambdarole = template.add_resource(
            iam.Role('CleanupLambdaRole',
                     AssumeRolePolicyDocument=make_simple_assume_policy(
                         'lambda.amazonaws.com'),
                     ManagedPolicyArns=[
                         IAM_ARN_PREFIX + 'AWSLambdaBasicExecutionRole'
                     ],
                     PermissionsBoundary=boundary_arn,
                     Policies=[
                         iam.Policy(
                             PolicyName=Join(
                                 '',
                                 [variables['AppPrefix'].ref, '-ecrcleanup']),
                             PolicyDocument=PolicyDocument(
                                 Version='2012-10-17',
                                 Statement=[
                                     Statement(Action=[awacs.ssm.GetParameter],
                                               Effect=Allow,
                                               Resource=[image_param_arn]),
                                     Statement(Action=[
                                         awacs.ecr.DescribeImages,
                                         awacs.ecr.BatchDeleteImage
                                     ],
                                               Effect=Allow,
                                               Resource=[ecr_repo_arn])
                                 ]))
                     ]))
        cleanupfunction = template.add_resource(
            awslambda.Function(
                'CleanupFunction',
                Description='Cleanup stale ECR images',
                Code=awslambda.Code(
                    ZipFile=variables['ECRCleanupLambdaFunction']),
                Environment=awslambda.Environment(
                    Variables={
                        'ECR_REPO_NAME': variables['EcrRepoName'].ref,
                        'SSM_PARAM': image_param_path
                    }),
                Handler='index.handler',
                Role=cleanuplambdarole.get_att('Arn'),
                Runtime='python3.6',
                Timeout=120))
        cleanuprule = template.add_resource(
            events.Rule('CleanupRule',
                        Description='Regularly invoke CleanupFunction',
                        ScheduleExpression='rate(7 days)',
                        State='ENABLED',
                        Targets=[
                            events.Target(Arn=cleanupfunction.get_att('Arn'),
                                          Id='CleanupFunction')
                        ]))
        template.add_resource(
            awslambda.Permission(
                'AllowCWLambdaInvocation',
                FunctionName=cleanupfunction.ref(),
                Action=awacs.awslambda.InvokeFunction.JSONrepr(),
                Principal='events.amazonaws.com',
                SourceArn=cleanuprule.get_att('Arn')))

        appsource = template.add_resource(
            codecommit.Repository(
                'AppSource',
                RepositoryName=Join('-',
                                    [variables['AppPrefix'].ref, 'source'])))
        for i in ['Name', 'Arn']:
            template.add_output(
                Output("AppRepo%s" % i,
                       Description="%s of app source repo" % i,
                       Value=appsource.get_att(i)))

        bucket = template.add_resource(
            s3.Bucket(
                'Bucket',
                AccessControl=s3.Private,
                LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                    s3.LifecycleRule(NoncurrentVersionExpirationInDays=90,
                                     Status='Enabled')
                ]),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled')))
        template.add_output(
            Output('PipelineBucketName',
                   Description='Name of pipeline bucket',
                   Value=bucket.ref()))

        # This list must be kept in sync between the CodeBuild project and its
        # role
        build_name = Join('', [variables['AppPrefix'].ref, '-build'])

        build_role = template.add_resource(
            iam.Role(
                'BuildRole',
                AssumeRolePolicyDocument=make_simple_assume_policy(
                    'codebuild.amazonaws.com'),
                PermissionsBoundary=boundary_arn,
                Policies=[
                    iam.Policy(
                        PolicyName=Join('', [build_name, '-policy']),
                        PolicyDocument=PolicyDocument(
                            Version='2012-10-17',
                            Statement=[
                                Statement(
                                    Action=[awacs.s3.GetObject],
                                    Effect=Allow,
                                    Resource=[
                                        Join('', [bucket.get_att('Arn'), '/*'])
                                    ]),
                                Statement(
                                    Action=[awacs.ecr.GetAuthorizationToken],
                                    Effect=Allow,
                                    Resource=['*']),
                                Statement(Action=[
                                    awacs.ecr.BatchCheckLayerAvailability,
                                    awacs.ecr.BatchGetImage,
                                    awacs.ecr.CompleteLayerUpload,
                                    awacs.ecr.DescribeImages,
                                    awacs.ecr.GetDownloadUrlForLayer,
                                    awacs.ecr.InitiateLayerUpload,
                                    awacs.ecr.PutImage,
                                    awacs.ecr.UploadLayerPart
                                ],
                                          Effect=Allow,
                                          Resource=[ecr_repo_arn]),
                                Statement(Action=[
                                    awacs.ssm.GetParameter,
                                    awacs.ssm.PutParameter
                                ],
                                          Effect=Allow,
                                          Resource=[image_param_arn]),
                                Statement(Action=[
                                    awacs.logs.CreateLogGroup,
                                    awacs.logs.CreateLogStream,
                                    awacs.logs.PutLogEvents
                                ],
                                          Effect=Allow,
                                          Resource=[
                                              Join('', [
                                                  'arn:', Partition, ':logs:',
                                                  Region, ':', AccountId,
                                                  ':log-group:/aws/codebuild/',
                                                  build_name
                                              ] + x)
                                              for x in [[':*'], [':*/*']]
                                          ])
                            ]))
                ]))

        buildproject = template.add_resource(
            codebuild.Project(
                'BuildProject',
                Artifacts=codebuild.Artifacts(Type='CODEPIPELINE'),
                Environment=codebuild.Environment(
                    ComputeType='BUILD_GENERAL1_SMALL',
                    EnvironmentVariables=[
                        codebuild.EnvironmentVariable(
                            Name='AWS_DEFAULT_REGION',
                            Type='PLAINTEXT',
                            Value=Region),
                        codebuild.EnvironmentVariable(Name='AWS_ACCOUNT_ID',
                                                      Type='PLAINTEXT',
                                                      Value=AccountId),
                        codebuild.EnvironmentVariable(
                            Name='IMAGE_REPO_NAME',
                            Type='PLAINTEXT',
                            Value=variables['EcrRepoName'].ref),
                    ],
                    Image='aws/codebuild/docker:18.09.0',
                    Type='LINUX_CONTAINER'),
                Name=build_name,
                ServiceRole=build_role.get_att('Arn'),
                Source=codebuild.Source(
                    Type='CODEPIPELINE',
                    BuildSpec=variables['BuildProjectBuildSpec'])))

        pipelinerole = template.add_resource(
            iam.Role(
                'PipelineRole',
                AssumeRolePolicyDocument=make_simple_assume_policy(
                    'codepipeline.amazonaws.com'),
                PermissionsBoundary=boundary_arn,
                Policies=[
                    iam.Policy(
                        PolicyName=Join('', [build_name, '-pipeline-policy']),
                        PolicyDocument=PolicyDocument(
                            Version='2012-10-17',
                            Statement=[
                                Statement(
                                    Action=[
                                        awacs.codecommit.GetBranch,
                                        awacs.codecommit.GetCommit,
                                        awacs.codecommit.UploadArchive,
                                        awacs.codecommit.
                                        GetUploadArchiveStatus,  # noqa
                                        awacs.codecommit.CancelUploadArchive
                                    ],  # noqa
                                    Effect=Allow,
                                    Resource=[appsource.get_att('Arn')]),
                                Statement(
                                    Action=[awacs.s3.GetBucketVersioning],
                                    Effect=Allow,
                                    Resource=[bucket.get_att('Arn')]),
                                Statement(
                                    Action=[
                                        awacs.s3.GetObject, awacs.s3.PutObject
                                    ],
                                    Effect=Allow,
                                    Resource=[
                                        Join('', [bucket.get_att('Arn'), '/*'])
                                    ]),
                                Statement(
                                    Action=[
                                        awacs.codebuild.BatchGetBuilds,
                                        awacs.codebuild.StartBuild
                                    ],
                                    Effect=Allow,
                                    Resource=[buildproject.get_att('Arn')])
                            ]))
                ]))

        template.add_resource(
            codepipeline.Pipeline(
                'Pipeline',
                ArtifactStore=codepipeline.ArtifactStore(Location=bucket.ref(),
                                                         Type='S3'),
                Name=build_name,
                RoleArn=pipelinerole.get_att('Arn'),
                Stages=[
                    codepipeline.Stages(
                        Name='Source',
                        Actions=[
                            codepipeline.Actions(
                                Name='CodeCommit',
                                ActionTypeId=codepipeline.ActionTypeId(
                                    Category='Source',
                                    Owner='AWS',
                                    Provider='CodeCommit',
                                    Version='1'),
                                Configuration={
                                    'RepositoryName':
                                    appsource.get_att('Name'),  # noqa
                                    'BranchName': 'master'
                                },
                                OutputArtifacts=[
                                    codepipeline.OutputArtifacts(
                                        Name='CodeCommitRepo')
                                ]),
                        ]),
                    codepipeline.Stages(
                        Name='Build',
                        Actions=[
                            codepipeline.Actions(
                                Name='Build',
                                ActionTypeId=codepipeline.ActionTypeId(
                                    Category='Build',
                                    Owner='AWS',
                                    Provider='CodeBuild',
                                    Version='1'),
                                Configuration={
                                    'ProjectName': buildproject.ref()
                                },
                                InputArtifacts=[
                                    codepipeline.InputArtifacts(
                                        Name='CodeCommitRepo')
                                ])
                        ])
                ]))
示例#10
0
    def create_template(self):
        """Create template (main function called by Stacker)."""
        template = self.template
        variables = self.get_variables()
        template.add_version('2010-09-09')
        template.add_description('Static Website - Bucket and Distribution')

        # Conditions
        template.add_condition(
            'AcmCertSpecified',
            And(Not(Equals(variables['AcmCertificateArn'].ref, '')),
                Not(Equals(variables['AcmCertificateArn'].ref, 'undefined'))))
        template.add_condition(
            'AliasesSpecified',
            And(Not(Equals(Select(0, variables['Aliases'].ref), '')),
                Not(Equals(Select(0, variables['Aliases'].ref), 'undefined'))))
        template.add_condition(
            'CFLoggingEnabled',
            And(Not(Equals(variables['LogBucketName'].ref, '')),
                Not(Equals(variables['LogBucketName'].ref, 'undefined'))))
        template.add_condition(
            'WAFNameSpecified',
            And(Not(Equals(variables['WAFWebACL'].ref, '')),
                Not(Equals(variables['WAFWebACL'].ref, 'undefined'))))

        # Resources
        oai = template.add_resource(
            cloudfront.CloudFrontOriginAccessIdentity(
                'OAI',
                CloudFrontOriginAccessIdentityConfig=cloudfront.
                CloudFrontOriginAccessIdentityConfig(  # noqa pylint: disable=line-too-long
                    Comment='CF access to website')))

        bucket = template.add_resource(
            s3.Bucket(
                'Bucket',
                AccessControl=s3.Private,
                LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                    s3.LifecycleRule(NoncurrentVersionExpirationInDays=90,
                                     Status='Enabled')
                ]),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled'),
                WebsiteConfiguration=s3.WebsiteConfiguration(
                    IndexDocument='index.html', ErrorDocument='error.html')))
        template.add_output(
            Output('BucketName',
                   Description='Name of website bucket',
                   Value=bucket.ref()))

        allowcfaccess = template.add_resource(
            s3.BucketPolicy(
                'AllowCFAccess',
                Bucket=bucket.ref(),
                PolicyDocument=Policy(
                    Version='2012-10-17',
                    Statement=[
                        Statement(
                            Action=[awacs.s3.GetObject],
                            Effect=Allow,
                            Principal=Principal(
                                'CanonicalUser',
                                oai.get_att('S3CanonicalUserId')),
                            Resource=[Join('', [bucket.get_att('Arn'), '/*'])])
                    ])))

        cfdistribution = template.add_resource(
            cloudfront.Distribution(
                'CFDistribution',
                DependsOn=allowcfaccess.title,
                DistributionConfig=cloudfront.DistributionConfig(
                    Aliases=If('AliasesSpecified', variables['Aliases'].ref,
                               NoValue),
                    Origins=[
                        cloudfront.Origin(
                            DomainName=Join(
                                '.', [bucket.ref(), 's3.amazonaws.com']),
                            S3OriginConfig=cloudfront.S3Origin(
                                OriginAccessIdentity=Join(
                                    '', [
                                        'origin-access-identity/cloudfront/',
                                        oai.ref()
                                    ])),
                            Id='S3Origin')
                    ],
                    DefaultCacheBehavior=cloudfront.DefaultCacheBehavior(
                        AllowedMethods=['GET', 'HEAD'],
                        Compress=False,
                        DefaultTTL='86400',
                        ForwardedValues=cloudfront.ForwardedValues(
                            Cookies=cloudfront.Cookies(Forward='none'),
                            QueryString=False,
                        ),
                        TargetOriginId='S3Origin',
                        ViewerProtocolPolicy='redirect-to-https'),
                    DefaultRootObject='index.html',
                    Logging=If(
                        'CFLoggingEnabled',
                        cloudfront.Logging(Bucket=Join('.', [
                            variables['LogBucketName'].ref, 's3.amazonaws.com'
                        ])), NoValue),
                    PriceClass=variables['PriceClass'].ref,
                    Enabled=True,
                    WebACLId=If('WAFNameSpecified', variables['WAFWebACL'].ref,
                                NoValue),
                    ViewerCertificate=If(
                        'AcmCertSpecified',
                        cloudfront.ViewerCertificate(
                            AcmCertificateArn=variables['AcmCertificateArn'].
                            ref,  # noqa
                            SslSupportMethod='sni-only'),
                        NoValue))))
        template.add_output(
            Output('CFDistributionId',
                   Description='CloudFront distribution ID',
                   Value=cfdistribution.ref()))
        template.add_output(
            Output('CFDistributionDomainName',
                   Description='CloudFront distribution domain name',
                   Value=cfdistribution.get_att('DomainName')))
示例#11
0
    def create_template(self):
        """Create template (main function called by Stacker)."""
        template = self.template
        variables = self.get_variables()
        template.add_version('2010-09-09')
        template.add_description('Sample app')

        # Conditions
        template.add_condition(
            'BucketNameOmitted',
            Or(Equals(variables['BucketName'].ref, ''),
               Equals(variables['BucketName'].ref, 'undefined')))

        # Resources
        bucket = template.add_resource(
            s3.Bucket(
                'Bucket',
                AccessControl=s3.Private,
                BucketName=If('BucketNameOmitted', Ref('AWS::NoValue'),
                              variables['BucketName'].ref),
                LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                    s3.LifecycleRule(NoncurrentVersionExpirationInDays=90,
                                     Status='Enabled')
                ]),
                Tags=Tags(application=variables['ApplicationName'].ref,
                          customer=variables['CustomerName'].ref,
                          environment=variables['EnvironmentName'].ref),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled')))
        template.add_output(
            Output('BucketName',
                   Description='Name of bucket',
                   Value=Ref(bucket)))
        template.add_output(
            Output('BucketArn',
                   Description='Arn of bucket',
                   Value=GetAtt(bucket, 'Arn')))

        # https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingServerSideEncryption.html
        template.add_resource(
            s3.BucketPolicy(
                'RequireBucketEncryption',
                Bucket=Ref(bucket),
                PolicyDocument=Policy(
                    Version='2012-10-17',
                    Statement=[
                        Statement(
                            Sid='DenyIncorrectEncryptionHeader',
                            Action=[awacs.s3.PutObject],
                            Condition=Condition(
                                StringNotEquals(
                                    's3:x-amz-server-side-encryption',
                                    'AES256')),
                            Effect=Deny,
                            Principal=Principal('*'),
                            Resource=[Join('',
                                           [GetAtt(bucket, 'Arn'), '/*'])]),
                        Statement(
                            Sid='DenyUnEncryptedObjectUploads',
                            Action=[awacs.s3.PutObject],
                            Condition=Condition(
                                Null('s3:x-amz-server-side-encryption',
                                     'true')),
                            Effect=Deny,
                            Principal=Principal('*'),
                            Resource=[Join('', [GetAtt(bucket, 'Arn'), '/*'])])
                    ])))
示例#12
0
    def add_resources(self):
        """Add resources to template."""
        template = self.template
        variables = self.get_variables()

        chefbucket = template.add_resource(
            s3.Bucket(
                'ChefBucket',
                AccessControl=s3.Private,
                BucketName=If('ChefBucketNameOmitted', Ref('AWS::NoValue'),
                              variables['ChefBucketName'].ref),
                LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[
                    s3.LifecycleRule(NoncurrentVersionExpirationInDays=90,
                                     Status='Enabled')
                ]),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status='Enabled')))
        template.add_output(
            Output(
                '%sName' % chefbucket.title,
                Description='Name of bucket storing core Chef configuration',
                Export=Export(
                    Sub('${AWS::StackName}-%sName' % chefbucket.title)),
                Value=Ref(chefbucket)))
        template.add_output(
            Output('%sArn' % chefbucket.title,
                   Description='Arn of bucket storing core Chef configuration',
                   Export=Export(
                       Sub('${AWS::StackName}-%sArn' % chefbucket.title)),
                   Value=GetAtt(chefbucket, 'Arn')))

        chefdatabucket = template.add_resource(
            s3.Bucket('ChefDataBucket',
                      AccessControl=s3.Private,
                      BucketName=If('ChefDataBucketNameOmitted',
                                    Ref('AWS::NoValue'),
                                    variables['ChefDataBucketName'].ref),
                      VersioningConfiguration=s3.VersioningConfiguration(
                          Status='Enabled')))
        template.add_output(
            Output(
                '%sName' % chefdatabucket.title,
                Description='Name of bucket storing extra/restricted Chef data',
                Export=Export(
                    Sub('${AWS::StackName}-'
                        '%sName' % chefdatabucket.title)),
                Value=Ref(chefdatabucket)))
        template.add_output(
            Output(
                '%sArn' % chefdatabucket.title,
                Description='Arn of bucket storing extra/restricted Chef data',
                Export=Export(
                    Sub('${AWS::StackName}-'
                        '%sArn' % chefdatabucket.title)),
                Value=GetAtt(chefdatabucket, 'Arn')))

        # https://docs.aws.amazon.com/AmazonS3/latest/dev/
        #         UsingServerSideEncryption.html
        template.add_resource(
            s3.BucketPolicy(
                'RequireChefDataBucketEncryption',
                Bucket=Ref(chefdatabucket),
                PolicyDocument=Policy(
                    Version='2012-10-17',
                    Statement=[
                        Statement(
                            Sid='DenyIncorrectEncryptionHeader',
                            Action=[awacs.s3.PutObject],
                            Condition=Condition(
                                StringNotEquals(
                                    's3:x-amz-server-side-encryption',
                                    'AES256')),
                            Effect=Deny,
                            Principal=Principal('*'),
                            Resource=[
                                Join('', [GetAtt(chefdatabucket, 'Arn'), '/*'])
                            ]),
                        Statement(
                            Sid='DenyUnEncryptedObjectUploads',
                            Action=[awacs.s3.PutObject],
                            Condition=Condition(
                                Null('s3:x-amz-server-side-encryption',
                                     'true')),
                            Effect=Deny,
                            Principal=Principal('*'),
                            Resource=[
                                Join('', [GetAtt(chefdatabucket, 'Arn'), '/*'])
                            ])
                    ])))