示例#1
0
def convert_yaml_node_to_troposphere(node):
    if node.tag == '!Sub':
        if type(node.value) == type(str()):
            # ScalarNode - single argument only
            return troposphere.Sub(node.value)
        else:
            values = {}
            for map_node in node.value[1:]:
                if map_node.tag != yaml_map_tag:
                    raise TroposphereConversionError(
                        "Substitue variables for !Sub must be mappings.")
                values[map_node.value[0][0].value] = map_node.value[0][1].value
            return troposphere.Sub(node.value[0].value, **values)

    elif node.tag == '!Ref':
        return troposphere.Ref(node.value)
    elif node.tag == '!Join':
        delimiter = node.value[0].value
        values = []
        for node in node.value[1].value:
            values.append(convert_yaml_node_to_troposphere(node))
        return troposphere.Join(delimiter, values)

    elif node.tag == yaml_str_tag:
        return node.value
    else:
        raise TroposphereConversionError(
            "Unknown YAML to convert to Troposphere")
    def run(self):
        puppet_version = constants.VERSION
        description = f"""Bootstrap template used to configure spoke account for stack use
        {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-spoke-stack"}}"""

        template = t.Template(Description=description)

        template.add_resource(
            iam.Role(
                "PuppetStackRole",
                RoleName="PuppetStackRole",
                ManagedPolicyArns=[
                    t.Sub(
                        "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess"
                    )
                ],
                Path=config.get_puppet_role_path(),
                AssumeRolePolicyDocument={
                    "Version":
                    "2012-10-17",
                    "Statement": [{
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "Service": ["cloudformation.amazonaws.com"]
                        },
                    }],
                },
            ))

        self.write_output(template.to_yaml(), skip_json_dump=True)
    def user_delegate_role_and_policies(self, user, permissions_list):
        "Create and add an account delegate Role to template"
        user_arn = 'arn:aws:iam::{}:user/{}'.format(self.master_account_id,
                                                    user.username)
        assume_role_res = troposphere.iam.Role(
            "UserAccountDelegateRole",
            RoleName="IAM-User-Account-Delegate-Role-{}".format(
                self.create_resource_name(user.name,
                                          filter_id='IAM.Role.RoleName')),
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(Effect=Allow,
                              Action=[AssumeRole],
                              Principal=Principal("AWS", [user_arn]),
                              Condition=Condition(
                                  [AWACSBool({MultiFactorAuthPresent: True})]))
                ]))
        # Iterate over permissions and create a delegate role and policices
        for permission_config in permissions_list:
            init_method = getattr(
                self,
                "init_{}_permission".format(permission_config.type.lower()))
            init_method(permission_config, assume_role_res)

        self.template.add_resource(assume_role_res)
        self.template.add_output(
            troposphere.Output(
                title='SigninUrl',
                Value=troposphere.Sub(
                    'https://signin.aws.amazon.com/switchrole?account=${AWS::AccountId}&roleName=${UserAccountDelegateRole}'
                )))
示例#4
0
    def get_alarm_description(self, notification_cfn_refs):
        """Create an Alarm Description in JSON format with Paco Alarm information"""
        project = get_parent_by_interface(self, schemas.IProject)
        netenv = get_parent_by_interface(self, schemas.INetworkEnvironment)
        env = get_parent_by_interface(self, schemas.IEnvironment)
        envreg = get_parent_by_interface(self, schemas.IEnvironmentRegion)
        app = get_parent_by_interface(self, schemas.IApplication)
        group = get_parent_by_interface(self, schemas.IResourceGroup)
        resource = get_parent_by_interface(self, schemas.IResource)

        # SNS Topic ARNs are supplied Paramter Refs
        topic_arn_subs = []
        sub_dict = {}
        for action_ref in notification_cfn_refs:
            ref_id = action_ref.data['Ref']
            topic_arn_subs.append('${%s}' % ref_id)
            sub_dict[ref_id] = action_ref

        # Base alarm info - used for standalone alarms not part of an application
        description = {
            "project_name": project.name,
            "project_title": project.title,
            "account_name": self.account_name,
            "alarm_name": self.name,
            "classification": self.classification,
            "severity": self.severity,
            "topic_arns": topic_arn_subs
        }

        # conditional fields:
        if self.description:
            description['description'] = self.description
        if self.runbook_url:
            description['runbook_url'] = self.runbook_url

        if app != None:
            # Service applications and apps not part of a NetEnv
            description["app_name"] = app.name
            description["app_title"] = app.title
        if group != None:
            # Application level Alarms do not have resource group and resource
            description["resource_group_name"] = group.name
            description["resource_group_title"] = group.title
            description["resource_name"] = resource.name
            description["resource_title"] = resource.title

        if netenv != None:
            # NetEnv information
            description["netenv_name"] = netenv.name
            description["netenv_title"] = netenv.title
            description["env_name"] = env.name
            description["env_title"] = env.title
            description["envreg_name"] = envreg.name
            description["envreg_title"] = envreg.title

        description_json = json.dumps(description)

        return troposphere.Sub(description_json, sub_dict)
示例#5
0
    def __init__(self, stack, paco_ctx):
        dashboard = stack.resource
        super().__init__(stack, paco_ctx)
        self.set_aws_name('Dashboard', self.resource_group_name, self.resource.name)
        self.init_template('CloudWatch Dashboard')

        if not dashboard.is_enabled(): return

        # Parameters for variables
        if dashboard.variables:
            for key, value in dashboard.variables.items():
                if type(value) == type(str()):
                    param_type = 'String'
                elif type(value) == type(int()) or type(value) == type(float()):
                    param_type = 'Number'
                else:
                    raise UnsupportedCloudFormationParameterType(
                        "Can not cast {} of type {} to a CloudFormation Parameter type.".format(
                            value, type(value)
                        )
                    )
                variable_param = self.create_cfn_parameter(
                    param_type=param_type,
                    name=key,
                    description='Dashboard {} Variable'.format(key),
                    value=value
                )

        # Region Parameter
        region_param = self.create_cfn_parameter(
            param_type='String',
            name='AwsRegion',
            description='Dashboard Region Variable',
            value=self.aws_region
        )

        # Dashboard resource
        dashboard_logical_id = 'Dashboard'
        body = troposphere.Sub(dashboard.dashboard_file)
        cfn_export_dict = {
            'DashboardBody': body,
            'DashboardName': dashboard.title_or_name
        }
        dashboard_resource = troposphere.cloudwatch.Dashboard.from_dict(
            dashboard_logical_id,
            cfn_export_dict
        )
        self.template.add_resource(dashboard_resource)
示例#6
0
    def init_manual_approval_action(self, template, action_config):
        self.manual_approval_is_enabled = action_config.is_enabled()
        # Manual Approval Deploy Action
        subscription_list = []
        for approval_email in action_config.manual_approval_notification_email:
            email_hash = utils.md5sum(str_data=approval_email)
            manual_approval_notification_email_param = self.create_cfn_parameter(
                param_type='String',
                name='ManualApprovalNotificationEmail'+email_hash,
                description='Email to send notifications to when a deployment requires approval.',
                value=approval_email,
            )
            subscription_list.append(
                troposphere.sns.Subscription(
                    Endpoint=troposphere.Ref(manual_approval_notification_email_param),
                    Protocol = 'email'
                )
            )

        manual_approval_sns_res = troposphere.sns.Topic(
            title = 'ManualApprovalSNSTopic',
            template=template,
            Condition = 'ManualApprovalIsEnabled',
            TopicName = troposphere.Sub('${ResourceNamePrefix}-Approval'),
            Subscription = subscription_list
        )
        manual_deploy_action = troposphere.codepipeline.Actions(
            Name='Approval',
            ActionTypeId = troposphere.codepipeline.ActionTypeId(
                Category = 'Approval',
                Owner = 'AWS',
                Version = '1',
                Provider = 'Manual'
            ),
            Configuration = {
                'NotificationArn': troposphere.Ref(manual_approval_sns_res),
            },
            RunOrder = action_config.run_order
        )
        manual_deploy_action = troposphere.If(
            'ManualApprovalIsEnabled',
            manual_deploy_action,
            troposphere.Ref('AWS::NoValue')
        )

        return manual_deploy_action
示例#7
0
def generate_migrate_role_template(
    role_name: str,
    path: str,
    assuming_account_id: str,
    assuming_resource: str,
    ssm_parameter_prefix: str,
) -> troposphere.Template:
    return generate_role_template(
        "migrate",
        [
            awacs_organizations.CreateOrganizationalUnit,
            awacs_organizations.UpdateOrganizationalUnit,
            awacs_organizations.MoveAccount,
            awacs_organizations.CreatePolicy,
            awacs_organizations.UpdatePolicy,
            awacs_organizations.AttachPolicy,
            awacs_organizations.ListPolicies,
            awacs_organizations.RegisterDelegatedAdministrator,
            awacs_organizations.DeregisterDelegatedAdministrator,
        ],
        role_name,
        path,
        assuming_account_id,
        assuming_resource,
        [
            aws.Statement(
                Sid="2",
                Effect=aws.Allow,
                Action=[
                    awscs_ssm.GetParameter,
                    awscs_ssm.PutParameter,
                    awscs_ssm.AddTagsToResource,
                ],
                Resource=[
                    troposphere.Sub(
                        awscs_ssm.ARN(
                            resource=
                            f"parameter{ssm_parameter_prefix}/migrations/*",
                            account="${AWS::AccountId}",
                            region="${AWS::Region}",
                        ))
                ],
            )
        ],
    )
示例#8
0
    def add_pipeline_service_role(self):
        "Create a CodePipeline Service Role resource and add it to the template"
        self.pipeline_service_role_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodePipeline-Service'],
            filter_id='IAM.Role.RoleName')
        pipeline_service_role_res = troposphere.iam.Role(
            title='CodePipelineServiceRole',
            template=self.template,
            RoleName=self.pipeline_service_role_name,
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[AssumeRole],
                        Principal=Principal("Service",
                                            ['codepipeline.amazonaws.com']),
                    )
                ]))
        pipeline_policy_statement_list = [
            Statement(Sid='CodePipelineAccess',
                      Effect=Allow,
                      Action=[
                          Action('codepipeline', '*'),
                          Action('sns', 'Publish'),
                          Action('s3', 'ListAllMyBuckets'),
                          Action('s3', 'GetBucketLocation'),
                          Action('iam', 'ListRoles'),
                          Action('iam', 'PassRole'),
                      ],
                      Resource=['*']),
            Statement(
                Sid='S3Access',
                Effect=Allow,
                Action=[
                    Action('s3', 'PutObject'),
                    Action('s3', 'GetBucketPolicy'),
                    Action('s3', 'GetObject'),
                    Action('s3', 'ListBucket'),
                ],
                Resource=[
                    troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'),
                    troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}')
                ]),
            Statement(Sid='KMSCMK',
                      Effect=Allow,
                      Action=[
                          Action('kms', 'Decrypt'),
                      ],
                      Resource=[troposphere.Ref(self.cmk_arn_param)]),
        ]
        if self.lambda_invoke_enabled:
            pipeline_policy_statement_list.append(
                Statement(
                    Sid='LambdaInvoke',
                    Effect=Allow,
                    Action=[
                        Action('lambda', 'InvokeFunction'),
                    ],
                    Resource=['*'],
                ))
        if self.codebuild_access_enabled:
            pipeline_policy_statement_list.append(
                Statement(Sid='CodeBuildAccess',
                          Effect=Allow,
                          Action=[
                              Action('codebuild', 'BatchGetBuilds'),
                              Action('codebuild', 'StartBuild')
                          ],
                          Resource=[
                              troposphere.Ref(self.codebuild_project_arn_param)
                          ]))
        if self.codecommit_source_enabled:
            # Add Statements to allow CodeCommit if a CodeCommit.Source is enabled
            pipeline_policy_statement_list.append(
                Statement(Sid='CodeCommitAssumeRole',
                          Effect=Allow,
                          Action=[
                              Action('sts', 'AssumeRole'),
                          ],
                          Resource=[
                              troposphere.Ref(self.codecommit_role_arn_param)
                          ]))
            pipeline_policy_statement_list.append(
                Statement(Sid='CodeCommitAccess',
                          Effect=Allow,
                          Action=[
                              Action('codecommit', 'List*'),
                              Action('codecommit', 'Get*'),
                              Action('codecommit', 'GitPull'),
                              Action('codecommit', 'UploadArchive'),
                              Action('codecommit', 'CancelUploadArchive'),
                          ],
                          Resource=[
                              troposphere.Ref(self.codecommit_repo_arn_param),
                          ]))
        if self.github_source_enabled:
            # Add Statement to allow GitHub if a GitHub.Source is enabled
            cmk_arn_param = self.create_cfn_parameter(
                param_type='String',
                name='SourceCMKArn',
                description='The CMK Arn',
                value=self.pipeline.paco_ref + '.kms.arn',
            )
            pipeline_policy_statement_list.append(
                Statement(Sid='CMK',
                          Effect=Allow,
                          Action=[
                              Action('kms', '*'),
                          ],
                          Resource=[troposphere.Ref(cmk_arn_param)]))

        if self.codedeploy_deploy_assume_role_statement != None:
            pipeline_policy_statement_list.append(
                self.codedeploy_deploy_assume_role_statement)
        if self.s3_deploy_assume_role_statement != None:
            pipeline_policy_statement_list.append(
                self.s3_deploy_assume_role_statement)
        for statement in self.s3_deploy_statements:
            pipeline_policy_statement_list.append(statement)
        troposphere.iam.PolicyType(
            title='CodePipelinePolicy',
            template=self.template,
            DependsOn='CodePipelineServiceRole',
            PolicyName=troposphere.Sub(
                '${ResourceNamePrefix}-CodePipeline-Policy'),
            PolicyDocument=PolicyDocument(
                Statement=pipeline_policy_statement_list, ),
            Roles=[troposphere.Ref(pipeline_service_role_res)])
        return pipeline_service_role_res
示例#9
0
    def create_codepipeline_cfn(
        self,
        template,
        res_config,
    ):
        # CodePipeline
        # Source Actions
        source_stage_actions = []
        # Source Actions
        for action_name in res_config.source.keys():
            action_config = res_config.source[action_name]
            # Manual Approval Action
            if action_config.type == 'ManualApproval':
                manual_approval_action = self.init_manual_approval_action(template, action_config)
                source_stage_actions.append(manual_approval_action)
            # CodeCommit Action
            if action_config.type == 'CodeCommit.Source':
                codecommit_repo_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='CodeCommitRepositoryArn',
                    description='The Arn of the CodeCommit repository',
                    value='{}.codecommit.arn'.format(action_config.paco_ref),
                )
                codecommit_role_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='CodeCommitRoleArn',
                    description='The Arn of the CodeCommit Role',
                    value='{}.codecommit_role.arn'.format(action_config.paco_ref),
                )
                codecommit_repo_name_param = self.create_cfn_parameter(
                    param_type='String',
                    name='CodeCommitRepositoryName',
                    description='The name of the CodeCommit repository',
                    value=action_config.codecommit_repository+'.name',
                )
                deploy_branch_name_param = self.create_cfn_parameter(
                    param_type='String',
                    name='CodeCommitDeploymentBranchName',
                    description='The name of the branch where commits will trigger a build.',
                    value=action_config.deployment_branch_name,
                )

                codecommit_source_action = troposphere.codepipeline.Actions(
                    Name='CodeCommit',
                    ActionTypeId = troposphere.codepipeline.ActionTypeId(
                        Category = 'Source',
                        Owner = 'AWS',
                        Version = '1',
                        Provider = 'CodeCommit'
                    ),
                    Configuration = {
                        'RepositoryName': troposphere.Ref(codecommit_repo_name_param),
                        'BranchName': troposphere.Ref(deploy_branch_name_param)
                    },
                    OutputArtifacts = [
                        troposphere.codepipeline.OutputArtifacts(
                            Name = 'CodeCommitArtifact'
                        )
                    ],
                    RunOrder = action_config.run_order,
                    RoleArn = troposphere.Ref(codecommit_role_arn_param)
                )
                source_stage_actions.append(codecommit_source_action)

        source_stage = troposphere.codepipeline.Stages(
            Name="Source",
            Actions = source_stage_actions
        )
        # Build Actions
        build_stage_actions = []
        for action_name in res_config.build.keys():
            action_config = res_config.build[action_name]
            # Manual Approval Action
            if action_config.type == 'ManualApproval':
                manual_approval_action = self.init_manual_approval_action(template, action_config)
                build_stage_actions.append(manual_approval_action)
            # CodeBuild Build Action
            elif action_config.type == 'CodeBuild.Build':
                codebuild_project_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='CodeBuildProjectArn',
                    description='The arn of the CodeBuild project',
                    value='{}.project.arn'.format(action_config.paco_ref),
                )
                codebuild_build_action = troposphere.codepipeline.Actions(
                    Name='CodeBuild',
                    ActionTypeId = troposphere.codepipeline.ActionTypeId(
                        Category = 'Build',
                        Owner = 'AWS',
                        Version = '1',
                        Provider = 'CodeBuild'
                    ),
                    Configuration = {
                        'ProjectName': troposphere.Ref(self.resource_name_prefix_param),
                    },
                    InputArtifacts = [
                        troposphere.codepipeline.InputArtifacts(
                            Name = 'CodeCommitArtifact'
                        )
                    ],
                    OutputArtifacts = [
                        troposphere.codepipeline.OutputArtifacts(
                            Name = 'CodeBuildArtifact'
                        )
                    ],
                    RunOrder = action_config.run_order
                )
                build_stage_actions.append(codebuild_build_action)
        build_stage = troposphere.codepipeline.Stages(
            Name="Build",
            Actions = build_stage_actions
        )
        # Deploy Action
        [ deploy_stage,
          s3_deploy_assume_role_statement,
          codedeploy_deploy_assume_role_statement ] = self.init_deploy_stage(res_config, template)

        # Manual Deploy Enabled/Disable
        manual_approval_enabled_param = self.create_cfn_parameter(
            param_type='String',
            name='ManualApprovalEnabled',
            description='Boolean indicating whether a manual approval is enabled or not.',
            value=self.manual_approval_is_enabled,
        )
        template.add_condition(
            'ManualApprovalIsEnabled',
            troposphere.Equals(troposphere.Ref(manual_approval_enabled_param), 'true')
        )

        # CodePipeline Role and Policy
        self.pipeline_service_role_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodePipeline-Service'],
            filter_id='IAM.Role.RoleName'
        )
        pipeline_service_role_res = troposphere.iam.Role(
            title='CodePipelineServiceRole',
            template = template,
            RoleName=self.pipeline_service_role_name,
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[ AssumeRole ],
                        Principal=Principal("Service", ['codepipeline.amazonaws.com']),
                    )
                ]
            )
        )
        pipeline_policy_statement_list = [
            Statement(
                Sid='CodeCommitAccess',
                Effect=Allow,
                Action=[
                    Action('codecommit', 'List*'),
                    Action('codecommit', 'Get*'),
                    Action('codecommit', 'GitPull'),
                    Action('codecommit', 'UploadArchive'),
                    Action('codecommit', 'CancelUploadArchive'),
                ],
                Resource=[
                    troposphere.Ref(codecommit_repo_arn_param),
                ]
            ),
            Statement(
                Sid='CodePipelineAccess',
                Effect=Allow,
                Action=[
                    Action('codepipeline', '*'),
                    Action('sns', 'Publish'),
                    Action('s3', 'ListAllMyBuckets'),
                    Action('s3', 'GetBucketLocation'),
                    Action('iam', 'ListRoles'),
                    Action('iam', 'PassRole'),
                ],
                Resource=[ '*' ]
            ),
            Statement(
                Sid='CodeBuildAccess',
                Effect=Allow,
                Action=[
                    Action('codebuild', 'BatchGetBuilds'),
                    Action('codebuild', 'StartBuild')
                ],
                Resource=[ troposphere.Ref(codebuild_project_arn_param) ]
            ),
            Statement(
                Sid='S3Access',
                Effect=Allow,
                Action=[
                    Action('s3', 'PutObject'),
                    Action('s3', 'GetBucketPolicy'),
                    Action('s3', 'GetObject'),
                    Action('s3', 'ListBucket'),
                ],
                Resource=[
                    troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'),
                    troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}')
                ]
            ),
            Statement(
                Sid='KMSCMK',
                Effect=Allow,
                Action=[
                    Action('kms', 'Decrypt'),
                ],
                Resource=[ troposphere.Ref(self.cmk_arn_param) ]
            ),
            Statement(
                Sid='CodeCommitAssumeRole',
                Effect=Allow,
                Action=[
                    Action('sts', 'AssumeRole'),
                ],
                Resource=[ troposphere.Ref(codecommit_role_arn_param) ]
            ),
        ]

        if codedeploy_deploy_assume_role_statement != None:
            pipeline_policy_statement_list.append(codedeploy_deploy_assume_role_statement)
        if s3_deploy_assume_role_statement != None:
            pipeline_policy_statement_list.append(s3_deploy_assume_role_statement)
        troposphere.iam.PolicyType(
            title='CodePipelinePolicy',
            template = template,
            DependsOn = 'CodePipelineServiceRole',
            PolicyName=troposphere.Sub('${ResourceNamePrefix}-CodePipeline-Policy'),
            PolicyDocument=PolicyDocument(
                Statement=pipeline_policy_statement_list,
            ),
            Roles=[troposphere.Ref(pipeline_service_role_res)]
        )

        pipeline_stages = []
        if source_stage != None: pipeline_stages.append(source_stage)
        if build_stage != None: pipeline_stages.append(build_stage)
        if deploy_stage != None: pipeline_stages.append(deploy_stage)

        pipeline_res = troposphere.codepipeline.Pipeline(
            title = 'BuildCodePipeline',
            template = template,
            DependsOn='CodePipelinePolicy',
            RoleArn = troposphere.GetAtt(pipeline_service_role_res, 'Arn'),
            Name = troposphere.Ref(self.resource_name_prefix_param),
            Stages = pipeline_stages,
            ArtifactStore = troposphere.codepipeline.ArtifactStore(
                Type = 'S3',
                Location = troposphere.Ref(self.artifacts_bucket_name_param),
                EncryptionKey = troposphere.codepipeline.EncryptionKey(
                    Type = 'KMS',
                    Id = troposphere.Ref(self.cmk_arn_param),
                )
            )
        )

        return pipeline_res
def get_template(
    puppet_version,
    all_regions,
    source,
    is_caching_enabled,
    is_manual_approvals: bool,
    scm_skip_creation_of_repo: bool,
    should_validate: bool,
) -> t.Template:
    is_codecommit = source.get("Provider", "").lower() == "codecommit"
    is_github = source.get("Provider", "").lower() == "github"
    is_codestarsourceconnection = (source.get(
        "Provider", "").lower() == "codestarsourceconnection")
    is_custom = (source.get("Provider", "").lower() == "custom")
    is_s3 = source.get("Provider", "").lower() == "s3"
    description = f"""Bootstrap template used to bring up the main ServiceCatalog-Puppet AWS CodePipeline with dependencies
{{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master"}}"""

    template = t.Template(Description=description)

    version_parameter = template.add_parameter(
        t.Parameter("Version", Default=puppet_version, Type="String"))
    org_iam_role_arn_parameter = template.add_parameter(
        t.Parameter("OrgIamRoleArn", Default="None", Type="String"))
    with_manual_approvals_parameter = template.add_parameter(
        t.Parameter(
            "WithManualApprovals",
            Type="String",
            AllowedValues=["Yes", "No"],
            Default="No",
        ))
    puppet_code_pipeline_role_permission_boundary_parameter = template.add_parameter(
        t.Parameter(
            "PuppetCodePipelineRolePermissionBoundary",
            Type="String",
            Description=
            "IAM Permission Boundary to apply to the PuppetCodePipelineRole",
            Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data,
        ))
    source_role_permissions_boundary_parameter = template.add_parameter(
        t.Parameter(
            "SourceRolePermissionsBoundary",
            Type="String",
            Description="IAM Permission Boundary to apply to the SourceRole",
            Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data,
        ))
    puppet_generate_role_permission_boundary_parameter = template.add_parameter(
        t.Parameter(
            "PuppetGenerateRolePermissionBoundary",
            Type="String",
            Description=
            "IAM Permission Boundary to apply to the PuppetGenerateRole",
            Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data,
        ))
    puppet_deploy_role_permission_boundary_parameter = template.add_parameter(
        t.Parameter(
            "PuppetDeployRolePermissionBoundary",
            Type="String",
            Description=
            "IAM Permission Boundary to apply to the PuppetDeployRole",
            Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data,
        ))
    puppet_provisioning_role_permissions_boundary_parameter = template.add_parameter(
        t.Parameter(
            "PuppetProvisioningRolePermissionsBoundary",
            Type="String",
            Description=
            "IAM Permission Boundary to apply to the PuppetProvisioningRole",
            Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data,
        ))
    cloud_formation_deploy_role_permissions_boundary_parameter = template.add_parameter(
        t.Parameter(
            "CloudFormationDeployRolePermissionsBoundary",
            Type="String",
            Description=
            "IAM Permission Boundary to apply to the CloudFormationDeployRole",
            Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data,
        ))
    deploy_environment_compute_type_parameter = template.add_parameter(
        t.Parameter(
            "DeployEnvironmentComputeType",
            Type="String",
            Description="The AWS CodeBuild Environment Compute Type",
            Default="BUILD_GENERAL1_SMALL",
        ))
    spoke_deploy_environment_compute_type_parameter = template.add_parameter(
        t.Parameter(
            "SpokeDeployEnvironmentComputeType",
            Type="String",
            Description=
            "The AWS CodeBuild Environment Compute Type for spoke execution mode",
            Default="BUILD_GENERAL1_SMALL",
        ))
    deploy_num_workers_parameter = template.add_parameter(
        t.Parameter(
            "DeployNumWorkers",
            Type="Number",
            Description=
            "Number of workers that should be used when running a deploy",
            Default=10,
        ))
    puppet_role_name_parameter = template.add_parameter(
        t.Parameter("PuppetRoleName", Type="String", Default="PuppetRole"))
    puppet_role_path_template_parameter = template.add_parameter(
        t.Parameter("PuppetRolePath",
                    Type="String",
                    Default="/servicecatalog-puppet/"))

    template.add_condition(
        "ShouldUseOrgs",
        t.Not(t.Equals(t.Ref(org_iam_role_arn_parameter), "None")))
    template.add_condition(
        "HasManualApprovals",
        t.Equals(t.Ref(with_manual_approvals_parameter), "Yes"))

    template.add_resource(
        s3.Bucket(
            "StacksRepository",
            BucketName=t.Sub("sc-puppet-stacks-repository-${AWS::AccountId}"),
            VersioningConfiguration=s3.VersioningConfiguration(
                Status="Enabled"),
            BucketEncryption=s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.
                        ServerSideEncryptionByDefault(SSEAlgorithm="AES256"))
                ]),
            PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration(
                BlockPublicAcls=True,
                BlockPublicPolicy=True,
                IgnorePublicAcls=True,
                RestrictPublicBuckets=True,
            ),
            Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}),
        ))

    manual_approvals_param = template.add_resource(
        ssm.Parameter(
            "ManualApprovalsParam",
            Type="String",
            Name="/servicecatalog-puppet/manual-approvals",
            Value=t.Ref(with_manual_approvals_parameter),
        ))
    template.add_resource(
        ssm.Parameter(
            "SpokeDeployEnvParameter",
            Type="String",
            Name=constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME,
            Value=t.Ref(spoke_deploy_environment_compute_type_parameter),
        ))
    param = template.add_resource(
        ssm.Parameter(
            "Param",
            Type="String",
            Name="service-catalog-puppet-version",
            Value=t.Ref(version_parameter),
        ))
    partition_parameter = template.add_resource(
        ssm.Parameter(
            "PartitionParameter",
            Type="String",
            Name="/servicecatalog-puppet/partition",
            Value=t.Ref("AWS::Partition"),
        ))
    puppet_role_name_parameter = template.add_resource(
        ssm.Parameter(
            "PuppetRoleNameParameter",
            Type="String",
            Name="/servicecatalog-puppet/puppet-role/name",
            Value=t.Ref(puppet_role_name_parameter),
        ))
    puppet_role_path_parameter = template.add_resource(
        ssm.Parameter(
            "PuppetRolePathParameter",
            Type="String",
            Name="/servicecatalog-puppet/puppet-role/path",
            Value=t.Ref(puppet_role_path_template_parameter),
        ))
    share_accept_function_role = template.add_resource(
        iam.Role(
            "ShareAcceptFunctionRole",
            RoleName="ShareAcceptFunctionRole",
            ManagedPolicyArns=[
                t.Sub(
                    "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
                )
            ],
            Path=t.Ref(puppet_role_path_template_parameter),
            Policies=[
                iam.Policy(
                    PolicyName="ServiceCatalogActions",
                    PolicyDocument={
                        "Version":
                        "2012-10-17",
                        "Statement": [{
                            "Action": ["sts:AssumeRole"],
                            "Resource": {
                                "Fn::Sub":
                                "arn:${AWS::Partition}:iam::*:role${PuppetRolePath}${PuppetRoleName}"
                            },
                            "Effect": "Allow",
                        }],
                    },
                )
            ],
            AssumeRolePolicyDocument={
                "Version":
                "2012-10-17",
                "Statement": [{
                    "Action": ["sts:AssumeRole"],
                    "Effect": "Allow",
                    "Principal": {
                        "Service": ["lambda.amazonaws.com"]
                    },
                }],
            },
        ))

    provisioning_role = template.add_resource(
        iam.Role(
            "ProvisioningRole",
            RoleName="PuppetProvisioningRole",
            AssumeRolePolicyDocument={
                "Version":
                "2012-10-17",
                "Statement": [
                    {
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "Service": ["codebuild.amazonaws.com"]
                        },
                    },
                    {
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "AWS": {
                                "Fn::Sub": "${AWS::AccountId}"
                            }
                        },
                    },
                ],
            },
            ManagedPolicyArns=[
                t.Sub(
                    "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess"
                )
            ],
            PermissionsBoundary=t.Ref(
                puppet_provisioning_role_permissions_boundary_parameter),
            Path=t.Ref(puppet_role_path_template_parameter),
        ))

    cloud_formation_deploy_role = template.add_resource(
        iam.Role(
            "CloudFormationDeployRole",
            RoleName="CloudFormationDeployRole",
            AssumeRolePolicyDocument={
                "Version":
                "2012-10-17",
                "Statement": [
                    {
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "Service": ["cloudformation.amazonaws.com"]
                        },
                    },
                    {
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "AWS": {
                                "Fn::Sub": "${AWS::AccountId}"
                            }
                        },
                    },
                ],
            },
            ManagedPolicyArns=[
                t.Sub(
                    "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess"
                )
            ],
            PermissionsBoundary=t.Ref(
                cloud_formation_deploy_role_permissions_boundary_parameter),
            Path=t.Ref(puppet_role_path_template_parameter),
        ))

    pipeline_role = template.add_resource(
        iam.Role(
            "PipelineRole",
            RoleName="PuppetCodePipelineRole",
            AssumeRolePolicyDocument={
                "Version":
                "2012-10-17",
                "Statement": [{
                    "Action": ["sts:AssumeRole"],
                    "Effect": "Allow",
                    "Principal": {
                        "Service": ["codepipeline.amazonaws.com"]
                    },
                }],
            },
            ManagedPolicyArns=[
                t.Sub(
                    "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess"
                )
            ],
            PermissionsBoundary=t.Ref(
                puppet_code_pipeline_role_permission_boundary_parameter),
            Path=t.Ref(puppet_role_path_template_parameter),
        ))

    source_role = template.add_resource(
        iam.Role(
            "SourceRole",
            RoleName="PuppetSourceRole",
            AssumeRolePolicyDocument={
                "Version":
                "2012-10-17",
                "Statement": [
                    {
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "Service": ["codepipeline.amazonaws.com"]
                        },
                    },
                    {
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "AWS": {
                                "Fn::Sub":
                                "arn:${AWS::Partition}:iam::${AWS::AccountId}:root"
                            }
                        },
                    },
                ],
            },
            ManagedPolicyArns=[
                t.Sub(
                    "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess"
                )
            ],
            PermissionsBoundary=t.Ref(
                source_role_permissions_boundary_parameter),
            Path=t.Ref(puppet_role_path_template_parameter),
        ))

    dry_run_notification_topic = template.add_resource(
        sns.Topic(
            "DryRunNotificationTopic",
            DisplayName="service-catalog-puppet-dry-run-approvals",
            TopicName="service-catalog-puppet-dry-run-approvals",
            Condition="HasManualApprovals",
        ))

    deploy_role = template.add_resource(
        iam.Role(
            "DeployRole",
            RoleName="PuppetDeployRole",
            AssumeRolePolicyDocument={
                "Version":
                "2012-10-17",
                "Statement": [{
                    "Action": ["sts:AssumeRole"],
                    "Effect": "Allow",
                    "Principal": {
                        "Service": ["codebuild.amazonaws.com"]
                    },
                }],
            },
            ManagedPolicyArns=[
                t.Sub(
                    "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess"
                )
            ],
            PermissionsBoundary=t.Ref(
                puppet_deploy_role_permission_boundary_parameter),
            Path=t.Ref(puppet_role_path_template_parameter),
        ))

    num_workers_ssm_parameter = template.add_resource(
        ssm.Parameter(
            "NumWorkersSSMParameter",
            Type="String",
            Name="/servicecatalog-puppet/deploy/num-workers",
            Value=t.Sub("${DeployNumWorkers}"),
        ))

    parameterised_source_bucket = template.add_resource(
        s3.Bucket(
            "ParameterisedSourceBucket",
            PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration(
                IgnorePublicAcls=True,
                BlockPublicPolicy=True,
                BlockPublicAcls=True,
                RestrictPublicBuckets=True,
            ),
            BucketEncryption=s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.
                        ServerSideEncryptionByDefault(SSEAlgorithm="AES256"))
                ]),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            BucketName=t.Sub("sc-puppet-parameterised-runs-${AWS::AccountId}"),
            VersioningConfiguration=s3.VersioningConfiguration(
                Status="Enabled"),
        ))

    source_stage = codepipeline.Stages(
        Name="Source",
        Actions=[
            codepipeline.Actions(
                RunOrder=1,
                RoleArn=t.GetAtt("SourceRole", "Arn"),
                ActionTypeId=codepipeline.ActionTypeId(
                    Category="Source",
                    Owner="AWS",
                    Version="1",
                    Provider="S3",
                ),
                OutputArtifacts=[
                    codepipeline.OutputArtifacts(Name="ParameterisedSource")
                ],
                Configuration={
                    "S3Bucket": t.Ref(parameterised_source_bucket),
                    "S3ObjectKey": "parameters.zip",
                    "PollForSourceChanges": True,
                },
                Name="ParameterisedSource",
            )
        ],
    )

    install_spec = {
        "runtime-versions":
        dict(python="3.7"),
        "commands": [
            f"pip install {puppet_version}" if "http" in puppet_version else
            f"pip install aws-service-catalog-puppet=={puppet_version}",
        ],
    }

    deploy_env_vars = [
        {
            "Type": "PLAINTEXT",
            "Name": "PUPPET_ACCOUNT_ID",
            "Value": t.Ref("AWS::AccountId"),
        },
        {
            "Type": "PLAINTEXT",
            "Name": "PUPPET_REGION",
            "Value": t.Ref("AWS::Region"),
        },
        {
            "Type": "PARAMETER_STORE",
            "Name": "PARTITION",
            "Value": t.Ref(partition_parameter),
        },
        {
            "Type": "PARAMETER_STORE",
            "Name": "PUPPET_ROLE_NAME",
            "Value": t.Ref(puppet_role_name_parameter),
        },
        {
            "Type": "PARAMETER_STORE",
            "Name": "PUPPET_ROLE_PATH",
            "Value": t.Ref(puppet_role_path_parameter),
        },
    ]

    if is_codecommit:
        template.add_resource(
            codecommit.Repository(
                "CodeRepo",
                RepositoryName=source.get("Configuration").get(
                    "RepositoryName"),
                RepositoryDescription=
                "Repo to store the servicecatalog puppet solution",
                DeletionPolicy="Retain",
            ))

        source_stage.Actions.append(
            codepipeline.Actions(
                RunOrder=1,
                RoleArn=t.GetAtt("SourceRole", "Arn"),
                ActionTypeId=codepipeline.ActionTypeId(
                    Category="Source",
                    Owner="AWS",
                    Version="1",
                    Provider="CodeCommit",
                ),
                OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")],
                Configuration={
                    "RepositoryName":
                    source.get("Configuration").get("RepositoryName"),
                    "BranchName":
                    source.get("Configuration").get("BranchName"),
                    "PollForSourceChanges":
                    source.get("Configuration").get("PollForSourceChanges",
                                                    True),
                },
                Name="Source",
            ))

    if is_github:
        source_stage.Actions.append(
            codepipeline.Actions(
                RunOrder=1,
                ActionTypeId=codepipeline.ActionTypeId(
                    Category="Source",
                    Owner="ThirdParty",
                    Version="1",
                    Provider="GitHub",
                ),
                OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")],
                Configuration={
                    "Owner":
                    source.get("Configuration").get("Owner"),
                    "Repo":
                    source.get("Configuration").get("Repo"),
                    "Branch":
                    source.get("Configuration").get("Branch"),
                    "OAuthToken":
                    t.Join(
                        "",
                        [
                            "{{resolve:secretsmanager:",
                            source.get("Configuration").get(
                                "SecretsManagerSecret"),
                            ":SecretString:OAuthToken}}",
                        ],
                    ),
                    "PollForSourceChanges":
                    source.get("Configuration").get("PollForSourceChanges"),
                },
                Name="Source",
            ))

    if is_custom:
        source_stage.Actions.append(
            codepipeline.Actions(
                RunOrder=1,
                ActionTypeId=codepipeline.ActionTypeId(
                    Category="Source",
                    Owner="Custom",
                    Version=source.get("Configuration").get(
                        "CustomActionTypeVersion"),
                    Provider=source.get("Configuration").get(
                        "CustomActionTypeProvider"),
                ),
                OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")],
                Configuration={
                    "GitUrl": source.get("Configuration").get("GitUrl"),
                    "Branch": source.get("Configuration").get("Branch"),
                    "PipelineName": t.Sub("${AWS::StackName}-pipeline"),
                },
                Name="Source",
            ))
        webhook = codepipeline.Webhook(
            "Webhook",
            Authentication="IP",
            TargetAction="Source",
            AuthenticationConfiguration=codepipeline.WebhookAuthConfiguration(
                AllowedIPRange=source.get("Configuration").get(
                    "GitWebHookIpAddress")),
            Filters=[
                codepipeline.WebhookFilterRule(
                    JsonPath="$.changes[0].ref.id",
                    MatchEquals="refs/heads/{Branch}")
            ],
            TargetPipelineVersion=1,
            TargetPipeline=t.Sub("${AWS::StackName}-pipeline"),
        )
        template.add_resource(webhook)
        values_for_sub = {
            "GitUrl": source.get("Configuration").get("GitUrl"),
            "WebhookUrl": t.GetAtt(webhook, "Url"),
        }
        output_to_add = t.Output("WebhookUrl")
        output_to_add.Value = t.Sub("${GitUrl}||${WebhookUrl}",
                                    **values_for_sub)
        output_to_add.Export = t.Export(t.Sub("${AWS::StackName}-pipeline"))
        template.add_output(output_to_add)

    if is_codestarsourceconnection:
        source_stage.Actions.append(
            codepipeline.Actions(
                RunOrder=1,
                RoleArn=t.GetAtt("SourceRole", "Arn"),
                ActionTypeId=codepipeline.ActionTypeId(
                    Category="Source",
                    Owner="AWS",
                    Version="1",
                    Provider="CodeStarSourceConnection",
                ),
                OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")],
                Configuration={
                    "ConnectionArn":
                    source.get("Configuration").get("ConnectionArn"),
                    "FullRepositoryId":
                    source.get("Configuration").get("FullRepositoryId"),
                    "BranchName":
                    source.get("Configuration").get("BranchName"),
                    "OutputArtifactFormat":
                    source.get("Configuration").get("OutputArtifactFormat"),
                },
                Name="Source",
            ))

    if is_s3:
        bucket_name = source.get("Configuration").get("S3Bucket")
        if not scm_skip_creation_of_repo:
            template.add_resource(
                s3.Bucket(
                    bucket_name,
                    PublicAccessBlockConfiguration=s3.
                    PublicAccessBlockConfiguration(
                        IgnorePublicAcls=True,
                        BlockPublicPolicy=True,
                        BlockPublicAcls=True,
                        RestrictPublicBuckets=True,
                    ),
                    BucketEncryption=s3.BucketEncryption(
                        ServerSideEncryptionConfiguration=[
                            s3.ServerSideEncryptionRule(
                                ServerSideEncryptionByDefault=s3.
                                ServerSideEncryptionByDefault(
                                    SSEAlgorithm="AES256"))
                        ]),
                    Tags=t.Tags.from_dict(
                        **{"ServiceCatalogPuppet:Actor": "Framework"}),
                    BucketName=bucket_name,
                    VersioningConfiguration=s3.VersioningConfiguration(
                        Status="Enabled"),
                ))

        source_stage.Actions.append(
            codepipeline.Actions(
                RunOrder=1,
                ActionTypeId=codepipeline.ActionTypeId(
                    Category="Source",
                    Owner="AWS",
                    Version="1",
                    Provider="S3",
                ),
                OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")],
                Configuration={
                    "S3Bucket":
                    bucket_name,
                    "S3ObjectKey":
                    source.get("Configuration").get("S3ObjectKey"),
                    "PollForSourceChanges":
                    source.get("Configuration").get("PollForSourceChanges"),
                },
                Name="Source",
            ))

    single_account_run_project_build_spec = dict(
        version=0.2,
        phases=dict(
            install=install_spec,
            build={
                "commands": [
                    'echo "single_account: \\"${SINGLE_ACCOUNT_ID}\\"" > parameters.yaml',
                    "cat parameters.yaml",
                    "zip parameters.zip parameters.yaml",
                    "aws s3 cp parameters.zip s3://sc-puppet-parameterised-runs-${PUPPET_ACCOUNT_ID}/parameters.zip",
                ]
            },
            post_build={
                "commands": [
                    "servicecatalog-puppet wait-for-parameterised-run-to-complete",
                ]
            },
        ),
        artifacts=dict(
            name="DeployProject",
            files=[
                "ServiceCatalogPuppet/manifest.yaml",
                "ServiceCatalogPuppet/manifest-expanded.yaml",
                "results/*/*",
                "output/*/*",
                "exploded_results/*/*",
                "tasks.log",
            ],
        ),
    )

    single_account_run_project_args = dict(
        Name="servicecatalog-puppet-single-account-run",
        Description="Runs puppet for a single account - SINGLE_ACCOUNT_ID",
        ServiceRole=t.GetAtt(deploy_role, "Arn"),
        Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}),
        Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ),
        TimeoutInMinutes=480,
        Environment=codebuild.Environment(
            ComputeType=t.Ref(deploy_environment_compute_type_parameter),
            Image="aws/codebuild/standard:4.0",
            Type="LINUX_CONTAINER",
            EnvironmentVariables=[
                {
                    "Type": "PLAINTEXT",
                    "Name": "SINGLE_ACCOUNT_ID",
                    "Value": "CHANGE_ME",
                },
            ] + deploy_env_vars,
        ),
        Source=codebuild.Source(
            Type="NO_SOURCE",
            BuildSpec=yaml.safe_dump(single_account_run_project_build_spec),
        ),
    )

    single_account_run_project = template.add_resource(
        codebuild.Project("SingleAccountRunProject",
                          **single_account_run_project_args))

    single_account_run_project_build_spec["phases"]["post_build"]["commands"] = [
        "servicecatalog-puppet wait-for-parameterised-run-to-complete --on-complete-url $CALLBACK_URL"
    ]
    single_account_run_project_args[
        "Name"] = "servicecatalog-puppet-single-account-run-with-callback"
    single_account_run_project_args[
        "Description"] = "Runs puppet for a single account - SINGLE_ACCOUNT_ID and then does a http put"
    single_account_run_project_args.get(
        "Environment").EnvironmentVariables.append({
            "Type": "PLAINTEXT",
            "Name": "CALLBACK_URL",
            "Value": "CHANGE_ME",
        })
    single_account_run_project_args["Source"] = codebuild.Source(
        Type="NO_SOURCE",
        BuildSpec=yaml.safe_dump(single_account_run_project_build_spec),
    )
    single_account_run_project_with_callback = template.add_resource(
        codebuild.Project("SingleAccountRunWithCallbackProject",
                          **single_account_run_project_args))

    stages = [source_stage]

    if should_validate:
        template.add_resource(
            codebuild.Project(
                "ValidateProject",
                Name="servicecatalog-puppet-validate",
                ServiceRole=t.GetAtt("DeployRole", "Arn"),
                Tags=t.Tags.from_dict(
                    **{"ServiceCatalogPuppet:Actor": "Framework"}),
                Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
                TimeoutInMinutes=60,
                Environment=codebuild.Environment(
                    ComputeType="BUILD_GENERAL1_SMALL",
                    Image="aws/codebuild/standard:4.0",
                    Type="LINUX_CONTAINER",
                ),
                Source=codebuild.Source(
                    BuildSpec=yaml.safe_dump(
                        dict(
                            version="0.2",
                            phases={
                                "install": {
                                    "runtime-versions": {
                                        "python": "3.7",
                                    },
                                    "commands": [
                                        f"pip install {puppet_version}"
                                        if "http" in puppet_version else
                                        f"pip install aws-service-catalog-puppet=={puppet_version}",
                                    ],
                                },
                                "build": {
                                    "commands": [
                                        "servicecatalog-puppet validate manifest.yaml"
                                    ]
                                },
                            },
                        )),
                    Type="CODEPIPELINE",
                ),
                Description="Validate the manifest.yaml file",
            ))
        stages.append(
            codepipeline.Stages(
                Name="Validate",
                Actions=[
                    codepipeline.Actions(
                        InputArtifacts=[
                            codepipeline.InputArtifacts(Name="Source"),
                        ],
                        Name="Validate",
                        ActionTypeId=codepipeline.ActionTypeId(
                            Category="Build",
                            Owner="AWS",
                            Version="1",
                            Provider="CodeBuild",
                        ),
                        OutputArtifacts=[
                            codepipeline.OutputArtifacts(
                                Name="ValidateProject")
                        ],
                        Configuration={
                            "ProjectName": t.Ref("ValidateProject"),
                            "PrimarySource": "Source",
                        },
                        RunOrder=1,
                    ),
                ],
            ))

    if is_manual_approvals:
        deploy_stage = codepipeline.Stages(
            Name="Deploy",
            Actions=[
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name="Source"),
                        codepipeline.InputArtifacts(
                            Name="ParameterisedSource"),
                    ],
                    Name="DryRun",
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Build",
                        Owner="AWS",
                        Version="1",
                        Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name="DryRunProject")
                    ],
                    Configuration={
                        "ProjectName": t.Ref("DryRunProject"),
                        "PrimarySource": "Source",
                    },
                    RunOrder=1,
                ),
                codepipeline.Actions(
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Approval",
                        Owner="AWS",
                        Version="1",
                        Provider="Manual",
                    ),
                    Configuration={
                        "NotificationArn":
                        t.Ref("DryRunNotificationTopic"),
                        "CustomData":
                        "Approve when you are happy with the dry run.",
                    },
                    Name="DryRunApproval",
                    RunOrder=2,
                ),
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name="Source"),
                        codepipeline.InputArtifacts(
                            Name="ParameterisedSource"),
                    ],
                    Name="Deploy",
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Build",
                        Owner="AWS",
                        Version="1",
                        Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name="DeployProject")
                    ],
                    Configuration={
                        "ProjectName": t.Ref("DeployProject"),
                        "PrimarySource": "Source",
                    },
                    RunOrder=3,
                ),
            ],
        )
    else:
        deploy_stage = codepipeline.Stages(
            Name="Deploy",
            Actions=[
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name="Source"),
                        codepipeline.InputArtifacts(
                            Name="ParameterisedSource"),
                    ],
                    Name="Deploy",
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Build",
                        Owner="AWS",
                        Version="1",
                        Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name="DeployProject")
                    ],
                    Configuration={
                        "ProjectName":
                        t.Ref("DeployProject"),
                        "PrimarySource":
                        "Source",
                        "EnvironmentVariables":
                        '[{"name":"EXECUTION_ID","value":"#{codepipeline.PipelineExecutionId}","type":"PLAINTEXT"}]',
                    },
                    RunOrder=1,
                ),
            ],
        )

    stages.append(deploy_stage)

    pipeline = template.add_resource(
        codepipeline.Pipeline(
            "Pipeline",
            RoleArn=t.GetAtt("PipelineRole", "Arn"),
            Stages=stages,
            Name=t.Sub("${AWS::StackName}-pipeline"),
            ArtifactStore=codepipeline.ArtifactStore(
                Type="S3",
                Location=t.Sub(
                    "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}"
                ),
            ),
            RestartExecutionOnUpdate=True,
        ))

    if is_github:
        template.add_resource(
            codepipeline.Webhook(
                "Webhook",
                AuthenticationConfiguration=codepipeline.
                WebhookAuthConfiguration(SecretToken=t.Join(
                    "",
                    [
                        "{{resolve:secretsmanager:",
                        source.get("Configuration").get(
                            "SecretsManagerSecret"),
                        ":SecretString:SecretToken}}",
                    ],
                )),
                Filters=[
                    codepipeline.WebhookFilterRule(
                        JsonPath="$.ref",
                        MatchEquals="refs/heads/" +
                        source.get("Configuration").get("Branch"),
                    )
                ],
                Authentication="GITHUB_HMAC",
                TargetPipeline=t.Ref(pipeline),
                TargetAction="Source",
                Name=t.Sub("${AWS::StackName}-webhook"),
                TargetPipelineVersion=t.GetAtt(pipeline, "Version"),
                RegisterWithThirdParty="true",
            ))

    deploy_project_build_spec = dict(
        version=0.2,
        phases=dict(
            install={
                "runtime-versions":
                dict(python="3.7"),
                "commands": [
                    f"pip install {puppet_version}"
                    if "http" in puppet_version else
                    f"pip install aws-service-catalog-puppet=={puppet_version}",
                ],
            },
            pre_build={
                "commands": [
                    "servicecatalog-puppet --info expand --parameter-override-file $CODEBUILD_SRC_DIR_ParameterisedSource/parameters.yaml manifest.yaml",
                ]
            },
            build={
                "commands": [
                    "servicecatalog-puppet --info deploy --num-workers ${NUM_WORKERS} manifest-expanded.yaml",
                ]
            },
        ),
        artifacts=dict(
            name="DeployProject",
            files=[
                "manifest-expanded.yaml",
                "results/*/*",
                "output/*/*",
                "exploded_results/*/*",
                "tasks.log",
            ],
        ),
    )

    deploy_project_args = dict(
        Name="servicecatalog-puppet-deploy",
        ServiceRole=t.GetAtt(deploy_role, "Arn"),
        Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}),
        Artifacts=codebuild.Artifacts(Type="CODEPIPELINE", ),
        TimeoutInMinutes=480,
        Environment=codebuild.Environment(
            ComputeType=t.Ref(deploy_environment_compute_type_parameter),
            Image="aws/codebuild/standard:4.0",
            Type="LINUX_CONTAINER",
            EnvironmentVariables=[
                {
                    "Type": "PARAMETER_STORE",
                    "Name": "NUM_WORKERS",
                    "Value": t.Ref(num_workers_ssm_parameter),
                },
                {
                    "Type":
                    "PARAMETER_STORE",
                    "Name":
                    "SPOKE_EXECUTION_MODE_DEPLOY_ENV",
                    "Value":
                    constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME,
                },
            ] + deploy_env_vars,
        ),
        Source=codebuild.Source(
            Type="CODEPIPELINE",
            BuildSpec=yaml.safe_dump(deploy_project_build_spec),
        ),
        Description="deploys out the products to be deployed",
    )

    deploy_project = template.add_resource(
        codebuild.Project("DeployProject", **deploy_project_args))

    deploy_project_build_spec["phases"]["build"]["commands"] = [
        "servicecatalog-puppet --info dry-run manifest-expanded.yaml"
    ]
    deploy_project_build_spec["artifacts"]["name"] = "DryRunProject"
    deploy_project_args["Name"] = "servicecatalog-puppet-dryrun"
    deploy_project_args[
        "Description"] = "dry run of servicecatalog-puppet-dryrun"
    deploy_project_args["Source"] = codebuild.Source(
        Type="CODEPIPELINE",
        BuildSpec=yaml.safe_dump(deploy_project_build_spec),
    )

    dry_run_project = template.add_resource(
        codebuild.Project("DryRunProject", **deploy_project_args))

    bootstrap_project = template.add_resource(
        codebuild.Project(
            "BootstrapProject",
            Name="servicecatalog-puppet-bootstrap-spokes-in-ou",
            ServiceRole=t.GetAtt("DeployRole", "Arn"),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"),
            TimeoutInMinutes=60,
            Environment=codebuild.Environment(
                ComputeType="BUILD_GENERAL1_SMALL",
                Image="aws/codebuild/standard:4.0",
                Type="LINUX_CONTAINER",
                EnvironmentVariables=[
                    {
                        "Type": "PLAINTEXT",
                        "Name": "OU_OR_PATH",
                        "Value": "CHANGE_ME"
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "IAM_ROLE_NAME",
                        "Value": "OrganizationAccountAccessRole",
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "IAM_ROLE_ARNS",
                        "Value": ""
                    },
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=
                "version: 0.2\nphases:\n  install:\n    runtime-versions:\n      python: 3.7\n    commands:\n      - pip install aws-service-catalog-puppet\n  build:\n    commands:\n      - servicecatalog-puppet bootstrap-spokes-in-ou $OU_OR_PATH $IAM_ROLE_NAME $IAM_ROLE_ARNS\nartifacts:\n  files:\n    - results/*/*\n    - output/*/*\n  name: BootstrapProject\n",
                Type="NO_SOURCE",
            ),
            Description="Bootstrap all the accounts in an OU",
        ))

    template.add_resource(
        codebuild.Project(
            "BootstrapASpokeProject",
            Name="servicecatalog-puppet-bootstrap-spoke",
            ServiceRole=t.GetAtt("DeployRole", "Arn"),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"),
            TimeoutInMinutes=60,
            Environment=codebuild.Environment(
                ComputeType="BUILD_GENERAL1_SMALL",
                Image="aws/codebuild/standard:4.0",
                Type="LINUX_CONTAINER",
                EnvironmentVariables=[
                    {
                        "Type": "PLAINTEXT",
                        "Name": "PUPPET_ACCOUNT_ID",
                        "Value": t.Sub("${AWS::AccountId}"),
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN",
                        "Value": "CHANGE_ME",
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "ASSUMABLE_ROLE_IN_ROOT_ACCOUNT",
                        "Value": "CHANGE_ME",
                    },
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=yaml.safe_dump(
                    dict(
                        version=0.2,
                        phases=dict(
                            install=install_spec,
                            build={
                                "commands": [
                                    "servicecatalog-puppet bootstrap-spoke-as ${PUPPET_ACCOUNT_ID} ${ASSUMABLE_ROLE_IN_ROOT_ACCOUNT} ${ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN}"
                                ]
                            },
                        ),
                    )),
                Type="NO_SOURCE",
            ),
            Description="Bootstrap given account as a spoke",
        ))

    cloud_formation_events_queue = template.add_resource(
        sqs.Queue(
            "CloudFormationEventsQueue",
            QueueName="servicecatalog-puppet-cloudformation-events",
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
        ))

    cloud_formation_events_queue_policy = template.add_resource(
        sqs.QueuePolicy(
            "CloudFormationEventsQueuePolicy",
            Queues=[t.Ref(cloud_formation_events_queue)],
            PolicyDocument={
                "Id":
                "AllowSNS",
                "Version":
                "2012-10-17",
                "Statement": [{
                    "Sid": "allow-send-message",
                    "Effect": "Allow",
                    "Principal": {
                        "AWS": "*"
                    },
                    "Action": ["sqs:SendMessage"],
                    "Resource": "*",
                    "Condition": {
                        "ArnEquals": {
                            "aws:SourceArn":
                            t.Sub(
                                "arn:${AWS::Partition}:sns:*:${AWS::AccountId}:servicecatalog-puppet-cloudformation-regional-events"
                            )
                        }
                    },
                }],
            },
        ))

    spoke_deploy_bucket = template.add_resource(
        s3.Bucket(
            "SpokeDeployBucket",
            PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration(
                IgnorePublicAcls=True,
                BlockPublicPolicy=True,
                BlockPublicAcls=True,
                RestrictPublicBuckets=True,
            ),
            BucketEncryption=s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.
                        ServerSideEncryptionByDefault(SSEAlgorithm="AES256"))
                ]),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            BucketName=t.Sub("sc-puppet-spoke-deploy-${AWS::AccountId}"),
            VersioningConfiguration=s3.VersioningConfiguration(
                Status="Enabled"),
        ))

    caching_bucket = template.add_resource(
        s3.Bucket(
            "CachingBucket",
            PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration(
                BlockPublicAcls=True,
                BlockPublicPolicy=True,
                IgnorePublicAcls=True,
                RestrictPublicBuckets=True,
            ),
            BucketEncryption=s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.
                        ServerSideEncryptionByDefault(SSEAlgorithm="AES256"))
                ]),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            BucketName=t.Sub(
                "sc-puppet-caching-bucket-${AWS::AccountId}-${AWS::Region}"),
            VersioningConfiguration=s3.VersioningConfiguration(
                Status="Enabled"),
        ))

    template.add_output(
        t.Output(
            "CloudFormationEventsQueueArn",
            Value=t.GetAtt(cloud_formation_events_queue, "Arn"),
        ))
    template.add_output(t.Output("Version", Value=t.GetAtt(param, "Value")))
    template.add_output(
        t.Output("ManualApprovalsParam",
                 Value=t.GetAtt(manual_approvals_param, "Value")))

    template.add_resource(
        ssm.Parameter(
            "DefaultTerraformVersion",
            Type="String",
            Name=constants.DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME,
            Value=constants.DEFAULT_TERRAFORM_VERSION_VALUE,
        ))

    return template
示例#11
0
    def __init__(self, stack, paco_ctx,):
        rds_config = stack.resource
        config_ref = rds_config.paco_ref_parts
        super().__init__(stack, paco_ctx, iam_capabilities=["CAPABILITY_IAM"])
        self.set_aws_name('RDS', self.resource_group_name, self.resource.name)
        self.init_template('RDS')
        template = self.template
        if not rds_config.is_enabled(): return

        rds_logical_id = 'PrimaryDBInstance'

        # DB Subnet Group
        db_subnet_id_list_param = self.create_cfn_parameter(
            param_type='List<AWS::EC2::Subnet::Id>',
            name='DBSubnetIdList',
            description='The list of subnet IDs where this database will be provisioned.',
            value=rds_config.segment+'.subnet_id_list',
        )
        db_subnet_group_res = troposphere.rds.DBSubnetGroup(
            title='DBSubnetGroup',
            template =template,
            DBSubnetGroupDescription=troposphere.Ref('AWS::StackName'),
            SubnetIds=troposphere.Ref(db_subnet_id_list_param),
        )

        # DB Parameter Group
        engine_major_version = None
        if rds_config.parameter_group == None:
            # No Parameter Group supplied, create one
            engine_major_version = '.'.join(rds_config.engine_version.split('.')[0:2])
            param_group_family = gen_vocabulary.rds_engine_versions[rds_config.engine][rds_config.engine_version]['param_group_family']
            dbparametergroup_ref = troposphere.rds.DBParameterGroup(
                "DBParameterGroup",
                template = template,
                Family=param_group_family,
                Description=troposphere.Ref('AWS::StackName')
            )
        else:
            # Use an existing Parameter Group
            dbparametergroup_ref = self.create_cfn_parameter(
                name='DBParameterGroupName',
                param_type='String',
                description='DB Parameter Group Name',
                value=rds_config.parameter_group + '.name',
            )

        # Option Group
        option_group_res = None
        if len(rds_config.option_configurations) > 0 or (hasattr(rds_config, 'backup_restore_bucket') and rds_config.backup_restore_bucket != None):
            option_group_dict = {
                'EngineName': rds_config.engine,
                'MajorEngineVersion': engine_major_version,
                'OptionGroupDescription': troposphere.Ref('AWS::StackName')
            }
            option_config_list = []
            if len(rds_config.option_configurations) > 0:
                for option_config in rds_config.option_configurations:
                    option_config_dict = {
                        'OptionName': option_config.option_name,
                    }
                    if len(option_config.option_settings) > 0:
                        option_config_dict['OptionSettings'] = []
                        idx = 0
                        for option_setting in option_config.option_settings:
                            option_value = option_setting.value
                            if references.is_ref(option_setting.value):
                                # Use an existing Parameter Group
                                option_setting_value_param = self.create_cfn_parameter(
                                    name=f'OptionsGroupValue{idx}',
                                    param_type='String',
                                    description=f'DB Option Settings Value {idx}',
                                    value=option_setting.value
                                )
                                option_value = troposphere.Ref(option_setting_value_param)

                            option_setting_dict = {
                                'Name': option_setting.name,
                                'Value': option_value
                            }
                            option_config_dict['OptionSettings'].append(option_setting_dict)
                    option_config_list.append(option_config_dict)
            if hasattr(rds_config, 'backup_restore_bucket') and rds_config.backup_restore_bucket != None:
                option_config_dict = {
                    'OptionName': 'SQLSERVER_BACKUP_RESTORE',
                    'OptionSettings': []
                }
                # S3 Bucket Arn Param
                backup_restore_bucket_arn_param = self.create_cfn_parameter(
                    name='SQLServerBackupRestoreBucketArn',
                    param_type='String',
                    description=f'DB Option Setting SQLServer Backup Restore Bucket ARN',
                    value=f'{rds_config.backup_restore_bucket}.arn'
                )
                # Create Role for SQLServer Bucket
                sqlserver_backup_restore_role = troposphere.iam.Role(
                    title='SQLServerBackupRestoreRole',
                    template=self.template,
                    AssumeRolePolicyDocument=PolicyDocument(
                        Statement=[
                            Statement(
                                Effect=Allow,
                                Action=[Action("sts", "AssumeRole")],
                                Principal=Principal("Service", "rds.amazonaws.com")
                            )
                        ]
                    ),
                    Policies=[
                        troposphere.iam.Policy(
                            PolicyName="S3BucketAccess",
                            PolicyDocument=Policy(
                                Version='2012-10-17',
                                Statement=[
                                    Statement(
                                        Effect=Allow,
                                        Action=[
                                            awacs.s3.ListBucket,
                                            awacs.s3.GetBucketLocation
                                        ],
                                        Resource=[troposphere.Ref(backup_restore_bucket_arn_param)],
                                    ),
                                    Statement(
                                        Effect=Allow,
                                        Action=[
                                            Action('s3', 'GetObjectMetaData'),
                                            awacs.s3.GetObject,
                                            awacs.s3.PutObject,
                                            awacs.s3.ListMultipartUploadParts,
                                            awacs.s3.AbortMultipartUpload
                                        ],
                                        Resource=[troposphere.Sub("${SQLServerBackupRestoreBucketArn}/*")]
                                    )
                                ]
                            )
                        )
                    ],
                    Path="/",
                )
                option_value = troposphere.GetAtt(sqlserver_backup_restore_role, 'Arn')

                option_setting_dict = {
                    'Name': 'IAM_ROLE_ARN',
                    'Value': option_value
                }
                option_config_dict['OptionSettings'].append(option_setting_dict)
                option_config_list.append(option_config_dict)
            option_group_dict['OptionConfigurations'] = option_config_list

            option_group_res = troposphere.rds.OptionGroup.from_dict(
                'OptionGroup',
                option_group_dict )
            template.add_resource(option_group_res)

        # RDS MultiAZ (Mysql, Postgresql)
        sg_param_ref_list = []
        for sg_ref in rds_config.security_groups:
            sg_hash = utils.md5sum(str_data=sg_ref)
            sg_param = self.create_cfn_parameter(
                param_type='AWS::EC2::SecurityGroup::Id',
                name=self.create_cfn_logical_id('SecurityGroup'+sg_hash),
                description='VPC Security Group to attach to the RDS.',
                value=sg_ref+'.id',
            )
            sg_param_ref_list.append(troposphere.Ref(sg_param))

        db_instance_dict = {
            'Engine': rds_config.engine,
            'EngineVersion': rds_config.engine_version,
            'DBInstanceIdentifier': troposphere.Ref('AWS::StackName'),
            'DBInstanceClass': rds_config.db_instance_type,
            'DBSubnetGroupName': troposphere.Ref(db_subnet_group_res),
            'DBParameterGroupName': troposphere.Ref(dbparametergroup_ref),
            'CopyTagsToSnapshot': True,
            'AllowMajorVersionUpgrade': rds_config.allow_major_version_upgrade,
            'AutoMinorVersionUpgrade': rds_config.auto_minor_version_upgrade,
            'AllocatedStorage': rds_config.storage_size_gb,
            'StorageType': rds_config.storage_type,
            'BackupRetentionPeriod': rds_config.backup_retention_period,
            'Port': rds_config.port,
            'PreferredBackupWindow': rds_config.backup_preferred_window,
            'PreferredMaintenanceWindow': rds_config.maintenance_preferred_window,
            'VPCSecurityGroups': sg_param_ref_list
        }

        if schemas.IRDSMultiAZ.providedBy(rds_config):
            db_instance_dict['MultiAZ'] = rds_config.multi_az


        # License Model
        if rds_config.license_model:
            db_instance_dict['LicenseModel'] = rds_config.license_model

        # Deletion Protection
        if rds_config.deletion_protection:
            db_instance_dict['DeletionProtection'] = rds_config.deletion_protection

        # CloudWatch Logs Exports
        if len(rds_config.cloudwatch_logs_exports) > 0:
            db_instance_dict['EnableCloudwatchLogsExports'] = rds_config.cloudwatch_logs_exports

        # Option Group
        if option_group_res != None:
            db_instance_dict['OptionGroupName'] = troposphere.Ref(option_group_res)

        # DB Snapshot Identifier
        if rds_config.db_snapshot_identifier == '' or rds_config.db_snapshot_identifier == None:
            db_snapshot_id_enabled = False
        else:
            db_snapshot_id_enabled = True
        if db_snapshot_id_enabled == True:
            db_instance_dict['DBSnapshotIdentifier'] = rds_config.db_snapshot_identifier
            # To restore an existing DB from a Snapshot, RDS will need to replace the RDS
            # resource, in which case the DBInstanceIdentifier name CAN NOT be set
            # del db_instance_dict['DBInstanceIdentifier']

        # Username and Passsword
        if db_snapshot_id_enabled == False:
            # Encryption
            if rds_config.storage_encrypted == True:
                db_instance_dict['StorageEncrypted'] = True
            if rds_config.kms_key_id and rds_config.kms_key_id != '':
                db_instance_dict['KmsKeyId'] = rds_config.kms_key_id

            # Username & Password
            db_instance_dict['MasterUsername'] = rds_config.master_username
            if rds_config.secrets_password:
                # Password from Secrets Manager
                sta_logical_id = 'SecretTargetAttachmentRDS'
                secret_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='RDSSecretARN',
                    description='The ARN for the secret for the RDS master password.',
                    value=rds_config.secrets_password + '.arn',
                )
                secret_target_attachment_resource = troposphere.secretsmanager.SecretTargetAttachment(
                    title=sta_logical_id,
                    SecretId=troposphere.Ref(secret_arn_param),
                    TargetId=troposphere.Ref(rds_logical_id),
                    TargetType='AWS::RDS::DBInstance'
                )
                template.add_resource(secret_target_attachment_resource)

                db_instance_dict['MasterUserPassword'] = troposphere.Join(
                    '',
                    ['{{resolve:secretsmanager:', troposphere.Ref(secret_arn_param), ':SecretString:password}}' ]
                )
            else:
                master_password_param = self.create_cfn_parameter(
                    param_type='String',
                    name='MasterUserPassword',
                    description='The master user password.',
                    value=rds_config.master_user_password,
                    noecho=True,
                )
                db_instance_dict['MasterUserPassword'] = troposphere.Ref(master_password_param)

        db_instance_res = troposphere.rds.DBInstance.from_dict(
            rds_logical_id,
            db_instance_dict
        )
        template.add_resource(db_instance_res)

        # Outputs
        self.create_output(
            title='DBInstanceName',
            description='DB Instance Name',
            value=troposphere.Ref(db_instance_res),
            ref=config_ref + ".name",
        )
        self.create_output(
            title='RDSEndpointAddress',
            description='RDS Endpoint URL',
            value=troposphere.GetAtt(db_instance_res, 'Endpoint.Address'),
            ref=config_ref + ".endpoint.address",
        )

        # Legacy Route53 Record Set
        if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16') == True:
            if rds_config.is_dns_enabled() == True:
                for dns_config in rds_config.dns_config:
                    dns_hash = utils.md5sum(str_data=(rds_config.hosted_zone+rds_config.domain_name))
                    primary_hosted_zone_id_param = self.create_cfn_parameter(
                        param_type='String',
                        name='DNSHostedZoneId'+dns_hash,
                        description='The hosted zone id to create the Route53 record set.',
                        value=rds_config.primary_hosted_zone+'.id',
                    )
                    record_set_res = troposphere.route53.RecordSetType(
                        title = 'RecordSet'+dns_hash,
                        template = template,
                        Comment = 'RDS Primary DNS',
                        HostedZoneId = troposphere.Ref(primary_hosted_zone_id_param),
                        Name = rds_config.primary_domain_name,
                        Type = 'CNAME',
                        TTL = dns_config.ttl,
                        ResourceRecords = [ troposphere.GetAtt(db_instance_res, 'Endpoint.Address')]
                    )
                    record_set_res.DependsOn = db_instance_res

        # DNS - Route53 Record Set
        if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16') == False:
            if rds_config.is_dns_enabled() == True:
                route53_ctl = self.paco_ctx.get_controller('route53')
                for dns_config in rds_config.dns:
                    route53_ctl.add_record_set(
                        self.account_ctx,
                        self.aws_region,
                        rds_config,
                        enabled=rds_config.is_enabled(),
                        dns=dns_config,
                        record_set_type='CNAME',
                        resource_records=['paco.ref ' + config_ref + '.endpoint.address'],
                        stack_group=self.stack.stack_group,
                        async_stack_provision=True,
                        config_ref=rds_config.paco_ref_parts + '.dns'
                    )
def cfn_json_dumps(structure):
    """
    Similar to json.dumps(o), but with support for troposphere objects such as:
     - Ref()
     - GetAtt()
     - ImportValue()
     - Sub(), Join()

    Returns a Sub()-wrapped string, with all troposphere-objects pulled to the
    outer layer.

    E.g.:

    cfn_json_dumps({'foo': Sub('${bar}')})
    -> Sub('{"foo", "${bar}"}')

    cfn_json_dumps({'foo': {'bar': Sub('${baz}')}})
    -> Sub('{"foo": {"bar": "${baz}"}})
    """
    def replace_objects(thing, params=None):
        """
        Recursive function to split `thing` into a JSONable object and a dict of
        substitutions.
        :param thing: Thing to split. Probably a dict or list.
        :param params: Params from earlier calls.
        :return: (thing_without_functions, substitutions)
        """
        if params is None:
            params = {}

        if isinstance(thing, string_types):
            # Escape things that look like substitutions
            thing = re.sub(r'\$\{([^}]+)\}', '${!\\1}', thing)
            return thing, params

        elif isinstance(thing, bool) or isinstance(thing, int):
            # Pass through unmodified
            return thing, params

        elif thing is None:
            # Pass through unmodified
            return None, params

        elif isinstance(thing, dict):
            # Recurse down for keys & values
            _ = {}
            for k, v in thing.items():
                k, params = replace_objects(k, params)
                v, params = replace_objects(v, params)
                _[k] = v
            return _, params

        elif isinstance(thing, list) or isinstance(thing, tuple):
            # Recurse down for every element.
            # We don't need to maintain the list vs tuple, since JSON doesn't
            # differentiate either.
            _ = []
            for e in thing:
                e, params = replace_objects(e, params)
                _.append(e)
            return _, params

        elif isinstance(thing, troposphere.AWSHelperFn):
            # Extract this function by replacing it with a `${}`, and moving it
            # to the outermost Sub()
            aws_function_name = thing.__class__.__name__

            # Find a free name for this kind of function
            sub_name = None
            i = 0
            while sub_name is None or sub_name in params:
                sub_name = "{}_{}".format(aws_function_name, i)
                i = i + 1

            params[sub_name] = thing
            return "${{{}}}".format(sub_name), params

        else:
            raise TypeError("Don't know how to convert {}".format(type(thing)))

    structure, params = replace_objects(structure)
    if params == {}:
        return json.dumps(structure)
    else:
        return troposphere.Sub(json.dumps(structure), **params)
def create_cdk_pipeline(name, version, product_name, product_version,
                        template_config, p) -> t.Template:
    description = f"""Builds a cdk pipeline
{{"version": "{constants.VERSION}", "framework": "servicecatalog-factory", "role": "product-pipeline", "type": "{name}", "version": "{version}"}}"""
    configuration = template_config.get("Configuration")
    template = t.Template(Description=description)

    template.add_parameter(t.Parameter("PuppetAccountId", Type="String"))
    template.add_parameter(
        t.Parameter("CDKSupportCDKDeployRequireApproval",
                    Type="String",
                    Default="never"))
    template.add_parameter(
        t.Parameter("CDKSupportCDKComputeType",
                    Type="String",
                    Default="BUILD_GENERAL1_SMALL"))
    template.add_parameter(
        t.Parameter("CDKSupportCDKDeployImage",
                    Type="String",
                    Default="aws/codebuild/standard:4.0"))
    template.add_parameter(
        t.Parameter("CDKSupportCDKToolkitStackName",
                    Type="String",
                    Default="CDKToolKit"))
    template.add_parameter(
        t.Parameter(
            "CDKSupportCDKDeployExtraArgs",
            Type="String",
            Default="",
            Description="Extra args to pass to CDK deploy",
        ))
    template.add_parameter(
        t.Parameter(
            "CDKSupportStartCDKDeployFunctionArn",
            Type="String",
        ))
    template.add_parameter(
        t.Parameter(
            "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn",
            Type="String",
        ))
    template.add_parameter(
        t.Parameter("CDKSupportIAMRolePaths",
                    Type="String",
                    Default="/servicecatalog-factory-cdk-support/"))
    template.add_parameter(
        t.Parameter("CDKSupportCDKDeployRoleName",
                    Type="String",
                    Default="CDKDeployRoleName"))

    manifest = json.loads(open(f"{p}/{PREFIX}/manifest.json", "r").read())

    cdk_deploy_parameter_args = list()

    for artifact_name, artifact in manifest.get("artifacts", {}).items():
        if artifact.get("type") == "aws:cloudformation:stack":
            artifact_template_file_path = artifact.get("properties",
                                                       {}).get("templateFile")
            assert (
                artifact_template_file_path
            ), f"Could not find template file in manifest.json for {artifact_name}"
            artifact_template = json.loads(
                open(f"{p}/{PREFIX}/{artifact_template_file_path}",
                     "r").read())
            for parameter_name, parameter_details in artifact_template.get(
                    "Parameters", {}).items():
                if template.parameters.get(parameter_name) is None:
                    template.add_parameter(
                        t.Parameter(parameter_name, **parameter_details))
                cdk_deploy_parameter_args.append(
                    f"--parameters {artifact_name}:{parameter_name}=${{{parameter_name}}}"
                )

            for output_name, output_details in artifact_template.get(
                    "Outputs", {}).items():
                if template.outputs.get(output_name) is None:
                    new_output = dict(**output_details)
                    new_output["Value"] = t.GetAtt("GetOutputsCode",
                                                   output_name)
                    template.add_output(t.Output(output_name, **new_output))
    cdk_deploy_parameter_args = " ".join(cdk_deploy_parameter_args)

    class DeployDetailsCustomResource(cloudformation.AWSCustomObject):
        resource_type = "Custom::DeployDetails"
        props = dict()

    runtime_versions = dict(
        nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, )
    if configuration.get("runtime-versions"):
        runtime_versions.update(configuration.get("runtime-versions"))

    extra_commands = list(configuration.get("install", {}).get("commands", []))

    template.add_resource(
        codebuild.Project(
            "CDKDeploy",
            Name=t.Sub("${AWS::StackName}-deploy"),
            Description='Run CDK deploy for given source code',
            ServiceRole=t.Sub(
                "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}"
            ),
            Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ),
            Environment=codebuild.Environment(
                ComputeType=t.Ref('CDKSupportCDKComputeType'),
                EnvironmentVariables=[
                    codebuild.EnvironmentVariable(
                        Name="CDK_DEPLOY_REQUIRE_APPROVAL",
                        Type="PLAINTEXT",
                        Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(
                        Name="CDK_TOOLKIT_STACK_NAME",
                        Type="PLAINTEXT",
                        Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="UId",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="REGION",
                                                  Type="PLAINTEXT",
                                                  Value=t.Ref("AWS::Region")),
                    codebuild.EnvironmentVariable(
                        Name="CDK_DEPLOY_PARAMETER_ARGS",
                        Type="PLAINTEXT",
                        Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="NAME",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="VERSION",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                ],
                Image=t.Ref('CDKSupportCDKDeployImage'),
                Type="LINUX_CONTAINER",
            ),
            Source=codebuild.Source(
                Type="NO_SOURCE",
                BuildSpec=t.Sub(
                    yaml.safe_dump(
                        dict(
                            version=0.2,
                            phases=dict(
                                install={
                                    "runtime-versions":
                                    runtime_versions,
                                    "commands": [
                                        "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip",
                                        "unzip $NAME-$VERSION.zip",
                                        "npm install",
                                    ] + extra_commands
                                },
                                build={
                                    "commands": [
                                        "npm run cdk deploy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --require-approval $CDK_DEPLOY_REQUIRE_APPROVAL --outputs-file scf_outputs.json $CDK_DEPLOY_EXTRA_ARGS $CDK_DEPLOY_PARAMETER_ARGS '*'",
                                        "aws s3 cp scf_outputs.json s3://sc-cdk-artifacts-${AWS::AccountId}/CDK/1.0.0/$NAME/$VERSION/scf_outputs-$CODEBUILD_BUILD_ID.json",
                                    ]
                                },
                            ),
                            artifacts={
                                "name": "CDKDeploy",
                                "files": ["*", "**/*"],
                            },
                        ))),
            ),
            TimeoutInMinutes=480,
        ))

    template.add_resource(
        codebuild.Project(
            "CDKDestroy",
            Name=t.Sub("${AWS::StackName}-destroy"),
            Description='Run CDK destroy for given source code',
            ServiceRole=t.Sub(
                "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}"
            ),
            Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ),
            Environment=codebuild.Environment(
                ComputeType=t.Ref('CDKSupportCDKComputeType'),
                EnvironmentVariables=[
                    codebuild.EnvironmentVariable(
                        Name="CDK_DEPLOY_REQUIRE_APPROVAL",
                        Type="PLAINTEXT",
                        Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(
                        Name="CDK_TOOLKIT_STACK_NAME",
                        Type="PLAINTEXT",
                        Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="UId",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="REGION",
                                                  Type="PLAINTEXT",
                                                  Value=t.Ref("AWS::Region")),
                    codebuild.EnvironmentVariable(
                        Name="CDK_DEPLOY_PARAMETER_ARGS",
                        Type="PLAINTEXT",
                        Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="NAME",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="VERSION",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                ],
                Image=t.Ref('CDKSupportCDKDeployImage'),
                Type="LINUX_CONTAINER",
            ),
            Source=codebuild.Source(
                Type="NO_SOURCE",
                BuildSpec=t.Sub(
                    yaml.safe_dump(
                        dict(
                            version=0.2,
                            phases=dict(
                                install={
                                    "runtime-versions":
                                    runtime_versions,
                                    "commands": [
                                        "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip",
                                        "unzip $NAME-$VERSION.zip",
                                        "npm install",
                                    ] + extra_commands
                                },
                                build={
                                    "commands": [
                                        "npm run cdk destroy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --force --ignore-errors '*'"
                                    ]
                                },
                            ),
                            artifacts={
                                "name": "CDKDeploy",
                                "files": ["*", "**/*"],
                            },
                        ))),
            ),
            TimeoutInMinutes=480,
        ))

    template.add_resource(
        DeployDetailsCustomResource(
            "StartCDKDeploy",
            DependsOn=["CDKDeploy", "CDKDestroy"],
            ServiceToken=t.Ref("CDKSupportStartCDKDeployFunctionArn"),
            CreateUpdateProject=t.Ref("CDKDeploy"),
            DeleteProject=t.Ref("CDKDestroy"),
            CDK_DEPLOY_EXTRA_ARGS=t.Ref("CDKSupportCDKDeployExtraArgs"),
            CDK_TOOLKIT_STACK_NAME=t.Ref("CDKSupportCDKToolkitStackName"),
            PUPPET_ACCOUNT_ID=t.Ref("PuppetAccountId"),
            CDK_DEPLOY_PARAMETER_ARGS=t.Sub(cdk_deploy_parameter_args),
            CDK_DEPLOY_REQUIRE_APPROVAL=t.Ref(
                "CDKSupportCDKDeployRequireApproval"),
            NAME=product_name,
            VERSION=product_version,
        ))

    template.add_resource(
        DeployDetailsCustomResource(
            "GetOutputsCode",
            DependsOn=[
                "StartCDKDeploy",
            ],
            ServiceToken=t.Ref(
                "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn"),
            CodeBuildBuildId=t.GetAtt("StartCDKDeploy", "BuildId"),
            BucketName=t.Sub("sc-cdk-artifacts-${AWS::AccountId}"),
            ObjectKeyPrefix=t.Sub(
                f"CDK/1.0.0/{product_name}/{product_version}"),
        ))

    return template
示例#14
0
def generate_codepipeline_template(
    codepipeline_role_name: str,
    codepipeline_role_path: str,
    codebuild_role_name: str,
    codebuild_role_path: str,
    ssm_parameter_prefix: str,
    scm_provider: str,
    scm_connection_arn: str,
    scm_full_repository_id: str,
    scm_branch_name: str,
    scm_bucket_name: str,
    scm_object_key: str,
    scm_skip_creation_of_repo: str,
    migrate_role_arn: str,
) -> troposphere.Template:
    version = pkg_resources.get_distribution("aws-organized").version
    t = troposphere.Template()
    t.set_description(
        "CICD template that runs aws organized migrate for the given branch of the given repo"
    )
    project_name = "AWSOrganized-Migrate"
    bucket_name = scm_bucket_name
    if scm_provider.lower(
    ) == "codecommit" and scm_skip_creation_of_repo is False:
        t.add_resource(
            codecommit.Repository("Repository",
                                  RepositoryName=scm_full_repository_id))
    if scm_provider.lower() == "s3" and scm_skip_creation_of_repo is False:
        bucket_name = (
            scm_bucket_name if scm_bucket_name else
            troposphere.Sub("aws-organized-pipeline-source-${AWS::AccountId}"))
        t.add_resource(
            s3.Bucket(
                "Source",
                BucketName=bucket_name,
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status="Enabled"),
                BucketEncryption=s3.BucketEncryption(
                    ServerSideEncryptionConfiguration=[
                        s3.ServerSideEncryptionRule(
                            ServerSideEncryptionByDefault=s3.
                            ServerSideEncryptionByDefault(
                                SSEAlgorithm="AES256"))
                    ]),
            ))
    artifact_store = t.add_resource(
        s3.Bucket(
            "ArtifactStore",
            VersioningConfiguration=s3.VersioningConfiguration(
                Status="Enabled"),
            BucketEncryption=s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.
                        ServerSideEncryptionByDefault(SSEAlgorithm="AES256"))
                ]),
        ))
    codepipeline_role = t.add_resource(
        iam.Role(
            "CodePipelineRole",
            RoleName=codepipeline_role_name,
            Path=codepipeline_role_path,
            ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"],
            AssumeRolePolicyDocument=aws.PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    aws.Statement(
                        Effect=aws.Allow,
                        Action=[awacs_sts.AssumeRole],
                        Principal=aws.Principal(
                            "Service", ["codepipeline.amazonaws.com"]),
                    )
                ],
            ),
        ))
    codebuild_role = t.add_resource(
        iam.Role(
            "CodeBuildRole",
            RoleName=codebuild_role_name,
            Path=codebuild_role_path,
            ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"],
            AssumeRolePolicyDocument=aws.PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    aws.Statement(
                        Effect=aws.Allow,
                        Action=[awacs_sts.AssumeRole],
                        Principal=aws.Principal("Service",
                                                ["codebuild.amazonaws.com"]),
                    )
                ],
            ),
        ))
    version_parameter = ssm.Parameter(
        "versionparameter",
        Name=f"{ssm_parameter_prefix}/version",
        Type="String",
        Value=version,
    )
    t.add_resource(version_parameter)
    project = t.add_resource(
        codebuild.Project(
            "AWSOrganizedMigrate",
            Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
            Environment=codebuild.Environment(
                ComputeType="BUILD_GENERAL1_SMALL",
                Image="aws/codebuild/standard:4.0",
                Type="LINUX_CONTAINER",
                EnvironmentVariables=[
                    {
                        "Name": "MIGRATE_ROLE_ARN",
                        "Type": "PLAINTEXT",
                        "Value": migrate_role_arn,
                    },
                    {
                        "Name": "Version",
                        "Type": "PARAMETER_STORE",
                        "Value": troposphere.Ref(version_parameter),
                    },
                    {
                        "Name": "SSM_PARAMETER_PREFIX",
                        "Type": "PLAINTEXT",
                        "Value": ssm_parameter_prefix,
                    },
                ],
            ),
            Name=project_name,
            ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"),
            Source=codebuild.Source(
                Type="CODEPIPELINE",
                BuildSpec=yaml.safe_dump(
                    dict(
                        version="0.2",
                        phases=dict(
                            install={
                                "runtime-versions":
                                dict(python="3.8"),
                                "commands":
                                ["pip install aws-organized==${Version}"],
                            },
                            build={
                                "commands": [
                                    "aws-organized migrate --ssm-parameter-prefix $SSM_PARAMETER_PREFIX $MIGRATE_ROLE_ARN"
                                ]
                            },
                        ),
                        artifacts=dict(files=["environment"]),
                    )),
            ),
        ))
    source_actions = dict(
        codecommit=codepipeline.Actions(
            Name="SourceAction",
            ActionTypeId=codepipeline.ActionTypeId(Category="Source",
                                                   Owner="AWS",
                                                   Version="1",
                                                   Provider="CodeCommit"),
            OutputArtifacts=[
                codepipeline.OutputArtifacts(Name="SourceOutput")
            ],
            Configuration={
                "RepositoryName": scm_full_repository_id,
                "BranchName": scm_branch_name,
                "PollForSourceChanges": "true",
            },
            RunOrder="1",
        ),
        codestarsourceconnection=codepipeline.Actions(
            Name="SourceAction",
            ActionTypeId=codepipeline.ActionTypeId(
                Category="Source",
                Owner="AWS",
                Version="1",
                Provider="CodeStarSourceConnection",
            ),
            OutputArtifacts=[
                codepipeline.OutputArtifacts(Name="SourceOutput")
            ],
            Configuration={
                "ConnectionArn": scm_connection_arn,
                "FullRepositoryId": scm_full_repository_id,
                "BranchName": scm_branch_name,
                "OutputArtifactFormat": "CODE_ZIP",
            },
            RunOrder="1",
        ),
        s3=codepipeline.Actions(
            Name="SourceAction",
            ActionTypeId=codepipeline.ActionTypeId(Category="Source",
                                                   Owner="AWS",
                                                   Version="1",
                                                   Provider="S3"),
            OutputArtifacts=[
                codepipeline.OutputArtifacts(Name="SourceOutput")
            ],
            Configuration={
                "S3Bucket": bucket_name,
                "S3ObjectKey": scm_object_key,
                "PollForSourceChanges": True,
            },
            RunOrder="1",
        ),
    ).get(scm_provider.lower())
    t.add_resource(
        codepipeline.Pipeline(
            "Pipeline",
            RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"),
            Stages=[
                codepipeline.Stages(Name="Source", Actions=[source_actions]),
                codepipeline.Stages(
                    Name="Migrate",
                    Actions=[
                        codepipeline.Actions(
                            Name="Migrate",
                            InputArtifacts=[
                                codepipeline.InputArtifacts(
                                    Name="SourceOutput")
                            ],
                            ActionTypeId=codepipeline.ActionTypeId(
                                Category="Build",
                                Owner="AWS",
                                Version="1",
                                Provider="CodeBuild",
                            ),
                            Configuration={
                                "ProjectName": troposphere.Ref(project),
                                "PrimarySource": "SourceAction",
                            },
                            RunOrder="1",
                        )
                    ],
                ),
            ],
            ArtifactStore=codepipeline.ArtifactStore(
                Type="S3", Location=troposphere.Ref(artifact_store)),
        ))
    return t
示例#15
0
    def create_codebuild_cfn(self, template, pipeline_config, action_config,
                             config_ref):
        # CodeBuild
        compute_type_param = self.create_cfn_parameter(
            param_type='String',
            name='CodeBuildComputeType',
            description=
            'The type of compute environment. This determines the number of CPU cores and memory the build environment uses.',
            value=action_config.codebuild_compute_type,
        )
        image_param = self.create_cfn_parameter(
            param_type='String',
            name='CodeBuildImage',
            description=
            'The image tag or image digest that identifies the Docker image to use for this build project.',
            value=action_config.codebuild_image,
        )
        deploy_env_name_param = self.create_cfn_parameter(
            param_type='String',
            name='DeploymentEnvironmentName',
            description=
            'The name of the environment codebuild will be deploying into.',
            value=action_config.deployment_environment,
        )
        # If ECS Release Phase, then create the needed parameters
        release_phase = action_config.release_phase
        ecs_release_phase_cluster_arn_param = []
        ecs_release_phase_cluster_name_param = []
        ecs_release_phase_service_arn_param = []
        if release_phase != None and release_phase.ecs != None:
            idx = 0
            for command in release_phase.ecs:
                service_obj = get_model_obj_from_ref(command.service,
                                                     self.paco_ctx.project)
                service_obj = get_parent_by_interface(service_obj,
                                                      schemas.IECSServices)
                cluster_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'ReleasePhaseECSClusterArn{idx}',
                    description='ECS Cluster Arn',
                    value=service_obj.cluster + '.arn',
                )
                ecs_release_phase_cluster_arn_param.append(cluster_arn_param)
                cluster_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'ReleasePhaseECSClusterName{idx}',
                    description='ECS Cluster Name',
                    value=service_obj.cluster + '.name',
                )
                ecs_release_phase_cluster_name_param.append(cluster_arn_param)
                service_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'ReleasePhaseECSServiceArn{idx}',
                    description='ECS Service Arn',
                    value=command.service + '.arn',
                )
                ecs_release_phase_service_arn_param.append(service_arn_param)
                idx += 1
        self.project_role_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodeBuild-Project'],
            filter_id='IAM.Role.RoleName')

        # codecommit_repo_users ManagedPolicies
        managed_policy_arns = []
        for user_ref in action_config.codecommit_repo_users:
            user = get_model_obj_from_ref(user_ref, self.paco_ctx.project)
            # codecommit_stack = user.__parent__.__parent__.__parent__.stack
            user_logical_id = self.gen_cf_logical_name(user.username)
            codecommit_user_policy_param = self.create_cfn_parameter(
                param_type='String',
                name='CodeCommitUserPolicy' + user_logical_id,
                description='The CodeCommit User Policy for ' + user.username,
                value=user_ref + '.policy.arn',
            )
            managed_policy_arns.append(
                troposphere.Ref(codecommit_user_policy_param))

        project_role_res = troposphere.iam.Role(
            title='CodeBuildProjectRole',
            template=template,
            RoleName=self.project_role_name,
            ManagedPolicyArns=managed_policy_arns,
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[AssumeRole],
                        Principal=Principal("Service",
                                            ['codebuild.amazonaws.com']),
                    )
                ]))

        project_policy_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodeBuild-Project'],
            filter_id='IAM.Policy.PolicyName')

        # Project Policy
        policy_statements = []
        if pipeline_config.configuration.disable_codepipeline == False:
            policy_statements.extend([
                Statement(
                    Sid='S3Access',
                    Effect=Allow,
                    Action=[
                        Action('s3', 'PutObject'),
                        Action('s3', 'PutObjectAcl'),
                        Action('s3', 'GetObject'),
                        Action('s3', 'GetObjectAcl'),
                        Action('s3', 'ListBucket'),
                        Action('s3', 'DeleteObject'),
                        Action('s3', 'GetBucketPolicy'),
                        Action('s3', 'HeadObject'),
                    ],
                    Resource=[
                        troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}'),
                        troposphere.Sub(
                            'arn:aws:s3:::${ArtifactsBucketName}/*'),
                    ]),
                Statement(Sid='KMSCMK',
                          Effect=Allow,
                          Action=[Action('kms', '*')],
                          Resource=[troposphere.Ref(self.cmk_arn_param)])
            ])
        policy_statements.append(
            Statement(Sid='CloudWatchLogsAccess',
                      Effect=Allow,
                      Action=[
                          Action('logs', 'CreateLogGroup'),
                          Action('logs', 'CreateLogStream'),
                          Action('logs', 'PutLogEvents'),
                      ],
                      Resource=['arn:aws:logs:*:*:*']))

        release_phase = action_config.release_phase
        if release_phase != None and release_phase.ecs != None:
            ssm_doc = self.paco_ctx.project['resource']['ssm'].ssm_documents[
                'paco_ecs_docker_exec']
            # SSM Exec Document
            policy_statements.append(
                Statement(Sid='ECSReleasePhaseSSMCore',
                          Effect=Allow,
                          Action=[
                              Action('ssm', 'ListDocuments'),
                              Action('ssm', 'ListDocumentVersions'),
                              Action('ssm', 'DescribeDocument'),
                              Action('ssm', 'GetDocument'),
                              Action('ssm', 'DescribeInstanceInformation'),
                              Action('ssm', 'DescribeDocumentParameters'),
                              Action('ssm', 'CancelCommand'),
                              Action('ssm', 'ListCommands'),
                              Action('ssm', 'ListCommandInvocations'),
                              Action('ssm', 'DescribeAutomationExecutions'),
                              Action('ssm', 'DescribeInstanceProperties'),
                              Action('ssm', 'GetCommandInvocation'),
                              Action('ec2', 'DescribeInstanceStatus'),
                          ],
                          Resource=['*']))
            policy_statements.append(
                Statement(
                    Sid=f'ECSReleasePhaseSSMSendCommandDocument',
                    Effect=Allow,
                    Action=[
                        Action('ssm', 'SendCommand'),
                    ],
                    Resource=[
                        f'arn:aws:ssm:{self.aws_region}:{self.account_ctx.get_id()}:document/paco_ecs_docker_exec'
                    ]))
            idx = 0
            for command in release_phase.ecs:
                policy_statements.append(
                    Statement(
                        Sid=f'ECSReleasePhaseSSMSendCommand{idx}',
                        Effect=Allow,
                        Action=[
                            Action('ssm', 'SendCommand'),
                        ],
                        Resource=[f'arn:aws:ec2:*:*:instance/*'],
                        Condition=Condition(
                            StringLike({
                                'ssm:resourceTag/Paco-ECSCluster-Name':
                                troposphere.Ref(
                                    ecs_release_phase_cluster_name_param[idx])
                            }))))

                policy_statements.append(
                    Statement(
                        Sid=f'ECSRelasePhaseClusterAccess{idx}',
                        Effect=Allow,
                        Action=[
                            Action('ecs', 'DescribeServices'),
                            Action('ecs', 'RunTask'),
                            Action('ecs', 'StopTask'),
                            Action('ecs', 'DescribeContainerInstances'),
                            Action('ecs', 'ListTasks'),
                            Action('ecs', 'DescribeTasks'),
                        ],
                        Resource=['*'],
                        Condition=Condition(
                            StringEquals({
                                'ecs:cluster':
                                troposphere.Ref(
                                    ecs_release_phase_cluster_arn_param[idx])
                            }))))
                idx += 1

            policy_statements.append(
                Statement(Sid='ECSReleasePhaseSSMAutomationExecution',
                          Effect=Allow,
                          Action=[
                              Action('ssm', 'StartAutomationExecution'),
                              Action('ssm', 'StopAutomationExecution'),
                              Action('ssm', 'GetAutomationExecution'),
                          ],
                          Resource=['arn:aws:ssm:::automation-definition/']))
            # ECS Policies
            policy_statements.append(
                Statement(Sid='ECSRelasePhaseECS',
                          Effect=Allow,
                          Action=[
                              Action('ecs', 'DescribeTaskDefinition'),
                              Action('ecs', 'DeregisterTaskDefinition'),
                              Action('ecs', 'RegisterTaskDefinition'),
                              Action('ecs', 'ListTagsForResource'),
                              Action('ecr', 'DescribeImages')
                          ],
                          Resource=['*']))

            # IAM Pass Role
            policy_statements.append(
                Statement(Sid='IAMPassRole',
                          Effect=Allow,
                          Action=[Action('iam', 'passrole')],
                          Resource=['*']))

        if len(action_config.secrets) > 0:
            secrets_arn_list = []
            for secret_ref in action_config.secrets:
                name_hash = md5sum(str_data=secret_ref)
                secret_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='SecretsArn' + name_hash,
                    description=
                    'Secrets Manager Secret Arn to expose access to',
                    value=secret_ref + '.arn')
                secrets_arn_list.append(troposphere.Ref(secret_arn_param))
            policy_statements.append(
                Statement(Sid='SecretsManager',
                          Effect=Allow,
                          Action=[
                              Action('secretsmanager', 'GetSecretValue'),
                          ],
                          Resource=secrets_arn_list))

        project_policy_res = troposphere.iam.PolicyType(
            title='CodeBuildProjectPolicy',
            PolicyName=project_policy_name,
            PolicyDocument=PolicyDocument(Statement=policy_statements),
            Roles=[troposphere.Ref(project_role_res)])
        template.add_resource(project_policy_res)

        # User defined policies
        for policy in action_config.role_policies:
            policy_name = self.create_resource_name_join(
                name_list=[
                    self.res_name_prefix, 'CodeBuild-Project', policy.name
                ],
                separator='-',
                filter_id='IAM.Policy.PolicyName',
                hash_long_names=True,
                camel_case=True)
            statement_list = []

            for statement in policy.statement:
                action_list = []
                for action in statement.action:
                    action_parts = action.split(':')
                    action_list.append(Action(action_parts[0],
                                              action_parts[1]))
                statement_list.append(
                    Statement(Effect=statement.effect,
                              Action=action_list,
                              Resource=statement.resource))
            troposphere.iam.PolicyType(
                title=self.create_cfn_logical_id('CodeBuildProjectPolicy' +
                                                 policy.name,
                                                 camel_case=True),
                template=template,
                PolicyName=policy_name,
                PolicyDocument=PolicyDocument(Statement=statement_list, ),
                Roles=[troposphere.Ref(project_role_res)])

        # ECR Permission Policies
        self.set_ecr_repositories_statements(
            action_config.ecr_repositories, template,
            f'{self.res_name_prefix}-CodeBuild-Project',
            [troposphere.Ref(project_role_res)])

        # CodeBuild Project Resource
        timeout_mins_param = self.create_cfn_parameter(
            param_type='String',
            name='TimeoutInMinutes',
            description=
            'How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed.',
            value=action_config.timeout_mins,
        )

        # Environment Variables
        codebuild_env_vars = [{
            'Name': 'DeploymentEnvironmentName',
            'Value': troposphere.Ref(deploy_env_name_param)
        }]
        if pipeline_config.configuration.disable_codepipeline == False:
            codebuild_env_vars.extend([{
                'Name':
                'ArtifactsBucket',
                'Value':
                troposphere.Ref(self.artifacts_bucket_name_param),
            }, {
                'Name':
                'KMSKey',
                'Value':
                troposphere.Ref(self.cmk_arn_param)
            }])
        # If ECS Release Phase, then add the config to the environment
        release_phase = action_config.release_phase
        if release_phase != None and release_phase.ecs != None:
            idx = 0
            for command in release_phase.ecs:
                codebuild_env_vars.append({
                    'Name':
                    f'PACO_CB_RP_ECS_CLUSTER_ID_{idx}',
                    'Value':
                    troposphere.Ref(ecs_release_phase_cluster_arn_param[idx])
                })
                codebuild_env_vars.append({
                    'Name':
                    f'PACO_CB_RP_ECS_SERVICE_ID_{idx}',
                    'Value':
                    troposphere.Ref(ecs_release_phase_service_arn_param[idx])
                })
                idx += 1

        # CodeBuild: Environment
        source = troposphere.codebuild.Source(Type='CODEPIPELINE', )
        if action_config.buildspec != None and action_config.buildspec != '':
            source = troposphere.codebuild.Source(
                Type='CODEPIPELINE',
                BuildSpec=action_config.buildspec,
            )

        project_dict = {
            'Name':
            troposphere.Ref(self.resource_name_prefix_param),
            'Artifacts': {
                'Type': 'NO_ARTIFACTS'
            },
            'Description':
            troposphere.Ref('AWS::StackName'),
            'ServiceRole':
            troposphere.GetAtt('CodeBuildProjectRole', 'Arn'),
            'Environment': {
                'Type': 'LINUX_CONTAINER',
                'ComputeType': troposphere.Ref(compute_type_param),
                'Image': troposphere.Ref(image_param),
                'EnvironmentVariables': codebuild_env_vars,
                'PrivilegedMode': action_config.privileged_mode
            },
            'Source': {
                'Type': 'NO_SOURCE'
            },
            'TimeoutInMinutes':
            troposphere.Ref(timeout_mins_param),
            'Tags':
            troposphere.codebuild.Tags(
                Name=troposphere.Ref(self.resource_name_prefix_param))
        }

        if action_config.buildspec:
            project_dict['Source']['BuildSpec'] = action_config.buildspec

        if pipeline_config.configuration.disable_codepipeline == False:
            project_dict['EncryptionKey'] = troposphere.Ref(self.cmk_arn_param)
            project_dict['Artifacts'] = {'Type': 'CODEPIPELINE'}
            project_dict['Source']['Type'] = 'CODEPIPELINE'
        elif action_config.source.github != None:
            project_dict['Source']['Type'] = 'GITHUB'
            project_dict['Source'][
                'Location'] = action_config.source.github.location
            project_dict['Source'][
                'ReportBuildStatus'] = action_config.source.github.report_build_status
        else:
            raise PacoException(
                "CodeBuild source must be configured when Codepipeline is disabled."
            )

        if action_config.concurrent_build_limit > 0:
            project_dict[
                'ConcurrentBuildLimit'] = action_config.concurrent_build_limit

        project_res = troposphere.codebuild.Project.from_dict(
            'CodeBuildProject', project_dict)
        self.template.add_resource(project_res)

        self.create_output(title='ProjectArn',
                           value=troposphere.GetAtt(project_res, 'Arn'),
                           description='CodeBuild Project Arn',
                           ref=config_ref + '.project.arn')

        return project_res
示例#16
0
文件: rds.py 项目: dwtcourses/paco
    def __init__(self, stack, paco_ctx,):
        rds_aurora = stack.resource
        super().__init__(stack, paco_ctx, iam_capabilities=["CAPABILITY_IAM"])
        self.set_aws_name('RDSAurora', self.resource_group_name, self.resource.name)
        self.init_template('RDSAurora')
        if not rds_aurora.is_enabled(): return

        rds_cluster_logical_id = 'DBCluster'
        db_cluster_dict = rds_aurora.cfn_export_dict
        self.notification_groups = {}

        # DB Subnet Group
        db_subnet_id_list_param = self.create_cfn_parameter(
            param_type='List<AWS::EC2::Subnet::Id>',
            name='DBSubnetIdList',
            description='The list of subnet IDs where this database will be provisioned.',
            value=rds_aurora.segment + '.subnet_id_list',
        )
        db_subnet_group_resource = troposphere.rds.DBSubnetGroup(
            title='DBSubnetGroup',
            template=self.template,
            DBSubnetGroupDescription=troposphere.Ref('AWS::StackName'),
            SubnetIds=troposphere.Ref(db_subnet_id_list_param),
        )
        db_cluster_dict['DBSubnetGroupName'] = troposphere.Ref(db_subnet_group_resource)

        # DB Cluster Parameter Group
        if rds_aurora.cluster_parameter_group == None:
            # If no Cluster Parameter Group supplied then create one
            param_group_family = gen_vocabulary.rds_engine_versions[rds_aurora.engine][rds_aurora.engine_version]['param_group_family']
            cluster_parameter_group_ref = troposphere.rds.DBClusterParameterGroup(
                "DBClusterParameterGroup",
                template=self.template,
                Family=param_group_family,
                Description=troposphere.Ref('AWS::StackName')
            )
        else:
            # Use existing Parameter Group
            cluster_parameter_group_ref = self.create_cfn_parameter(
                name='DBClusterParameterGroupName',
                param_type='String',
                description='DB Cluster Parameter Group Name',
                value=rds_aurora.cluster_parameter_group + '.name',
            )
        db_cluster_dict['DBClusterParameterGroupName'] = troposphere.Ref(cluster_parameter_group_ref)

        # Default DB Parameter Group
        need_db_pg = False
        default_instance = rds_aurora.default_instance
        for db_instance in rds_aurora.db_instances.values():
            if default_instance.parameter_group == None and db_instance.parameter_group == None:
                need_db_pg = True
        if need_db_pg:
            # create default DB Parameter Group
            param_group_family = gen_vocabulary.rds_engine_versions[rds_config.engine][rds_config.engine_version]['param_group_family']
            default_dbparametergroup_resource = troposphere.rds.DBParameterGroup(
                "DBParameterGroup",
                template=self.template,
                Family=param_group_family,
                Description=troposphere.Ref('AWS::StackName')
            )

        # Enhanced Monitoring Role
        need_monitoring_role = False
        for db_instance in rds_aurora.db_instances.values():
            enhanced_monitoring_interval = db_instance.get_value_or_default('enhanced_monitoring_interval_in_seconds')
            if enhanced_monitoring_interval != 0:
                need_monitoring_role = True
        if need_monitoring_role:
            enhanced_monitoring_role_resource = troposphere.iam.Role(
                title='MonitoringIAMRole',
                template=self.template,
                AssumeRolePolicyDocument=PolicyDocument(
                    Statement=[
                        Statement(
                            Effect=Allow,
                            Action=[Action("sts", "AssumeRole")],
                            Principal=Principal("Service", "monitoring.rds.amazonaws.com")
                        )
                    ]
                ),
                ManagedPolicyArns=["arn:aws:iam::aws:policy/service-role/AmazonRDSEnhancedMonitoringRole"],
                Path="/",
            )

        # DB Snapshot Identifier
        if rds_aurora.db_snapshot_identifier == '' or rds_aurora.db_snapshot_identifier == None:
            db_snapshot_id_enabled = False
        else:
            db_snapshot_id_enabled = True
        if db_snapshot_id_enabled == True:
            db_cluster_dict['SnapshotIdentifier'] = rds_aurora.db_snapshot_identifier

        # KMS-CMK key encryption
        if rds_aurora.enable_kms_encryption == True and db_snapshot_id_enabled == False:
            key_policy = Policy(
                Version='2012-10-17',
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[Action('kms', '*'),],
                        Principal=Principal("AWS", [f'arn:aws:iam::{self.stack.account_ctx.id}:root']),
                        Resource=['*'],
                    ),
                    Statement(
                        Effect=Allow,
                        Action=[
                            awacs.kms.Encrypt,
                            awacs.kms.Decrypt,
                            Action('kms', 'ReEncrypt*'),
                            Action('kms', 'GenerateDataKey*'),
                            awacs.kms.CreateGrant,
                            awacs.kms.ListGrants,
                            awacs.kms.DescribeKey,
                        ],
                        Principal=Principal('AWS',['*']),
                        Resource=['*'],
                        Condition=Condition([
                            StringEquals({
                                'kms:CallerAccount': f'{self.stack.account_ctx.id}',
                                'kms:ViaService': f'rds.{self.stack.aws_region}.amazonaws.com'
                            })
                        ]),
                    ),
                ],
            )
            kms_key_resource = troposphere.kms.Key(
                title='AuroraKMSCMK',
                template=self.template,
                KeyPolicy=key_policy,
            )
            db_cluster_dict['StorageEncrypted'] = True
            db_cluster_dict['KmsKeyId'] = troposphere.Ref(kms_key_resource)

            kms_key_alias_resource = troposphere.kms.Alias(
                title="AuroraKMSCMKAlias",
                template=self.template,
                AliasName=troposphere.Sub('alias/${' + rds_cluster_logical_id + '}'),
                TargetKeyId=troposphere.Ref(kms_key_resource),
            )
            kms_key_alias_resource.DependsOn = rds_cluster_logical_id

        # Username and Passsword - only if there is no DB Snapshot Identifier
        if db_snapshot_id_enabled == False:
            db_cluster_dict['MasterUsername'] = rds_aurora.master_username
            if rds_aurora.secrets_password:
                # Password from Secrets Manager
                sta_logical_id = 'SecretTargetAttachmentRDS'
                secret_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='RDSSecretARN',
                    description='The ARN for the secret for the RDS master password.',
                    value=rds_aurora.secrets_password + '.arn',
                )
                secret_target_attachment_resource = troposphere.secretsmanager.SecretTargetAttachment(
                    title=sta_logical_id,
                    template=self.template,
                    SecretId=troposphere.Ref(secret_arn_param),
                    TargetId=troposphere.Ref(rds_cluster_logical_id),
                    TargetType='AWS::RDS::DBCluster'
                )
                secret_target_attachment_resource.DependsOn = rds_cluster_logical_id
                db_cluster_dict['MasterUserPassword'] = troposphere.Join(
                    '',
                    ['{{resolve:secretsmanager:', troposphere.Ref(secret_arn_param), ':SecretString:password}}' ]
                )
            else:
                master_password_param = self.create_cfn_parameter(
                    param_type='String',
                    name='MasterUserPassword',
                    description='The master user password.',
                    value=rds_aurora.master_user_password,
                    noecho=True,
                )
                db_cluster_dict['MasterUserPassword'] = troposphere.Ref(master_password_param)

        db_cluster_res = troposphere.rds.DBCluster.from_dict(
            rds_cluster_logical_id,
            db_cluster_dict
        )
        self.template.add_resource(db_cluster_res)

        # Cluster Event Notifications
        if hasattr(rds_aurora, 'cluster_event_notifications'):
            for group in rds_aurora.cluster_event_notifications.groups:
                notif_param = self.create_notification_param(group)
                event_subscription_resource = troposphere.rds.EventSubscription(
                    title=self.create_cfn_logical_id(f"ClusterEventSubscription{group}"),
                    template=self.template,
                    EventCategories=rds_aurora.cluster_event_notifications.event_categories,
                    SourceIds=[troposphere.Ref(db_cluster_res)],
                    SnsTopicArn=troposphere.Ref(notif_param),
                    SourceType='db-cluster',
                )

        # DB Instance(s)
        for db_instance in rds_aurora.db_instances.values():
            logical_name = self.create_cfn_logical_id(db_instance.name)
            db_instance_dict = {
                'DBClusterIdentifier': troposphere.Ref(db_cluster_res),
                'DBInstanceClass': db_instance.get_value_or_default('db_instance_type'),
                'DBSubnetGroupName': troposphere.Ref(db_subnet_group_resource),
                'EnablePerformanceInsights': db_instance.get_value_or_default('enable_performance_insights'),
                'Engine': rds_aurora.engine,
                'PubliclyAccessible': db_instance.get_value_or_default('publicly_accessible'),
                'AllowMajorVersionUpgrade': db_instance.get_value_or_default('allow_major_version_upgrade'),
                'AutoMinorVersionUpgrade': db_instance.get_value_or_default('auto_minor_version_upgrade'),
            }
            enhanced_monitoring_interval = db_instance.get_value_or_default('enhanced_monitoring_interval_in_seconds')
            if enhanced_monitoring_interval != 0:
                db_instance_dict['MonitoringInterval'] = enhanced_monitoring_interval
                db_instance_dict['MonitoringRoleArn'] = troposphere.GetAtt(enhanced_monitoring_role_resource, "Arn")
            if db_instance.availability_zone != None:
                subnet_id_ref = f'{rds_aurora.segment}.az{db_instance.availability_zone}.availability_zone'
                db_instance_subnet_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'DBInstanceAZ{logical_name}',
                    description=f'Subnet where DB Instance {logical_name} is provisioned',
                    value=subnet_id_ref,
                )
                db_instance_dict['AvailabilityZone'] = troposphere.Ref(db_instance_subnet_param)

            # DB Parameter Group
            if default_instance.parameter_group == None and db_instance.parameter_group == None:
                dbparametergroup_resource = default_dbparametergroup_resource
            elif db_instance.parameter_group != None:
                # Use instance-specific DB Parameter Group
                dbparametergroup_resource = self.create_cfn_parameter(
                    name=f'DBParameterGroupName{logical_name}',
                    param_type='String',
                    description='DB Parameter Group Name',
                    value=db_instance.parameter_group + '.name',
                )
            else:
                # Use default DB Parameter Group
                dbparametergroup_resource = self.create_cfn_parameter(
                    name=f'DBParameterGroupName{logical_name}',
                    param_type='String',
                    description='DB Parameter Group Name',
                    value=default_instance.parameter_group + '.name',
                )
            db_instance_dict['DBParameterGroupName'] = troposphere.Ref(dbparametergroup_resource)

            db_instance_resource = troposphere.rds.DBInstance.from_dict(
                f'DBInstance{logical_name}',
                db_instance_dict
            )
            self.template.add_resource(db_instance_resource)

            # DB Event Notifications
            event_notifications = db_instance.get_value_or_default('event_notifications')
            if event_notifications != None:
                for group in event_notifications.groups:
                    notif_param = self.create_notification_param(group)
                    event_subscription_resource = troposphere.rds.EventSubscription(
                        title=self.create_cfn_logical_id(f"DBEventSubscription{logical_name}{group}"),
                        template=self.template,
                        EventCategories=event_notifications.event_categories,
                        SourceIds=[troposphere.Ref(db_instance_resource)],
                        SnsTopicArn=troposphere.Ref(notif_param),
                        SourceType='db-instance',
                    )

            # DB Instance Outputs
            self.create_output(
                title=f'DBInstanceName{logical_name}',
                description=f'DB Instance Name for {logical_name}',
                value=troposphere.Ref(db_instance_resource),
                ref=db_instance.paco_ref_parts + ".name",
            )

        # DB Cluster Outputs
        self.create_output(
            title='DBClusterName',
            description='DB Cluster Name',
            value=troposphere.Ref(db_cluster_res),
            ref=self.resource.paco_ref_parts + ".name",
        )
        self.create_output(
            title='ClusterEndpointAddress',
            description='Cluster Endpoint Address',
            value=troposphere.GetAtt(db_cluster_res, 'Endpoint.Address'),
            ref=self.resource.paco_ref_parts + ".endpoint.address",
        )
        self.create_output(
            title='ClusterEndpointPort',
            description='Cluster Endpoint Port',
            value=troposphere.GetAtt(db_cluster_res, 'Endpoint.Port'),
            ref=self.resource.paco_ref_parts + ".endpoint.port",
        )
        self.create_output(
            title='ClusterReadEndpointAddress',
            description='Cluster ReadEndpoint Address',
            value=troposphere.GetAtt(db_cluster_res, 'ReadEndpoint.Address'),
            ref=self.resource.paco_ref_parts + ".readendpoint.address",
        )

        # DNS - Route53 Record Set
        if rds_aurora.is_dns_enabled() == True:
            route53_ctl = self.paco_ctx.get_controller('route53')
            for dns in rds_aurora.dns:
                route53_ctl.add_record_set(
                    self.account_ctx,
                    self.aws_region,
                    rds_aurora,
                    enabled=rds_aurora.is_enabled(),
                    dns=dns,
                    record_set_type='CNAME',
                    resource_records=[rds_aurora.paco_ref + '.endpoint.address'],
                    stack_group=self.stack.stack_group,
                )
            for read_dns in rds_aurora.read_dns:
                route53_ctl.add_record_set(
                    self.account_ctx,
                    self.aws_region,
                    rds_aurora,
                    enabled=rds_aurora.is_enabled(),
                    dns=read_dns,
                    record_set_type='CNAME',
                    resource_records=[rds_aurora.paco_ref + '.readendpoint.address'],
                    stack_group=self.stack.stack_group,
                )
示例#17
0
    def __init__(self, paco_ctx, account_ctx, aws_region, stack_group,
                 stack_tags, app_id, grp_id, res_id, factory_name,
                 cloudfront_config, config_ref):
        super().__init__(paco_ctx,
                         account_ctx,
                         aws_region,
                         enabled=cloudfront_config.is_enabled(),
                         config_ref=config_ref,
                         stack_group=stack_group,
                         stack_tags=stack_tags,
                         change_protected=cloudfront_config.change_protected)
        self.set_aws_name('CloudFront', grp_id, res_id, factory_name)
        origin_access_id_enabled = False

        self.init_template('CloudFront Distribution')
        template = self.template

        target_origin_param = self.create_cfn_parameter(
            param_type='String',
            name='TargetOrigin',
            description='Target Origin',
            value=cloudfront_config.default_cache_behavior.target_origin,
        )

        distribution_config_dict = {
            'Enabled': cloudfront_config.is_enabled(),
            'DefaultRootObject': cloudfront_config.default_root_object,
            'HttpVersion': 'http1.1',
            'DefaultCacheBehavior': {
                'AllowedMethods':
                cloudfront_config.default_cache_behavior.allowed_methods,
                'DefaultTTL':
                cloudfront_config.default_cache_behavior.default_ttl,
                'TargetOriginId':
                troposphere.Ref(target_origin_param),
                'ViewerProtocolPolicy':
                cloudfront_config.default_cache_behavior.viewer_protocol_policy
            },
            'PriceClass': 'PriceClass_' + cloudfront_config.price_class,
            'ViewerCertificate': {
                'AcmCertificateArn':
                self.paco_ctx.get_ref('paco.ref ' + self.config_ref +
                                      '.viewer_certificate.arn'),
                'SslSupportMethod':
                cloudfront_config.viewer_certificate.ssl_supported_method,
                'MinimumProtocolVersion':
                cloudfront_config.viewer_certificate.minimum_protocol_version
            }
        }
        if cloudfront_config.default_cache_behavior.min_ttl != -1:
            distribution_config_dict['DefaultCacheBehavior'][
                'MinTTL'] = cloudfront_config.default_cache_behavior.min_ttl
        if cloudfront_config.default_cache_behavior.max_ttl != -1:
            distribution_config_dict['DefaultCacheBehavior'][
                'MaxTTL'] = cloudfront_config.default_cache_behavior.max_ttl

        # Domain Alises and Record Sets
        aliases_list = []
        aliases_param_map = {}
        for alias in cloudfront_config.domain_aliases:
            alias_hash = utils.md5sum(str_data=alias.domain_name)
            domain_name_param = 'DomainAlias' + alias_hash
            alias_param = self.create_cfn_parameter(
                param_type='String',
                name=domain_name_param,
                description='Domain Alias CNAME',
                value=alias.domain_name)
            aliases_list.append(troposphere.Ref(alias_param))
            aliases_param_map[alias.domain_name] = alias_param

        distribution_config_dict['Aliases'] = aliases_list

        # DefaultcacheBehavior
        # Forward Values
        forwarded_values_config = cloudfront_config.default_cache_behavior.forwarded_values
        forwarded_values_dict = {
            'Cookies': {
                'Forward': 'none',
            },
            'QueryString': str(forwarded_values_config.query_string)
        }
        # Cookies
        if cloudfront_config.s3_origin_exists() == False:
            forwarded_values_dict['Cookies'][
                'Forward'] = forwarded_values_config.cookies.forward
        if len(forwarded_values_config.cookies.whitelisted_names) > 0:
            forwarded_values_dict['Cookies'][
                'WhitelistedNames'] = forwarded_values_config.cookies.whitelisted_names
        # Headers
        if cloudfront_config.s3_origin_exists() == False:
            forwarded_values_dict[
                'Headers'] = cloudfront_config.default_cache_behavior.forwarded_values.headers
        distribution_config_dict['DefaultCacheBehavior'][
            'ForwardedValues'] = forwarded_values_dict

        # Cache Behaviors
        if len(cloudfront_config.cache_behaviors) > 0:
            cache_behaviors_list = []
            target_origin_param_map = {}
            for cache_behavior in cloudfront_config.cache_behaviors:
                target_origin_hash = utils.md5sum(
                    str_data=cache_behavior.target_origin)
                if target_origin_hash not in target_origin_param_map.keys():
                    cb_target_origin_param = self.create_cfn_parameter(
                        param_type='String',
                        name=self.create_cfn_logical_id(
                            'TargetOriginCacheBehavior' + target_origin_hash),
                        description='Target Origin',
                        value=cache_behavior.target_origin,
                    )
                    target_origin_param_map[
                        target_origin_hash] = cb_target_origin_param
                else:
                    cb_target_origin_param = target_origin_param_map[
                        target_origin_hash]

                cache_behavior_dict = {
                    'PathPattern': cache_behavior.path_pattern,
                    'AllowedMethods': cache_behavior.allowed_methods,
                    'DefaultTTL': cache_behavior.default_ttl,
                    'TargetOriginId': troposphere.Ref(cb_target_origin_param),
                    'ViewerProtocolPolicy':
                    cache_behavior.viewer_protocol_policy
                }
                cb_forwarded_values_config = cache_behavior.forwarded_values
                cb_forwarded_values_dict = {
                    'Cookies': {
                        'Forward': 'none',
                    },
                    'QueryString': str(cb_forwarded_values_config.query_string)
                }
                # Cookies
                cb_forwarded_values_dict['Cookies'][
                    'Forward'] = cb_forwarded_values_config.cookies.forward
                if len(cb_forwarded_values_config.cookies.whitelisted_names
                       ) > 0:
                    cb_forwarded_values_dict['Cookies'][
                        'WhitelistedNames'] = cb_forwarded_values_config.cookies.whitelisted_names
                # Headers
                if cloudfront_config.s3_origin_exists() == False:
                    cb_forwarded_values_dict[
                        'Headers'] = cache_behavior.forwarded_values.headers
                cache_behavior_dict[
                    'ForwardedValues'] = cb_forwarded_values_dict
                cache_behaviors_list.append(cache_behavior_dict)

            distribution_config_dict['CacheBehaviors'] = cache_behaviors_list

        # Origin Access Identity
        if cloudfront_config.s3_origin_exists() == True:
            origin_id_res = troposphere.cloudfront.CloudFrontOriginAccessIdentity(
                title='CloudFrontOriginAccessIdentity',
                template=template,
                CloudFrontOriginAccessIdentityConfig=troposphere.cloudfront.
                CloudFrontOriginAccessIdentityConfig(
                    Comment=troposphere.Ref('AWS::StackName')))
            troposphere.Output(title='CloudFrontOriginAccessIdentity',
                               template=template,
                               Value=troposphere.Ref(origin_id_res))

        # Origins
        origins_list = []
        for origin_name, origin in cloudfront_config.origins.items():
            if origin.s3_bucket != None:
                domain_hash = utils.md5sum(str_data=origin.s3_bucket)
                origin_domain_name = self.paco_ctx.get_ref(origin.s3_bucket +
                                                           '.url')
            else:
                domain_hash = utils.md5sum(str_data=origin.domain_name)
                origin_domain_name = origin.domain_name
            origin_dict = {'Id': origin_name, 'DomainName': origin_domain_name}
            if origin.s3_bucket == None:
                origin_dict['CustomOriginConfig'] = {
                    'HTTPSPort': origin.custom_origin_config.https_port,
                    'OriginKeepaliveTimeout':
                    origin.custom_origin_config.keepalive_timeout,
                    'OriginProtocolPolicy':
                    origin.custom_origin_config.protocol_policy,
                    'OriginReadTimeout':
                    origin.custom_origin_config.read_timeout,
                    'OriginSSLProtocols':
                    origin.custom_origin_config.ssl_protocols
                }
                if origin.custom_origin_config.http_port:
                    origin_dict['CustomOriginConfig']['HTTPPort'] = str(
                        origin.custom_origin_config.http_port)
            else:
                s3_config = self.paco_ctx.get_ref(origin.s3_bucket)
                origin_dict['S3OriginConfig'] = {}
                if s3_config.cloudfront_origin == False:
                    origin_dict['S3OriginConfig']['OriginAccessIdentity'] = ''
                else:
                    origin_access_id_enabled = True
                    param_name = "OriginAccessIdentiy" + domain_hash
                    access_id_ref = origin.s3_bucket + '.origin_id'
                    s3_cf_origin_id_param = self.create_cfn_parameter(
                        param_type='String',
                        name=param_name,
                        description='Origin Access Identity',
                        value=access_id_ref,
                    )
                    origin_dict['S3OriginConfig'][
                        'OriginAccessIdentity'] = troposphere.Sub(
                            'origin-access-identity/cloudfront/${OriginAccessId}',
                            {
                                'OriginAccessId':
                                troposphere.Ref(s3_cf_origin_id_param)
                            })
            origins_list.append(origin_dict)
        distribution_config_dict['Origins'] = origins_list

        # Custom Error
        error_resp_list = []
        for error_resp in cloudfront_config.custom_error_responses:
            error_resp_dict = {
                'ErrorCachingMinTTL': error_resp.error_caching_min_ttl,
                'ErrorCode': error_resp.error_code,
                'ResponseCode': error_resp.response_code,
                'ResponsePagePath': error_resp.response_page_path
            }
            error_resp_list.append(error_resp_dict)
        if len(error_resp_list) > 0:
            distribution_config_dict['CustomErrorResponses'] = error_resp_list

        # Web ACL
        if cloudfront_config.webacl_id != None:
            webacl_id_param = self.create_cfn_parameter(
                param_type='String',
                name='WebAclId',
                description='WAF Web Acl ID',
                value=cloudfront_config.webacl_id)
            distribution_config_dict['WebACLId'] = troposphere.Ref(
                webacl_id_param)

        distribution_dict = {'DistributionConfig': distribution_config_dict}
        distribution_res = troposphere.cloudfront.Distribution.from_dict(
            'Distribution', distribution_dict)

        if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16') == True:
            if cloudfront_config.is_dns_enabled() == True:
                for alias in cloudfront_config.domain_aliases:
                    alias_hash = utils.md5sum(str_data=alias.domain_name)
                    zone_param_name = 'AliasHostedZoneId' + alias_hash
                    alias_zone_id_param = self.create_cfn_parameter(
                        param_type='String',
                        name=zone_param_name,
                        description='Domain Alias Hosted Zone Id',
                        value=alias.hosted_zone + '.id',
                    )
                    record_set_res = troposphere.route53.RecordSetType(
                        title=self.create_cfn_logical_id_join(
                            ['RecordSet', alias_hash]),
                        template=template,
                        HostedZoneId=troposphere.Ref(alias_zone_id_param),
                        Name=troposphere.Ref(
                            aliases_param_map[alias.domain_name]),
                        Type='A',
                        AliasTarget=troposphere.route53.AliasTarget(
                            DNSName=troposphere.GetAtt(distribution_res,
                                                       'DomainName'),
                            HostedZoneId='Z2FDTNDATAQYW2'))
                    record_set_res.DependsOn = distribution_res

        self.create_output(title='CloudFrontURL',
                           value=troposphere.GetAtt('Distribution',
                                                    'DomainName'),
                           ref=self.config_ref + '.domain_name')
        self.create_output(title='CloudFrontId',
                           value=troposphere.Ref(distribution_res),
                           ref=self.config_ref + '.id')

        template.add_resource(distribution_res)

        self.set_template()
        if origin_access_id_enabled:
            self.stack.wait_for_delete = True

        if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16') == False:
            route53_ctl = self.paco_ctx.get_controller('route53')
            if cloudfront_config.is_dns_enabled() == True:
                for alias in cloudfront_config.domain_aliases:
                    route53_ctl.add_record_set(
                        self.account_ctx,
                        self.aws_region,
                        enabled=cloudfront_config.is_enabled(),
                        dns=alias,
                        record_set_type='Alias',
                        alias_dns_name='paco.ref ' + self.config_ref +
                        '.domain_name',
                        alias_hosted_zone_id='Z2FDTNDATAQYW2',
                        stack_group=self.stack_group,
                        config_ref=self.config_ref + '.record_set')
示例#18
0
def get_resources() -> list:
    all_regions = config.get_regions()
    return [
        codebuild.Project(
            "CDKPackage100",
            Name=CDK_PACKAGE_PROJECT_NAME,
            ServiceRole=t.Sub(
                "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole"
            ),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
            TimeoutInMinutes=60,
            Environment=codebuild.Environment(
                ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT,
                Image=constants.ENVIRONMENT_IMAGE_DEFAULT,
                Type=constants.ENVIRONMENT_TYPE_DEFAULT,
                EnvironmentVariables=[
                    {
                        "Type": "PLAINTEXT",
                        "Name": "ACCOUNT_ID",
                        "Value": t.Sub("${AWS::AccountId}"),
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "NAME",
                        "Value": "CHANGE_ME"
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "VERSION",
                        "Value": "CHANGE_ME"
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "CODEPIPELINE_ID",
                        "Value": "CHANGE_ME",
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "PIPELINE_NAME",
                        "Value": "CHANGE_ME",
                    },
                    {
                        "Type": "PLAINTEXT",
                        "Name": "TEMPLATE_FORMAT",
                        "Value": "CHANGE_ME",
                    },
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=t.Sub(
                    yaml.safe_dump(
                        dict(
                            version=0.2,
                            phases=dict(build={
                                "commands": [
                                    'zip -r $NAME-$VERSION.zip . -x "node_modules/*"'
                                ] + [
                                    f"aws cloudformation package --region {region} --template $(pwd)/product.template.yaml --s3-bucket sc-factory-artifacts-$ACCOUNT_ID-{region} --s3-prefix /CDK/1.0.0/$NAME/$VERSION --output-template-file product.template-{region}.yaml"
                                    for region in all_regions
                                ] + [
                                    f"aws s3 cp --quiet $NAME-$VERSION.zip s3://sc-factory-artifacts-$ACCOUNT_ID-{region}/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip"
                                    for region in all_regions
                                ]
                            }, ),
                            artifacts={
                                "name": PACKAGE_OUTPUT_ARTIFACT,
                                "files": ["product.template-*.yaml"],
                            },
                        ))),
                Type="CODEPIPELINE",
            ),
            Description=t.Sub("Create a build stage for template CDK 1.0.0"),
        ),
        codebuild.Project(
            "CDKDeploy100",
            Name=CDK_DEPLOY_PROJECT_NAME,
            ServiceRole=t.Sub(
                "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole"
            ),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
            TimeoutInMinutes=60,
            Environment=codebuild.Environment(
                ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT,
                Image=constants.ENVIRONMENT_IMAGE_DEFAULT,
                Type=constants.ENVIRONMENT_TYPE_DEFAULT,
                EnvironmentVariables=[
                    codebuild.EnvironmentVariable(
                        Type="PLAINTEXT",
                        Name="ACCOUNT_ID",
                        Value=t.Sub("${AWS::AccountId}"),
                    ),
                    codebuild.EnvironmentVariable(Name="PIPELINE_NAME",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="CODEPIPELINE_ID",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=t.Sub(
                    yaml.safe_dump(
                        dict(
                            version=0.2,
                            phases=dict(
                                install={
                                    "runtime-versions":
                                    dict(python="3.7", ),
                                    "commands": [
                                        f"pip install {constants.VERSION}"
                                        if "http" in constants.VERSION else
                                        f"pip install aws-service-catalog-factory=={constants.VERSION}",
                                    ],
                                },
                                build={"commands": get_commands_for_deploy()},
                            ),
                            artifacts={
                                "name": DEPLOY_OUTPUT_ARTIFACT,
                                "files": ["*", "**/*"],
                            },
                        ))),
                Type="CODEPIPELINE",
            ),
            Description=t.Sub("Create a deploy stage for template CDK 1.0.0"),
        ),
    ]
示例#19
0
    def run(self):
        puppet_version = constants.VERSION
        description = f"""Bootstrap template used to configure spoke account for terraform use
        {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-spoke-terraform"}}"""

        service_role = t.Sub(
            "arn:aws:iam::${AWS::AccountId}:role/servicecatalog-puppet/PuppetDeployInSpokeRole"
        )
        template = t.Template(Description=description)
        state = template.add_resource(
            s3.Bucket(
                "state",
                BucketName=t.Sub("sc-puppet-state-${AWS::AccountId}"),
                VersioningConfiguration=s3.VersioningConfiguration(
                    Status="Enabled"),
                BucketEncryption=s3.BucketEncryption(
                    ServerSideEncryptionConfiguration=[
                        s3.ServerSideEncryptionRule(
                            ServerSideEncryptionByDefault=s3.
                            ServerSideEncryptionByDefault(
                                SSEAlgorithm="AES256"))
                    ]),
                PublicAccessBlockConfiguration=s3.
                PublicAccessBlockConfiguration(
                    BlockPublicAcls=True,
                    BlockPublicPolicy=True,
                    IgnorePublicAcls=True,
                    RestrictPublicBuckets=True,
                ),
                Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}),
            ))
        template.add_resource(
            s3.BucketPolicy(
                "statePolicy",
                Bucket=t.Ref(state),
                PolicyDocument={
                    "Version":
                    "2012-10-17",
                    "Statement": [
                        {
                            "Action": [
                                "s3:GetObject*",
                                "s3:PutObject*",
                            ],
                            "Principal": {
                                "AWS": self.puppet_account_id
                            },
                            "Resource": t.Join("/",
                                               [t.GetAtt(state, "Arn"), "*"]),
                            "Effect": "Allow",
                            "Sid": "AllowPuppet",
                        },
                    ],
                },
            ))
        execute_build_spec = dict(
            version="0.2",
            phases=dict(
                install=dict(commands=[
                    "mkdir -p /root/downloads",
                    "curl -s -qL -o /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip",
                    "unzip /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/bin/",
                    "chmod +x /usr/bin/terraform",
                    "terraform --version",
                    "aws s3 cp $ZIP source.zip",
                    "unzip source.zip",
                ], ),
                pre_build=dict(commands=[
                    "aws s3 cp $STATE_FILE terraform.tfstate || echo 'no statefile copied'",
                    'ASSUME_ROLE_ARN="arn:aws:iam::${TARGET_ACCOUNT}:role/servicecatalog-puppet/PuppetRole"',
                    "TEMP_ROLE=$(aws sts assume-role --role-arn $ASSUME_ROLE_ARN --role-session-name terraform)",
                    "export TEMP_ROLE",
                    'export AWS_ACCESS_KEY_ID=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.AccessKeyId")',
                    'export AWS_SECRET_ACCESS_KEY=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SecretAccessKey")',
                    'export AWS_SESSION_TOKEN=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SessionToken")',
                    "aws sts get-caller-identity",
                    "terraform init",
                ], ),
                build=dict(commands=[
                    "terraform apply -auto-approve",
                ]),
                post_build=dict(commands=[
                    "terraform output -json > outputs.json",
                    "unset AWS_ACCESS_KEY_ID",
                    "unset AWS_SECRET_ACCESS_KEY",
                    "unset AWS_SESSION_TOKEN",
                    "aws sts get-caller-identity",
                    "aws s3 cp terraform.tfstate $STATE_FILE",
                ]),
            ),
            artifacts=dict(files=[
                "outputs.json",
            ], ),
        )
        execute_terraform = dict(
            Name=constants.EXECUTE_TERRAFORM_PROJECT_NAME,
            ServiceRole=service_role,
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(
                Type="S3",
                Location=t.Ref("state"),
                Path="terraform-executions",
                Name="artifacts-execute",
                NamespaceType="BUILD_ID",
            ),
            TimeoutInMinutes=480,
            Environment=codebuild.Environment(
                ComputeType="BUILD_GENERAL1_SMALL",
                Image=constants.CODEBUILD_DEFAULT_IMAGE,
                Type="LINUX_CONTAINER",
                EnvironmentVariables=[
                    codebuild.EnvironmentVariable(
                        Name="TERRAFORM_VERSION",
                        Type="PARAMETER_STORE",
                        Value=constants.
                        DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME,
                    ),
                ] + [
                    codebuild.EnvironmentVariable(
                        Name=name,
                        Type="PLAINTEXT",
                        Value="CHANGE_ME",
                    ) for name in ["TARGET_ACCOUNT", "ZIP", "STATE_FILE"]
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=yaml.safe_dump(execute_build_spec),
                Type="NO_SOURCE",
            ),
            Description=
            "Execute the given terraform in the given account using the given state file",
        )

        # execute
        template.add_resource(
            codebuild.Project("ExecuteTerraformProject", **execute_terraform))

        # execute dry run
        execute_dry_run_terraform = copy.deepcopy(execute_terraform)
        execute_dry_run_terraform[
            "Name"] = constants.EXECUTE_DRY_RUN_TERRAFORM_PROJECT_NAME
        execute_dry_run_terraform["Description"] = execute_dry_run_terraform[
            "Description"].replace("Execute", "DRY RUN of Execute")
        execute_dry_run_build_spec = copy.deepcopy(execute_build_spec)
        execute_dry_run_build_spec["phases"]["build"]["commands"] = [
            "terraform plan -out=plan.bin",
            "terraform show -json plan.bin > plan.json",
        ]
        del execute_dry_run_build_spec["phases"]["post_build"]
        execute_dry_run_build_spec["artifacts"] = dict(files=[
            "plan.bin",
            "plan.json",
        ], )
        execute_dry_run_terraform["Source"] = codebuild.Source(
            BuildSpec=yaml.safe_dump(execute_dry_run_build_spec),
            Type="NO_SOURCE",
        )
        execute_dry_run_terraform["Artifacts"] = codebuild.Artifacts(
            Type="S3",
            Location=t.Ref("state"),
            Path="terraform-executions",
            Name="artifacts-execute-dry-run",
            NamespaceType="BUILD_ID",
        )
        template.add_resource(
            codebuild.Project("ExecuteDryRunTerraformProject",
                              **execute_dry_run_terraform))

        # terminate
        terminate_terraform = copy.deepcopy(execute_terraform)
        terminate_terraform[
            "Name"] = constants.TERMINATE_TERRAFORM_PROJECT_NAME
        terminate_terraform["Description"] = terminate_terraform[
            "Description"].replace("Execute", "Terminate")
        terminate_build_spec = copy.deepcopy(execute_build_spec)
        terminate_build_spec["phases"]["build"]["commands"] = [
            "terraform destroy -auto-approve"
        ]
        terminate_build_spec["phases"]["post_build"]["commands"] = [
            "unset AWS_ACCESS_KEY_ID",
            "unset AWS_SECRET_ACCESS_KEY",
            "unset AWS_SESSION_TOKEN",
            "aws sts get-caller-identity",
            "aws s3 cp terraform.tfstate $STATE_FILE",
        ]
        del terminate_build_spec["artifacts"]
        terminate_terraform["Source"] = codebuild.Source(
            BuildSpec=yaml.safe_dump(terminate_build_spec),
            Type="NO_SOURCE",
        )
        terminate_terraform["Artifacts"] = codebuild.Artifacts(
            Type="S3",
            Location=t.Ref("state"),
            Path="terraform-executions",
            Name="artifacts-terminate",
            NamespaceType="BUILD_ID",
        )
        template.add_resource(
            codebuild.Project("TerminateTerraformProject",
                              **terminate_terraform))

        # terminate dry run
        termminate_dry_run_terraform = copy.deepcopy(execute_terraform)
        termminate_dry_run_terraform[
            "Name"] = constants.TERMINATE_DRY_RUN_TERRAFORM_PROJECT_NAME
        new_description = termminate_dry_run_terraform["Description"].replace(
            "Execute", "DRY RUN of Terminate")
        termminate_dry_run_terraform["Description"] = new_description
        termminate_dry_run_build_spec = copy.deepcopy(execute_build_spec)
        termminate_dry_run_build_spec["phases"]["build"]["commands"] = [
            "terraform plan -destroy -out=plan.bin",
            "terraform show -json plan.bin > plan.json",
        ]
        del termminate_dry_run_build_spec["phases"]["post_build"]
        termminate_dry_run_build_spec["artifacts"] = dict(files=[
            "plan.bin",
            "plan.json",
        ], )
        termminate_dry_run_terraform["Source"] = codebuild.Source(
            BuildSpec=yaml.safe_dump(termminate_dry_run_build_spec),
            Type="NO_SOURCE",
        )
        termminate_dry_run_terraform["Artifacts"] = codebuild.Artifacts(
            Type="S3",
            Location=t.Ref("state"),
            Path="terraform-executions",
            Name="artifacts-terminate-dry-run",
            NamespaceType="BUILD_ID",
        )
        template.add_resource(
            codebuild.Project("TerminateDryRunTerraformProject",
                              **termminate_dry_run_terraform))

        self.write_output(template.to_yaml(), skip_json_dump=True)
示例#20
0
    def __init__(self, stack, paco_ctx):
        super().__init__(stack, paco_ctx, iam_capabilities=["CAPABILITY_IAM"])
        self.apigatewayrestapi = apigatewayrestapi = stack.resource
        self.set_aws_name('ApiGatewayRestApi', self.resource_group_name, self.resource.name)

        self.init_template('ApiGateway: {}'.format(apigatewayrestapi.title))
        template = self.template
        if not self.apigatewayrestapi.is_enabled():
            return

        # Parameters
        lambda_params = {}
        for method in self.apigatewayrestapi.methods.values():
            if method.integration.integration_lambda != None:
                param_name = 'MethodArn' + self.create_cfn_logical_id(method.name)
                if method.integration.integration_lambda not in lambda_params:
                    lambda_params[method.integration.integration_lambda] = self.create_cfn_parameter(
                        name=param_name,
                        param_type='String',
                        description='Lambda ARN parameter.',
                        value=method.integration.integration_lambda + '.arn',
                    )
                method.parameter_arn_ref = troposphere.Ref(lambda_params[method.integration.integration_lambda])

        # Resources
        restapi_logical_id = 'ApiGatewayRestApi'
        cfn_export_dict = self.apigatewayrestapi.cfn_export_dict
        if self.paco_ctx.legacy_flag('aim_name_2019_11_28') == True:
            cfn_export_dict['Name'] = self.apigatewayrestapi.name

        self.restapi_resource = troposphere.apigateway.RestApi.from_dict(
            restapi_logical_id,
            cfn_export_dict
        )
        template.add_resource(self.restapi_resource)
        self.create_output(
            title='ApiGatewayRestApiId',
            value=troposphere.Ref(self.restapi_resource),
            ref=self.apigatewayrestapi.paco_ref_parts + '.id',
        )
        self.create_output(
            title='ApiGatewayRestApiAddress',
            value=troposphere.Join('.', [
                troposphere.Ref(self.restapi_resource),
                'execute-api',
                self.stack.aws_region,
                'amazonaws.com',
            ]),
            ref=self.apigatewayrestapi.paco_ref_parts + '.address',
        )
        self.create_output(
            title='ApiGatewayRestApiRootResourceId',
            value=troposphere.GetAtt(self.restapi_resource, "RootResourceId"),
            ref=self.apigatewayrestapi.paco_ref_parts + '.root_resource_id',
        )

        # Authorizers
        if self.apigatewayrestapi.cognito_authorizers != None:
            # monkey patch for Troposphere ... ToDo: file a PR
            troposphere.apigateway.Authorizer.props['AuthorizerUri'] = (str, False)
            self.user_pool_params = {}
            for cog_auth in self.apigatewayrestapi.cognito_authorizers.values():
                provider_arns = []
                for user_pool_ref in cog_auth.user_pools:
                    if user_pool_ref not in self.user_pool_params:
                        self.user_pool_params[user_pool_ref] = self.create_cfn_parameter(
                        name='CognitoUserPool' + md5sum(str_data=user_pool_ref),
                        param_type='String',
                        description='Cognito User Pool ARN',
                        value=user_pool_ref + '.arn',
                    )
                    provider_arns.append(troposphere.Ref(self.user_pool_params[user_pool_ref]))
                cog_auth_resource = troposphere.apigateway.Authorizer(
                    title=self.create_cfn_logical_id(f'CognitoAuthorizer{cog_auth.name}'),
                    Name=cog_auth.name,
                    RestApiId=troposphere.Ref(self.restapi_resource),
                    IdentitySource='method.request.header.' + cog_auth.identity_source,
                    Type='COGNITO_USER_POOLS',
                    ProviderARNs=provider_arns,
                )
                self.template.add_resource(cog_auth_resource)
                cog_auth.resource = cog_auth_resource

        # Model
        for model in self.apigatewayrestapi.models.values():
            model.logical_id = self.create_cfn_logical_id('ApiGatewayModel' + model.name)
            cfn_export_dict = model.cfn_export_dict
            cfn_export_dict['RestApiId'] = troposphere.Ref(self.restapi_resource)
            if 'Schema' not in cfn_export_dict:
                cfn_export_dict['Schema'] = {}
            model_resource = troposphere.apigateway.Model.from_dict(model.logical_id, cfn_export_dict)
            model.resource = model_resource
            template.add_resource(model_resource)

        # Resource
        self.recursively_add_resources(self.apigatewayrestapi.resources)

        # Method
        api_account_name = self.apigatewayrestapi.get_account().name
        for method in self.apigatewayrestapi.methods.values():
            method_depends_on = [ restapi_logical_id ]
            method_id = 'ApiGatewayMethod' + self.create_cfn_logical_id(method.name)
            method.logical_id = method_id
            cfn_export_dict = method.cfn_export_dict
            if method.authorizer != None:
                # ToDo: only Cognito Authorizers
                auth_type, auth_name = method.authorizer.split('.')
                auth_cont = getattr(self.apigatewayrestapi, auth_type)
                auth_obj = auth_cont[auth_name]
                cfn_export_dict["AuthorizerId"] = troposphere.Ref(auth_obj.resource)
                if auth_type == 'cognito_authorizers':
                    cfn_export_dict["AuthorizationType"] = 'COGNITO_USER_POOLS'
            if method.resource_name:
                cfn_export_dict["ResourceId"] = troposphere.Ref(method.get_resource().resource)
                method_depends_on.append(method.get_resource().resource)
            else:
                cfn_export_dict["ResourceId"] = troposphere.GetAtt(self.restapi_resource, 'RootResourceId')
            cfn_export_dict["RestApiId"] = troposphere.Ref(self.restapi_resource)

            # Lambad Integration
            if method.integration.integration_lambda != None:
                awslambda = get_model_obj_from_ref(method.integration.integration_lambda, self.project)
                uri = troposphere.Join('', [
                    "arn:aws:apigateway:",
                    awslambda.region_name,
                    ":lambda:path/2015-03-31/functions/",
                    method.parameter_arn_ref,
                    "/invocations"]
                )
                cfn_export_dict["Integration"]["Uri"] = uri

                if method.integration.integration_type == 'AWS_PROXY':
                    # Cross-account Lambda can not have a Role or gets a permission error
                    if api_account_name == awslambda.get_account().name:
                        # IAM Role - allows API Gateway to invoke Lambda
                        # ToDo: enable Api Gateway to invoke things other than Lambda ...
                        # ToDo: share Roles between methods!
                        iam_role_resource = troposphere.iam.Role(
                            self.create_cfn_logical_id('ApiGatewayIamRole' + self.apigatewayrestapi.name + method.name),
                            Path='/',
                            AssumeRolePolicyDocument=Policy(
                                Version='2012-10-17',
                                Statement=[
                                    Statement(
                                        Effect=Allow,
                                        Action=[awacs.sts.AssumeRole],
                                        Principal=Principal('Service',['apigateway.amazonaws.com'])
                                    )
                                ],
                            ),
                            Policies=[
                                troposphere.iam.Policy(
                                    PolicyName=self.create_cfn_logical_id('LambdaAccessApiGateway' + self.apigatewayrestapi.name + method.name),
                                    PolicyDocument=Policy(
                                        Version='2012-10-17',
                                        Statement=[
                                            Statement(
                                                Effect=Allow,
                                                Action=[awacs.awslambda.InvokeFunction],
                                                Resource=[method.parameter_arn_ref],
                                            )
                                        ]
                                    )
                                )
                            ]
                        )
                        template.add_resource(iam_role_resource)
                        cfn_export_dict["Integration"]["Credentials"] = troposphere.GetAtt(iam_role_resource, "Arn")

            elif method.integration.integration_type == 'AWS':
                # Enable Lambda (custom) integration
                # When send to a Lambda (Custom) the HTTP Method must always be POST regardless of
                # the HttpMethod
                cfn_export_dict["Integration"]["IntegrationHttpMethod"] = "POST"
                lambda_permission_resource = troposphere.awslambda.Permission(
                    self.create_cfn_logical_id('LambdaPermissionApiGateway' + method.name),
                    Action = 'lambda:InvokeFunction',
                    FunctionName = method.parameter_arn_ref,
                    Principal = 'apigateway.amazonaws.com',
                    SourceArn = troposphere.Sub(
                        "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${%s}/*/%s/" % (
                            restapi_logical_id, method.http_method
                        )
                    )
                )
                template.add_resource(lambda_permission_resource)

            # look-up the method_names and assign a Ref to the model resource
            # ToDo: validate model_names in the model
            responses = []
            for method_response in method.method_responses:
                response_dict = {"StatusCode": method_response.status_code}
                if method_response.response_models:
                    response_dict["ResponseModels"] = {}
                    for response_model in method_response.response_models:
                        for model in self.apigatewayrestapi.models.values():
                            if model.name == response_model.model_name:
                                response_dict["ResponseModels"][response_model.content_type] = troposphere.Ref(model.resource)
                if method_response.response_parameters:
                    response_dict["ResponseParameters"] = method_response.response_parameters
                responses.append(response_dict)
            cfn_export_dict["MethodResponses"] = responses

            method_resource = troposphere.apigateway.Method.from_dict(method_id, cfn_export_dict)
            method_resource.DependsOn = method_depends_on
            template.add_resource(method_resource)
            self.create_output(
                title=self.create_cfn_logical_id(f'ApiGatewayRestApiMethod{method.name}'),
                value=troposphere.Ref(method_resource),
                ref=method.paco_ref_parts + '.id',
            )

        # Deployment
        deployment_resource = troposphere.apigateway.Deployment.from_dict(
            'ApiGatewayDeployment',
            {'Description': 'Deployment',
             'RestApiId': troposphere.Ref(self.restapi_resource) }
        )
        # this is needed otherwise you can get 'No integration defined for method'
        # as the Deployment can be created before the Methods
        deployment_resource.DependsOn = [
            method.logical_id for method in self.apigatewayrestapi.methods.values()
        ]

        template.add_resource(deployment_resource)
        self.create_output(
            title=self.create_cfn_logical_id(f'ApiGatewayRestApiDeployment'),
            value=troposphere.Ref(deployment_resource),
            ref=self.apigatewayrestapi.paco_ref_parts + '.deploymnt_id',
        )

        # Stage
        self.stage_resources = []
        for stage in self.apigatewayrestapi.stages.values():
            stage_id = self.create_cfn_logical_id('ApiGatewayStage' + stage.name)
            cfn_export_dict = stage.cfn_export_dict
            cfn_export_dict["RestApiId"] = troposphere.Ref(self.restapi_resource)
            cfn_export_dict["DeploymentId"] = troposphere.Ref(deployment_resource)
            stage_resource = troposphere.apigateway.Stage.from_dict(stage_id, cfn_export_dict)
            self.stage_resources.append(stage_resource)
            template.add_resource(stage_resource)
            self.create_output(
                title=self.create_cfn_logical_id(f'ApiGatewayRestApiStage{stage.name}'),
                value=troposphere.Ref(stage_resource),
                ref=stage.paco_ref_parts + '.id',
            )

        # DNS
        # Caution: experimental code: REGIONAL endpoints only and
        # the dns.ssl_certificate field expects an Arn instead of a paco.ref to an ACM resource ...
        if self.apigatewayrestapi.is_dns_enabled() == True:
            route53_ctl = self.paco_ctx.get_controller('route53')
            for dns in self.apigatewayrestapi.dns:
                # ApiGateway DomainName resource
                domain_name_logical_id = self.create_cfn_logical_id('DomainName' + dns.domain_name)
                # ToDo: currently SSL Certificate must be an Arn
                # A paco.ref to an SSL Cert is typically in a netenv, which isn't initialized in a Service
                # either init the netenv or have some way of managing ACM certs globally?
                cfn_export_dict = {
                    'DomainName': dns.domain_name,
                    'RegionalCertificateArn': dns.ssl_certificate,
                    'EndpointConfiguration': {"Types": ['REGIONAL']},
                }
                domain_name_resource = troposphere.apigateway.DomainName.from_dict(
                    domain_name_logical_id,
                    cfn_export_dict
                )
                template.add_resource(domain_name_resource)
                domain_name_name = dns.domain_name.replace('.', '')
                self.create_output(
                    title=domain_name_logical_id,
                    value=troposphere.GetAtt(domain_name_resource, 'RegionalDomainName'),
                    ref=f'{dns.paco_ref_parts}.{domain_name_name}.regional_domain_name',
                )

                # ApiGateway BasePathMapping
                for base_path_mapping in dns.base_path_mappings:
                    cfn_export_dict = {
                        'DomainName': dns.domain_name,
                        'RestApiId': troposphere.Ref(self.restapi_resource),
                        'Stage': base_path_mapping.stage,
                    }
                    if base_path_mapping.base_path != '':
                        cfn_export_dict['BasePath'] = base_path_mapping.base_path
                    base_path_mapping_logical_id = self.create_cfn_logical_id('BasePathMapping' + dns.domain_name)
                    base_path_mapping_resource = troposphere.apigateway.BasePathMapping.from_dict(
                        base_path_mapping_logical_id,
                        cfn_export_dict,
                    )
                    base_path_mapping_resource.DependsOn = [domain_name_logical_id]
                    for stage in self.stage_resources:
                        base_path_mapping_resource.DependsOn.append(stage.title)
                    template.add_resource(base_path_mapping_resource)
示例#21
0
    def __init__(
        self,
        paco_ctx,
        account_ctx,
        aws_region,
        stack_group,
        stack_tags,
        app_id,
        grp_id,
        res_id,
        dashboard,
    ):
        super().__init__(paco_ctx,
                         account_ctx,
                         aws_region,
                         enabled=dashboard.is_enabled(),
                         config_ref=dashboard.paco_ref_parts,
                         stack_group=stack_group,
                         stack_tags=stack_tags)
        self.set_aws_name('Dashboard', grp_id, res_id)
        self.init_template('CloudWatch Dashboard')

        if not dashboard.is_enabled():
            return self.set_template()

        # Parameters for variables
        if dashboard.variables and dashboard.is_enabled():
            for key, value in dashboard.variables.items():
                if type(value) == type(str()):
                    param_type = 'String'
                elif type(value) == type(int()) or type(value) == type(
                        float()):
                    param_type = 'Number'
                else:
                    raise UnsupportedCloudFormationParameterType(
                        "Can not cast {} of type {} to a CloudFormation Parameter type."
                        .format(value, type(value)))
                variable_param = self.create_cfn_parameter(
                    param_type=param_type,
                    name=key,
                    description='Dashboard {} Variable'.format(key),
                    value=value)

        # Region Parameter
        region_param = self.create_cfn_parameter(
            param_type='String',
            name='AwsRegion',
            description='Dashboard Region Variable',
            value=aws_region)

        # Dashboard resource
        dashboard_logical_id = 'Dashboard'
        body = troposphere.Sub(dashboard.dashboard_file)
        cfn_export_dict = {
            'DashboardBody': body,
            'DashboardName': dashboard.title_or_name
        }
        dashboard_resource = troposphere.cloudwatch.Dashboard.from_dict(
            dashboard_logical_id, cfn_export_dict)
        self.template.add_resource(dashboard_resource)

        # Generate the Template
        self.set_template()
def get_template(version: str, default_region_value) -> t.Template:
    description = f"""Bootstrap template used to bootstrap a region of ServiceCatalog-Puppet master
{{"version": "{version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master-region"}}"""

    template = t.Template(Description=description)

    version_parameter = template.add_parameter(
        t.Parameter("Version", Default=version, Type="String")
    )
    default_region_value_parameter = template.add_parameter(
        t.Parameter("DefaultRegionValue", Default=default_region_value, Type="String")
    )

    template.add_resource(
        ssm.Parameter(
            "DefaultRegionParam",
            Name="/servicecatalog-puppet/home-region",
            Type="String",
            Value=t.Ref(default_region_value_parameter),
            Tags={"ServiceCatalogPuppet:Actor": "Framework"},
        )
    )
    version_ssm_parameter = template.add_resource(
        ssm.Parameter(
            "Param",
            Name="service-catalog-puppet-regional-version",
            Type="String",
            Value=t.Ref(version_parameter),
            Tags={"ServiceCatalogPuppet:Actor": "Framework"},
        )
    )

    template.add_resource(
        s3.Bucket(
            "PipelineArtifactBucket",
            BucketName=t.Sub(
                "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}"
            ),
            VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"),
            BucketEncryption=s3.BucketEncryption(
                ServerSideEncryptionConfiguration=[
                    s3.ServerSideEncryptionRule(
                        ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault(
                            SSEAlgorithm="AES256"
                        )
                    )
                ]
            ),
            PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration(
                BlockPublicAcls=True,
                BlockPublicPolicy=True,
                IgnorePublicAcls=True,
                RestrictPublicBuckets=True,
            ),
            Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}),
        )
    )

    regional_product_topic = template.add_resource(
        sns.Topic(
            "RegionalProductTopic",
            DisplayName="servicecatalog-puppet-cloudformation-regional-events",
            TopicName="servicecatalog-puppet-cloudformation-regional-events",
            Subscription=[
                sns.Subscription(
                    Endpoint=t.Sub(
                        "arn:${AWS::Partition}:sqs:${DefaultRegionValue}:${AWS::AccountId}:servicecatalog-puppet-cloudformation-events"
                    ),
                    Protocol="sqs",
                )
            ],
        ),
    )

    template.add_output(
        t.Output("Version", Value=t.GetAtt(version_ssm_parameter, "Value"))
    )
    template.add_output(
        t.Output("RegionalProductTopic", Value=t.Ref(regional_product_topic))
    )

    return template
示例#23
0
    def create_codebuild_cfn(self, template, pipeline_config, action_config,
                             config_ref):
        # CodeBuild
        compute_type_param = self.create_cfn_parameter(
            param_type='String',
            name='CodeBuildComputeType',
            description=
            'The type of compute environment. This determines the number of CPU cores and memory the build environment uses.',
            value=action_config.codebuild_compute_type,
        )
        image_param = self.create_cfn_parameter(
            param_type='String',
            name='CodeBuildImage',
            description=
            'The image tag or image digest that identifies the Docker image to use for this build project.',
            value=action_config.codebuild_image,
        )
        deploy_env_name_param = self.create_cfn_parameter(
            param_type='String',
            name='DeploymentEnvironmentName',
            description=
            'The name of the environment codebuild will be deploying into.',
            value=action_config.deployment_environment,
        )
        self.project_role_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodeBuild-Project'],
            filter_id='IAM.Role.RoleName')
        project_role_res = troposphere.iam.Role(
            title='CodeBuildProjectRole',
            template=template,
            RoleName=self.project_role_name,
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[AssumeRole],
                        Principal=Principal("Service",
                                            ['codebuild.amazonaws.com']),
                    )
                ]))

        project_policy_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodeBuild-Project'],
            filter_id='IAM.Policy.PolicyName')
        project_policy_res = troposphere.iam.PolicyType(
            title='CodeBuildProjectPolicy',
            PolicyName=project_policy_name,
            PolicyDocument=PolicyDocument(Statement=[
                Statement(
                    Sid='S3Access',
                    Effect=Allow,
                    Action=[
                        Action('s3', 'PutObject'),
                        Action('s3', 'PutObjectAcl'),
                        Action('s3', 'GetObject'),
                        Action('s3', 'GetObjectAcl'),
                        Action('s3', 'ListBucket'),
                        Action('s3', 'DeleteObject'),
                        Action('s3', 'GetBucketPolicy'),
                    ],
                    Resource=[
                        troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}'),
                        troposphere.Sub(
                            'arn:aws:s3:::${ArtifactsBucketName}/*'),
                    ]),
                Statement(Sid='CloudWatchLogsAccess',
                          Effect=Allow,
                          Action=[
                              Action('logs', 'CreateLogGroup'),
                              Action('logs', 'CreateLogStream'),
                              Action('logs', 'PutLogEvents'),
                          ],
                          Resource=['arn:aws:logs:*:*:*']),
                Statement(Sid='KMSCMK',
                          Effect=Allow,
                          Action=[Action('kms', '*')],
                          Resource=[troposphere.Ref(self.cmk_arn_param)]),
            ], ),
            Roles=[troposphere.Ref(project_role_res)])
        template.add_resource(project_policy_res)

        # User defined policies
        for policy in action_config.role_policies:
            policy_name = self.create_resource_name_join(
                name_list=[
                    self.res_name_prefix, 'CodeBuild-Project', policy.name
                ],
                separator='-',
                filter_id='IAM.Policy.PolicyName',
                hash_long_names=True,
                camel_case=True)
            statement_list = []

            for statement in policy.statement:
                action_list = []
                for action in statement.action:
                    action_parts = action.split(':')
                    action_list.append(Action(action_parts[0],
                                              action_parts[1]))
                statement_list.append(
                    Statement(Effect=statement.effect,
                              Action=action_list,
                              Resource=statement.resource))
            troposphere.iam.PolicyType(
                title=self.create_cfn_logical_id('CodeBuildProjectPolicy' +
                                                 policy.name,
                                                 camel_case=True),
                template=template,
                PolicyName=policy_name,
                PolicyDocument=PolicyDocument(Statement=statement_list, ),
                Roles=[troposphere.Ref(project_role_res)])

        # CodeBuild Project Resource
        timeout_mins_param = self.create_cfn_parameter(
            param_type='String',
            name='TimeoutInMinutes',
            description=
            'How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed.',
            value=action_config.timeout_mins,
        )

        # CodeBuild: Environment
        environment = troposphere.codebuild.Environment(
            Type='LINUX_CONTAINER',
            ComputeType=troposphere.Ref(compute_type_param),
            Image=troposphere.Ref(image_param),
            EnvironmentVariables=[{
                'Name':
                'ArtifactsBucket',
                'Value':
                troposphere.Ref(self.artifacts_bucket_name_param),
            }, {
                'Name':
                'DeploymentEnvironmentName',
                'Value':
                troposphere.Ref(deploy_env_name_param)
            }, {
                'Name': 'KMSKey',
                'Value': troposphere.Ref(self.cmk_arn_param)
            }])
        project_res = troposphere.codebuild.Project(
            title='CodeBuildProject',
            template=template,
            Name=troposphere.Ref(self.resource_name_prefix_param),
            Description=troposphere.Ref('AWS::StackName'),
            ServiceRole=troposphere.GetAtt('CodeBuildProjectRole', 'Arn'),
            EncryptionKey=troposphere.Ref(self.cmk_arn_param),
            Artifacts=troposphere.codebuild.Artifacts(Type='CODEPIPELINE'),
            Environment=environment,
            Source=troposphere.codebuild.Source(Type='CODEPIPELINE'),
            TimeoutInMinutes=troposphere.Ref(timeout_mins_param),
            Tags=troposphere.codebuild.Tags(
                Name=troposphere.Ref(self.resource_name_prefix_param)))

        self.create_output(title='ProjectArn',
                           value=troposphere.GetAtt(project_res, 'Arn'),
                           description='CodeBuild Project Arn',
                           ref=config_ref + '.project.arn')

        return project_res
示例#24
0
def create_cloudformation(
    key_name,
    ami_id,
    instance_type,
    security_group=None,
    subnet_id=None,
    price=None,
    size=100,
    user_script=USER_SCRIPT_DEFAULT,
    extra_user_data="",
):
    user_script = USER_SCRIPT_DEFAULT.replace("<<extra_user_data>>",
                                              extra_user_data)
    # XXX set this to get real bool values
    os.environ["TROPO_REAL_BOOL"] = "true"

    t = troposphere.Template(Description="TchoTcho EC2 train")

    instance_security_group = t.add_resource(
        troposphere.ec2.SecurityGroup(
            "InstanceSecurityGroup",
            VpcId=get_default_vpc_id(),
            GroupDescription=
            "Enable only SSH ingoing via port 22 and all outgoing",
            SecurityGroupIngress=[
                troposphere.ec2.SecurityGroupRule(IpProtocol="tcp",
                                                  FromPort=22,
                                                  ToPort=22,
                                                  CidrIp="0.0.0.0/0"),
                troposphere.ec2.SecurityGroupRule(IpProtocol="tcp",
                                                  FromPort=22,
                                                  ToPort=22,
                                                  CidrIpv6="::/0"),
            ],
            SecurityGroupEgress=[
                troposphere.ec2.SecurityGroupRule(IpProtocol="-1",
                                                  CidrIp="0.0.0.0/0"),
                troposphere.ec2.SecurityGroupRule(IpProtocol="-1",
                                                  CidrIpv6="::/0"),
            ],
        ))

    instance_role = t.add_resource(
        troposphere.iam.Role(
            "InstanceRole",
            AssumeRolePolicyDocument=awacs.aws.Policy(
                Version="2012-10-17",
                Statement=[
                    awacs.aws.Statement(
                        Effect=awacs.aws.Allow,
                        Principal=awacs.aws.Principal("Service",
                                                      "ec2.amazonaws.com"),
                        Action=[awacs.sts.AssumeRole],
                    ),
                ],
            ),
            ManagedPolicyArns=[
                "arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceforEC2Role",
            ],
            Policies=[
                troposphere.iam.Policy(
                    PolicyName="S3FullAccess",
                    PolicyDocument={
                        "Statement": [{
                            "Effect": "Allow",
                            "Action": "s3:*",
                            "Resource": "*"
                        }],
                    },
                )
            ],
        ))

    instance_profile = t.add_resource(
        troposphere.iam.InstanceProfile(
            "InstanceProfile",
            Roles=[troposphere.Ref(instance_role)],
        ))

    launch_template = t.add_resource(
        troposphere.ec2.LaunchTemplate(
            "InstanceLaunchTemplate",
            # https://github.com/cloudtools/troposphere/blob/07dde6b66fca28dd401903027d8ac13bc107e0b6/examples/CloudFormation_Init_ConfigSet.py#L45
            # https://stackoverflow.com/questions/35095950/what-are-the-benefits-of-cfn-init-over-userdata
            # XXX for now we are not using this we always just delete the stack
            # Metadata=troposphere.cloudformation.Metadata(
            #     troposphere.cloudformation.Init(
            #         troposphere.cloudformation.InitConfigSets(default=["bootstrap"]),
            #         awspackages=troposphere.cloudformation.InitConfig(
            #             commands={
            #                 "001-bootstrap": {"command": "touch ~/bootstra.txt"},
            #             },
            #         ),
            #     ),
            # ),
            LaunchTemplateData=troposphere.ec2.LaunchTemplateData(
                KeyName=key_name,
                ImageId=ami_id,
                InstanceType=instance_type,
                UserData=troposphere.Base64(
                    # Sub is needed if we have variables
                    troposphere.Sub(textwrap.dedent(user_script.strip()), ), ),
                IamInstanceProfile=troposphere.ec2.IamInstanceProfile(
                    Arn=troposphere.GetAtt(instance_profile, "Arn"), ),
                BlockDeviceMappings=[
                    troposphere.ec2.LaunchTemplateBlockDeviceMapping(
                        DeviceName="/dev/sda1",
                        Ebs=troposphere.ec2.EBSBlockDevice(
                            DeleteOnTermination=True,
                            VolumeSize=size,
                            Encrypted=True),
                    )
                ],
            ),
        ))

    if price:
        instance_market_options = troposphere.ec2.InstanceMarketOptions(
            MarketType="spot",
            SpotOptions=troposphere.ec2.SpotOptions(
                SpotInstanceType="one-time",
                MaxPrice=str(price),
                InstanceInterruptionBehavior="terminate",
            ),
        )

        launch_template.properties["LaunchTemplateData"].properties[
            "InstanceMarketOptions"] = instance_market_options

    if not security_group:
        security_group = [troposphere.Ref(instance_security_group)]

    if subnet_id:
        network_interfaces = [
            troposphere.ec2.NetworkInterfaces(
                SubnetId=subnet_id,
                DeviceIndex=0,
                Groups=[security_group],
            )
        ]

        launch_template.properties["LaunchTemplateData"].properties[
            "NetworkInterfaces"] = network_interfaces
    else:
        launch_template.properties["LaunchTemplateData"].properties[
            "SecurityGroupIds"] = [troposphere.Ref(instance_security_group)]

    ec2_instance = t.add_resource(
        troposphere.ec2.Instance(
            "TchoTchoInstance",
            LaunchTemplate=troposphere.ec2.LaunchTemplateSpecification(
                LaunchTemplateId=troposphere.Ref(launch_template),
                Version=troposphere.GetAtt(launch_template,
                                           "LatestVersionNumber"),
            ),
            CreationPolicy=troposphere.policies.CreationPolicy(
                ResourceSignal=troposphere.policies.ResourceSignal(
                    Timeout='PT15M')),
        ))

    t.add_output([
        troposphere.Output(
            "InstanceId",
            Description="InstanceId of the EC2 instance",
            Value=troposphere.Ref(ec2_instance),
        ),
        troposphere.Output(
            "AZ",
            Description="Availability Zone of the EC2 instance",
            Value=troposphere.GetAtt(ec2_instance, "AvailabilityZone"),
        ),
        troposphere.Output(
            "PublicIP",
            Description="Public IP address of the EC2 instance",
            Value=troposphere.GetAtt(ec2_instance, "PublicIp"),
        ),
        troposphere.Output(
            "PrivateIP",
            Description="Private IP address of the EC2 instance",
            Value=troposphere.GetAtt(ec2_instance, "PrivateIp"),
        ),
        troposphere.Output(
            "PublicDNS",
            Description="Public DNSName of the EC2 instance",
            Value=troposphere.GetAtt(ec2_instance, "PublicDnsName"),
        ),
        troposphere.Output(
            "PrivateDNS",
            Description="Private DNSName of the EC2 instance",
            Value=troposphere.GetAtt(ec2_instance, "PrivateDnsName"),
        ),
    ])
    # XXX moto has some problems with yaml; validate, LaunchTemplateData is
    # not parsed so the ec2instance ImageId other keys are not found
    # return t.to_yaml()
    return t.to_json()
示例#25
0
    def __init__(self, stack, paco_ctx):
        super().__init__(stack, paco_ctx, iam_capabilities=["CAPABILITY_IAM"])
        self.apigatewayrestapi = apigatewayrestapi = stack.resource
        self.set_aws_name('ApiGatewayRestApi', self.resource_group_name, self.resource.name)

        self.init_template('ApiGateway: {}'.format(apigatewayrestapi.title))
        template = self.template
        if not self.apigatewayrestapi.is_enabled():
            return

        # Parameters
        method_params = []
        for method in self.apigatewayrestapi.methods.values():
            param_name = 'MethodArn' + self.create_cfn_logical_id(method.name)
            lambda_arn_param = self.create_cfn_parameter(
                name=param_name,
                param_type='String',
                description='Lambda ARN parameter.',
                value=method.integration.integration_lambda + '.arn',
            )
            method.parameter_arn_ref = troposphere.Ref(param_name)

        # Resources
        restapi_logical_id = 'ApiGatewayRestApi'
        restapi_resource = troposphere.apigateway.RestApi.from_dict(
            restapi_logical_id,
            self.apigatewayrestapi.cfn_export_dict
        )
        template.add_resource(restapi_resource)

        # Model
        for model in self.apigatewayrestapi.models.values():
            model.logical_id = self.create_cfn_logical_id('ApiGatewayModel' + model.name)
            cfn_export_dict = model.cfn_export_dict
            cfn_export_dict['RestApiId'] = troposphere.Ref(restapi_resource)
            if 'Schema' not in cfn_export_dict:
                cfn_export_dict['Schema'] = {}
            model_resource = troposphere.apigateway.Model.from_dict(model.logical_id, cfn_export_dict)
            model.resource = model_resource
            template.add_resource(model_resource)

        # Resource
        for resource in self.apigatewayrestapi.resources.values():
            resource_id = 'ApiGatewayResource' + self.create_cfn_logical_id(resource.name)
            cfn_export_dict = resource.cfn_export_dict
            if resource.parent_id == "RootResourceId":
                cfn_export_dict["ParentId"] = troposphere.GetAtt(restapi_resource, "RootResourceId")
                cfn_export_dict["RestApiId"] = troposphere.Ref(restapi_resource)
            else:
                raise NotImplemented("ToDo: handle nested resources")
            resource_resource = troposphere.apigateway.Resource.from_dict(resource_id, cfn_export_dict)
            resource.resource = resource_resource
            resource_resource.DependsOn = restapi_logical_id
            template.add_resource(resource_resource)

        # Method
        for method in self.apigatewayrestapi.methods.values():
            method_id = 'ApiGatewayMethod' + self.create_cfn_logical_id(method.name)
            method.logical_id = method_id
            cfn_export_dict = method.cfn_export_dict
            for resource in self.apigatewayrestapi.resources.values():
                if resource.name == method.resource_id:
                    cfn_export_dict["ResourceId"] = troposphere.Ref(resource.resource)
            if 'ResourceId' not in cfn_export_dict:
                cfn_export_dict["ResourceId"] = troposphere.GetAtt(restapi_resource, 'RootResourceId')
            cfn_export_dict["RestApiId"] = troposphere.Ref(restapi_resource)
            uri = troposphere.Join('', ["arn:aws:apigateway:", method.region_name, ":lambda:path/2015-03-31/functions/", method.parameter_arn_ref, "/invocations"])
            cfn_export_dict["Integration"]["Uri"] = uri

            if method.integration.integration_type == 'AWS_PROXY':
                # IAM Role - allows API Gateway to invoke Lambda
                # ToDo: enable Api Gateway to invoke things other than Lambda ...
                iam_role_resource = troposphere.iam.Role(
                    self.create_cfn_logical_id('ApiGatewayIamRole' + self.apigatewayrestapi.name + method.name),
                    Path='/',
                    AssumeRolePolicyDocument=Policy(
                        Version='2012-10-17',
                        Statement=[
                            Statement(
                                Effect=Allow,
                                Action=[awacs.sts.AssumeRole],
                                Principal=Principal('Service',['apigateway.amazonaws.com'])
                            )
                        ],
                    ),
                    Policies=[
                        troposphere.iam.Policy(
                            PolicyName=self.create_cfn_logical_id('LambdaAccessApiGateway' + self.apigatewayrestapi.name + method.name),
                            PolicyDocument=Policy(
                                Version='2012-10-17',
                                Statement=[
                                    Statement(
                                        Effect=Allow,
                                        Action=[awacs.awslambda.InvokeFunction],
                                        Resource=[method.parameter_arn_ref],
                                    )
                                ]
                            )
                        )
                    ]
                )
                template.add_resource(iam_role_resource)
                cfn_export_dict["Integration"]["Credentials"] = troposphere.GetAtt(iam_role_resource, "Arn")

            elif method.integration.integration_type == 'AWS':
                # Enable Lambda (custom) integration
                # When send to a Lambda (Custom) the HTTP Method must always be POST regardless of
                # the HttpMethod
                cfn_export_dict["Integration"]["IntegrationHttpMethod"] = "POST"
                lambda_permission_resource = troposphere.awslambda.Permission(
                    self.create_cfn_logical_id('LambdaPermissionApiGateway' + method.name),
                    Action = 'lambda:InvokeFunction',
                    FunctionName = method.parameter_arn_ref,
                    Principal = 'apigateway.amazonaws.com',
                    SourceArn = troposphere.Sub(
                        "arn:aws:execute-api:${AWS::Region}:${AWS::AccountId}:${%s}/*/%s/" % (
                            restapi_logical_id, method.http_method
                        )
                    )
                )
                template.add_resource(lambda_permission_resource)

            # look-up the method_names and assign a Ref to the model resource
            # ToDo: validate model_names in the model
            responses = []
            for method_response in method.method_responses:
                response_dict = {"StatusCode": method_response.status_code}
                if method_response.response_models:
                    response_dict["ResponseModels"] = {}
                    for response_model in method_response.response_models:
                        for model in self.apigatewayrestapi.models.values():
                            if model.name == response_model.model_name:
                                response_dict["ResponseModels"][response_model.content_type] = troposphere.Ref(model.resource)
                responses.append(response_dict)
            cfn_export_dict["MethodResponses"] = responses

            method_resource = troposphere.apigateway.Method.from_dict(method_id, cfn_export_dict)
            method_resource.DependsOn = restapi_logical_id
            template.add_resource(method_resource)

        # Deployment
        deployment_resource = troposphere.apigateway.Deployment.from_dict(
            'ApiGatewayDeployment',
            {'Description': 'Deployment',
             'RestApiId': troposphere.Ref(restapi_resource) }
        )
        # ToDo: Deployment depends upon all Methods
        for method in self.apigatewayrestapi.methods.values():
            deployment_resource.DependsOn = method.logical_id
        template.add_resource(deployment_resource)

        # Stage
        for stage in self.apigatewayrestapi.stages.values():
            stage_id = self.create_cfn_logical_id('ApiGatewayStage' + stage.name)
            cfn_export_dict = stage.cfn_export_dict
            cfn_export_dict["RestApiId"] = troposphere.Ref(restapi_resource)
            cfn_export_dict["DeploymentId"] = troposphere.Ref(deployment_resource)
            stage_resource = troposphere.apigateway.Stage.from_dict(stage_id, cfn_export_dict)
            template.add_resource(stage_resource)
示例#26
0
    def render(
        self,
        template,
        name,
        version,
        description,
        source,
        product_ids_by_region,
        tags,
        friendly_uid,
    ) -> str:
        template_description = f"{friendly_uid}-{version}"
        tpl = t.Template(Description=template_description)

        all_regions = product_ids_by_region.keys()

        source_stage = codepipeline.Stages(
            Name="Source",
            Actions=[
                dict(
                    codecommit=codepipeline.Actions(
                        RunOrder=1,
                        RoleArn=t.Sub(
                            "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/SourceRole"
                        ),
                        ActionTypeId=codepipeline.ActionTypeId(
                            Category="Source",
                            Owner="AWS",
                            Version="1",
                            Provider="CodeCommit",
                        ),
                        OutputArtifacts=[
                            codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT)
                        ],
                        Configuration={
                            "RepositoryName": source.get("Configuration").get(
                                "RepositoryName"
                            ),
                            "BranchName": source.get("Configuration").get("BranchName"),
                            "PollForSourceChanges": source.get("Configuration").get(
                                "PollForSourceChanges", True
                            ),
                        },
                        Name="Source",
                    ),
                    github=codepipeline.Actions(
                        RunOrder=1,
                        ActionTypeId=codepipeline.ActionTypeId(
                            Category="Source",
                            Owner="ThirdParty",
                            Version="1",
                            Provider="GitHub",
                        ),
                        OutputArtifacts=[
                            codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT)
                        ],
                        Configuration={
                            "Owner": source.get("Configuration").get("Owner"),
                            "Repo": source.get("Configuration").get("Repo"),
                            "Branch": source.get("Configuration").get("Branch"),
                            "OAuthToken": t.Join(
                                "",
                                [
                                    "{{resolve:secretsmanager:",
                                    source.get("Configuration").get(
                                        "SecretsManagerSecret"
                                    ),
                                    ":SecretString:OAuthToken}}",
                                ],
                            ),
                            "PollForSourceChanges": source.get("Configuration").get(
                                "PollForSourceChanges"
                            ),
                        },
                        Name="Source",
                    ),
                    codestarsourceconnection=codepipeline.Actions(
                        RunOrder=1,
                        RoleArn=t.Sub(
                            "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/SourceRole"
                        ),
                        ActionTypeId=codepipeline.ActionTypeId(
                            Category="Source",
                            Owner="AWS",
                            Version="1",
                            Provider="CodeStarSourceConnection",
                        ),
                        OutputArtifacts=[
                            codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT)
                        ],
                        Configuration={
                            "ConnectionArn": source.get("Configuration").get(
                                "ConnectionArn"
                            ),
                            "FullRepositoryId": source.get("Configuration").get(
                                "FullRepositoryId"
                            ),
                            "BranchName": source.get("Configuration").get("BranchName"),
                            "OutputArtifactFormat": source.get("Configuration").get(
                                "OutputArtifactFormat"
                            ),
                        },
                        Name="Source",
                    ),
                    s3=codepipeline.Actions(
                        RunOrder=1,
                        ActionTypeId=codepipeline.ActionTypeId(
                            Category="Source", Owner="AWS", Version="1", Provider="S3",
                        ),
                        OutputArtifacts=[
                            codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT)
                        ],
                        Configuration={
                            "S3Bucket": source.get("Configuration").get("S3Bucket"),
                            "S3ObjectKey": source.get("Configuration").get(
                                "S3ObjectKey"
                            ),
                            "PollForSourceChanges": source.get("Configuration").get(
                                "PollForSourceChanges"
                            ),
                        },
                        Name="Source",
                    ),
                ).get(source.get("Provider", "").lower())
            ],
        )

        build_project_name = t.Sub("${AWS::StackName}-build")
        configuration = template.get("Configuration", {})
        runtime_versions = dict(
            nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT,
        )
        if configuration.get("runtime-versions"):
            runtime_versions.update(configuration.get("runtime-versions"))

        extra_commands = list(configuration.get("install", {}).get("commands", []))

        tpl.add_resource(
            codebuild.Project(
                "BuildProject",
                Name=build_project_name,
                ServiceRole=t.Sub(
                    "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole"
                ),
                Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}),
                Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
                TimeoutInMinutes=60,
                Environment=codebuild.Environment(
                    ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT,
                    Image=constants.ENVIRONMENT_IMAGE_DEFAULT,
                    Type=constants.ENVIRONMENT_TYPE_DEFAULT,
                    EnvironmentVariables=[
                        {"Type": "PLAINTEXT", "Name": "ACCOUNT_ID", "Value": "CHANGE_ME",},
                        {"Type": "PLAINTEXT", "Name": "REGION", "Value": "CHANGE_ME",},
                        {"Type": "PLAINTEXT", "Name": "NAME", "Value": "CHANGE_ME",},
                        {"Type": "PLAINTEXT", "Name": "VERSION", "Value": "CHANGE_ME",},
                        {"Type": "PLAINTEXT", "Name": "PROVISIONER_NAME", "Value": "CHANGE_ME",},
                        {"Type": "PLAINTEXT", "Name": "PROVISIONER_VERSION", "Value": "CHANGE_ME",},
                    ],
                ),
                Source=codebuild.Source(
                    BuildSpec=t.Sub(
                        yaml.safe_dump(
                            dict(
                                version=0.2,
                                phases=dict(
                                    install={
                                        "runtime-versions": runtime_versions,
                                        "commands": [
                                            f"pip install {constants.VERSION}"
                                            if "http" in constants.VERSION
                                            else f"pip install aws-service-catalog-factory=={constants.VERSION}",
                                        ] + extra_commands
                                    },
                                    pre_build={
                                        "commands": [
                                            "npm install",
                                            "npm run cdk synth -- --output sct-synth-output",
                                        ],
                                    },
                                    build={
                                        "commands": [
                                            f"servicecatalog-factory generate-template $PROVISIONER_NAME $PROVISIONER_VERSION $NAME $VERSION . > product.template.yaml",
                                        ]
                                    },
                                ),
                                artifacts={
                                    "name": BUILD_OUTPUT_ARTIFACT,
                                    "files": ["*", "**/*"],
                                    "exclude-paths": ["sct-synth-output/*"],
                                },
                            )
                        )
                    ),
                    Type="CODEPIPELINE",
                ),
                Description=t.Sub("Create a build stage for template CDK 1.0.0"),
            )
        )

        build_stage = codepipeline.Stages(
            Name="Build",
            Actions=[
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT),
                    ],
                    Name='Build',
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Build",
                        Owner="AWS",
                        Version="1",
                        Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name=BUILD_OUTPUT_ARTIFACT)
                    ],
                    Configuration={
                        "ProjectName": build_project_name,
                        "PrimarySource": SOURCE_OUTPUT_ARTIFACT,
                        "EnvironmentVariables": t.Sub(
                            json.dumps(
                                [
                                    dict(name="ACCOUNT_ID", value="${AWS::AccountId}", type="PLAINTEXT"),
                                    dict(name="REGION", value="${AWS::Region}", type="PLAINTEXT"),
                                    dict(name="PROVISIONER_NAME", value='CDK', type="PLAINTEXT"),
                                    dict(name="PROVISIONER_VERSION", value='1.0.0', type="PLAINTEXT"),
                                    dict(name="NAME", value=name, type="PLAINTEXT"),
                                    dict(
                                        name="VERSION", value=version, type="PLAINTEXT"
                                    ),
                                ]
                            )
                        ),
                    },
                    RunOrder=1,
                )
            ],
        )

        validate_stage = codepipeline.Stages(
            Name="Validate",
            Actions=[
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name=BUILD_OUTPUT_ARTIFACT),
                    ],
                    Name="Validate",
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Test", Owner="AWS", Version="1", Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name=VALIDATE_OUTPUT_ARTIFACT)
                    ],
                    Configuration={
                        "ProjectName": shared_resources.VALIDATE_PROJECT_NAME,
                        "PrimarySource": BUILD_OUTPUT_ARTIFACT,
                    },
                    RunOrder=1,
                )
            ],
        )
        #
        package_stage = codepipeline.Stages(
            Name="Package",
            Actions=[
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name=BUILD_OUTPUT_ARTIFACT),
                    ],
                    Name="Package",
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Build",
                        Owner="AWS",
                        Version="1",
                        Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name=PACKAGE_OUTPUT_ARTIFACT)
                    ],
                    Configuration={
                        "ProjectName": cdk_shared_resources.CDK_PACKAGE_PROJECT_NAME,
                        "PrimarySource": BUILD_OUTPUT_ARTIFACT,
                        "EnvironmentVariables": t.Sub(
                            json.dumps(
                                [
                                    dict(
                                        name="PIPELINE_NAME",
                                        value="${AWS::StackName}-pipeline",
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="CODEPIPELINE_ID",
                                        value="#{codepipeline.PipelineExecutionId}",
                                        type="PLAINTEXT",
                                    ),
                                    dict(name="NAME", value=name, type="PLAINTEXT"),
                                    dict(
                                        name="VERSION", value=version, type="PLAINTEXT"
                                    ),
                                    dict(
                                        name="DESCRIPTION",
                                        value=description,
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="TEMPLATE_FORMAT",
                                        value="yaml",
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="PROVISIONER",
                                        value="CDK/1.0.0",
                                        type="PLAINTEXT",
                                    ),
                                ]
                            )
                        ),
                    },
                    RunOrder=1,
                )
            ],
        )

        deploy_stage = codepipeline.Stages(
            Name="Deploy",
            Actions=[
                codepipeline.Actions(
                    InputArtifacts=[
                        codepipeline.InputArtifacts(Name=PACKAGE_OUTPUT_ARTIFACT),
                    ],
                    Name="Deploy",
                    ActionTypeId=codepipeline.ActionTypeId(
                        Category="Build",
                        Owner="AWS",
                        Version="1",
                        Provider="CodeBuild",
                    ),
                    OutputArtifacts=[
                        codepipeline.OutputArtifacts(Name=DEPLOY_OUTPUT_ARTIFACT)
                    ],
                    Configuration={
                        "ProjectName": cdk_shared_resources.CDK_DEPLOY_PROJECT_NAME,
                        "PrimarySource": PACKAGE_OUTPUT_ARTIFACT,
                        "EnvironmentVariables": t.Sub(
                            json.dumps(
                                [
                                    dict(
                                        name="ACCOUNT_ID",
                                        value="${AWS::AccountId}",
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="REGION",
                                        value="${AWS::Region}",
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="PIPELINE_NAME",
                                        value="${AWS::StackName}-pipeline",
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="CODEPIPELINE_ID",
                                        value="#{codepipeline.PipelineExecutionId}",
                                        type="PLAINTEXT",
                                    ),
                                    dict(
                                        name="PROVISIONER",
                                        value="CDK/1.0.0",
                                        type="PLAINTEXT",
                                    ),
                                ]
                            )
                        ),
                    },
                    RunOrder=1,
                )
            ],
        )

        tpl.add_resource(
            codepipeline.Pipeline(
                "Pipeline",
                RoleArn=t.Sub(
                    "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/CodePipelineRole"
                ),
                Stages=[
                    source_stage,
                    build_stage,
                    validate_stage,
                    package_stage,
                    deploy_stage,
                ],
                Name=t.Sub("${AWS::StackName}-pipeline"),
                ArtifactStores=[
                    codepipeline.ArtifactStoreMap(
                        Region=region,
                        ArtifactStore=codepipeline.ArtifactStore(
                            Type="S3",
                            Location=t.Sub(
                                "sc-factory-artifacts-${AWS::AccountId}-" + region
                            ),
                        ),
                    )
                    for region in all_regions
                ],
                RestartExecutionOnUpdate=False,
            )
        )

        return tpl.to_yaml(clean_up=True)
示例#27
0
    def create_codebuild_cfn(self, template, pipeline_config, action_config,
                             config_ref):
        # CodeBuild
        compute_type_param = self.create_cfn_parameter(
            param_type='String',
            name='CodeBuildComputeType',
            description=
            'The type of compute environment. This determines the number of CPU cores and memory the build environment uses.',
            value=action_config.codebuild_compute_type,
        )
        image_param = self.create_cfn_parameter(
            param_type='String',
            name='CodeBuildImage',
            description=
            'The image tag or image digest that identifies the Docker image to use for this build project.',
            value=action_config.codebuild_image,
        )
        deploy_env_name_param = self.create_cfn_parameter(
            param_type='String',
            name='DeploymentEnvironmentName',
            description=
            'The name of the environment codebuild will be deploying into.',
            value=action_config.deployment_environment,
        )
        # If ECS Release Phase, then create the needed parameters
        release_phase = action_config.release_phase
        ecs_release_phase_cluster_arn_param = []
        ecs_release_phase_cluster_name_param = []
        ecs_release_phase_service_arn_param = []
        if release_phase != None and release_phase.ecs != None:
            idx = 0
            for command in release_phase.ecs:
                service_obj = get_model_obj_from_ref(command.service,
                                                     self.paco_ctx.project)
                service_obj = get_parent_by_interface(service_obj,
                                                      schemas.IECSServices)
                cluster_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'ReleasePhaseECSClusterArn{idx}',
                    description='ECS Cluster Arn',
                    value=service_obj.cluster + '.arn',
                )
                ecs_release_phase_cluster_arn_param.append(cluster_arn_param)
                cluster_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'ReleasePhaseECSClusterName{idx}',
                    description='ECS Cluster Name',
                    value=service_obj.cluster + '.name',
                )
                ecs_release_phase_cluster_name_param.append(cluster_arn_param)
                service_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name=f'ReleasePhaseECSServiceArn{idx}',
                    description='ECS Service Arn',
                    value=command.service + '.arn',
                )
                ecs_release_phase_service_arn_param.append(service_arn_param)
                idx += 1
        self.project_role_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodeBuild-Project'],
            filter_id='IAM.Role.RoleName')

        # codecommit_repo_users ManagedPolicies
        managed_policy_arns = []
        for user_ref in action_config.codecommit_repo_users:
            user = get_model_obj_from_ref(user_ref, self.paco_ctx.project)
            # codecommit_stack = user.__parent__.__parent__.__parent__.stack
            user_logical_id = self.gen_cf_logical_name(user.username)
            codecommit_user_policy_param = self.create_cfn_parameter(
                param_type='String',
                name='CodeCommitUserPolicy' + user_logical_id,
                description='The CodeCommit User Policy for ' + user.username,
                value=user_ref + '.policy.arn',
            )
            managed_policy_arns.append(
                troposphere.Ref(codecommit_user_policy_param))

        project_role_res = troposphere.iam.Role(
            title='CodeBuildProjectRole',
            template=template,
            RoleName=self.project_role_name,
            ManagedPolicyArns=managed_policy_arns,
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[AssumeRole],
                        Principal=Principal("Service",
                                            ['codebuild.amazonaws.com']),
                    )
                ]))

        project_policy_name = self.create_iam_resource_name(
            name_list=[self.res_name_prefix, 'CodeBuild-Project'],
            filter_id='IAM.Policy.PolicyName')

        # Project Policy
        policy_statements = []
        if self.enable_artifacts_bucket:
            policy_statements.append(
                Statement(
                    Sid='S3Access',
                    Effect=Allow,
                    Action=[
                        Action('s3', 'PutObject'),
                        Action('s3', 'PutObjectAcl'),
                        Action('s3', 'GetObject'),
                        Action('s3', 'GetObjectAcl'),
                        Action('s3', 'ListBucket'),
                        Action('s3', 'DeleteObject'),
                        Action('s3', 'GetBucketPolicy'),
                        Action('s3', 'HeadObject'),
                    ],
                    Resource=[
                        troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}'),
                        troposphere.Sub(
                            'arn:aws:s3:::${ArtifactsBucketName}/*'),
                    ]))
        if pipeline_config.configuration.disable_codepipeline == False:
            policy_statements.append(
                Statement(Sid='KMSCMK',
                          Effect=Allow,
                          Action=[Action('kms', '*')],
                          Resource=[troposphere.Ref(self.cmk_arn_param)]))
        policy_statements.append(
            Statement(Sid='CloudWatchLogsAccess',
                      Effect=Allow,
                      Action=[
                          Action('logs', 'CreateLogGroup'),
                          Action('logs', 'CreateLogStream'),
                          Action('logs', 'PutLogEvents'),
                      ],
                      Resource=['arn:aws:logs:*:*:*']))

        release_phase = action_config.release_phase
        if release_phase != None and release_phase.ecs != None:
            ssm_doc = self.paco_ctx.project['resource']['ssm'].ssm_documents[
                'paco_ecs_docker_exec']
            # SSM Exec Document
            policy_statements.append(
                Statement(Sid='ECSReleasePhaseSSMCore',
                          Effect=Allow,
                          Action=[
                              Action('ssm', 'ListDocuments'),
                              Action('ssm', 'ListDocumentVersions'),
                              Action('ssm', 'DescribeDocument'),
                              Action('ssm', 'GetDocument'),
                              Action('ssm', 'DescribeInstanceInformation'),
                              Action('ssm', 'DescribeDocumentParameters'),
                              Action('ssm', 'CancelCommand'),
                              Action('ssm', 'ListCommands'),
                              Action('ssm', 'ListCommandInvocations'),
                              Action('ssm', 'DescribeAutomationExecutions'),
                              Action('ssm', 'DescribeInstanceProperties'),
                              Action('ssm', 'GetCommandInvocation'),
                              Action('ec2', 'DescribeInstanceStatus'),
                          ],
                          Resource=['*']))
            policy_statements.append(
                Statement(
                    Sid=f'ECSReleasePhaseSSMSendCommandDocument',
                    Effect=Allow,
                    Action=[
                        Action('ssm', 'SendCommand'),
                    ],
                    Resource=[
                        f'arn:aws:ssm:{self.aws_region}:{self.account_ctx.get_id()}:document/paco_ecs_docker_exec'
                    ]))
            idx = 0
            for command in release_phase.ecs:
                policy_statements.append(
                    Statement(
                        Sid=f'ECSReleasePhaseSSMSendCommand{idx}',
                        Effect=Allow,
                        Action=[
                            Action('ssm', 'SendCommand'),
                        ],
                        Resource=[f'arn:aws:ec2:*:*:instance/*'],
                        Condition=Condition(
                            StringLike({
                                'ssm:resourceTag/Paco-ECSCluster-Name':
                                troposphere.Ref(
                                    ecs_release_phase_cluster_name_param[idx])
                            }))))

                policy_statements.append(
                    Statement(
                        Sid=f'ECSRelasePhaseClusterAccess{idx}',
                        Effect=Allow,
                        Action=[
                            Action('ecs', 'DescribeServices'),
                            Action('ecs', 'RunTask'),
                            Action('ecs', 'StopTask'),
                            Action('ecs', 'DescribeContainerInstances'),
                            Action('ecs', 'ListTasks'),
                            Action('ecs', 'DescribeTasks'),
                        ],
                        Resource=['*'],
                        Condition=Condition(
                            StringEquals({
                                'ecs:cluster':
                                troposphere.Ref(
                                    ecs_release_phase_cluster_arn_param[idx])
                            }))))
                idx += 1

            policy_statements.append(
                Statement(Sid='ECSReleasePhaseSSMAutomationExecution',
                          Effect=Allow,
                          Action=[
                              Action('ssm', 'StartAutomationExecution'),
                              Action('ssm', 'StopAutomationExecution'),
                              Action('ssm', 'GetAutomationExecution'),
                          ],
                          Resource=['arn:aws:ssm:::automation-definition/']))
            # ECS Policies
            policy_statements.append(
                Statement(Sid='ECSRelasePhaseECS',
                          Effect=Allow,
                          Action=[
                              Action('ecs', 'DescribeTaskDefinition'),
                              Action('ecs', 'DeregisterTaskDefinition'),
                              Action('ecs', 'RegisterTaskDefinition'),
                              Action('ecs', 'ListTagsForResource'),
                              Action('ecr', 'DescribeImages')
                          ],
                          Resource=['*']))

            # IAM Pass Role
            policy_statements.append(
                Statement(Sid='IAMPassRole',
                          Effect=Allow,
                          Action=[Action('iam', 'passrole')],
                          Resource=['*']))

        if len(action_config.secrets) > 0:
            secrets_arn_list = []
            for secret_ref in action_config.secrets:
                name_hash = md5sum(str_data=secret_ref)
                secret_arn_param = self.create_cfn_parameter(
                    param_type='String',
                    name='SecretsArn' + name_hash,
                    description=
                    'Secrets Manager Secret Arn to expose access to',
                    value=secret_ref + '.arn')
                secrets_arn_list.append(troposphere.Ref(secret_arn_param))
            policy_statements.append(
                Statement(Sid='SecretsManager',
                          Effect=Allow,
                          Action=[
                              Action('secretsmanager', 'GetSecretValue'),
                          ],
                          Resource=secrets_arn_list))

        project_policy_res = troposphere.iam.PolicyType(
            title='CodeBuildProjectPolicy',
            PolicyName=project_policy_name,
            PolicyDocument=PolicyDocument(Statement=policy_statements),
            Roles=[troposphere.Ref(project_role_res)])
        project_policy_res.DependsOn = project_role_res
        template.add_resource(project_policy_res)

        # User defined policies
        for policy in action_config.role_policies:
            policy_name = self.create_resource_name_join(
                name_list=[
                    self.res_name_prefix, 'CodeBuild-Project', policy.name
                ],
                separator='-',
                filter_id='IAM.Policy.PolicyName',
                hash_long_names=True,
                camel_case=True)
            statement_list = []

            for statement in policy.statement:
                action_list = []
                for action in statement.action:
                    action_parts = action.split(':')
                    action_list.append(Action(action_parts[0],
                                              action_parts[1]))
                statement_list.append(
                    Statement(Effect=statement.effect,
                              Action=action_list,
                              Resource=statement.resource))
            troposphere.iam.PolicyType(
                title=self.create_cfn_logical_id('CodeBuildProjectPolicy' +
                                                 policy.name,
                                                 camel_case=True),
                template=template,
                PolicyName=policy_name,
                PolicyDocument=PolicyDocument(Statement=statement_list, ),
                Roles=[troposphere.Ref(project_role_res)])

        # ECR Permission Policies
        self.set_ecr_repositories_statements(
            action_config.ecr_repositories, template,
            f'{self.res_name_prefix}-CodeBuild-Project',
            [troposphere.Ref(project_role_res)])

        # CodeBuild Project Resource
        timeout_mins_param = self.create_cfn_parameter(
            param_type='String',
            name='TimeoutInMinutes',
            description=
            'How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed.',
            value=action_config.timeout_mins,
        )

        # Environment Variables
        codebuild_env_vars = [{
            'Name': 'DeploymentEnvironmentName',
            'Value': troposphere.Ref(deploy_env_name_param)
        }]
        if pipeline_config.configuration.disable_codepipeline == False:
            codebuild_env_vars.append({
                'Name':
                'KMSKey',
                'Value':
                troposphere.Ref(self.cmk_arn_param)
            })
        if self.enable_artifacts_bucket:
            codebuild_env_vars.append({
                'Name':
                'ArtifactsBucket',
                'Value':
                troposphere.Ref(self.artifacts_bucket_name_param),
            })
        # If ECS Release Phase, then add the config to the environment
        release_phase = action_config.release_phase
        if release_phase != None and release_phase.ecs != None:
            idx = 0
            for command in release_phase.ecs:
                codebuild_env_vars.append({
                    'Name':
                    f'PACO_CB_RP_ECS_CLUSTER_ID_{idx}',
                    'Value':
                    troposphere.Ref(ecs_release_phase_cluster_arn_param[idx])
                })
                codebuild_env_vars.append({
                    'Name':
                    f'PACO_CB_RP_ECS_SERVICE_ID_{idx}',
                    'Value':
                    troposphere.Ref(ecs_release_phase_service_arn_param[idx])
                })
                idx += 1

        # CodeBuild: Environment
        project_dict = {
            'Name':
            troposphere.Ref(self.resource_name_prefix_param),
            'Artifacts': {
                'Type': 'NO_ARTIFACTS'
            },
            'Description':
            troposphere.Ref('AWS::StackName'),
            'ServiceRole':
            troposphere.GetAtt('CodeBuildProjectRole', 'Arn'),
            'Environment': {
                'Type': 'LINUX_CONTAINER',
                'ComputeType': troposphere.Ref(compute_type_param),
                'Image': troposphere.Ref(image_param),
                'EnvironmentVariables': codebuild_env_vars,
                'PrivilegedMode': action_config.privileged_mode
            },
            'Source': {
                'Type': 'NO_SOURCE'
            },
            'TimeoutInMinutes':
            troposphere.Ref(timeout_mins_param),
            'Tags':
            troposphere.codebuild.Tags(
                Name=troposphere.Ref(self.resource_name_prefix_param))
        }

        if action_config.buildspec:
            project_dict['Source']['BuildSpec'] = action_config.buildspec

        if pipeline_config.configuration.disable_codepipeline == False:
            project_dict['EncryptionKey'] = troposphere.Ref(self.cmk_arn_param)
            project_dict['Artifacts'] = {'Type': 'CODEPIPELINE'}
            project_dict['Source']['Type'] = 'CODEPIPELINE'
        else:
            if action_config.artifacts == None or action_config.artifacts.type == 'NO_ARTIFACTS':
                project_dict['Artifacts'] = {
                    'Type': 'NO_ARTIFACTS',
                }
            else:
                project_dict['Artifacts'] = {
                    'Type': action_config.artifacts.type,
                    'Location':
                    troposphere.Ref(self.artifacts_bucket_name_param),
                    'NamespaceType': action_config.artifacts.namespace_type,
                    'Packaging': action_config.artifacts.packaging,
                    'Name': action_config.artifacts.name
                }
                if action_config.artifacts.path != None:
                    project_dict['Artifacts'][
                        'Path'] = action_config.artifacts.path
            if action_config.source.github != None:
                github_config = action_config.source.github
                project_dict['Source']['Type'] = 'GITHUB'
                location = f'https://github.com/{github_config.github_owner}/{github_config.github_repository}.git'
                project_dict['Source']['Location'] = location
                project_dict['Source'][
                    'ReportBuildStatus'] = github_config.report_build_status
                if github_config.deployment_branch_name != None:
                    project_dict[
                        'SourceVersion'] = github_config.deployment_branch_name
            else:
                raise PacoException(
                    "CodeBuild source must be configured when Codepipeline is disabled."
                )

        if action_config.concurrent_build_limit > 0:
            project_dict[
                'ConcurrentBuildLimit'] = action_config.concurrent_build_limit

        if action_config.vpc_config != None:
            vpc_config = action_config.vpc_config
            vpc_id_param = self.create_cfn_parameter(
                name='VPC',
                param_type='AWS::EC2::VPC::Id',
                description='The VPC Id',
                value='paco.ref netenv.{}.<environment>.<region>.network.vpc.id'
                .format(self.env_ctx.netenv.name),
            )

            security_group_list = []
            for sg_ref in vpc_config.security_groups:
                ref = Reference(sg_ref)
                sg_param_name = self.gen_cf_logical_name('SecurityGroupId' +
                                                         ref.parts[-2] +
                                                         ref.parts[-1])
                sg_param = self.create_cfn_parameter(
                    name=sg_param_name,
                    param_type='String',
                    description='Security Group Id',
                    value=sg_ref + '.id',
                )
                security_group_list.append(troposphere.Ref(sg_param))

            # security_group_list_param = self.create_cfn_ref_list_param(
            #     param_type='List<AWS::EC2::SecurityGroup::Id>',
            #     name='SecurityGroupList',
            #     description='List of security group ids to attach to CodeBuild.',
            #     value=vpc_config.security_groups,
            #     ref_attribute='id',
            # )
            subnet_id_list = []
            subnet_arn_list = []
            az_size = self.env_ctx.netenv[self.account_ctx.name][
                self.aws_region].network.availability_zones
            for segment_ref in vpc_config.segments:
                for az_idx in range(1, az_size + 1):
                    # Subnet Ids
                    segment_name = self.create_cfn_logical_id(
                        f"Segment{segment_ref.split('.')[-1]}AZ{az_idx}")
                    subnet_id_param = self.create_cfn_parameter(
                        name=segment_name,
                        param_type='AWS::EC2::Subnet::Id',
                        description=
                        f'VPC Subnet Id in AZ{az_idx} for CodeBuild VPC Config',
                        value=segment_ref + f'.az{az_idx}.subnet_id')
                    subnet_id_list.append(troposphere.Ref(subnet_id_param))
                    # Subnet Arns
                    subnet_arn_param = self.create_cfn_parameter(
                        name=segment_name + 'Arn',
                        param_type='String',
                        description=
                        f'VPC Subnet Id ARN in AZ{az_idx} for CodeBuild VPC Config',
                        value=segment_ref + f'.az{az_idx}.subnet_id.arn')
                    subnet_arn_list.append(troposphere.Ref(subnet_arn_param))

            if len(subnet_id_list) == 0:
                raise PacoException(
                    "CodeBuild VPC Config must have at least one segment defined."
                )

            # VPC Config Permissions
            policy_statements.append(
                Statement(Sid='VpcConfigPermissions',
                          Effect=Allow,
                          Action=[
                              Action('ec2', 'CreateNetworkInterface'),
                              Action('ec2', 'DescribeDhcpOptions'),
                              Action('ec2', 'DescribeNetworkInterfaces'),
                              Action('ec2', 'DeleteNetworkInterface'),
                              Action('ec2', 'DescribeSubnets'),
                              Action('ec2', 'DescribeSecurityGroups'),
                              Action('ec2', 'DescribeVpcs'),
                          ],
                          Resource=['*']))
            policy_statements.append(
                Statement(
                    Sid='VpcConfigNetworkInterface',
                    Effect=Allow,
                    Action=[
                        Action('ec2', 'CreateNetworkInterfacePermission'),
                    ],
                    Resource=[
                        f'arn:aws:ec2:{self.aws_region}:{self.account_ctx.id}:network-interface/*'
                    ],
                    Condition=Condition([
                        StringEquals({
                            "ec2:AuthorizedService":
                            "codebuild.amazonaws.com"
                        }),
                        ArnEquals({"ec2:Subnet": subnet_arn_list})
                    ])))

            project_dict['VpcConfig'] = {
                'VpcId': troposphere.Ref(vpc_id_param),
                'SecurityGroupIds': security_group_list,
                'Subnets': subnet_id_list
            }

        # Batch Build Config
        batch_service_role_res = None
        if action_config.build_batch_config != None and action_config.build_batch_config.is_enabled(
        ):
            batch_config = action_config.build_batch_config

            batch_service_role_name = self.create_iam_resource_name(
                name_list=[
                    self.res_name_prefix, 'CodeBuild-BuildBatch-ServiceRole'
                ],
                filter_id='IAM.Role.RoleName')
            batch_service_role_res = troposphere.iam.Role(
                title='CodeBuildBuildBatchConfigServiceRole',
                template=template,
                RoleName=batch_service_role_name,
                AssumeRolePolicyDocument=PolicyDocument(
                    Version="2012-10-17",
                    Statement=[
                        Statement(
                            Effect=Allow,
                            Action=[AssumeRole],
                            Principal=Principal("Service",
                                                ['codebuild.amazonaws.com']),
                        )
                    ]))

            project_dict['BuildBatchConfig'] = {
                'BatchReportMode': batch_config.batch_report_mode,
                'CombineArtifacts': batch_config.combine_artifacts,
                'TimeoutInMins': batch_config.timeout_in_mins,
                'ServiceRole': troposphere.GetAtt(batch_service_role_res,
                                                  'Arn'),
                'Restrictions': {
                    'ComputeTypesAllowed':
                    batch_config.restrictions.compute_types_allowed,
                    'MaximumBuildsAllowed':
                    batch_config.restrictions.maximum_builds_allowed
                }
            }

        project_res = troposphere.codebuild.Project.from_dict(
            'CodeBuildProject', project_dict)
        project_res.DependsOn = project_policy_res
        if action_config.build_batch_config != None and action_config.build_batch_config.is_enabled(
        ):
            project_res.DependsOn = batch_service_role_res

        self.template.add_resource(project_res)

        if batch_service_role_res != None:
            build_batch_policy_statements = []
            build_batch_policy_statements.append(
                Statement(Sid='BatchServiceRole',
                          Effect=Allow,
                          Action=[
                              Action('codebuild', 'StartBuild'),
                              Action('codebuild', 'StopBuild'),
                              Action('codebuild', 'RetryBuild')
                          ],
                          Resource=[troposphere.GetAtt(project_res, 'Arn')]))

            batch_policy_name = self.create_iam_resource_name(
                name_list=[self.res_name_prefix, 'CodeBuild-BatchPolicy'],
                filter_id='IAM.Policy.PolicyName')
            batch_policy_res = troposphere.iam.PolicyType(
                title='CodeBuildBuildBatchPolicy',
                template=template,
                PolicyName=batch_policy_name,
                PolicyDocument=PolicyDocument(
                    Statement=build_batch_policy_statements),
                Roles=[troposphere.Ref(batch_service_role_res)])

            batch_policy_res.DependsOn = project_res

        self.create_output(title='ProjectArn',
                           value=troposphere.GetAtt(project_res, 'Arn'),
                           description='CodeBuild Project Arn',
                           ref=config_ref + '.project.arn')

        return project_res
def get_resources() -> list:
    all_regions = config.get_regions()

    return [
        codebuild.Project(
            "Validate",
            Name=VALIDATE_PROJECT_NAME,
            ServiceRole=t.Sub(
                "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole"
            ),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
            TimeoutInMinutes=60,
            Environment=codebuild.Environment(
                ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT,
                Image=constants.ENVIRONMENT_IMAGE_DEFAULT,
                Type=constants.ENVIRONMENT_TYPE_DEFAULT,
                EnvironmentVariables=[
                    codebuild.EnvironmentVariable(
                        Name="TEMPLATE_FORMAT",
                        Type="PLAINTEXT",
                        Value="yaml",
                    )
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=t.Sub(
                    yaml.safe_dump(
                        dict(
                            version=0.2,
                            phases=dict(build={
                                "commands": [
                                    "export FactoryTemplateValidateBucket=$(aws cloudformation list-stack-resources --stack-name servicecatalog-factory --query 'StackResourceSummaries[?LogicalResourceId==`FactoryTemplateValidateBucket`].PhysicalResourceId' --output text)",
                                    "aws s3 cp product.template.$TEMPLATE_FORMAT s3://$FactoryTemplateValidateBucket/$CODEBUILD_BUILD_ID.$TEMPLATE_FORMAT",
                                    "aws cloudformation validate-template --template-url https://$FactoryTemplateValidateBucket.s3.$AWS_REGION.amazonaws.com/$CODEBUILD_BUILD_ID.$TEMPLATE_FORMAT",
                                ]
                            }, ),
                            artifacts=dict(
                                name=VALIDATE_OUTPUT_ARTIFACT,
                                files=["*", "**/*"],
                            ),
                        ))),
                Type="CODEPIPELINE",
            ),
            Description=t.Sub("Run validate"),
        ),
        codebuild.Project(
            "Deploy",
            Name=DEPLOY_IN_GOVCLOUD_PROJECT_NAME,
            ServiceRole=t.Sub(
                "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole"
            ),
            Tags=t.Tags.from_dict(
                **{"ServiceCatalogPuppet:Actor": "Framework"}),
            Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"),
            TimeoutInMinutes=60,
            Environment=codebuild.Environment(
                ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT,
                Image=constants.ENVIRONMENT_IMAGE_DEFAULT,
                Type=constants.ENVIRONMENT_TYPE_DEFAULT,
                EnvironmentVariables=[
                    codebuild.EnvironmentVariable(
                        Type="PLAINTEXT",
                        Name="ACCOUNT_ID",
                        Value=t.Sub("${AWS::AccountId}"),
                    ),
                    codebuild.EnvironmentVariable(Name="PIPELINE_NAME",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                    codebuild.EnvironmentVariable(Name="CODEPIPELINE_ID",
                                                  Type="PLAINTEXT",
                                                  Value="CHANGE_ME"),
                ],
            ),
            Source=codebuild.Source(
                BuildSpec=t.Sub(
                    yaml.safe_dump(
                        dict(
                            version=0.2,
                            phases=dict(
                                install={
                                    "runtime-versions":
                                    dict(
                                        python="3.7",
                                        nodejs=constants.
                                        BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT,
                                    ),
                                    "commands": [
                                        f"pip install {constants.VERSION}"
                                        if "http" in constants.VERSION else
                                        f"pip install aws-service-catalog-factory=={constants.VERSION}",
                                    ],
                                },
                                build={"commands": get_commands_for_deploy()},
                            ),
                            artifacts={
                                "name": DEPLOY_OUTPUT_ARTIFACT,
                                "files": ["*", "**/*"],
                            },
                        ))),
                Type="CODEPIPELINE",
            ),
            Description=t.Sub(
                "Create a deploy stage for template cloudformation"),
        ),
    ]
示例#29
0
    def __init__(
        self,
        stack,
        paco_ctx,
    ):
        super().__init__(
            stack,
            paco_ctx,
            iam_capabilities=["CAPABILITY_NAMED_IAM"],
        )
        account_ctx = stack.account_ctx
        aws_region = stack.aws_region
        self.set_aws_name('Lambda', self.resource_group_name, self.resource_name)
        awslambda = self.awslambda = self.stack.resource
        self.init_template('Lambda Function')

        # if not enabled finish with only empty placeholder
        if not awslambda.is_enabled(): return

        # Parameters
        sdb_cache_param = self.create_cfn_parameter(
            name='EnableSDBCache',
            param_type='String',
            description='Boolean indicating whether an SDB Domain will be created to be used as a cache.',
            value=awslambda.sdb_cache
        )
        function_description_param = self.create_cfn_parameter(
            name='FunctionDescription',
            param_type='String',
            description='A description of the Lamdba Function.',
            value=awslambda.description
        )
        handler_param = self.create_cfn_parameter(
            name='Handler',
            param_type='String',
            description='The name of the function to call upon execution.',
            value=awslambda.handler
        )
        runtime_param = self.create_cfn_parameter(
            name='Runtime',
            param_type='String',
            description='The name of the runtime language.',
            value=awslambda.runtime
        )
        role_arn_param = self.create_cfn_parameter(
            name='RoleArn',
            param_type='String',
            description='The execution role for the Lambda Function.',
            value=awslambda.iam_role.get_arn()
        )
        role_name_param = self.create_cfn_parameter(
            name='RoleName',
            param_type='String',
            description='The execution role name for the Lambda Function.',
            value=awslambda.iam_role.resolve_ref_obj.role_name
        )
        memory_size_param = self.create_cfn_parameter(
            name='MemorySize',
            param_type='Number',
            description="The amount of memory that your function has access to. Increasing the function's" + \
            " memory also increases its CPU allocation. The default value is 128 MB. The value must be a multiple of 64 MB.",
            value=awslambda.memory_size
        )
        reserved_conc_exec_param = self.create_cfn_parameter(
            name='ReservedConcurrentExecutions',
            param_type='Number',
            description='The number of simultaneous executions to reserve for the function.',
            value=awslambda.reserved_concurrent_executions
        )
        timeout_param = self.create_cfn_parameter(
            name='Timeout',
            param_type='Number',
            description='The amount of time that Lambda allows a function to run before stopping it. ',
            value=awslambda.timeout
        )
        layers_param = self.create_cfn_parameter(
            name='Layers',
            param_type='CommaDelimitedList',
            description='List of up to 5 Lambda Layer ARNs.',
            value=','.join(awslambda.layers)
        )

        # create the Lambda resource
        cfn_export_dict = {
            'Description': troposphere.Ref(function_description_param),
            'Handler': troposphere.Ref(handler_param),
            'MemorySize': troposphere.Ref(memory_size_param),
            'Runtime': troposphere.Ref(runtime_param),
            'Role': troposphere.Ref(role_arn_param),
            'Timeout': troposphere.Ref(timeout_param),
        }
        if awslambda.reserved_concurrent_executions:
            cfn_export_dict['ReservedConcurrentExecutions'] = troposphere.Ref(reserved_conc_exec_param),

        if len(awslambda.layers) > 0:
            cfn_export_dict['Layers'] = troposphere.Ref(layers_param),

        # Lambda VPC
        if awslambda.vpc_config != None:
            vpc_security_group = self.create_cfn_ref_list_param(
                name='VpcSecurityGroupIdList',
                param_type='List<AWS::EC2::SecurityGroup::Id>',
                description='VPC Security Group Id List',
                value=awslambda.vpc_config.security_groups,
                ref_attribute='id',
            )
            # Segment SubnetList is a Segment stack Output based on availability zones
            segment_ref = awslambda.vpc_config.segments[0] + '.subnet_id_list'
            subnet_list_param = self.create_cfn_parameter(
                name='VpcSubnetIdList',
                param_type='List<AWS::EC2::Subnet::Id>',
                description='VPC Subnet Id List',
                value=segment_ref
            )
            cfn_export_dict['VpcConfig'] = {
                'SecurityGroupIds': troposphere.Ref(vpc_security_group),
                'SubnetIds': troposphere.Ref(subnet_list_param),
            }

        # Code object: S3 Bucket, inline ZipFile or deploy artifact?
        if awslambda.code.s3_bucket:
            if awslambda.code.s3_bucket.startswith('paco.ref '):
                value = awslambda.code.s3_bucket + ".name"
            else:
                value = awslambda.code.s3_bucket
            s3bucket_param = self.create_cfn_parameter(
                name='CodeS3Bucket',
                description="An Amazon S3 bucket in the same AWS Region as your function. The bucket can be in a different AWS account.",
                param_type='String',
                value=value
            )
            s3key_param = self.create_cfn_parameter(
                name='CodeS3Key',
                description="The Amazon S3 key of the deployment package.",
                param_type='String',
                value=awslambda.code.s3_key
            )
            cfn_export_dict['Code'] = {
                'S3Bucket': troposphere.Ref(s3bucket_param),
                'S3Key': troposphere.Ref(s3key_param),
            }
        else:
            zip_path = Path(awslambda.code.zipfile)
            if zip_path.is_file():
                cfn_export_dict['Code'] = {
                    'ZipFile': zip_path.read_text()
                }
            elif zip_path.is_dir():
                # get S3Bucket/S3Key or if it does not exist, it will create the bucket and artifact
                # and then upload the artifact
                bucket_name, artifact_name = init_lambda_code(
                    self.paco_ctx.paco_buckets,
                    self.stack.resource,
                    awslambda.code.zipfile,
                    self.stack.account_ctx,
                    self.stack.aws_region,
                )
                s3bucket_param = self.create_cfn_parameter(
                    name='CodeS3Bucket',
                    description="The Paco S3 Bucket for configuration",
                    param_type='String',
                    value=bucket_name
                )
                s3key_param = self.create_cfn_parameter(
                    name='CodeS3Key',
                    description="The Lambda code artifact S3 Key.",
                    param_type='String',
                    value=artifact_name
                )
                cfn_export_dict['Code'] = {
                    'S3Bucket': troposphere.Ref(s3bucket_param),
                    'S3Key': troposphere.Ref(s3key_param),
                }

        # Environment variables
        var_export = {}
        if awslambda.environment != None and awslambda.environment.variables != None:
            for var in awslambda.environment.variables:
                name = var.key.replace('_','')
                env_param = self.create_cfn_parameter(
                    name='EnvVar{}'.format(name),
                    param_type='String',
                    description='Env var for {}'.format(name),
                    value=var.value,
                )
                var_export[var.key] = troposphere.Ref(env_param)
            if awslambda.sdb_cache == True:
                var_export['SDB_CACHE_DOMAIN'] = troposphere.Ref('LambdaSDBCacheDomain')
            if len(awslambda.log_group_names) > 0:
                # Add PACO_LOG_GROUPS Environment Variable
                paco_log_groups = [
                    prefixed_name(awslambda, loggroup_name, self.paco_ctx.legacy_flag)
                    for loggroup_name in awslambda.log_group_names
                ]
                paco_log_groups_param = self.create_cfn_parameter(
                    name='EnvVariablePacoLogGroups',
                    param_type='String',
                    description='Env var for Paco Log Groups',
                    value=','.join(paco_log_groups),
                )
                var_export['PACO_LOG_GROUPS'] = troposphere.Ref(paco_log_groups_param)
        cfn_export_dict['Environment'] = { 'Variables': var_export }

        # Lambda resource
        self.awslambda_resource = troposphere.awslambda.Function.from_dict(
            'Function',
            cfn_export_dict
        )
        self.template.add_resource(self.awslambda_resource)

        # SDB Cache with SDB Domain and SDB Domain Policy resources
        if awslambda.sdb_cache == True:
            sdb_domain_resource = troposphere.sdb.Domain(
                title='LambdaSDBCacheDomain',
                template=self.template,
                Description="Lambda Function Domain"
            )
            sdb_policy = troposphere.iam.Policy(
                title='LambdaSDBCacheDomainPolicy',
                template=self.template,
                PolicyName='SDBDomain',
                PolicyDocument=Policy(
                    Version='2012-10-17',
                    Statement=[
                        Statement(
                            Effect=Allow,
                            Action=[Action("sdb","*")],
                            Resource=[
                                troposphere.Sub(
                                    'arn:aws:sdb:${AWS::Region}:${AWS::AccountId}:domain/${DomainName}',
                                    DomainName=troposphere.Ref('LambdaSDBCacheDomain')
                                )
                            ],
                        )
                    ],
                    Roles=troposphere.Ref(role_arn_param)
                )
            )
            sdb_policy.DependsOn = sdb_domain_resource
            self.awslambda_resource.DependsOn = sdb_domain_resource

        # Permissions
        # SNS Topic Lambda permissions and subscription
        idx = 1
        for sns_topic_ref in awslambda.sns_topics:
            # SNS Topic Arn parameters
            param_name = 'SNSTopicArn%d' % idx
            self.create_cfn_parameter(
                name=param_name,
                param_type='String',
                description='An SNS Topic ARN to grant permission to.',
                value=sns_topic_ref + '.arn'
            )

            # Lambda permission
            troposphere.awslambda.Permission(
                title=param_name + 'Permission',
                template=self.template,
                Action="lambda:InvokeFunction",
                FunctionName=troposphere.GetAtt(self.awslambda_resource, 'Arn'),
                Principal='sns.amazonaws.com',
                SourceArn=troposphere.Ref(param_name),
            )

            # SNS Topic subscription
            sns_topic = get_model_obj_from_ref(sns_topic_ref, self.paco_ctx.project)
            troposphere.sns.SubscriptionResource(
                title=param_name + 'Subscription',
                template=self.template,
                Endpoint=troposphere.GetAtt(self.awslambda_resource, 'Arn'),
                Protocol='lambda',
                TopicArn=troposphere.Ref(param_name),
                Region=sns_topic.region_name
            )
            idx += 1


        # Lambda permissions for connected Paco resources

        app = get_parent_by_interface(awslambda, schemas.IApplication)
        for obj in get_all_nodes(app):
            # S3 Bucket notification permission(s)
            if schemas.IS3Bucket.providedBy(obj):
                seen = {}
                if hasattr(obj, 'notifications'):
                    if hasattr(obj.notifications, 'lambdas'):
                        for lambda_notif in obj.notifications.lambdas:
                            if lambda_notif.function == awslambda.paco_ref:
                                # yes, this Lambda gets notification from this S3Bucket
                                group = get_parent_by_interface(obj, schemas.IResourceGroup)
                                s3_logical_name = self.gen_cf_logical_name(group.name + obj.name, '_')
                                if s3_logical_name not in seen:
                                    troposphere.awslambda.Permission(
                                        title='S3Bucket' + s3_logical_name,
                                        template=self.template,
                                        Action="lambda:InvokeFunction",
                                        FunctionName=troposphere.GetAtt(self.awslambda_resource, 'Arn'),
                                        Principal='s3.amazonaws.com',
                                        SourceArn='arn:aws:s3:::' + obj.get_bucket_name(),
                                    )
                                    seen[s3_logical_name] = True

            # Events Rule permission(s)
            if schemas.IEventsRule.providedBy(obj):
                seen = {}
                for target in obj.targets:
                    target_ref = Reference(target.target)
                    target_ref.set_account_name(account_ctx.get_name())
                    target_ref.set_region(aws_region)
                    lambda_ref = Reference(awslambda.paco_ref)

                    if target_ref.raw == lambda_ref.raw:
                        # yes, the Events Rule has a Target that is this Lambda
                        group = get_parent_by_interface(obj, schemas.IResourceGroup)
                        eventsrule_logical_name = self.gen_cf_logical_name(group.name + obj.name, '_')
                        if eventsrule_logical_name not in seen:
                            rule_name = create_event_rule_name(obj)
                            # rule_name = self.create_cfn_logical_id("EventsRule" + obj.paco_ref)
                            # rule_name = hash_smaller(rule_name, 64)
                            source_arn = 'arn:aws:events:{}:{}:rule/{}'.format(
                                aws_region,
                                account_ctx.id,
                                rule_name
                            )
                            troposphere.awslambda.Permission(
                                title='EventsRule' + eventsrule_logical_name,
                                template=self.template,
                                Action="lambda:InvokeFunction",
                                FunctionName=troposphere.GetAtt(self.awslambda_resource, 'Arn'),
                                Principal='events.amazonaws.com',
                                SourceArn=source_arn,
                            )
                            seen[eventsrule_logical_name] = True

            # IoT Analytics permission(s)
            if schemas.IIoTAnalyticsPipeline.providedBy(obj):
                seen = {}
                for activity in obj.pipeline_activities.values():
                    if activity.activity_type == 'lambda':
                        target_ref = Reference(activity.function)
                        target_ref.set_account_name(account_ctx.get_name())
                        target_ref.set_region(aws_region)
                        lambda_ref = Reference(awslambda.paco_ref)
                        if target_ref.raw == lambda_ref.raw:
                            # yes, the IoT Analytics Lambda Activity has a ref to this Lambda
                            group = get_parent_by_interface(obj, schemas.IResourceGroup)
                            iotap_logical_name = self.gen_cf_logical_name(group.name + obj.name, '_')
                            if iotap_logical_name not in seen:
                                rule_name = create_event_rule_name(obj)
                                troposphere.awslambda.Permission(
                                    title='IoTAnalyticsPipeline' + iotap_logical_name,
                                    template=self.template,
                                    Action="lambda:InvokeFunction",
                                    FunctionName=troposphere.GetAtt(self.awslambda_resource, 'Arn'),
                                    Principal='iotanalytics.amazonaws.com',
                                )
                                seen[iotap_logical_name] = True

        # Log group(s)
        loggroup_function_name = troposphere.Join(
            '', [
                '/aws/lambda/',
                troposphere.Select(
                    6, troposphere.Split(':', troposphere.GetAtt(self.awslambda_resource, 'Arn'))
                )
            ]
        )
        loggroup_resources = []
        loggroup_resources.append(
            self.add_log_group(loggroup_function_name, 'lambda')
        )
        if len(awslambda.log_group_names) > 0:
            # Additional App-specific LogGroups
            for loggroup_name in awslambda.log_group_names:
                # Add LogGroup to the template
                prefixed_loggroup_name = prefixed_name(awslambda, loggroup_name, self.paco_ctx.legacy_flag)
                loggroup_resources.append(
                    self.add_log_group(prefixed_loggroup_name)
                )

        # LogGroup permissions
        log_group_arns = [
            troposphere.Join(':', [
                f'arn:aws:logs:{self.aws_region}:{account_ctx.id}:log-group',
                loggroup_function_name,
                '*'
            ])
        ]
        log_stream_arns = [
            troposphere.Join(':', [
                f'arn:aws:logs:{self.aws_region}:{account_ctx.id}:log-group',
                loggroup_function_name,
                'log-stream',
                '*'
            ])
        ]
        for loggroup_name in awslambda.log_group_names:
            prefixed_loggroup_name = prefixed_name(awslambda, loggroup_name, self.paco_ctx.legacy_flag)
            log_group_arns.append(
                f'arn:aws:logs:{self.aws_region}:{account_ctx.id}:log-group:{prefixed_loggroup_name}:*'
            )
            log_stream_arns.append(
                f'arn:aws:logs:{self.aws_region}:{account_ctx.id}:log-group:{prefixed_loggroup_name}:log-stream:*'
            )

        loggroup_policy_resource = troposphere.iam.ManagedPolicy(
            title='LogGroupManagedPolicy',
            PolicyDocument=Policy(
                Version='2012-10-17',
                Statement=[
                    Statement(
                        Sid='AllowLambdaModifyLogStreams',
                        Effect=Allow,
                        Action=[
                            Action("logs","CreateLogStream"),
                            Action("logs","DescribeLogStreams"),
                        ],
                        Resource=log_group_arns,
                    ),
                    Statement(
                        Sid='AllowLambdaPutLogEvents',
                        Effect=Allow,
                        Action=[
                            Action("logs","PutLogEvents"),
                        ],
                        Resource=log_stream_arns,
                    ),
                ],
            ),
            Roles=[troposphere.Ref(role_name_param)],
        )
        loggroup_policy_resource.DependsOn = loggroup_resources
        self.template.add_resource(loggroup_policy_resource)

        # Outputs
        self.create_output(
            title='FunctionName',
            value=troposphere.Ref(self.awslambda_resource),
            ref=awslambda.paco_ref_parts + '.name',
        )
        self.create_output(
            title='FunctionArn',
            value=troposphere.GetAtt(self.awslambda_resource, 'Arn'),
            ref=awslambda.paco_ref_parts + '.arn',
        )
示例#30
0
    def __init__(self, stack, paco_ctx, factory_name):
        cloudfront_config = stack.resource
        config_ref = stack.stack_ref
        super().__init__(stack, paco_ctx)
        self.set_aws_name('CloudFront', self.resource_group_name,
                          self.resource_name, factory_name)
        origin_access_id_enabled = False

        self.init_template('CloudFront Distribution')
        template = self.template

        target_origin_param = self.create_cfn_parameter(
            param_type='String',
            name='TargetOrigin',
            description='Target Origin',
            value=cloudfront_config.default_cache_behavior.target_origin,
        )

        distribution_config_dict = {
            'Enabled': cloudfront_config.is_enabled(),
            'DefaultRootObject': cloudfront_config.default_root_object,
            'HttpVersion': 'http2',
            'DefaultCacheBehavior': {
                'AllowedMethods':
                cloudfront_config.default_cache_behavior.allowed_methods,
                'DefaultTTL':
                cloudfront_config.default_cache_behavior.default_ttl,
                'TargetOriginId':
                troposphere.Ref(target_origin_param),
                'ViewerProtocolPolicy':
                cloudfront_config.default_cache_behavior.viewer_protocol_policy
            },
            'PriceClass': 'PriceClass_' + cloudfront_config.price_class
        }
        if cloudfront_config.is_enabled() == True:
            # force the certificate to be in us-east-1, as that's the only CloudFront region
            if cloudfront_config.viewer_certificate.certificate != None:
                certificate = get_model_obj_from_ref(
                    cloudfront_config.viewer_certificate.certificate,
                    self.paco_ctx.project)
                if certificate.region != 'us-east-1':
                    raise InvalidCloudFrontCertificateRegion(
                        f'Certficate region is: {certificate.region}: {certificate.paco_ref}'
                    )
                viewer_certificate_param = self.create_cfn_parameter(
                    name='ViewerCertificateArn',
                    description="ACM Viewer Certificate ARN",
                    param_type='String',
                    value=cloudfront_config.viewer_certificate.certificate +
                    '.arn',
                )
                distribution_config_dict['ViewerCertificate'] = {
                    'AcmCertificateArn':
                    troposphere.Ref(viewer_certificate_param),
                    'SslSupportMethod':
                    cloudfront_config.viewer_certificate.ssl_supported_method,
                    'MinimumProtocolVersion':
                    cloudfront_config.viewer_certificate.
                    minimum_protocol_version
                }
        if cloudfront_config.default_cache_behavior.min_ttl != -1:
            distribution_config_dict['DefaultCacheBehavior'][
                'MinTTL'] = cloudfront_config.default_cache_behavior.min_ttl
        if cloudfront_config.default_cache_behavior.max_ttl != -1:
            distribution_config_dict['DefaultCacheBehavior'][
                'MaxTTL'] = cloudfront_config.default_cache_behavior.max_ttl

        # Lambda Function Association Parameters - for both DefaultCacheBehaviour and CacheBehaviours
        lambda_associations = []
        lambda_params = {}
        associations = cloudfront_config.default_cache_behavior.lambda_function_associations[:]
        for cache_behaviour in cloudfront_config.cache_behaviors:
            for lambda_association in cache_behaviour.lambda_function_associations:
                associations.append(lambda_association)
        for lambda_association in associations:
            lambda_ref = lambda_association.lambda_function
            if lambda_ref not in lambda_params:
                if lambda_ref.endswith('.autoversion.arn'):
                    lambda_name = self.create_cfn_logical_id(
                        'Lambda' + utils.md5sum(str_data=lambda_ref))
                    lambda_params[lambda_ref] = self.create_cfn_parameter(
                        param_type='String',
                        name=lambda_name,
                        description=
                        f'Lambda Function Associated for {lambda_ref}',
                        value=lambda_ref,
                    )
        # Lambda Function Association for DefaultCacheBehavior
        for lambda_association in cloudfront_config.default_cache_behavior.lambda_function_associations:
            lambda_associations.append({
                'EventType':
                lambda_association.event_type,
                'IncludeBody':
                lambda_association.include_body,
                'LambdaFunctionARN':
                troposphere.Ref(
                    lambda_params[lambda_association.lambda_function]),
            })
        if len(lambda_associations) > 0:
            # ToDo: PR this monkey-patch into Troposphere
            from troposphere.validators import boolean
            troposphere.cloudfront.LambdaFunctionAssociation.props[
                'IncludeBody'] = (boolean, False)
            distribution_config_dict['DefaultCacheBehavior'][
                'LambdaFunctionAssociations'] = lambda_associations

        # Domain Alises and Record Sets
        aliases_list = []
        aliases_param_map = {}
        for alias in cloudfront_config.domain_aliases:
            alias_hash = utils.md5sum(str_data=alias.domain_name)
            domain_name_param = 'DomainAlias' + alias_hash
            alias_param = self.create_cfn_parameter(
                param_type='String',
                name=domain_name_param,
                description='Domain Alias CNAME',
                value=alias.domain_name)
            aliases_list.append(troposphere.Ref(alias_param))
            aliases_param_map[alias.domain_name] = alias_param

        distribution_config_dict['Aliases'] = aliases_list

        # DefaultcacheBehavior
        # Forward Values
        if cloudfront_config.default_cache_behavior.origin_request_policy_id != None:
            distribution_config_dict['DefaultCacheBehavior'][
                'OriginRequestPolicyId'] = cloudfront_config.default_cache_behavior.origin_request_policy_id
        if cloudfront_config.default_cache_behavior.cache_policy_id != None:
            distribution_config_dict['DefaultCacheBehavior'][
                'CachePolicyId'] = cloudfront_config.default_cache_behavior.cache_policy_id
        else:
            forwarded_values_config = cloudfront_config.default_cache_behavior.forwarded_values
            forwarded_values_dict = {
                'Cookies': {
                    'Forward': 'none',
                },
                'QueryString': str(forwarded_values_config.query_string)
            }
            # Cookies
            if cloudfront_config.s3_origin_exists() == False:
                forwarded_values_dict['Cookies'][
                    'Forward'] = forwarded_values_config.cookies.forward
            if len(forwarded_values_config.cookies.whitelisted_names) > 0:
                forwarded_values_dict['Cookies'][
                    'WhitelistedNames'] = forwarded_values_config.cookies.whitelisted_names
            # Headers
            if cloudfront_config.s3_origin_exists() == False:
                forwarded_values_dict[
                    'Headers'] = cloudfront_config.default_cache_behavior.forwarded_values.headers
            distribution_config_dict['DefaultCacheBehavior'][
                'ForwardedValues'] = forwarded_values_dict

        # Cache Behaviors
        if len(cloudfront_config.cache_behaviors) > 0:
            cache_behaviors_list = []
            target_origin_param_map = {}
            for cache_behavior in cloudfront_config.cache_behaviors:
                target_origin_hash = utils.md5sum(
                    str_data=cache_behavior.target_origin)
                if target_origin_hash not in target_origin_param_map.keys():
                    cb_target_origin_param = self.create_cfn_parameter(
                        param_type='String',
                        name=self.create_cfn_logical_id(
                            'TargetOriginCacheBehavior' + target_origin_hash),
                        description='Target Origin',
                        value=cache_behavior.target_origin,
                    )
                    target_origin_param_map[
                        target_origin_hash] = cb_target_origin_param
                else:
                    cb_target_origin_param = target_origin_param_map[
                        target_origin_hash]

                cache_behavior_dict = {
                    'PathPattern': cache_behavior.path_pattern,
                    'AllowedMethods': cache_behavior.allowed_methods,
                    'DefaultTTL': cache_behavior.default_ttl,
                    'TargetOriginId': troposphere.Ref(cb_target_origin_param),
                    'ViewerProtocolPolicy':
                    cache_behavior.viewer_protocol_policy
                }
                # CacheBehavior Lambda Function Associations
                if len(cache_behavior.lambda_function_associations) > 0:
                    lambda_associations = []
                    for lambda_association in cache_behavior.lambda_function_associations:
                        lambda_associations.append({
                            'EventType':
                            lambda_association.event_type,
                            'IncludeBody':
                            lambda_association.include_body,
                            'LambdaFunctionARN':
                            troposphere.Ref(lambda_params[
                                lambda_association.lambda_function]),
                        })
                    cache_behavior_dict[
                        'LambdaFunctionAssociations'] = lambda_associations

                # CachePolicyId or ForwardedValues, not both
                if cache_behavior.origin_request_policy_id != None:
                    cache_behavior_dict[
                        'OriginRequestPolicyId'] = cache_behavior.origin_request_policy_id
                if cache_behavior.cache_policy_id != None:
                    cache_behavior_dict[
                        'CachePolicyId'] = cache_behavior.cache_policy_id
                else:
                    cb_forwarded_values_config = cache_behavior.forwarded_values
                    cb_forwarded_values_dict = {
                        'QueryString':
                        str(cb_forwarded_values_config.query_string)
                    }

                    # Cookies
                    if cb_forwarded_values_config.cookies != None:
                        cb_forwarded_values_dict['Cookies'] = {
                            'Forward': 'none'
                        }
                        cb_forwarded_values_dict['Cookies'][
                            'Forward'] = cb_forwarded_values_config.cookies.forward
                        if len(cb_forwarded_values_config.cookies.
                               whitelisted_names) > 0:
                            cb_forwarded_values_dict['Cookies'][
                                'WhitelistedNames'] = cb_forwarded_values_config.cookies.whitelisted_names

                    # Headers
                    if cloudfront_config.s3_origin_exists() == False:
                        cb_forwarded_values_dict[
                            'Headers'] = cache_behavior.forwarded_values.headers
                    cache_behavior_dict[
                        'ForwardedValues'] = cb_forwarded_values_dict
                cache_behaviors_list.append(cache_behavior_dict)

            distribution_config_dict['CacheBehaviors'] = cache_behaviors_list

        # Origin Access Identity
        if cloudfront_config.s3_origin_exists() == True:
            origin_id_res = troposphere.cloudfront.CloudFrontOriginAccessIdentity(
                title='CloudFrontOriginAccessIdentity',
                template=template,
                CloudFrontOriginAccessIdentityConfig=troposphere.cloudfront.
                CloudFrontOriginAccessIdentityConfig(
                    Comment=troposphere.Ref('AWS::StackName')))
            troposphere.Output(title='CloudFrontOriginAccessIdentity',
                               template=template,
                               Value=troposphere.Ref(origin_id_res))

        # Origins
        origins_list = []
        for origin_name, origin in cloudfront_config.origins.items():
            if origin.s3_bucket != None:
                domain_hash = utils.md5sum(str_data=origin.s3_bucket)
                origin_domain_name = self.paco_ctx.get_ref(origin.s3_bucket +
                                                           '.url')
            else:
                domain_hash = utils.md5sum(str_data=origin.domain_name)
                origin_domain_name = origin.domain_name
            origin_dict = {'Id': origin_name, 'DomainName': origin_domain_name}
            if origin.s3_bucket == None:
                origin_dict['CustomOriginConfig'] = {
                    'OriginKeepaliveTimeout':
                    origin.custom_origin_config.keepalive_timeout,
                    'OriginProtocolPolicy':
                    origin.custom_origin_config.protocol_policy,
                    'OriginReadTimeout':
                    origin.custom_origin_config.read_timeout,
                }
                if len(origin.custom_origin_config.ssl_protocols) > 0:
                    origin_dict['CustomOriginConfig'][
                        'OriginSSLProtocols'] = origin.custom_origin_config.ssl_protocols
                if origin.custom_origin_config.https_port != None:
                    origin_dict['CustomOriginConfig'][
                        'HTTPSPort'] = origin.custom_origin_config.https_port
                if origin.custom_origin_config.http_port:
                    origin_dict['CustomOriginConfig']['HTTPPort'] = str(
                        origin.custom_origin_config.http_port)
            else:
                s3_config = self.paco_ctx.get_ref(origin.s3_bucket)
                origin_dict['S3OriginConfig'] = {}
                if s3_config.cloudfront_origin == False:
                    origin_dict['S3OriginConfig']['OriginAccessIdentity'] = ''
                else:
                    origin_access_id_enabled = True
                    param_name = "OriginAccessIdentiy" + domain_hash
                    access_id_ref = origin.s3_bucket + '.origin_id'
                    s3_cf_origin_id_param = self.create_cfn_parameter(
                        param_type='String',
                        name=param_name,
                        description='Origin Access Identity',
                        value=access_id_ref,
                    )
                    origin_dict['S3OriginConfig'][
                        'OriginAccessIdentity'] = troposphere.Sub(
                            'origin-access-identity/cloudfront/${OriginAccessId}',
                            {
                                'OriginAccessId':
                                troposphere.Ref(s3_cf_origin_id_param)
                            })
            origins_list.append(origin_dict)
        distribution_config_dict['Origins'] = origins_list

        # Custom Error
        error_resp_list = []
        for error_resp in cloudfront_config.custom_error_responses:
            error_resp_dict = {
                'ErrorCachingMinTTL': error_resp.error_caching_min_ttl,
                'ErrorCode': error_resp.error_code,
                'ResponseCode': error_resp.response_code,
                'ResponsePagePath': error_resp.response_page_path
            }
            error_resp_list.append(error_resp_dict)
        if len(error_resp_list) > 0:
            distribution_config_dict['CustomErrorResponses'] = error_resp_list

        # Web ACL
        if cloudfront_config.webacl_id != None:
            webacl_id_value = cloudfront_config.webacl_id
            if is_ref(webacl_id_value):
                webacl_id_value = cloudfront_config.webacl_id + '.arn'
            webacl_id_param = self.create_cfn_parameter(
                param_type='String',
                name='WebAclId',
                description='WAF Web Acl Arn',
                value=webacl_id_value)
            distribution_config_dict['WebACLId'] = troposphere.Ref(
                webacl_id_param)

        distribution_dict = {'DistributionConfig': distribution_config_dict}
        distribution_res = troposphere.cloudfront.Distribution.from_dict(
            'Distribution', distribution_dict)
        template.add_resource(distribution_res)

        self.create_output(title='CloudFrontURL',
                           value=troposphere.GetAtt('Distribution',
                                                    'DomainName'),
                           ref=self.config_ref + '.domain_name')
        self.create_output(title='CloudFrontId',
                           value=troposphere.Ref(distribution_res),
                           ref=self.config_ref + '.id')

        if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16') == True:
            if cloudfront_config.is_dns_enabled() == True:
                for alias in cloudfront_config.domain_aliases:
                    alias_hash = utils.md5sum(str_data=alias.domain_name)
                    zone_param_name = 'AliasHostedZoneId' + alias_hash
                    alias_zone_id_param = self.create_cfn_parameter(
                        param_type='String',
                        name=zone_param_name,
                        description='Domain Alias Hosted Zone Id',
                        value=alias.hosted_zone + '.id',
                    )
                    record_set_res = troposphere.route53.RecordSetType(
                        title=self.create_cfn_logical_id_join(
                            ['RecordSet', alias_hash]),
                        template=template,
                        HostedZoneId=troposphere.Ref(alias_zone_id_param),
                        Name=troposphere.Ref(
                            aliases_param_map[alias.domain_name]),
                        Type='A',
                        AliasTarget=troposphere.route53.AliasTarget(
                            DNSName=troposphere.GetAtt(distribution_res,
                                                       'DomainName'),
                            HostedZoneId='Z2FDTNDATAQYW2'))
                    record_set_res.DependsOn = distribution_res

        if origin_access_id_enabled:
            self.stack.wait_for_delete = True

        if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16') == False:
            route53_ctl = self.paco_ctx.get_controller('route53')
            if cloudfront_config.is_dns_enabled() == True:
                for alias in cloudfront_config.domain_aliases:
                    account_ctx = self.account_ctx
                    if alias.hosted_zone:
                        if is_ref(alias.hosted_zone):
                            hosted_zone = get_model_obj_from_ref(
                                alias.hosted_zone, self.paco_ctx.project)
                            account_ctx = self.paco_ctx.get_account_context(
                                account_ref=hosted_zone.account)
                        route53_ctl.add_record_set(
                            account_ctx,
                            self.aws_region,
                            cloudfront_config,
                            enabled=cloudfront_config.is_enabled(),
                            dns=alias,
                            record_set_type='Alias',
                            alias_dns_name='paco.ref ' + self.stack.stack_ref +
                            '.domain_name',
                            alias_hosted_zone_id=
                            'Z2FDTNDATAQYW2',  # This is always the hosted zone ID when you create an alias record that routes traffic to a CloudFront distribution
                            stack_group=self.stack.stack_group,
                            async_stack_provision=True,
                            config_ref=config_ref + '.record_set')