def __init__(self, app: core.App, id: str, props, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        # define the s3 artifact
        source_output = aws_codepipeline.Artifact(artifact_name='source')

        # define the pipeline
        pipeline = aws_codepipeline.Pipeline(
            self,
            "Pipeline",
            pipeline_name=f"{props['namespace']}",
            artifact_bucket=props['bucket'],
            stages=[
                aws_codepipeline.StageProps(
                    stage_name='Source',
                    actions=[
                        aws_codepipeline_actions.S3SourceAction(
                            bucket=props['bucket'],
                            bucket_key='source.zip',
                            action_name='S3Source',
                            run_order=1,
                            output=source_output,
                            trigger=aws_codepipeline_actions.S3Trigger.POLL),
                    ]),
                aws_codepipeline.StageProps(
                    stage_name='Build',
                    actions=[
                        aws_codepipeline_actions.CodeBuildAction(
                            action_name='DockerBuildImages',
                            input=source_output,
                            project=props['cb_docker_build'],
                            run_order=1,
                        )
                    ])
            ])
        # give pipelinerole read write to the bucket
        props['bucket'].grant_read_write(pipeline.role)

        #pipeline param to get the
        pipeline_param = aws_ssm.StringParameter(
            self,
            "PPipeline",
            parameter_name=f"{props['namespace']}-pipeline",
            string_value=pipeline.pipeline_name,
            description='IoT playground pipeline bucket')
        # cfn output
        core.CfnOutput(self,
                       "PipelineOut",
                       description="Pipeline",
                       value=pipeline.pipeline_name)
Пример #2
0
def source_action_template(template_location, assets_bucket):
    """
    source_action_model_monitor configures a codepipeline action with S3 as source

    :template_location: path to the zip file containg the CF template and stages configuration in the S3 bucket: assets_bucket
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage
    """
    source_output = codepipeline.Artifact()
    return source_output, codepipeline_actions.S3SourceAction(
        action_name="S3Source",
        bucket=assets_bucket,
        bucket_key=template_location.value_as_string,
        output=source_output,
    )
Пример #3
0
def source_action_model_monitor(template_zip_file, assets_bucket):
    """
    source_action_model_monitor configures a codepipeline action with S3 as source

    :template_zip_file: path to the template zip file in : assets_bucket containg model monitor template and parameters
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage
    """
    source_output = codepipeline.Artifact()
    return source_output, codepipeline_actions.S3SourceAction(
        action_name="S3Source",
        bucket=assets_bucket,
        bucket_key=template_zip_file.value_as_string,
        output=source_output,
    )
Пример #4
0
def source_action(artifact_location, assets_bucket):
    """
    source_action configures a codepipeline action with S3 as source

    :artifact_location: path to the artifact (model/inference data) in the S3 bucket: assets_bucket
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage
    """
    source_output = codepipeline.Artifact()
    return source_output, codepipeline_actions.S3SourceAction(
        action_name="S3Source",
        bucket=assets_bucket,
        bucket_key=artifact_location.value_as_string,
        output=source_output,
    )
Пример #5
0
def source_action_custom(assets_bucket, custom_container):
    """
    source_action configures a codepipeline action with S3 as source

    :model_artifact_location: path to the model artifact in the S3 bucket: assets_bucket
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :custom_container: point to a zip file containing dockerfile and assets for building a custom model
    :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage
    """
    source_output = codepipeline.Artifact()
    return source_output, codepipeline_actions.S3SourceAction(
        action_name="S3Source",
        bucket=assets_bucket,
        bucket_key=custom_container.value_as_string,
        output=source_output,
    )
    def __init__(self, scope: Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        source_artifact = codepipeline.Artifact()
        cloud_assembly_artifact = codepipeline.Artifact()
        bucket = _s3.Bucket(self, 'bucket')

        pipeline = CdkPipeline(self, "Pipeline",
            pipeline_name = "MyAppPipeline",
            cloud_assembly_artifact = cloud_assembly_artifact,
            source_action = codepipeline_actions.S3SourceAction(
                bucket = bucket.bucket_name,
                bucket_key = "faropt-master.zip",
                action_name = "S3",
                output = source_artifact),
            synth_action = SimpleSynthAction.standard_npm_synth(
                source_artifact = source_artifact,
                cloud_assembly_artifact = cloud_assembly_artifact,
                build_command = "cdk synth")
        )
Пример #7
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        code = codecommit.Repository(
            self, "CodeRepo", repository_name="iot-gg-cicd-workshop-repo")

        prod_deploy_param_bucket = s3.Bucket(
            self,
            "ProdDeployBucket",
            versioned=True,
        )

        prod_source_bucket = s3.Bucket(
            self,
            "ProdSourceBucket",
            versioned=True,
        )

        ssm.StringParameter(
            self,
            "ProdSourceBucketParameter",
            parameter_name="/iot-gg-cicd-workshop/s3/prod_source_bucket",
            string_value=prod_source_bucket.bucket_name,
        )
        ssm.StringParameter(
            self,
            "ProdDeployBucketParameter",
            parameter_name="/iot-gg-cicd-workshop/s3/prod_deploy_param_bucket",
            string_value=prod_deploy_param_bucket.bucket_name,
        )

        cdk_build = codebuild.PipelineProject(
            self,
            "Build",
            project_name="iot-gg-cicd-workshop-build",
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"),
            environment_variables={
                "AWS_DEFAULT_REGION":
                codebuild.BuildEnvironmentVariable(value=kwargs['env'].region)
            })

        add_policies(cdk_build, [
            "AWSCloudFormationFullAccess",
            "AmazonSSMFullAccess",
            "AmazonS3FullAccess",
            "AWSLambdaFullAccess",
            "IAMFullAccess",
        ])

        cdk_deploy_canary = codebuild.PipelineProject(
            self,
            "Deploy",
            project_name="iot-gg-cicd-workshop-deploy-canary",
            build_spec=codebuild.BuildSpec.from_source_filename(
                "deployspec.yml"),
            environment_variables={
                "AWS_DEFAULT_REGION":
                codebuild.BuildEnvironmentVariable(value=kwargs['env'].region)
            })

        add_policies(cdk_deploy_canary, [
            "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess",
            "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess",
            "AWSLambdaFullAccess", "AWSIoTFullAccess"
        ])

        source_output = codepipeline.Artifact()
        cdk_build_output = codepipeline.Artifact("CdkBuildOutput")

        codepipeline.Pipeline(
            self,
            "Pipeline",
            pipeline_name="iot-gg-cicd-workshop-pipeline-canary",
            stages=[
                codepipeline.StageProps(
                    stage_name="Source",
                    actions=[
                        codepipeline_actions.CodeCommitSourceAction(
                            action_name="CodeCommit_Source",
                            repository=code,
                            output=source_output)
                    ]),
                codepipeline.StageProps(
                    stage_name="Build_Package_Deploy_Lambda",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="Build_Package_Deploy",
                            project=cdk_build,
                            input=source_output,
                            outputs=[cdk_build_output])
                    ]),
                codepipeline.StageProps(
                    stage_name="Deploy_GreenGrass_Canary",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="Deploy_Canary",
                            project=cdk_deploy_canary,
                            input=cdk_build_output)
                    ]),
            ])

        cdk_deploy_prod = codebuild.PipelineProject(
            self,
            "DeployProd",
            project_name="iot-gg-cicd-workshop-deploy-main",
            build_spec=codebuild.BuildSpec.from_object(
                dict(
                    version="0.2",
                    phases=dict(install=dict(commands=[
                        "apt-get install zip",
                        "PROD_SOURCE_BUCKET=$(aws ssm get-parameter --name '/iot-gg-cicd-workshop/s3/prod_source_bucket' --with-decryption --query 'Parameter.Value' --output text)",
                        "aws s3 cp s3://$PROD_SOURCE_BUCKET/prod_deploy.zip prod_deploy.zip",
                        "unzip -o prod_deploy.zip", "ls -la", "make clean init"
                    ]),
                                build=dict(commands=[
                                    "ls -la",
                                    "make deploy-greengrass-prod",
                                ])),
                    artifacts={
                        "base-directory": ".",
                        "files": ["**/*"]
                    },
                    environment=dict(
                        buildImage=codebuild.LinuxBuildImage.STANDARD_2_0))))

        add_policies(cdk_deploy_prod, [
            "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess",
            "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess",
            "AWSLambdaFullAccess"
        ])

        prod_source_output = codepipeline.Artifact()
        codepipeline.Pipeline(
            self,
            "PipelineProd",
            pipeline_name="iot-gg-cicd-workshop-pipeline-main",
            stages=[
                codepipeline.StageProps(
                    stage_name="Source",
                    actions=[
                        codepipeline_actions.S3SourceAction(
                            action_name="S3_Source",
                            bucket=prod_deploy_param_bucket,
                            bucket_key="deploy_params.zip",
                            output=prod_source_output)
                    ]),
                codepipeline.StageProps(
                    stage_name="Deploy_GreenGrass_Prod",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="Deploy_Prod",
                            project=cdk_deploy_prod,
                            input=prod_source_output)
                    ]),
            ])
        prod_source_bucket.grant_read_write(cdk_deploy_canary.role)
        prod_source_bucket.grant_read(cdk_deploy_prod.role)
        prod_deploy_param_bucket.grant_read_write(cdk_deploy_canary.role)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        eks_vpc = ec2.Vpc(self, "VPC", cidr="10.0.0.0/16")

        self.node.apply_aspect(
            core.Tag("kubernetes.io/cluster/cluster", "shared"))

        eks_vpc.private_subnets[0].node.apply_aspect(
            core.Tag("kubernetes.io/role/internal-elb", "1"))
        eks_vpc.private_subnets[1].node.apply_aspect(
            core.Tag("kubernetes.io/role/internal-elb", "1"))
        eks_vpc.public_subnets[0].node.apply_aspect(
            core.Tag("kubernetes.io/role/elb", "1"))
        eks_vpc.public_subnets[1].node.apply_aspect(
            core.Tag("kubernetes.io/role/elb", "1"))

        # Create IAM Role For CodeBuild and Cloud9
        codebuild_role = iam.Role(
            self,
            "BuildRole",
            assumed_by=iam.CompositePrincipal(
                iam.ServicePrincipal("codebuild.amazonaws.com"),
                iam.ServicePrincipal("ec2.amazonaws.com")),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AdministratorAccess")
            ])

        instance_profile = iam.CfnInstanceProfile(
            self, "InstanceProfile", roles=[codebuild_role.role_name])

        # Create CodeBuild PipelineProject
        build_project = codebuild.PipelineProject(
            self,
            "BuildProject",
            role=codebuild_role,
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"))

        # Create CodePipeline
        pipeline = codepipeline.Pipeline(
            self,
            "Pipeline",
        )

        # Create Artifact
        artifact = codepipeline.Artifact()

        # S3 Source Bucket
        source_bucket = s3.Bucket.from_bucket_attributes(
            self,
            "SourceBucket",
            bucket_arn=core.Fn.join(
                "",
                ["arn:aws:s3:::ee-assets-prod-",
                 core.Fn.ref("AWS::Region")]))

        # Add Source Stage
        pipeline.add_stage(
            stage_name="Source",
            actions=[
                codepipeline_actions.S3SourceAction(
                    action_name="S3SourceRepo",
                    bucket=source_bucket,
                    bucket_key=
                    "modules/2cae1f20008d4fc5aaef294602649b98/v9/source.zip",
                    output=artifact,
                    trigger=codepipeline_actions.S3Trigger.NONE)
            ])

        # Add CodeBuild Stage
        pipeline.add_stage(
            stage_name="Deploy",
            actions=[
                codepipeline_actions.CodeBuildAction(
                    action_name="CodeBuildProject",
                    project=build_project,
                    type=codepipeline_actions.CodeBuildActionType.BUILD,
                    input=artifact,
                    environment_variables={
                        'PublicSubnet1ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.public_subnets[0].subnet_id),
                        'PublicSubnet2ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.public_subnets[1].subnet_id),
                        'PrivateSubnet1ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.private_subnets[0].subnet_id),
                        'PrivateSubnet2ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.private_subnets[1].subnet_id),
                        'AWS_DEFAULT_REGION':
                        codebuild.BuildEnvironmentVariable(value=self.region),
                        'INSTANCEPROFILEID':
                        codebuild.BuildEnvironmentVariable(
                            value=instance_profile.ref),
                        'AWS_ACCOUNT_ID':
                        codebuild.BuildEnvironmentVariable(value=self.account)
                    })
            ])

        cloud9_stack = cloudformation.CfnStack(
            self,
            "Cloud9Stack",
            #            template_url="https://aws-quickstart.s3.amazonaws.com/quickstart-cloud9-ide/templates/cloud9-ide-instance.yaml",
            template_url=
            "https://ee-assets-prod-us-east-1.s3.amazonaws.com/modules/2cae1f20008d4fc5aaef294602649b98/v9/cloud9-ide-instance.yaml",
            parameters={
                "C9InstanceType": "m5.large",
                "C9Subnet": eks_vpc.public_subnets[0].subnet_id
            })

        pipeline.node.add_dependency(eks_vpc)
        pipeline.node.add_dependency(cloud9_stack)
Пример #9
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, *kwargs)
        '''
        #1 Initiate the CDK class, and CDK bucket for templates
        #2 Get the "context" dict items from the cdk.json file created within the flask call
        #3 Create the logical objects for codepipeline
        #4 Template the jinja files based off of values from our API
        #5 Upload template to our s3 source bucket for the codepipeline
        #6 Runs our codepipeline
        '''
        logger = Logger(loglevel='info')
        stack_name = self.node.try_get_context('name')
        s3_bucket = self.node.try_get_context('s3_bucket')
        zipfile = self.node.try_get_context('zipfile')
        logger.info(s3_bucket)
        logger.info(
            f"Going to use file in {s3_bucket} to create cluster {stack_name}")
        role = self.node.try_get_context('iamrole')
        logger.info(f"Iam role {role}")
        if iam is not None:
            iam_role = iam.Role.from_role_arn(self, 'kube_role', role_arn=role)
            # iam_role.grant(
            #     iam.ServicePrincipal(
            #         service='codebuild.amazonaws.com'
            #     )
            # )
            # iam_role.grant(
            #     iam.ServicePrincipal(
            #         service='eks.amazonaws.com'
            #     )
            # )
            iam_role.add_to_policy(statement=iam.PolicyStatement(
                resources=["arn:aws:s3:::*/*"], actions=["s3:*"]))
            installEKS = codebuild.PipelineProject(
                self,
                f"{stack_name}-cluster",
                role=iam_role,
                project_name=f"eks-pipeline-{stack_name}",
                environment_variables={
                    'bucket_name':
                    codebuild.BuildEnvironmentVariable(value=s3_bucket),
                    'name':
                    codebuild.BuildEnvironmentVariable(value=stack_name)
                })
        else:
            installEKS = codebuild.PipelineProject(
                self,
                f"{stack_name}-cluster",
                project_name='stack_name',
                environment_variables={
                    'bucket_name':
                    codebuild.BuildEnvironmentVariable(value=s3_bucket)
                })
            installEKS.add_to_role_policy(
                iam.PolicyStatement(resources=["arn:aws:s3:::*/*"],
                                    actions=["s3:*"]))
        s3_source = s3.Bucket.from_bucket_attributes(
            self,
            's3_source',
            bucket_name=s3_bucket,
            bucket_arn='arn:aws:s3:::{}'.format(s3_bucket))
        s3_artifact = codepipeline.Artifact(artifact_name='art')
        codebuild_artifact = codepipeline.Artifact(artifact_name='codebuild')

        self.pipeline = codepipeline.Pipeline(self,
                                              f"{stack_name}",
                                              pipeline_name=stack_name,
                                              artifact_bucket=s3_source)
        self.pipeline.add_stage(stage_name='Source',
                                actions=[
                                    codepipeline_actions.S3SourceAction(
                                        action_name='s3Source',
                                        bucket=s3_source,
                                        bucket_key=zipfile,
                                        output=s3_artifact)
                                ])
        cb = codepipeline_actions.CodeBuildAction(project=installEKS,
                                                  input=s3_artifact,
                                                  outputs=[codebuild_artifact],
                                                  action_name='installEKS')
        self.pipeline.add_stage(stage_name=f"{stack_name}-EksInstall",
                                actions=[cb])
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        codebucket: s3.IBucket,
        **kwargs,
    ) -> None:
        super().__init__(scope, id, **kwargs)

        # 1. Create ECR repositories
        self.ecr_repo = ecr.Repository(
            self,
            'ECRRepo',
            image_scan_on_push=True,
            removal_policy=core.RemovalPolicy.DESTROY)
        # 2. Setup deployment CI/CD to deploy docker image to ECR
        pipeline = codepipeline.Pipeline(self,
                                         "Pipeline",
                                         pipeline_name='BuildArcDockerImage',
                                         artifact_bucket=codebucket)
        image_builder = codebuild.PipelineProject(
            self,
            'DockerBuild',
            project_name='BuildArcDockerImage',
            build_spec=codebuild.BuildSpec.from_source_filename(
                'buildspec.yaml'),
            environment=dict(
                build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3,
                privileged=True),
            environment_variables={
                'REPO_ECR':
                codebuild.BuildEnvironmentVariable(
                    value=self.ecr_repo.repository_uri),
            },
            description='Pipeline for docker build',
            timeout=core.Duration.minutes(60))
        image_builder.apply_removal_policy(core.RemovalPolicy.DESTROY)

        # 3. grant permissions for the CI/CD
        codebucket.grant_read_write(pipeline.role)
        codebucket.grant_read_write(image_builder)
        self.ecr_repo.grant_pull_push(image_builder)

        source_output = codepipeline.Artifact('src')
        pipeline.add_stage(
            stage_name='Source',
            actions=[
                codepipeline_actions.S3SourceAction(
                    action_name='S3Trigger',
                    bucket=codebucket,
                    bucket_key='app_code/ecr_build_src.zip',
                    output=source_output,
                    trigger=codepipeline_actions.S3Trigger.POLL),
            ])
        pipeline.add_stage(stage_name='Build',
                           actions=[
                               codepipeline_actions.CodeBuildAction(
                                   action_name='DockerImageBuild',
                                   input=source_output,
                                   project=image_builder)
                           ])

        # Override Cfn Nag warning W12: IAM policy should not allow * resource
        scan.suppress_cfnnag_rule(
            'W12',
            'the role for action of ecr:GetAuthorizationToken requires * resource',
            image_builder.role.node.find_child(
                'DefaultPolicy').node.default_child)

        image_builder.role.node.find_child(
            'DefaultPolicy'
        ).node.default_child.add_metadata(
            'cfn_nag', {
                "rules_to_suppress": [{
                    "id":
                    "W12",
                    "reason":
                    "the role for action of ecr:GetAuthorizationToken requires * resource"
                }, {
                    "id":
                    "W76",
                    "reason":
                    "the IAM policy is complex, need to be higher than 25"
                }]
            })
Пример #11
0
def create_action(
    scope: core.Construct,
    id: str,
    action_def: Union[CodeCommitAction, CodeBuildAction,
                      CloudFormationCreateUpdateStackAction, ApprovalAction,
                      LambdaInvokeAction, S3SourceAction, ],
):
    action_name = action_def.pop("name")
    run_order = action_def.get("run_order", 1)
    variables_namespace = action_def.get("variables_namespace")
    role = (aws_iam.Role.from_role_arn(scope, f"{id}RoleRef",
                                       action_def["role_arn"])
            if "role_arn" in action_def else None)

    if action_def["type"] == "CODECOMMIT":
        action_def = cast(CodeCommitAction, action_def)
        repository = aws_codecommit.Repository.from_repository_name(
            scope, f"{id}Repo", action_def["repository"])
        output = aws_codepipeline.Artifact(action_def["output"])
        return aws_codepipeline_actions.CodeCommitSourceAction(
            action_name=action_name,
            output=output,
            repository=repository,
            branch=action_def.get("branch", "master"),
            run_order=run_order,
            role=role,
            variables_namespace=variables_namespace,
        )
    elif action_def["type"] == "S3_SOURCE":
        action_def = cast(S3SourceAction, action_def)
        output = aws_codepipeline.Artifact(action_def["output"])
        if "kms_key_arn" in action_def:
            role = aws_iam.Role(
                scope,
                f"{id}Role",
                assumed_by=aws_iam.AccountRootPrincipal(),
            )
            aws_kms.Key.from_key_arn(
                scope, f"{id}KeyRef",
                key_arn=action_def["kms_key_arn"]).grant_decrypt(role)
        if "bucket" in action_def:
            bucket = aws_s3.Bucket.from_bucket_name(scope,
                                                    f"{id}SourceBucketRef",
                                                    action_def["bucket"])
        else:
            bucket = aws_s3.Bucket(
                scope,
                f"{id}SourceBucket",
                block_public_access=aws_s3.BlockPublicAccess.BLOCK_ALL,
                removal_policy=core.RemovalPolicy.DESTROY,
            )
            core.CfnOutput(scope,
                           f"{id}SourceBucketName",
                           value=bucket.bucket_name)
        return aws_codepipeline_actions.S3SourceAction(
            action_name=action_name,
            output=output,
            run_order=run_order,
            role=role,
            bucket=bucket,
            bucket_key=action_def["key"],
        )
    elif action_def["type"] == "CODEBUILD":
        action_def = cast(CodeBuildAction, action_def)
        # Set up CodeBuild project
        project_params = {
            "build_spec":
            aws_codebuild.BuildSpec.from_source_filename(
                action_def.get("build_spec", "buildspec.yaml")),
            "timeout":
            core.Duration.minutes(int(action_def.get("timeout_minutes", 60))),
        }
        project_params["environment"] = {
            "build_image": aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3
        }
        if "environment" in action_def:
            if "build_image" in action_def["environment"]:
                project_params["environment"]["build_image"] = getattr(
                    aws_codebuild.LinuxBuildImage,
                    action_def["environment"].pop("build_image"),
                )
            if "compute_type" in action_def["environment"]:
                project_params["environment"]["compute_type"] = getattr(
                    aws_codebuild.ComputeType,
                    action_def["environment"].pop("compute_type"),
                )
            project_params["environment"].update(**action_def["environment"])
        project_role = aws_iam.Role(
            scope,
            f"{id}CodeBuildRole",
            path="/codebuild/",
            assumed_by=aws_iam.ServicePrincipal(
                service="codebuild.amazonaws.com"),
        )
        project_role.add_to_policy(
            aws_iam.PolicyStatement(actions=["*"],
                                    resources=["*"],
                                    effect=aws_iam.Effect.ALLOW))
        project_environment_variables = ({
            var_key: aws_codebuild.BuildEnvironmentVariable(
                value=str(var_value),
                type=aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT,
            )
            for var_key, var_value in
            action_def["environment_variables"].items()
            if "#" not in str(var_value)
        } if "environment_variables" in action_def else None)
        project = aws_codebuild.PipelineProject(
            scope,
            f"{id}Project",
            project_name=id,
            role=project_role,
            environment_variables=project_environment_variables,
            **project_params,
        )
        pipeline_environment_variables = ({
            var_key: aws_codebuild.BuildEnvironmentVariable(
                value=str(var_value),
                type=aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT,
            )
            for var_key, var_value in
            action_def["environment_variables"].items()
            if "#" in str(var_value)
        } if "environment_variables" in action_def else None)
        extra_inputs = ([
            aws_codepipeline.Artifact(input_)
            for input_ in action_def["extra_inputs"]
        ] if "extra_inputs" in action_def else None)
        outputs = ([
            aws_codepipeline.Artifact(output)
            for output in action_def["outputs"]
        ] if "outputs" in action_def else None)
        return aws_codepipeline_actions.CodeBuildAction(
            action_name=action_name,
            input=aws_codepipeline.Artifact(action_def["input"]),
            project=project,
            run_order=run_order,
            role=role,
            variables_namespace=variables_namespace,
            environment_variables=pipeline_environment_variables,
            extra_inputs=extra_inputs,
            outputs=outputs,
        )
    elif action_def["type"] == "CLOUDFORMATION":
        action_def = cast(CloudFormationCreateUpdateStackAction, action_def)
        return aws_codepipeline_actions.CloudFormationCreateUpdateStackAction(
            action_name=action_name,
            admin_permissions=False,
            stack_name=action_def["stack_name"],
            template_path=aws_codepipeline.ArtifactPath(
                aws_codepipeline.Artifact(action_def["input"]),
                action_def.get("template_path", "template.yaml"),
            ),
            capabilities=[
                # This lstrip does not support all possibilties, but is good enough for now
                aws_cloudformation.CloudFormationCapabilities[
                    capability.lstrip("CAPABILITY_")]
                for capability in action_def["capabilities"]
            ] if "capabilities" in action_def else None,
            deployment_role=role,
            role=role,
            parameter_overrides=action_def.get("parameter_overrides"),
            run_order=run_order,
            variables_namespace=variables_namespace,
        )
    elif action_def["type"] == "APPROVAL":
        action_def = cast(ApprovalAction, action_def)
        return aws_codepipeline_actions.ManualApprovalAction(
            action_name=action_name,
            run_order=run_order,
            role=role,
            additional_information=action_def.get("additional_information"),
            external_entity_link=action_def.get("external_entity_link"),
            notification_topic=action_def.get("notification_topic"),
            variables_namespace=variables_namespace,
        )
    elif action_def["type"] == "LAMBDA":
        action_def = cast(LambdaInvokeAction, action_def)
        user_parameters = action_def.get("user_parameters")
        return aws_codepipeline_actions.LambdaInvokeAction(
            action_name=action_name,
            run_order=run_order,
            lambda_=aws_lambda.Function.from_function_arn(
                scope, f"{id}Lambda", action_def["function_arn"]),
            user_parameters=user_parameters,
            role=role,
            variables_namespace=variables_namespace,
        )
Пример #12
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Create IAM Role For CodeBuild
        codebuild_role = iam.Role(
            self,
            "BuildRole",
            assumed_by=iam.CompositePrincipal(
                iam.ServicePrincipal("codebuild.amazonaws.com")),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AdministratorAccess")
            ])

        instance_profile = iam.CfnInstanceProfile(
            self, "InstanceProfile", roles=[codebuild_role.role_name])

        # Create CodeBuild PipelineProject
        build_project = codebuild.PipelineProject(
            self,
            "BuildProject",
            role=codebuild_role,
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"),
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.STANDARD_5_0,
                compute_type=codebuild.ComputeType.LARGE))

        # Create CodePipeline
        pipeline = codepipeline.Pipeline(
            self,
            "Pipeline",
        )

        # Create Artifact
        artifact = codepipeline.Artifact()

        # S3 Source Bucket
        source_bucket = s3.Bucket.from_bucket_attributes(
            self,
            "SourceBucket",
            bucket_arn=core.Fn.join(
                "",
                ["arn:aws:s3:::ee-assets-prod-",
                 core.Fn.ref("AWS::Region")]))

        # Add Source Stage
        pipeline.add_stage(
            stage_name="Source",
            actions=[
                codepipeline_actions.S3SourceAction(
                    action_name="S3SourceRepo",
                    bucket=source_bucket,
                    bucket_key=
                    "modules/c52c7d8ba87d4217a2bf045037b58b5d/v1/source.zip",
                    output=artifact,
                    trigger=codepipeline_actions.S3Trigger.NONE)
            ])

        # Add CodeBuild Stage
        pipeline.add_stage(
            stage_name="Deploy",
            actions=[
                codepipeline_actions.CodeBuildAction(
                    action_name="CodeBuildProject",
                    project=build_project,
                    type=codepipeline_actions.CodeBuildActionType.BUILD,
                    input=artifact)
            ])
Пример #13
0
    def __init__(self, scope: core.Construct, id: str,
                 artifact_bucket: s3.Bucket, static_website_bucket: s3.Bucket,
                 backend_fn: _lambda.Function, api: apigateway.LambdaRestApi,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        fn = _lambda.Function(
            scope=self,
            id="source-update-function",
            runtime=_lambda.Runtime.PYTHON_3_8,
            handler="index.handler",
            # memory_size=500,
            timeout=core.Duration.seconds(10),
            code=_lambda.Code.from_asset(
                os.path.join("lambdas", "updateSource")))
        fn.add_to_role_policy(
            statement=iam.PolicyStatement(actions=[
                "lambda:UpdateFunctionCode",
            ],
                                          resources=[
                                              backend_fn.function_arn,
                                          ]))
        fn.add_to_role_policy(
            statement=iam.PolicyStatement(actions=[
                "s3:GetObject",
            ],
                                          resources=[
                                              artifact_bucket.bucket_arn +
                                              "/Server/main.zip",
                                          ]))

        # Codepipeline
        deploy_pipeline = codepipeline.Pipeline(
            scope=self,
            id="deploy-pipeline",
            restart_execution_on_update=True,
        )

        lambda_source_output = codepipeline.Artifact()
        client_source_output = codepipeline.Artifact()
        deploy_pipeline.add_stage(stage_name="Source",
                                  actions=[
                                      codepipeline_actions.S3SourceAction(
                                          action_name="LambdaSource",
                                          bucket=artifact_bucket,
                                          bucket_key="Server/main.zip",
                                          output=lambda_source_output,
                                      ),
                                      codepipeline_actions.S3SourceAction(
                                          action_name="ClientSource",
                                          bucket=artifact_bucket,
                                          bucket_key="Client/src.zip",
                                          output=client_source_output,
                                      )
                                  ])

        build_specs = {
            "version": "0.2",
            "env": {
                "variables": {
                    "REACT_APP_AUTH_URL": api.url,
                }
            },
            "phases": {
                "install": {
                    "commands": [
                        "npm install -g yarn",
                    ]
                },
                "build": {
                    "commands": [
                        "npm install",
                        "yarn test",
                        "yarn build",
                    ]
                }
            },
            "artifacts": {
                "base-directory": "build",
                "files": [
                    "**/*",
                ],
            }
        }
        client_build_output = codepipeline.Artifact()
        deploy_pipeline.add_stage(
            stage_name="Build",
            actions=[
                codepipeline_actions.CodeBuildAction(
                    action_name="ClientBuild",
                    project=codebuild.Project(
                        scope=self,
                        id="codebuild-client",
                        build_spec=codebuild.BuildSpec.from_object(
                            build_specs),
                    ),
                    input=client_source_output,
                    outputs=[client_build_output])
            ])

        deploy_pipeline.add_stage(stage_name="Deploy",
                                  actions=[
                                      codepipeline_actions.LambdaInvokeAction(
                                          lambda_=fn,
                                          inputs=[lambda_source_output],
                                          action_name="UpdateSource",
                                          user_parameters={
                                              "functionName":
                                              backend_fn.function_name,
                                              "sourceBucket":
                                              artifact_bucket.bucket_name,
                                              "sourceKey": "Server/main.zip",
                                          }),
                                      codepipeline_actions.S3DeployAction(
                                          bucket=static_website_bucket,
                                          input=client_build_output,
                                          action_name="DeployClient",
                                          extract=True,
                                      ),
                                  ])