Exemple #1
0
    def __init__(self, scope: core.Construct, id: str, props,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        refdata = s3.Bucket.from_bucket_attributes(
            self, 'reference_data', bucket_name='umccr-refdata-dev')

        build_env = cb.BuildEnvironment(
            build_image=cb.LinuxBuildImage.from_docker_registry("docker:dind"),
            privileged=True,
            compute_type=cb.ComputeType.LARGE)

        cb_project = cb.Project(
            self,
            id="UmccriseCodeBuildProject",
            project_name=props['codebuild_project_name'],
            environment=build_env,
            timeout=core.Duration.hours(3),
            source=cb.Source.git_hub(
                identifier="umccrise",
                owner="umccr",
                repo="umccrise",
                clone_depth=1,
                webhook=True,
                webhook_filters=[
                    cb.FilterGroup.in_event_of(
                        cb.EventAction.PUSH).and_tag_is(semver_tag_regex)
                ]))

        # Tackle IAM permissions
        # https://stackoverflow.com/questions/38587325/aws-ecr-getauthorizationtoken/54806087
        cb_project.role.add_managed_policy(
            iam.ManagedPolicy.from_aws_managed_policy_name(
                'AmazonEC2ContainerRegistryPowerUser'))
        refdata.grant_read(cb_project)
    def __init__(self, scope: core.Construct, id: str, project_git_url,
                 lambda_code_bucket, policy_resources):
        """
        CodeBuild project
        """
        super().__init__(scope, id)

        stack_name = core.Stack.of(self).stack_name

        self.project = codebuild.Project(
            self,
            "project",
            project_name=f"{stack_name}-{names.CODEBUILD_PROJECT}",
            source=codebuild.Source.git_hub_enterprise(
                https_clone_url=project_git_url, clone_depth=1),
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2,
                compute_type=codebuild.ComputeType.MEDIUM),
            artifacts=codebuild.Artifacts.s3(name=stack_name,
                                             bucket=lambda_code_bucket),
            badge=True,
            timeout=core.Duration.minutes(5))

        self.project.add_to_role_policy(
            iam.PolicyStatement(resources=policy_resources,
                                actions=[
                                    "lambda:UpdateFunctionCode",
                                    "lambda:PublishVersion",
                                    "lambda:UpdateAlias"
                                ]))
Exemple #3
0
  def __init__(self, scope: core.Construct, id: str, 
    build_image:assets.DockerImageAsset,
    build_role:iam.Role, **kwargs) -> None:
    super().__init__(scope, id, **kwargs)
    
    self.github_master_source = b.Source.git_hub(
      clone_depth=1,
      owner='dr-natetorious',
      repo='app-FinSurf',
      webhook=False
    )

    self.build_project = b.Project(self, 'DeployInfra',
      project_name='Deploy-FinSurf-Infra',
      source= self.github_master_source,
      environment= b.BuildEnvironment(
        build_image= b.LinuxBuildImage.from_ecr_repository(
          repository=build_image.repository,
          tag=build_image.image_uri.split(':')[-1]),
        environment_variables={
        },
        compute_type=b.ComputeType.SMALL
      ),
      role=build_role,
      build_spec= b.BuildSpec.from_source_filename(filename='cicd/configs/buildspec-cdk-infra.yml'),
    )
Exemple #4
0
def create_build_project(self, role, source_bucket):
    build_project = _cb.Project(
        self, 'CodeBuildProject',
        project_name='DEMO-BUILD',
        source=_cb.Source.s3(
            bucket=source_bucket,
            path='archive.zip'
        ),
        environment=_cb.BuildEnvironment(
            build_image=_cb.LinuxBuildImage.STANDARD_3_0,
            privileged=True
        ),
        environment_variables={
            'IMAGE_REPO_NAME': _cb.BuildEnvironmentVariable(value='demo-repository'),
            'AWS_DEFAULT_REGION': _cb.BuildEnvironmentVariable(value=os.environ.get('REGION')),
            'AWS_ACCOUNT_ID': _cb.BuildEnvironmentVariable(value=os.environ.get('ACCOUNT_ID')),
            'CONTAINER_NAME': _cb.BuildEnvironmentVariable(value='DEMO-CONTAINER'),
        },
        build_spec=_cb.BuildSpec.from_source_filename(filename='etc/cicd/buildspec.yml'),
        artifacts=_cb.Artifacts.s3(
            bucket=source_bucket,
            name='artifact-codebuild.zip',
            package_zip=True,
            include_build_id=False
        ),
        role=role
    )
    return build_project
Exemple #5
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Create IAM Role For CodeBuild
        # TODO Make this role's policy least privilege
        aws_app_resources_build_role = iam.Role(
            self, "AWSAppResourcesBuildRole",
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name("AdministratorAccess")
            ]
        )

        # We only want to fire on the master branch and if there is a change in the dockerbuild folder
        git_hub_source = codebuild.Source.git_hub(
            owner="jasonumiker",
            repo="k8s-plus-aws-gitops",
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is("master").and_file_path_is("aws-app-resources/*")
            ]
        )

        # Create CodeBuild
        build_project = codebuild.Project(
            self, "AWSAppResourcesBuildProject",
            source=git_hub_source,
            role=aws_app_resources_build_role,
            build_spec=codebuild.BuildSpec.from_source_filename("aws-app-resources/buildspec.yml")
        )
Exemple #6
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        repo: str,
        artifacts_bucket: str,
        owner: str = 'SeedCompany',
        create_bucket:
        bool = False,  # if true and bucket_name exists, then the artifact bucket will be created
        **kwargs
    ) -> None:
        super().__init__(scope, id, **kwargs)

        if create_bucket:
            artifactStore = s3.Bucket(self,
                                      artifacts_bucket,
                                      bucket_name=artifacts_bucket)
        else:
            artifactStore = s3.Bucket.from_bucket_name(self, artifacts_bucket,
                                                       artifacts_bucket)

        artifacts = codebuild.Artifacts.s3(
            bucket=artifactStore,
            name=repo,
            include_build_id=True,
            package_zip=False,
        )

        #GitHub credentials are entered into CodeBuild manually
        # $ aws codebuild import-source-credentials --server-type GITHUB --auth-type PERSONAL_ACCESS_TOKEN --token <token_value>
        gitRepo = codebuild.Source.git_hub(owner=owner,
                                           repo=repo,
                                           webhook=True)

        retetoRepo = ecr.Repository.from_repository_name(
            self, 'RetetoRepo', 'reteto')

        buildEnv = codebuild.BuildEnvironment(
            build_image=codebuild.LinuxBuildImage.from_ecr_repository(
                retetoRepo),
            compute_type=codebuild.ComputeType.SMALL,
            privileged=True)

        project = codebuild.Project(
            self,
            '%sBuild' % repo.capitalize(),
            project_name='%sBuild' % repo.capitalize(),
            environment=buildEnv,
            environment_variables={
                "AWS_ACCOUNT_ID":
                codebuild.BuildEnvironmentVariable(value=self.account),
                "REPO":
                codebuild.BuildEnvironmentVariable(value=repo)
            },
            source=gitRepo,
            artifacts=artifacts,
            badge=True,
            # see reference.buildspec.yml for a standard buildspec
            build_spec=codebuild.BuildSpec.from_object({}))
    def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        prj_name = self.node.try_get_context("project_name")
        env_name = self.node.try_get_context("env")
        account_id = core.Aws.ACCOUNT_ID
        PROJECT_NUMBER = 2

        # To Store Frontend App
        frontend_bucket = s3.Bucket(self, "frontend",
                                    access_control=s3.BucketAccessControl.BUCKET_OWNER_FULL_CONTROL,
                                    bucket_name=account_id + '-' + env_name + '-frontend',
                                    public_read_access=True,
                                    removal_policy=core.RemovalPolicy.DESTROY,
                                    website_index_document='index.html'
                                    )

        bucket_name = frontend_bucket.bucket_name

        github_token = core.SecretValue.secrets_manager("dev/github-token", json_field='github-from-marsApp')

        cb.GitHubSourceCredentials(self, "CodeBuildGitHubCreds",
                                          access_token=github_token
                                          )

        git_hub_source = cb.Source.git_hub(
            owner="manrodri",
            repo="30miniProjects",
            webhook=True,
            webhook_filters=[
                cb.FilterGroup.in_event_of(cb.EventAction.PUSH).and_branch_is(
                    "master").and_file_path_is('js30Projects/')
            ]
        )

        codebuild_project = cb.Project(
            self,
            "cb-frontend",
            source=git_hub_source,
            environment=cb.BuildEnvironment(
                build_image=cb.LinuxBuildImage.STANDARD_3_0,
                environment_variables={
                    'WEB_BUCKET_NAME': cb.BuildEnvironmentVariable(value=bucket_name),
                    'PROJECT_NUMBER': cb.BuildEnvironmentVariable(value=str(PROJECT_NUMBER))
                }
            ),
        )

        allow_object_actions = iam.PolicyStatement(resources=[f"arn:aws:s3:::{bucket_name}/*"],
                                               actions=["s3:*"])
        allow_bucket_actions = iam.PolicyStatement(
            resources=[f"arn:aws:s3:::{bucket_name}"],
            actions=['s3:*'],
        )
        codebuild_project.add_to_role_policy(allow_object_actions)
        codebuild_project.add_to_role_policy(allow_bucket_actions)
Exemple #8
0
    def __init__(self, scope: core.Construct, id: str, ecr_repo_name: str,
                 spec_file_path: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Define CodeBuild resource.
        git_hub_source = codebuild.Source.git_hub(
            owner=GITHUB_REPO_OWNER,
            repo=GITHUB_REPO_NAME,
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PULL_REQUEST_CREATED,
                    codebuild.EventAction.PULL_REQUEST_UPDATED,
                    codebuild.EventAction.PULL_REQUEST_REOPENED)
            ],
            clone_depth=1)

        # Define a IAM role for this stack.
        code_build_batch_policy = iam.PolicyDocument.from_json(
            code_build_batch_policy_in_json([id]))
        inline_policies = {"code_build_batch_policy": code_build_batch_policy}
        role = iam.Role(
            scope=self,
            id="{}-role".format(id),
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            inline_policies=inline_policies)

        # Create build spec.
        placeholder_map = {"ECR_REPO_PLACEHOLDER": ecr_arn(ecr_repo_name)}
        build_spec_content = YmlLoader.load(spec_file_path, placeholder_map)

        # Define CodeBuild.
        project = codebuild.Project(
            scope=self,
            id=id,
            project_name=id,
            source=git_hub_source,
            role=role,
            timeout=core.Duration.minutes(180),
            environment=codebuild.BuildEnvironment(
                compute_type=codebuild.ComputeType.SMALL,
                privileged=False,
                build_image=codebuild.LinuxBuildImage.STANDARD_4_0),
            build_spec=codebuild.BuildSpec.from_object(build_spec_content))

        # TODO: add build type BUILD_BATCH when CFN finishes the feature release. See CryptoAlg-575.

        # Add 'BuildBatchConfig' property, which is not supported in CDK.
        # CDK raw overrides: https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_raw
        # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codebuild-project.html#aws-resource-codebuild-project-properties
        cfn_build = project.node.default_child
        cfn_build.add_override("Properties.BuildBatchConfig", {
            "ServiceRole": role.role_arn,
            "TimeoutInMins": 180
        })
Exemple #9
0
    def __init__(self, scope: core.Construct, id: str, x86_ecr_repo_name: str,
                 arm_ecr_repo_name: str, spec_file_path: str,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Define CodeBuild resource.
        git_hub_source = codebuild.Source.git_hub(
            owner=GITHUB_REPO_OWNER,
            repo=GITHUB_REPO_NAME,
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH)
                # The current FIPS branch does not have the configuration needed to run the analytics, once we update
                # the branch or create a new FIPS branch it should be updated to '(main)|(fips.*)'
                .and_branch_is("main")
            ],
            webhook_triggers_batch_build=True)

        # Define a IAM role for this stack.
        metrics_policy = iam.PolicyDocument.from_json(
            code_build_publish_metrics_in_json())
        inline_policies = {"metric_policy": metrics_policy}
        role = iam.Role(
            scope=self,
            id="{}-role".format(id),
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            inline_policies=inline_policies)

        # Create build spec.
        placeholder_map = {
            "X86_ECR_REPO_PLACEHOLDER": ecr_arn(x86_ecr_repo_name),
            "ARM_ECR_REPO_PLACEHOLDER": ecr_arn(arm_ecr_repo_name)
        }
        build_spec_content = YmlLoader.load(spec_file_path, placeholder_map)

        # Define CodeBuild.
        analytics = codebuild.Project(
            scope=self,
            id="AnalyticsCodeBuild",
            project_name=id,
            source=git_hub_source,
            role=role,
            timeout=core.Duration.minutes(120),
            environment=codebuild.BuildEnvironment(
                compute_type=codebuild.ComputeType.LARGE,
                privileged=True,
                build_image=codebuild.LinuxBuildImage.STANDARD_4_0),
            build_spec=codebuild.BuildSpec.from_object(build_spec_content))
        analytics.enable_batch_builds()
 def new_build_project(self, repo: codecommit.Repository,
                       buildspec_path: str,
                       proj_name: str) -> _codebuild.Project:
     return _codebuild.Project(
         self,
         proj_name,
         badge=True,
         source=_codebuild.Source.code_commit(repository=repo),
         description=f"Build project for {proj_name}",
         environment=_codebuild.BuildEnvironment(
             build_image=_codebuild.LinuxBuildImage.STANDARD_5_0,
             compute_type=_codebuild.ComputeType.LARGE,
             privileged=True),
         project_name=proj_name,
         build_spec=_codebuild.BuildSpec.from_source_filename(
             filename=buildspec_path),
         timeout=Duration.minutes(10))
Exemple #11
0
  def __init__(self, scope: core.Construct, id: str, 
    project_name:str,
    build_image:assets.DockerImageAsset, 
    context:BuildContext, 
    build_role:iam.Role,
    app_dir:str, **kwargs) -> None:

    super().__init__(scope, id, **kwargs)

    self.github_master_source = b.Source.git_hub(
      clone_depth=1,
      owner='dr-natetorious',
      repo='app-FinSurf',
      webhook=False
    )

    param_name = '/app-finsurf/artifacts/bin/{}'.format(project_name)
    output_path = 's3://{}/cicd/{}'.format(
      context.buckets.artifacts_bucket.bucket_name,
      project_name)

    self.build_project = b.Project(self,'PythonProject',
      project_name=project_name,
      source= self.github_master_source,
      environment= b.BuildEnvironment(
        build_image= b.LinuxBuildImage.from_ecr_repository(
          repository=build_image.repository,
          tag=build_image.image_uri.split(':')[-1]),
        environment_variables={
          'APP_DIR':b.BuildEnvironmentVariable(value=app_dir),
          'PARAM_NAME': b.BuildEnvironmentVariable(value=param_name),
          'OUTPUT_PATH': b.BuildEnvironmentVariable(value=output_path),
        },
        compute_type=b.ComputeType.SMALL
      ),
      role=build_role,
      encryption_key= context.buckets.artifacts_key,
      build_spec= b.BuildSpec.from_source_filename(filename='cicd/configs/buildspec-python-zip.yml'),
      artifacts= b.Artifacts.s3(
        name=project_name,
        path="/artifacts",
        bucket=context.buckets.artifacts_bucket,
        encryption=True,
        include_build_id=False,
        package_zip=False)
      )
Exemple #12
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        
        # Create ECR Repository
        ghost_repo = ecr.Repository(
            self, "GhostRepo",
            repository_name="ghost"
        )

        # Create IAM Role For CodeBuild
        ghost_build_role = iam.Role(
            self, "GhostBuildRole",
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name("EC2InstanceProfileForImageBuilderECRContainerBuilds")
            ]
        )

        # We only want to fire on the master branch and if there is a change in the dockerbuild folder
        git_hub_source = codebuild.Source.git_hub(
            owner="jasonumiker",
            repo="k8s-plus-aws-gitops",
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is("master").and_file_path_is("dockerbuild/*")
            ]
        )

        # Create CodeBuild
        build_project = codebuild.Project(
            self, "GhostBuildProject",
            source=git_hub_source,
            role=ghost_build_role,
            build_spec=codebuild.BuildSpec.from_source_filename("dockerbuild/buildspec.yml"),
            environment={
                'privileged': True,
            },
            environment_variables={
                'AWS_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=self.account),
                'IMAGE_REPO_NAME': codebuild.BuildEnvironmentVariable(value=ghost_repo.repository_name)
            }
        )
    def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        repo = codecommit.Repository(
            self,
            "repo",
            repository_name="demorepo",
            description="Repo to test PR with stepfunctions")

        proj1 = self.new_build_project(repo, "pr_specs/buildspec.yaml",
                                       "proj1")

        proj2 = _codebuild.Project(
            self,
            "proj_name",
            badge=True,
            description="Build project for ",
            environment=_codebuild.BuildEnvironment(
                build_image=_codebuild.LinuxBuildImage.STANDARD_5_0,
                compute_type=_codebuild.ComputeType.LARGE,
                privileged=True),
            project_name="proj_name",
            build_spec=_codebuild.BuildSpec.from_source_filename(
                filename="pr_specs/buildspec2.yaml"),
            timeout=Duration.minutes(10),
        )

        input_task = _step_fn.Pass(self, "passstate")

        proj1_tasks = self.new_codebuild_task(proj1)
        proj2_tasks = self.new_codebuild_task(proj2)

        definition = input_task.next(proj1_tasks).next(proj2_tasks)

        _fn = _step_fn.StateMachine(
            self,
            "statemachine",
            definition=definition,
            state_machine_name="statemachine",
        )
    def __init__(self, scope: Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Create IAM Role For CodeBuild
        # TODO Make this role's policy least privilege
        aws_app_resources_build_role = iam.Role(
            self,
            "EKSCodeBuildRole",
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AdministratorAccess")
            ])

        # We only want to fire on the master branch and if there is a change in the dockerbuild folder
        git_hub_source = codebuild.Source.git_hub(
            owner=self.node.try_get_context("github_owner"),
            repo=self.node.try_get_context("github_repo"),
            branch_or_ref=self.node.try_get_context("github_branch"),
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PUSH).and_branch_is(
                        self.node.try_get_context("github_branch")).
                and_file_path_is("cluster-bootstrap/*")
            ])

        # Create CodeBuild
        build_project = codebuild.Project(
            self,
            "EKSCodeBuild",
            source=git_hub_source,
            role=aws_app_resources_build_role,
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.STANDARD_5_0,
                compute_type=codebuild.ComputeType.LARGE),
            build_spec=codebuild.BuildSpec.from_source_filename(
                "cluster-bootstrap/buildspec.yml"))
    def make_codebuild_project(
            self, project_suffix : str, 
            description : str,
            buildspec_path : str,
            env_variables : dict):
        '''
            Creates a codebuild project

            Parameters
            ----------
            project_suffix : str
                The suffix of the project. The full project name is
                APPLICATION_PREFIX _ project_suffix
            description : str
                Description used by tags
            buildspec_path : str
                the path the buildspec used to build this project
            env_variables : str
                The environment variables supplued to the project, e.g. the ECR epo URI
        '''

        project_name = "%s-%s" % (self.APPLICATION_PREFIX, project_suffix)
        build_project = codebuild.Project(
            self, project_name, 
            source=codebuild.Source.git_hub(owner=self.GITHUB_REPO_OWNER, repo=self.GITHUB_REPO_NAME),
            build_spec=codebuild.BuildSpec.from_source_filename(buildspec_path),
            description=description,
            environment_variables=env_variables,
            environment=codebuild.BuildEnvironment(
                privileged=True,
            ),
            project_name=project_name, role=self.codebuild_role_name,
            timeout=core.Duration.hours(1))

        util.tag_resource(build_project, project_name, description)

        
Exemple #16
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # As semver dictates: https://regex101.com/r/Ly7O1x/3/
        semver_tag_regex = '(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'

        refdata = s3.Bucket.from_bucket_attributes(
            self, 'reference_data', bucket_name='umccr-refdata-dev')

        build_env = cb.BuildEnvironment(
            build_image=cb.LinuxBuildImage.from_docker_registry("docker:dind"),
            privileged=True,
            compute_type=cb.ComputeType.SMALL)

        cb_project = cb.Project(
            self,
            id="rnasum",
            environment=build_env,
            timeout=core.Duration.hours(1),
            source=cb.Source.git_hub(
                identifier="rnasum",
                owner="umccr",
                repo="rnasum",
                clone_depth=1,
                webhook=True,
                webhook_filters=[
                    cb.FilterGroup.in_event_of(
                        cb.EventAction.PUSH).and_tag_is(semver_tag_regex)
                ]))

        # Tackle IAM permissions
        # https://stackoverflow.com/questions/38587325/aws-ecr-getauthorizationtoken/54806087
        cb_project.role.add_managed_policy(
            iam.ManagedPolicy.from_aws_managed_policy_name(
                'AmazonEC2ContainerRegistryPowerUser'))
        refdata.grant_read(cb_project)
Exemple #17
0
    def __init__(self, scope: core.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # ECR Repo
        ecrRepo = _ecr.Repository(self, 'EcrRepo')

        gitHubSource = _codebuild.Source.git_hub(
            owner='samuelhailemariam',
            repo='aws-cdk-cicd-docker-ecr',
            webhook=True,
            webhook_filters=[
                _codebuild.FilterGroup.in_event_of(
                    _codebuild.EventAction.PUSH).and_branch_is('main'),
            ],
        )

        # CODEBUILD - project

        project = _codebuild.Project(
            self,
            'MyProject',
            project_name=self.stack_name,
            source=gitHubSource,
            environment=_codebuild.BuildEnvironment(
                build_image=_codebuild.LinuxBuildImage.AMAZON_LINUX_2_2,
                privileged=True),
            environment_variables={
                'ECR_REPO_URI': {
                    'value': ecrRepo.repository_uri
                }
            },
            build_spec=_codebuild.BuildSpec.from_object({
                'version': "0.2",
                'phases': {
                    'pre_build': {
                        'commands': [
                            'env',
                            'export TAG=$CODEBUILD_RESOLVED_SOURCE_VERSION'
                        ]
                    },
                    'build': {
                        'commands': [
                            'cd docker-app',
                            'docker build -t $ECR_REPO_URI:$TAG .',
                            '$(aws ecr get-login --no-include-email)',
                            'docker push $ECR_REPO_URI:$TAG'
                        ]
                    },
                    'post_build': {
                        'commands': [
                            'echo "In Post-Build Stage"', 'cd ..',
                            "printf '[{\"name\":\"flask-app\",\"imageUri\":\"%s\"}]' $ECR_REPO_URI:$TAG > imagedefinitions.json",
                            'pwd', 'ls -al', 'cat imagedefinitions.json'
                        ]
                    }
                },
                'artifacts': {
                    'files': ['imagedefinitions.json']
                }
            }))

        ecrRepo.grant_pull_push(project.role)

        sourceOutput = _codepipeline.Artifact()
        buildOutput = _codepipeline.Artifact()

        sourceAction = _pipelineactions.GitHubSourceAction(
            action_name='GitHub_Source',
            owner='samuelhailemariam',
            repo='aws-cdk-cicd-docker-ecr',
            branch='master',
            oauth_token=core.SecretValue.secrets_manager("/my/github/token"),
            output=sourceOutput)

        buildAction = _pipelineactions.CodeBuildAction(action_name='CodeBuild',
                                                       project=project,
                                                       input=sourceOutput,
                                                       outputs=[buildOutput])

        pipeline = _codepipeline.Pipeline(self, "MyPipeline")

        source_stage = pipeline.add_stage(stage_name="Source",
                                          actions=[sourceAction])

        build_stage = pipeline.add_stage(stage_name="Build",
                                         actions=[buildAction])
Exemple #18
0
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 vpc: aws_ec2.Vpc,
                 ecs_cluster=aws_ecs.Cluster,
                 alb=elbv2.ApplicationLoadBalancer,
                 albTestListener=elbv2.ApplicationListener,
                 albProdListener=elbv2.ApplicationListener,
                 blueGroup=elbv2.ApplicationTargetGroup,
                 greenGroup=elbv2.ApplicationTargetGroup,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        ECS_APP_NAME = "Nginx-app",
        ECS_DEPLOYMENT_GROUP_NAME = "NginxAppECSBlueGreen"
        ECS_DEPLOYMENT_CONFIG_NAME = "CodeDeployDefault.ECSLinear10PercentEvery1Minutes"
        ECS_TASKSET_TERMINATION_WAIT_TIME = 10
        ECS_TASK_FAMILY_NAME = "Nginx-microservice"
        ECS_APP_NAME = "Nginx-microservice"
        ECS_APP_LOG_GROUP_NAME = "/ecs/Nginx-microservice"
        DUMMY_TASK_FAMILY_NAME = "sample-Nginx-microservice"
        DUMMY_APP_NAME = "sample-Nginx-microservice"
        DUMMY_APP_LOG_GROUP_NAME = "/ecs/sample-Nginx-microservice"
        DUMMY_CONTAINER_IMAGE = "smuralee/nginx"

        # =============================================================================
        # ECR and CodeCommit repositories for the Blue/ Green deployment
        # =============================================================================

        # ECR repository for the docker images
        NginxecrRepo = aws_ecr.Repository(self,
                                          "NginxRepo",
                                          image_scan_on_push=True)

        NginxCodeCommitrepo = aws_codecommit.Repository(
            self,
            "NginxRepository",
            repository_name=ECS_APP_NAME,
            description="Oussama application hosted on NGINX")

        # =============================================================================
        #   CODE BUILD and ECS TASK ROLES for the Blue/ Green deployment
        # =============================================================================

        # IAM role for the Code Build project
        codeBuildServiceRole = aws_iam.Role(
            self,
            "codeBuildServiceRole",
            assumed_by=aws_iam.ServicePrincipal('codebuild.amazonaws.com'))

        inlinePolicyForCodeBuild = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.ALLOW,
            actions=[
                "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability",
                "ecr:InitiateLayerUpload", "ecr:UploadLayerPart",
                "ecr:CompleteLayerUpload", "ecr:PutImage"
            ],
            resources=["*"])

        codeBuildServiceRole.add_to_policy(inlinePolicyForCodeBuild)

        # ECS task role
        ecsTaskRole = aws_iam.Role(
            self,
            "ecsTaskRoleForWorkshop",
            assumed_by=aws_iam.ServicePrincipal('ecs-tasks.amazonaws.com'))

        ecsTaskRole.add_managed_policy(
            aws_iam.ManagedPolicy.from_aws_managed_policy_name(
                "service-role/AmazonECSTaskExecutionRolePolicy"))

        # =============================================================================
        # CODE DEPLOY APPLICATION for the Blue/ Green deployment
        # =============================================================================

        # Creating the code deploy application
        codeDeployApplication = codedeploy.EcsApplication(
            self, "NginxAppCodeDeploy")

        # Creating the code deploy service role
        codeDeployServiceRole = aws_iam.Role(
            self,
            "codeDeployServiceRole",
            assumed_by=aws_iam.ServicePrincipal('codedeploy.amazonaws.com'))
        codeDeployServiceRole.add_managed_policy(
            aws_iam.ManagedPolicy.from_aws_managed_policy_name(
                "AWSCodeDeployRoleForECS"))

        # IAM role for custom lambda function
        customLambdaServiceRole = aws_iam.Role(
            self,
            "codeDeployCustomLambda",
            assumed_by=aws_iam.ServicePrincipal('lambda.amazonaws.com'))

        inlinePolicyForLambda = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.ALLOW,
            actions=[
                "iam:PassRole", "sts:AssumeRole", "codedeploy:List*",
                "codedeploy:Get*", "codedeploy:UpdateDeploymentGroup",
                "codedeploy:CreateDeploymentGroup",
                "codedeploy:DeleteDeploymentGroup"
            ],
            resources=["*"])

        customLambdaServiceRole.add_managed_policy(
            aws_iam.ManagedPolicy.from_aws_managed_policy_name(
                'service-role/AWSLambdaBasicExecutionRole'))
        customLambdaServiceRole.add_to_policy(inlinePolicyForLambda)

        # Custom resource to create the deployment group
        createDeploymentGroupLambda = aws_lambda.Function(
            self,
            'createDeploymentGroupLambda',
            code=aws_lambda.Code.from_asset("custom_resources"),
            runtime=aws_lambda.Runtime.PYTHON_3_8,
            handler='create_deployment_group.handler',
            role=customLambdaServiceRole,
            description="Custom resource to create deployment group",
            memory_size=128,
            timeout=core.Duration.seconds(60))

        # ================================================================================================
        # CloudWatch Alarms for 4XX errors
        blue4xxMetric = aws_cloudwatch.Metric(
            namespace='AWS/ApplicationELB',
            metric_name='HTTPCode_Target_4XX_Count',
            dimensions={
                "TargetGroup": blueGroup.target_group_full_name,
                "LoadBalancer": alb.load_balancer_full_name
            },
            statistic="sum",
            period=core.Duration.minutes(1))

        blueGroupAlarm = aws_cloudwatch.Alarm(
            self,
            "blue4xxErrors",
            alarm_name="Blue_4xx_Alarm",
            alarm_description=
            "CloudWatch Alarm for the 4xx errors of Blue target group",
            metric=blue4xxMetric,
            threshold=1,
            evaluation_periods=1)

        green4xxMetric = aws_cloudwatch.Metric(
            namespace='AWS/ApplicationELB',
            metric_name='HTTPCode_Target_4XX_Count',
            dimensions={
                "TargetGroup": greenGroup.target_group_full_name,
                "LoadBalancer": alb.load_balancer_full_name
            },
            statistic="sum",
            period=core.Duration.minutes(1))
        greenGroupAlarm = aws_cloudwatch.Alarm(
            self,
            "green4xxErrors",
            alarm_name="Green_4xx_Alarm",
            alarm_description=
            "CloudWatch Alarm for the 4xx errors of Green target group",
            metric=green4xxMetric,
            threshold=1,
            evaluation_periods=1)

        # ================================================================================================
        # DUMMY TASK DEFINITION for the initial service creation
        # This is required for the service being made available to create the CodeDeploy Deployment Group
        # ================================================================================================
        sampleTaskDefinition = aws_ecs.FargateTaskDefinition(
            self,
            "sampleTaskDefn",
            family=DUMMY_TASK_FAMILY_NAME,
            cpu=256,
            memory_limit_mib=1024,
            task_role=ecsTaskRole,
            execution_role=ecsTaskRole)

        sampleContainerDefn = sampleTaskDefinition.add_container(
            "sampleAppContainer",
            image=aws_ecs.ContainerImage.from_registry(DUMMY_CONTAINER_IMAGE),
            logging=aws_ecs.AwsLogDriver(log_group=aws_logs.LogGroup(
                self,
                "sampleAppLogGroup",
                log_group_name=DUMMY_APP_LOG_GROUP_NAME,
                removal_policy=core.RemovalPolicy.DESTROY),
                                         stream_prefix=DUMMY_APP_NAME),
            docker_labels={"name": DUMMY_APP_NAME})

        port_mapping = aws_ecs.PortMapping(container_port=80,
                                           protocol=aws_ecs.Protocol.TCP)

        sampleContainerDefn.add_port_mappings(port_mapping)

        # ================================================================================================
        # ECS task definition using ECR image
        # Will be used by the CODE DEPLOY for Blue/Green deployment
        # ================================================================================================
        NginxTaskDefinition = aws_ecs.FargateTaskDefinition(
            self,
            "appTaskDefn",
            family=ECS_TASK_FAMILY_NAME,
            cpu=256,
            memory_limit_mib=1024,
            task_role=ecsTaskRole,
            execution_role=ecsTaskRole)

        NginxcontainerDefinition = NginxTaskDefinition.add_container(
            "NginxAppContainer",
            image=aws_ecs.ContainerImage.from_ecr_repository(
                NginxecrRepo, "latest"),
            logging=aws_ecs.AwsLogDriver(log_group=aws_logs.LogGroup(
                self,
                "NginxAppLogGroup",
                log_group_name=ECS_APP_LOG_GROUP_NAME,
                removal_policy=core.RemovalPolicy.DESTROY),
                                         stream_prefix=ECS_APP_NAME),
            docker_labels={"name": ECS_APP_NAME})
        NginxcontainerDefinition.add_port_mappings(port_mapping)

        # =============================================================================
        # ECS SERVICE for the Blue/ Green deployment
        # =============================================================================
        NginxAppService = aws_ecs.FargateService(
            self,
            "NginxAppService",
            cluster=ecs_cluster,
            task_definition=NginxTaskDefinition,
            health_check_grace_period=core.Duration.seconds(10),
            desired_count=3,
            deployment_controller={
                "type": aws_ecs.DeploymentControllerType.CODE_DEPLOY
            },
            service_name=ECS_APP_NAME)

        NginxAppService.connections.allow_from(alb, aws_ec2.Port.tcp(80))
        NginxAppService.connections.allow_from(alb, aws_ec2.Port.tcp(8080))
        NginxAppService.attach_to_application_target_group(blueGroup)

        # =============================================================================
        # CODE DEPLOY - Deployment Group CUSTOM RESOURCE for the Blue/ Green deployment
        # =============================================================================

        core.CustomResource(
            self,
            'customEcsDeploymentGroup',
            service_token=createDeploymentGroupLambda.function_arn,
            properties={
                "ApplicationName": codeDeployApplication.application_name,
                "DeploymentGroupName": ECS_DEPLOYMENT_GROUP_NAME,
                "DeploymentConfigName": ECS_DEPLOYMENT_CONFIG_NAME,
                "ServiceRoleArn": codeDeployServiceRole.role_arn,
                "BlueTargetGroup": blueGroup.target_group_name,
                "GreenTargetGroup": greenGroup.target_group_name,
                "ProdListenerArn": albProdListener.listener_arn,
                "TestListenerArn": albTestListener.listener_arn,
                "EcsClusterName": ecs_cluster.cluster_name,
                "EcsServiceName": NginxAppService.service_name,
                "TerminationWaitTime": ECS_TASKSET_TERMINATION_WAIT_TIME,
                "BlueGroupAlarm": blueGroupAlarm.alarm_name,
                "GreenGroupAlarm": greenGroupAlarm.alarm_name,
            })

        ecsDeploymentGroup = codedeploy.EcsDeploymentGroup.from_ecs_deployment_group_attributes(
            self,
            "ecsDeploymentGroup",
            application=codeDeployApplication,
            deployment_group_name=ECS_DEPLOYMENT_GROUP_NAME,
            deployment_config=codedeploy.EcsDeploymentConfig.
            from_ecs_deployment_config_name(self, "ecsDeploymentConfig",
                                            ECS_DEPLOYMENT_CONFIG_NAME))

        # =============================================================================
        # CODE BUILD PROJECT for the Blue/ Green deployment
        # =============================================================================

        # Creating the code build project
        NginxAppcodebuild = aws_codebuild.Project(
            self,
            "NginxAppCodeBuild",
            role=codeBuildServiceRole,
            environment=aws_codebuild.BuildEnvironment(
                build_image=aws_codebuild.LinuxBuildImage.STANDARD_4_0,
                compute_type=aws_codebuild.ComputeType.SMALL,
                privileged=True,
                environment_variables={
                    'REPOSITORY_URI': {
                        'value':
                        NginxecrRepo.repository_uri,
                        'type':
                        aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT
                    },
                    'TASK_EXECUTION_ARN': {
                        'value':
                        ecsTaskRole.role_arn,
                        'type':
                        aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT
                    },
                    'TASK_FAMILY': {
                        'value':
                        ECS_TASK_FAMILY_NAME,
                        'type':
                        aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT
                    }
                }),
            source=aws_codebuild.Source.code_commit(
                repository=NginxCodeCommitrepo))

        # =============================================================================
        # CODE PIPELINE for Blue/Green ECS deployment
        # =============================================================================

        codePipelineServiceRole = aws_iam.Role(
            self,
            "codePipelineServiceRole",
            assumed_by=aws_iam.ServicePrincipal('codepipeline.amazonaws.com'))

        inlinePolicyForCodePipeline = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.ALLOW,
            actions=[
                "iam:PassRole", "sts:AssumeRole", "codecommit:Get*",
                "codecommit:List*", "codecommit:GitPull",
                "codecommit:UploadArchive", "codecommit:CancelUploadArchive",
                "codebuild:BatchGetBuilds", "codebuild:StartBuild",
                "codedeploy:CreateDeployment", "codedeploy:Get*",
                "codedeploy:RegisterApplicationRevision", "s3:Get*",
                "s3:List*", "s3:PutObject"
            ],
            resources=["*"])

        codePipelineServiceRole.add_to_policy(inlinePolicyForCodePipeline)

        sourceArtifact = codepipeline.Artifact('sourceArtifact')
        buildArtifact = codepipeline.Artifact('buildArtifact')

        # S3 bucket for storing the code pipeline artifacts
        NginxAppArtifactsBucket = s3.Bucket(
            self,
            "NginxAppArtifactsBucket",
            encryption=s3.BucketEncryption.S3_MANAGED,
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL)

        # S3 bucket policy for the code pipeline artifacts
        denyUnEncryptedObjectUploads = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.DENY,
            actions=["s3:PutObject"],
            principals=[aws_iam.AnyPrincipal()],
            resources=[NginxAppArtifactsBucket.bucket_arn + "/*"],
            conditions={
                "StringNotEquals": {
                    "s3:x-amz-server-side-encryption": "aws:kms"
                }
            })

        denyInsecureConnections = aws_iam.PolicyStatement(
            effect=aws_iam.Effect.DENY,
            actions=["s3:*"],
            principals=[aws_iam.AnyPrincipal()],
            resources=[NginxAppArtifactsBucket.bucket_arn + "/*"],
            conditions={"Bool": {
                "aws:SecureTransport": "false"
            }})

        NginxAppArtifactsBucket.add_to_resource_policy(
            denyUnEncryptedObjectUploads)
        NginxAppArtifactsBucket.add_to_resource_policy(denyInsecureConnections)

        # Code Pipeline - CloudWatch trigger event is created by CDK
        codepipeline.Pipeline(
            self,
            "ecsBlueGreen",
            role=codePipelineServiceRole,
            artifact_bucket=NginxAppArtifactsBucket,
            stages=[
                codepipeline.StageProps(
                    stage_name='Source',
                    actions=[
                        aws_codepipeline_actions.CodeCommitSourceAction(
                            action_name='Source',
                            repository=NginxCodeCommitrepo,
                            output=sourceArtifact,
                        )
                    ]),
                codepipeline.StageProps(
                    stage_name='Build',
                    actions=[
                        aws_codepipeline_actions.CodeBuildAction(
                            action_name='Build',
                            project=NginxAppcodebuild,
                            input=sourceArtifact,
                            outputs=[buildArtifact])
                    ]),
                codepipeline.StageProps(
                    stage_name='Deploy',
                    actions=[
                        aws_codepipeline_actions.CodeDeployEcsDeployAction(
                            action_name='Deploy',
                            deployment_group=ecsDeploymentGroup,
                            app_spec_template_input=buildArtifact,
                            task_definition_template_input=buildArtifact,
                        )
                    ])
            ])

        # =============================================================================
        # Export the outputs
        # =============================================================================
        core.CfnOutput(self,
                       "ecsBlueGreenCodeRepo",
                       description="Demo app code commit repository",
                       export_name="ecsBlueGreenDemoAppRepo",
                       value=NginxCodeCommitrepo.repository_clone_url_http)

        core.CfnOutput(self,
                       "ecsBlueGreenLBDns",
                       description="Load balancer DNS",
                       export_name="ecsBlueGreenLBDns",
                       value=alb.load_balancer_dns_name)
Exemple #19
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        uri = self.account + '.dkr.ecr.' + self.region + '.amazonaws.com'
        appl = 'colorteller'
        buildspec = {
            'version': '0.2',
            'phases': {
                'install': {
                    'commands': ['echo install step']
                },
                'pre_build': {
                    'commands': [
                        'echo logging in to AWS ECR...',
                        '$(aws ecr get-login --no-include-email --region %s)' %
                        self.region
                    ]
                },
                'build': {
                    'commands': [
                        'echo building Docker image...',
                        'cd appmeshdemo/colorapp/%s' % appl,
                        'docker build -t %s:latest .' % appl,
                        'docker tag %s:latest %s/%s:latest' % (appl, uri, appl)
                    ]
                },
                'post_build': {
                    'commands': [
                        'echo Docker image build complete!',
                        'echo push latest Docker images to ECR...',
                        'docker push %s/%s:latest' % (uri, appl)
                    ]
                }
            }
        }

        buildenviron = codebuild.BuildEnvironment(
            privileged=True,
            build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0,
            environment_variables={
                'AWS_DEFAULT_REGION':
                codebuild.BuildEnvironmentVariable(value=self.region),
                'AWS_ACCOUNT_ID':
                codebuild.BuildEnvironmentVariable(value=self.account),
                'IMAGE_REPO_NAME':
                codebuild.BuildEnvironmentVariable(value=appl),
                'IMAGE_TAG':
                codebuild.BuildEnvironmentVariable(value='latest')
            })

        proj = codebuild.Project(
            self,
            appl,
            build_spec=codebuild.BuildSpec.from_object(buildspec),
            environment=buildenviron)
        call = custom.AwsSdkCall(service='CodeBuild',
                                 action='startBuild',
                                 parameters={'projectName': proj.project_name},
                                 physical_resource_id='Custom%s' %
                                 proj.project_name)

        custom.AwsCustomResource(self,
                                 'CustomCodeBuild',
                                 on_create=call,
                                 on_update=call)
Exemple #20
0
    def __init__(self, scope: core.Construct, id: str, ecr_repo_name: str,
                 spec_file_path: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Define some variables that will be commonly used
        S3_PROD_BUCKET = "{}-{}-prod-bucket".format(AWS_ACCOUNT, id)
        S3_PR_BUCKET = "{}-{}-pr-bucket".format(AWS_ACCOUNT, id)
        CLOUDWATCH_LOGS = "{}-{}-cw-logs".format(AWS_ACCOUNT, id)

        # Define CodeBuild resource.
        git_hub_source = codebuild.Source.git_hub(
            owner=GITHUB_REPO_OWNER,
            repo=GITHUB_REPO_NAME,
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PULL_REQUEST_CREATED,
                    codebuild.EventAction.PULL_REQUEST_UPDATED,
                    codebuild.EventAction.PULL_REQUEST_REOPENED)
            ],
            clone_depth=1)

        # Define a IAM role for this stack.
        code_build_batch_policy = iam.PolicyDocument.from_json(
            code_build_batch_policy_in_json([id]))
        ec2_bm_framework_policy = iam.PolicyDocument.from_json(
            ec2_bm_framework_policies_in_json())
        ssm_bm_framework_policy = iam.PolicyDocument.from_json(
            ssm_bm_framework_policies_in_json())
        s3_read_write_policy_prod_bucket = iam.PolicyDocument.from_json(
            s3_read_write_policy_in_json(S3_PROD_BUCKET))
        s3_read_write_policy_pr_bucket = iam.PolicyDocument.from_json(
            s3_read_write_policy_in_json(S3_PR_BUCKET))
        s3_bm_framework_policy_prod_bucket = iam.PolicyDocument.from_json(
            s3_bm_framework_policies_in_json(S3_PROD_BUCKET))
        s3_bm_framework_policy_pr_bucket = iam.PolicyDocument.from_json(
            s3_bm_framework_policies_in_json(S3_PR_BUCKET))
        codebuild_inline_policies = {
            "code_build_batch_policy": code_build_batch_policy,
            "ec2_bm_framework_policy": ec2_bm_framework_policy,
            "ssm_bm_framework_policy": ssm_bm_framework_policy,
            "s3_read_write_policy_prod_bucket":
            s3_read_write_policy_prod_bucket,
            "s3_read_write_policy_pr_bucket": s3_read_write_policy_pr_bucket,
            "s3_bm_framework_policy_prod_bucket":
            s3_bm_framework_policy_prod_bucket,
            "s3_bm_framework_policy_pr_bucket":
            s3_bm_framework_policy_pr_bucket
        }
        codebuild_role = iam.Role(
            scope=self,
            id="{}-codebuild-role".format(id),
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            inline_policies=codebuild_inline_policies,
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "CloudWatchAgentServerPolicy")
            ])

        # Create build spec.
        placeholder_map = {"ECR_REPO_PLACEHOLDER": ecr_arn(ecr_repo_name)}
        build_spec_content = YmlLoader.load(spec_file_path, placeholder_map)

        # Define CodeBuild.
        project = codebuild.Project(
            scope=self,
            id=id,
            project_name=id,
            source=git_hub_source,
            role=codebuild_role,
            timeout=core.Duration.minutes(180),
            environment=codebuild.BuildEnvironment(
                compute_type=codebuild.ComputeType.SMALL,
                privileged=False,
                build_image=codebuild.LinuxBuildImage.STANDARD_4_0),
            build_spec=codebuild.BuildSpec.from_object(build_spec_content))

        # Add 'BuildBatchConfig' property, which is not supported in CDK.
        # CDK raw overrides: https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_raw
        # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codebuild-project.html#aws-resource-codebuild-project-properties
        cfn_build = project.node.default_child
        cfn_build.add_override("Properties.BuildBatchConfig", {
            "ServiceRole": codebuild_role.role_arn,
            "TimeoutInMins": 180
        })

        # use boto3 to determine if a bucket with the name that we want exists, and if it doesn't, create it
        s3_res = boto3.resource('s3')
        prod_bucket = s3_res.Bucket(S3_PROD_BUCKET)
        pr_bucket = s3_res.Bucket(S3_PR_BUCKET)
        try:
            s3_res.meta.client.head_bucket(Bucket=prod_bucket.name)
        except ClientError:
            production_results_s3 = s3.Bucket(self,
                                              "{}-prod-bucket".format(id),
                                              bucket_name=S3_PROD_BUCKET,
                                              enforce_ssl=True)

            production_results_s3.grant_put(codebuild_role)

        try:
            s3_res.meta.client.head_bucket(Bucket=pr_bucket.name)
        except ClientError:
            pr_results_s3 = s3.Bucket(self,
                                      "{}-pr-bucket".format(id),
                                      bucket_name=S3_PR_BUCKET,
                                      enforce_ssl=True)

            pr_results_s3.grant_put(codebuild_role)

        # use boto3 to determine if a cloudwatch logs group with the name we want exists, and if it doesn't, create it
        logs_client = boto3.client('logs')
        try:
            logs_client.describe_log_groups(logGroupNamePrefix=CLOUDWATCH_LOGS)
        except ClientError:
            # define CloudWatch Logs groups
            logs.LogGroup(self,
                          "{}-cw-logs".format(id),
                          log_group_name=CLOUDWATCH_LOGS)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Define CodeBuild resource.
        git_hub_source = codebuild.Source.git_hub(
            owner=GITHUB_REPO_OWNER,
            repo=GITHUB_REPO_NAME,
            webhook=False,
            branch_or_ref=GITHUB_SOURCE_VERSION,
            clone_depth=1)

        # Define a role.
        code_build_batch_policy = iam.PolicyDocument.from_json(
            code_build_batch_policy_in_json([id]))
        ecr_repo_names = [LINUX_AARCH_ECR_REPO, LINUX_X86_ECR_REPO]
        ecr_power_user_policy = iam.PolicyDocument.from_json(
            ecr_power_user_policy_in_json(ecr_repo_names))
        inline_policies = {
            "code_build_batch_policy": code_build_batch_policy,
            "ecr_power_user_policy": ecr_power_user_policy
        }
        role = iam.Role(
            scope=self,
            id="{}-role".format(id),
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            inline_policies=inline_policies)

        # Create build spec.
        build_spec_content = YmlLoader.load(
            "./cdk/codebuild/linux_img_build_omnibus.yaml")

        # Define environment variables.
        environment_variables = {
            "AWS_ACCOUNT_ID":
            codebuild.BuildEnvironmentVariable(value=AWS_ACCOUNT),
            "AWS_ECR_REPO_X86":
            codebuild.BuildEnvironmentVariable(value=LINUX_X86_ECR_REPO),
            "AWS_ECR_REPO_AARCH":
            codebuild.BuildEnvironmentVariable(value=LINUX_AARCH_ECR_REPO),
            "GITHUB_REPO_OWNER":
            codebuild.BuildEnvironmentVariable(value=GITHUB_REPO_OWNER),
        }

        # Define VPC
        vpc = ec2.Vpc(self, id="{}-ec2-vpc".format(id))

        # Define CodeBuild project.
        project = codebuild.Project(
            scope=self,
            id=id,
            vpc=vpc,
            project_name=id,
            source=git_hub_source,
            environment=codebuild.BuildEnvironment(
                compute_type=codebuild.ComputeType.SMALL,
                privileged=False,
                build_image=codebuild.LinuxBuildImage.STANDARD_4_0),
            environment_variables=environment_variables,
            role=role,
            timeout=core.Duration.minutes(120),
            build_spec=codebuild.BuildSpec.from_object(build_spec_content))

        # Add 'BuildBatchConfig' property, which is not supported in CDK.
        # CDK raw overrides: https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_raw
        # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codebuild-project.html#aws-resource-codebuild-project-properties
        cfn_build = project.node.default_child
        cfn_build.add_override("Properties.BuildBatchConfig", {
            "ServiceRole": role.role_arn,
            "TimeoutInMins": 120
        })
Exemple #22
0
    def __init__(self, app: cdk.App, id: str, apps: list, **kwargs) -> None:
        super().__init__(app, id)

        env = pu.PolicyUtils.current_env(self)
        uri = env['account'] + '.dkr.ecr.' + env['region'] + '.amazonaws.com'

        pd = pu.PolicyUtils.createpolicyfromfile(
            './appmeshdemo/policydocs/codedeployecr.json')
        cbrole = iam.Role(self,
                          'CodeBuildECRRole',
                          assumed_by=iam.ServicePrincipal('codebuild'),
                          inline_policies={'codedeployecr': pd})

        # create the repositories
        cnt = 1
        for appl in apps:
            repo = ecr.Repository(scope=self,
                                  id=id + appl,
                                  repository_name=appl)
            self._repos[appl] = repo

            be = codebuild.BuildEnvironment()
            be['privileged'] = True
            be['buildImage'] = codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0

            be['environmentVariables'] = {
                'AWS_DEFAULT_REGION':
                codebuild.BuildEnvironmentVariable(value=env['region']),
                'AWS_ACCOUNT_ID':
                codebuild.BuildEnvironmentVariable(value=env['account']),
                'IMAGE_REPO_NAME':
                codebuild.BuildEnvironmentVariable(value=appl),
                'IMAGE_TAG':
                codebuild.BuildEnvironmentVariable(value='latest')
            }
            buildspec = {
                'version': '0.2',
                'phases': {
                    'install': {
                        'commands': ['echo install step']
                    },
                    'pre_build': {
                        'commands': [
                            'echo logging in to AWS ECR...',
                            '$(aws ecr get-login --no-include-email --region %s)'
                            % env['region']
                        ]
                    },
                    'build': {
                        'commands': [
                            'echo building Docker image...',
                            'cd appmeshdemo/colorapp/%s' % appl,
                            'docker build -t %s:latest .' % appl,
                            'docker tag %s:latest %s/%s:latest' %
                            (appl, uri, appl)
                        ]
                    },
                    'post_build': {
                        'commands': [
                            'echo Docker image build complete!',
                            'echo push latest Docker images to ECR...',
                            'docker push %s/%s:latest' % (uri, appl)
                        ]
                    }
                }
            }

            # Create the build project in CodeBuild
            proj = codebuild.Project(self,
                                     appl,
                                     environment=be,
                                     role=cbrole,
                                     build_spec=buildspec,
                                     source=codebuild.GitHubSource(
                                         repo='appmeshdemo', owner='fitzee'))

            # Create a custom CloudFormation resource to start the build
            call = cfn.AwsSdkCall()
            call['service'] = 'CodeBuild'
            call['action'] = 'startBuild'
            call['parameters'] = {'projectName': proj.project_name}
            call['physicalResourceId'] = 'Custom%s' % proj.project_name
            cfn.AwsCustomResource(self,
                                  'CustomCodebuild%s' % cnt,
                                  on_create=call,
                                  on_update=call)
            cnt = cnt + 1
    def __init__(self, scope: core.Construct, id: str, x86_ecr_repo_name: str,
                 arm_ecr_repo_name: str, spec_file_path: str,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Define CodeBuild resource.
        git_hub_source = codebuild.Source.git_hub(
            owner=GITHUB_REPO_OWNER,
            repo=GITHUB_REPO_NAME,
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PULL_REQUEST_CREATED,
                    codebuild.EventAction.PULL_REQUEST_UPDATED,
                    codebuild.EventAction.PULL_REQUEST_REOPENED)
            ],
            clone_depth=1)

        # Define a IAM role for this stack.
        code_build_batch_policy = iam.PolicyDocument.from_json(
            code_build_batch_policy_in_json([id]))
        fuzz_policy = iam.PolicyDocument.from_json(
            code_build_publish_metrics_in_json())
        inline_policies = {
            "code_build_batch_policy": code_build_batch_policy,
            "fuzz_policy": fuzz_policy
        }
        role = iam.Role(
            scope=self,
            id="{}-role".format(id),
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            inline_policies=inline_policies)

        # Create the VPC for EFS and CodeBuild
        public_subnet = ec2.SubnetConfiguration(
            name="PublicFuzzingSubnet", subnet_type=ec2.SubnetType.PUBLIC)
        private_subnet = ec2.SubnetConfiguration(
            name="PrivateFuzzingSubnet", subnet_type=ec2.SubnetType.PRIVATE)

        # Create a VPC with a single public and private subnet in a single AZ. This is to avoid the elastic IP limit
        # being used up by a bunch of idle NAT gateways
        fuzz_vpc = ec2.Vpc(
            scope=self,
            id="{}-FuzzingVPC".format(id),
            subnet_configuration=[public_subnet, private_subnet],
            max_azs=1)
        build_security_group = ec2.SecurityGroup(
            scope=self, id="{}-FuzzingSecurityGroup".format(id), vpc=fuzz_vpc)

        build_security_group.add_ingress_rule(
            peer=build_security_group,
            connection=ec2.Port.all_traffic(),
            description="Allow all traffic inside security group")

        efs_subnet_selection = ec2.SubnetSelection(
            subnet_type=ec2.SubnetType.PRIVATE)

        # Create the EFS to store the corpus and logs. EFS allows new filesystems to burst to 100 MB/s for the first 2
        # TB of data read/written, after that the rate is limited based on the size of the filesystem. As of late
        # 2021 our corpus is less than one GB which results in EFS limiting all reads and writes to the minimum 1 MB/s.
        # To have the fuzzing be able to finish in a reasonable amount of time use the Provisioned capacity option.
        # For now this uses 100 MB/s which matches the performance used for 2021. Looking at EFS metrics in late 2021
        # during fuzz runs EFS sees 4-22 MB/s of transfers thus 100 MB/s gives lots of buffer and allows ~4-5 fuzz runs
        # to start at the same time with no issue.
        # https://docs.aws.amazon.com/efs/latest/ug/performance.html
        fuzz_filesystem = efs.FileSystem(
            scope=self,
            id="{}-FuzzingEFS".format(id),
            file_system_name="AWS-LC-Fuzz-Corpus",
            enable_automatic_backups=True,
            encrypted=True,
            security_group=build_security_group,
            vpc=fuzz_vpc,
            vpc_subnets=efs_subnet_selection,
            performance_mode=efs.PerformanceMode.GENERAL_PURPOSE,
            throughput_mode=efs.ThroughputMode.PROVISIONED,
            provisioned_throughput_per_second=core.Size.mebibytes(100),
        )

        # Create build spec.
        placeholder_map = {
            "X86_ECR_REPO_PLACEHOLDER": ecr_arn(x86_ecr_repo_name),
            "ARM_ECR_REPO_PLACEHOLDER": ecr_arn(arm_ecr_repo_name)
        }
        build_spec_content = YmlLoader.load(spec_file_path, placeholder_map)

        # Define CodeBuild.
        fuzz_codebuild = codebuild.Project(
            scope=self,
            id="FuzzingCodeBuild",
            project_name=id,
            source=git_hub_source,
            role=role,
            timeout=core.Duration.minutes(120),
            environment=codebuild.BuildEnvironment(
                compute_type=codebuild.ComputeType.LARGE,
                privileged=True,
                build_image=codebuild.LinuxBuildImage.STANDARD_4_0),
            build_spec=codebuild.BuildSpec.from_object(build_spec_content),
            vpc=fuzz_vpc,
            security_groups=[build_security_group])

        # TODO: add build type BUILD_BATCH when CFN finishes the feature release. See CryptoAlg-575.

        # Add 'BuildBatchConfig' property, which is not supported in CDK.
        # CDK raw overrides: https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_raw
        # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codebuild-project.html#aws-resource-codebuild-project-properties
        cfn_codebuild = fuzz_codebuild.node.default_child
        cfn_codebuild.add_override("Properties.BuildBatchConfig", {
            "ServiceRole": role.role_arn,
            "TimeoutInMins": 120
        })

        # The EFS identifier needs to match tests/ci/common_fuzz.sh, CodeBuild defines an environment variable named
        # codebuild_$identifier.
        # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-projectfilesystemlocation.html
        #
        # TODO: add this to the CDK project above when it supports EfsFileSystemLocation
        cfn_codebuild.add_override("Properties.FileSystemLocations", [{
            "Identifier":
            "fuzzing_root",
            "Location":
            "%s.efs.%s.amazonaws.com:/" %
            (fuzz_filesystem.file_system_id, AWS_REGION),
            "MountPoint":
            "/efs_fuzzing_root",
            "Type":
            "EFS"
        }])
    def __init__(self, scope: core.Construct, id: str, artifactbucket,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        prj_name = self.node.try_get_context("project_name")
        env_name = self.node.try_get_context("env")

        artifact_bucket = s3.Bucket.from_bucket_name(self, 'artifactbucket',
                                                     artifactbucket)
        github_token = core.SecretValue.secrets_manager(
            secret_id=f'{env_name}/github-token', json_field='github-token')

        pipeline = cp.Pipeline(
            self,
            'backend-pipeline',
            pipeline_name=f'{env_name}-{prj_name}-backend-pipeline',
            artifact_bucket=artifact_bucket,
            restart_execution_on_update=False,
        )

        source_output = cp.Artifact(artifact_name='source')
        build_output = cp.Artifact(artifact_name='build')

        build_project = cb.Project(
            self,
            'buildproject',
            project_name=f'{env_name}-{prj_name}-build-project',
            description='package lambda functions',
            environment=cb.BuildEnvironment(
                build_image=cb.LinuxBuildImage.STANDARD_3_0,
                environment_variables={
                    'ENV': cb.BuildEnvironmentVariable(value='dev'),
                    'PRJ': cb.BuildEnvironmentVariable(value=prj_name),
                    'STAGE': cb.BuildEnvironmentVariable(value='dev')
                }),
            build_spec=cb.BuildSpec.from_object({
                'version': '0.2',
                'phases': {
                    'install': {
                        'commands': [
                            'echo --INSTALL PHASE--',
                            'npm install --silent --no-progress serverless -g'
                        ]
                    },
                    'pre_build': {
                        'commands': [
                            'echo --PRE BUILD PHASE--',
                            'npm install --silent --no-progress'
                        ]
                    },
                    'build': {
                        'commands': [
                            'echo --BUILD PHASE--',
                            'serverless deploy -s $STAGE'
                        ]
                    }
                },
                'artifacts': {
                    'files': ["**/*"],
                    'base-directory': '.serverless'
                }
            }))

        pipeline.add_stage(stage_name='Source',
                           actions=[
                               cp_actions.GitHubSourceAction(
                                   oauth_token=github_token,
                                   output=source_output,
                                   repo='serverless_hello_world',
                                   branch='master',
                                   owner='manrodri',
                                   action_name='GitHubSource')
                           ])

        pipeline.add_stage(stage_name='Deploy',
                           actions=[
                               cp_actions.CodeBuildAction(
                                   action_name='DeployToDev',
                                   input=source_output,
                                   project=build_project,
                                   outputs=[build_output])
                           ])

        # build_project.role.add_to_policy(iam.PolicyStatement(
        #     actions=['cloudformation:*', 's3:*', 'iam:*', 'lambda:*', 'apigateway:*'],
        #     resources=["*"]
        # ))

        build_project.role.add_managed_policy(
            iam.ManagedPolicy.from_aws_managed_policy_name(
                'AdministratorAccess'))

        account_id = core.Aws.ACCOUNT_ID
        region = core.Aws.REGION

        ## ssm params
        ssm.StringParameter(self,
                            'account-id',
                            parameter_name=f'/{env_name}/account-id',
                            string_value=account_id)
        ssm.StringParameter(self,
                            'region',
                            parameter_name=f'/{env_name}/region',
                            string_value=region)
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here

        vpc = _ec2.Vpc(self,
                       "ecs-vpc",
                       cidr="10.0.0.0/16",
                       nat_gateways=1,
                       max_azs=3)

        clusterAdmin = _iam.Role(self,
                                 "AdminRole",
                                 assumed_by=_iam.AccountRootPrincipal())

        cluster = _ecs.Cluster(self, "ecs-cluster", vpc=vpc)

        logging = _ecs.AwsLogDriver(stream_prefix="ecs-logs")

        taskRole = _iam.Role(
            self,
            f"ecs-taskRole-{cdk.Stack.stack_name}",
            role_name=f"ecs-taskRole-{cdk.Stack.stack_name}",
            assumed_by=_iam.ServicePrincipal("ecs-tasks.amazonaws.com"))

        # ECS Contructs

        executionRolePolicy = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=['*'],
            actions=[
                "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability",
                "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage",
                "logs:CreateLogStream", "logs:PutLogEvents"
            ])

        taskDef = _ecs.FargateTaskDefinition(self,
                                             "ecs-taskdef",
                                             task_role=taskRole)

        taskDef.add_to_execution_role_policy(executionRolePolicy)

        container = taskDef.add_container(
            'flask-app',
            image=_ecs.ContainerImage.from_registry(
                "nikunjv/flask-image:blue"),
            memory_limit_mib=256,
            cpu=256,
            logging=logging)

        container.add_port_mappings(
            _ecs.PortMapping(container_port=5000, protocol=_ecs.Protocol.TCP))

        fargateService = ecs_patterns.ApplicationLoadBalancedFargateService(
            self,
            "ecs-service",
            cluster=cluster,
            task_definition=taskDef,
            public_load_balancer=True,
            desired_count=3,
            listener_port=80)

        scaling = fargateService.service.auto_scale_task_count(max_capacity=6)

        scaling.scale_on_cpu_utilization(
            "CpuScaling",
            target_utilization_percent=10,
            scale_in_cooldown=cdk.Duration.seconds(300),
            scale_out_cooldown=cdk.Duration.seconds(300))

        # PIPELINE CONSTRUCTS

        # ECR Repo

        ecrRepo = ecr.Repository(self, "EcrRepo")

        gitHubSource = codebuild.Source.git_hub(
            owner='samuelhailemariam',
            repo='aws-ecs-fargate-cicd-cdk',
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PUSH).and_branch_is('main'),
            ])

        # CODEBUILD - project

        project = codebuild.Project(
            self,
            "ECSProject",
            project_name=cdk.Aws.STACK_NAME,
            source=gitHubSource,
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2,
                privileged=True),
            environment_variables={
                "CLUSTER_NAME": {
                    'value': cluster.cluster_name
                },
                "ECR_REPO_URI": {
                    'value': ecrRepo.repository_uri
                }
            },
            build_spec=codebuild.BuildSpec.from_object({
                'version': "0.2",
                'phases': {
                    'pre_build': {
                        'commands': [
                            'env',
                            'export TAG=${CODEBUILD_RESOLVED_SOURCE_VERSION}'
                        ]
                    },
                    'build': {
                        'commands': [
                            'cd docker-app',
                            'docker build -t $ECR_REPO_URI:$TAG .',
                            '$(aws ecr get-login --no-include-email)',
                            'docker push $ECR_REPO_URI:$TAG'
                        ]
                    },
                    'post_build': {
                        'commands': [
                            'echo "In Post-Build Stage"', 'cd ..',
                            "printf '[{\"name\":\"flask-app\",\"imageUri\":\"%s\"}]' $ECR_REPO_URI:$TAG > imagedefinitions.json",
                            "pwd; ls -al; cat imagedefinitions.json"
                        ]
                    }
                },
                'artifacts': {
                    'files': ['imagedefinitions.json']
                }
            }))

        # PIPELINE ACTIONS

        sourceOutput = codepipeline.Artifact()
        buildOutput = codepipeline.Artifact()

        sourceAction = codepipeline_actions.GitHubSourceAction(
            action_name='GitHub_Source',
            owner='samuelhailemariam',
            repo='aws-ecs-fargate-cicd-cdk',
            branch='master',
            oauth_token=cdk.SecretValue.secrets_manager("/my/github/token"),
            output=sourceOutput)

        buildAction = codepipeline_actions.CodeBuildAction(
            action_name='codeBuild',
            project=project,
            input=sourceOutput,
            outputs=[buildOutput])

        manualApprovalAction = codepipeline_actions.ManualApprovalAction(
            action_name='Approve')

        deployAction = codepipeline_actions.EcsDeployAction(
            action_name='DeployAction',
            service=fargateService.service,
            image_file=codepipeline.ArtifactPath(buildOutput,
                                                 'imagedefinitions.json'))

        pipeline = codepipeline.Pipeline(self, "ECSPipeline")

        source_stage = pipeline.add_stage(stage_name="Source",
                                          actions=[sourceAction])

        build_stage = pipeline.add_stage(stage_name="Build",
                                         actions=[buildAction])

        approve_stage = pipeline.add_stage(stage_name="Approve",
                                           actions=[manualApprovalAction])

        deploy_stage = pipeline.add_stage(stage_name="Deploy-to-ECS",
                                          actions=[deployAction])

        ecrRepo.grant_pull_push(project.role)

        project.add_to_role_policy(
            _iam.PolicyStatement(resources=['cluster.cluster_arn'],
                                 actions=[
                                     "ecs:DescribeCluster",
                                     "ecr:GetAuthorizationToken",
                                     "ecr:BatchCheckLayerAvailability",
                                     "ecr:BatchGetImage",
                                     "ecr:GetDownloadUrlForLayer"
                                 ]))

        # OUTPUT

        cdk.CfnOutput(
            self,
            "LoadBlancer-DNS",
            value=fargateService.load_balancer.load_balancer_dns_name)
    def __init__(self, scope: core.Construct, id: str, *, prefix: str, environment: str, configuration, **kwargs):
        """
        :param scope: Stack class, used by CDK.
        :param id: ID of the construct, used by CDK.
        :param prefix: Prefix of the construct, used for naming purposes.
        :param environment: Environment of the construct, used for naming purposes.
        :param configuration: Configuration of the construct. In this case APIGATEWAY_FAN_OUT_SCHEMA.
        :param kwargs: Other parameters that could be used by the construct.
        """
        super().__init__(scope, id, **kwargs)
        self.prefix = prefix
        self.environment_ = environment
        self._configuration = configuration

        # Validating that the payload passed is correct
        validate_configuration(
            configuration_schema=S3_SPA_SIMPLE_PIPELINE_HOSTING_SCHEMA, configuration_received=self._configuration
        )

        self._deployment_bucket = base_bucket(self, **self._configuration["hosting"]["bucket"])

        artifact_bucket_name = (
            f"{self.prefix}-{self._configuration['hosting']['bucket']['bucket_name']}-artifacts-{self.environment_}"
        )
        artifact_bucket_config = {"bucket_name": artifact_bucket_name, "versioned": True, "public_read_access": False}
        self._deployment_artifact_bucket = base_bucket(self, **artifact_bucket_config)

        behaviour = cf.Behavior(
            is_default_behavior=self._configuration["hosting"]["cloudfront_distribution"]["origin_config"]["behaviours"][
                "is_default_behavior"
            ]
        )
        cloudfront_origins = cf.SourceConfiguration(
            behaviors=[behaviour], s3_origin_source=cf.S3OriginConfig(s3_bucket_source=self._deployment_bucket)
        )
        self._cloudfront_distribution = cf.CloudFrontWebDistribution(
            self,
            id=self._configuration["hosting"]["cloudfront_distribution"]["name"],
            origin_configs=[cloudfront_origins],
        )

        code_build_project_name = (
            f"{self.prefix}-{self._configuration['pipeline']['stages']['build']['name']}-cbproject-{self.environment_}"
        )
        self._codebuild_project = cb.Project(
            self,
            id=code_build_project_name,
            project_name=code_build_project_name,
            build_spec=cb.BuildSpec.from_object(
                {
                    "version": self._configuration["pipeline"]["stages"]["build"].get("version", "0.2"),
                    "phases": {"build": {"commands": self._configuration["pipeline"]["stages"]["build"]["commands"]}},
                    "artifacts": {
                        "base-directory": self._configuration["pipeline"]["stages"]["build"]["build_directory"],
                        "files": self._configuration["pipeline"]["stages"]["build"].get("files", "**/*"),
                    },
                }
            ),
        )

        source_artifact = cp.Artifact(artifact_name="source_artifact")
        single_page_app_artifact = cp.Artifact(artifact_name="single_page_app_artifact")

        pipeline_name = f"{self.prefix}-{self._configuration['pipeline']['name']}-pipeline-{self.environment_}"
        self._s3_single_page_app_pipeline = cp.Pipeline(
            self,
            id=pipeline_name,
            pipeline_name=pipeline_name,
            artifact_bucket=self._deployment_artifact_bucket,
        )

        self._s3_single_page_app_pipeline.add_stage(
            stage_name=self._configuration["pipeline"]["stages"]["github_source"]["name"],
            actions=[
                cp_actions.GitHubSourceAction(
                    action_name=self._configuration["pipeline"]["stages"]["github_source"]["name"],
                    repo=self._configuration["pipeline"]["stages"]["github_source"]["repo"],
                    owner=self._configuration["pipeline"]["stages"]["github_source"]["owner"],
                    branch=self._configuration["pipeline"]["stages"]["github_source"]["branch"],
                    oauth_token=core.SecretValue.secrets_manager(
                        secret_id=self._configuration["pipeline"]["stages"]["github_source"]["oauth_token_secret_arn"],
                    ),
                    output=source_artifact,
                )
            ],
        )

        self._s3_single_page_app_pipeline.add_stage(
            stage_name=self._configuration["pipeline"]["stages"]["build"]["name"],
            actions=[
                cp_actions.CodeBuildAction(
                    action_name=self._configuration["pipeline"]["stages"]["build"]["name"],
                    input=source_artifact,
                    project=self._codebuild_project,
                    outputs=[single_page_app_artifact],
                )
            ],
        )

        self._s3_single_page_app_pipeline.add_stage(
            stage_name=self._configuration["pipeline"]["stages"]["deploy"]["name"],
            actions=[
                cp_actions.S3DeployAction(
                    action_name=self._configuration["pipeline"]["stages"]["deploy"]["name"],
                    bucket=self._deployment_bucket,
                    input=single_page_app_artifact,
                )
            ],
        )
Exemple #27
0
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 ecr_repo_name: str,
                 docker_img_tag: str,
                 build_spec_file: str,
                 env_type: typing.Optional[str] = 'Linux',
                 privileged: typing.Optional[bool] = False,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Fetch environment variables.
        github_repo_owner = EnvUtil.get("GITHUB_REPO_OWNER", "awslabs")
        github_repo = EnvUtil.get("GITHUB_REPO", "aws-lc")

        # Define CodeBuild resource.
        git_hub_source = codebuild.Source.git_hub(
            owner=github_repo_owner,
            repo=github_repo,
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PULL_REQUEST_CREATED,
                    codebuild.EventAction.PULL_REQUEST_UPDATED,
                    codebuild.EventAction.PULL_REQUEST_REOPENED)
            ],
            clone_depth=1)

        # Define CodeBuild environment.
        ecr_repo = ecr.Repository.from_repository_name(
            scope=self, id=ecr_repo_name, repository_name=ecr_repo_name)
        build_image = codebuild.LinuxBuildImage.from_ecr_repository(
            repository=ecr_repo, tag=docker_img_tag)
        if env_type is 'Windows':
            build_image = codebuild.WindowsBuildImage.from_ecr_repository(
                repository=ecr_repo, tag=docker_img_tag)

        # Define a role.
        role = iam.Role(
            scope=self,
            id="{}-role".format(id),
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AmazonEC2ContainerRegistryReadOnly")
            ])

        # Define CodeBuild.
        build = codebuild.Project(
            scope=self,
            id=id,
            project_name=id,
            source=git_hub_source,
            role=role,
            environment=codebuild.BuildEnvironment(
                compute_type=codebuild.ComputeType.LARGE,
                privileged=privileged,
                build_image=build_image),
            build_spec=codebuild.BuildSpec.from_source_filename(
                build_spec_file))

        if env_type is 'ARM':
            # Workaround to change environment type.
            # see: https://github.com/aws/aws-cdk/issues/5517
            cfn_build = build.node.default_child
            cfn_build.add_override("Properties.Environment.Type",
                                   "ARM_CONTAINER")
    def __init__(self, scope: core.Construct, id: str, repository_name: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        codebuild_start_fn = _create_fn_from_folder(
            scope=self,
            folder_name="codebuild_start_fn",
        )
        codebuild_result_fn = _create_fn_from_folder(
            scope=self,
            folder_name="codebuild_result_fn",
        )

        repo = codecommit.Repository(
            scope=self,
            id="Repository",
            repository_name=repository_name,
        )

        project = codebuild.Project(
            scope=self,
            id="PullRequestCodeCommitProject",
            source=codebuild.Source.code_commit(repository=repo),
            badge=True,
        )

        project.on_build_started(
            id="on-build-started",
            target=targets.LambdaFunction(handler=codebuild_start_fn),
        )
        project.on_build_succeeded(
            id="on-build-succeeded",
            target=targets.LambdaFunction(handler=codebuild_result_fn),
        )
        project.on_build_failed(
            id="on-build-failed",
            target=targets.LambdaFunction(handler=codebuild_result_fn),
        )

        on_pull_request_state_change_rule = repo.on_pull_request_state_change(
            id="on-pull-request-change",
            event_pattern=events.EventPattern(
                detail={"event": [
                    "pullRequestSourceBranchUpdated",
                    "pullRequestCreated",
                ]}),
            # target=targets.LambdaFunction(
            #     handler=pull_request_fn,
            # )
        )
        on_pull_request_state_change_rule.add_target(
            target=targets.CodeBuildProject(
                project=project,
                event=events.RuleTargetInput.from_object(
                    {
                        "sourceVersion": events.EventField.from_path("$.detail.sourceCommit"),
                        "artifactsOverride": {"type": "NO_ARTIFACTS"},
                        "environmentVariablesOverride": [
                            {
                                "name": "pullRequestId",
                                "value": events.EventField.from_path("$.detail.pullRequestId"),
                                "type": "PLAINTEXT"
                            },
                            {
                                "name": "repositoryName",
                                "value": events.EventField.from_path("$.detail.repositoryNames[0]"),
                                "type": "PLAINTEXT"
                            },
                            {
                                "name": "sourceCommit",
                                "value": events.EventField.from_path("$.detail.sourceCommit"),
                                "type": "PLAINTEXT"
                            },
                            {
                                "name": "destinationCommit",
                                "value": events.EventField.from_path("$.detail.destinationCommit"),
                                "type": "PLAINTEXT"
                            }
                        ]
                    }
                ),
            )
        )
Exemple #29
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        repo: str,
        owner: str = 'SeedCompany',
        bucket_name:
        str = None,  # if specified, then artifacts from the build will be stored here
        create_bucket:
        bool = False,  # if true and bucket_name exists, then the artifact bucket will be created
        **kwargs
    ) -> None:
        super().__init__(scope, id, **kwargs)

        artifacts = None
        if bucket_name:
            if create_bucket:
                artifactStore = s3.Bucket(self,
                                          bucket_name,
                                          bucket_name=bucket_name)
            else:
                artifactStore = s3.Bucket.from_bucket_name(
                    self, bucket_name, bucket_name)

            artifacts = codebuild.Artifacts.s3(
                bucket=artifactStore,
                name=repo,
                include_build_id=True,
                package_zip=False,
            )

        #GitHub credentials are entered into CodeBuild manually
        # $ aws codebuild import-source-credentials --server-type GITHUB --auth-type PERSONAL_ACCESS_TOKEN --token <token_value>
        gitRepo = codebuild.Source.git_hub(owner=owner,
                                           repo=repo,
                                           webhook=True)

        buildEnv = codebuild.BuildEnvironment(
            build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0,
            compute_type=codebuild.ComputeType.SMALL,
            privileged=True)

        dockerRepo = ecr.Repository(self,
                                    '%sRepo' % repo.capitalize(),
                                    repository_name=repo)

        project = codebuild.Project(
            self,
            '%sBuild' % repo.capitalize(),
            project_name='%sBuild' % repo.capitalize(),
            environment=buildEnv,
            environment_variables={
                "AWS_ACCOUNT_ID":
                codebuild.BuildEnvironmentVariable(value=self.account),
                "REPO":
                codebuild.BuildEnvironmentVariable(value=repo)
            },
            source=gitRepo,
            artifacts=artifacts,
            badge=True,
            # see reference.buildspec.yml for a standard buildspec
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"))

        project.role.add_to_policy(
            iam.PolicyStatement(resources=['*'],
                                actions=['ecr:GetAuthorizationToken']))

        project.role.add_to_policy(
            iam.PolicyStatement(resources=[dockerRepo.repository_arn],
                                actions=[
                                    'ecr:InitiateLayerUpload',
                                    'ecr:UploadLayerPart',
                                    'ecr:CompleteLayerUpload',
                                    'ecr:BatchCheckLayerAvailability',
                                    'ecr:PutImage'
                                ]))
Exemple #30
0
    def create_docker_image_buildproject(self) -> Resource:
        """Greengrassのコンポーネント用に推論アプリのdockerイメージをビルドするcodebuild

        Returns:
            Resource: codebuild
        """

        codebuild_name = f"{self.stack_name}_{self.component_id}_build_component"
        role_name = self.get_role_name("codebuild")
        codebuild_role = aws_iam.Role(
            self,
            id=role_name,
            assumed_by=aws_iam.ServicePrincipal("codebuild.amazonaws.com"),
            role_name=role_name,
            path="/service-role/")
        codebuild_role.attach_inline_policy(
            aws_iam.Policy(
                self,
                "DefaultCodeBuildPermissions",
                document=aws_iam.PolicyDocument(statements=[
                    aws_iam.PolicyStatement(
                        actions=[
                            "logs:CreateLogGroup", "logs:CreateLogStream",
                            "logs:PutLogEvents"
                        ],
                        resources=[
                            f"arn:aws:logs:{Aws.REGION}:{Aws.ACCOUNT_ID}:log-group:/aws/codebuild/{codebuild_name}",
                            f"arn:aws:logs:{Aws.REGION}:{Aws.ACCOUNT_ID}:log-group:/aws/codebuild/{codebuild_name}:*"
                        ]),
                    aws_iam.PolicyStatement(
                        actions=[
                            "codebuild:CreateReportGroup",
                            "codebuild:CreateReport", "codebuild:UpdateReport",
                            "codebuild:BatchPutTestCases",
                            "codebuild:BatchPutCodeCoverages"
                        ],
                        resources=[
                            f"arn:aws:codebuild:{Aws.REGION}:{Aws.ACCOUNT_ID}:report-group/{codebuild_name}-*"
                        ]),
                    aws_iam.PolicyStatement(
                        actions=[
                            "s3:PutObject", "s3:GetObject",
                            "s3:GetObjectVersion", "s3:GetBucketAcl",
                            "s3:GetBucketLocation"
                        ],
                        resources=[
                            "arn:aws:s3:::{}/*".
                            format("ml-model-build-input-us-east-1")
                        ]),
                    aws_iam.PolicyStatement(
                        actions=["ecr:GetAuthorizationToken"], resources=["*"
                                                                          ]),
                    aws_iam.PolicyStatement(
                        actions=[
                            "ecr:BatchCheckLayerAvailability",
                            "ecr:GetDownloadUrlForLayer",
                            "ecr:GetRepositoryPolicy",
                            "ecr:DescribeRepositories", "ecr:ListImages",
                            "ecr:DescribeImages", "ecr:BatchGetImage",
                            "ecr:InitiateLayerUpload", "ecr:UploadLayerPart",
                            "ecr:CompleteLayerUpload", "ecr:PutImage"
                        ],
                        resources=[
                            self._component_ecr.repository_arn,
                            self._component_base_ecr.repository_arn
                        ]),
                    aws_iam.PolicyStatement(
                        actions=["codecommit:GitPull"
                                 ],
                        resources=[
                            self._component_source_repository.repository_arn
                        ])
                ])))

        buildspecfile = os.path.dirname(
            __file__) + "/buildspec/componentimage.yaml"
        with open(buildspecfile, "r") as yml:
            buildspec = yaml.safe_load(yml)

        code_build = aws_codebuild.Project(
            self,
            id=codebuild_name,
            project_name=codebuild_name,
            build_spec=aws_codebuild.BuildSpec.from_object(buildspec),
            environment=aws_codebuild.BuildEnvironment(
                privileged=True,
                build_image=aws_codebuild.LinuxBuildImage.STANDARD_4_0),
            description='Greengrass用の推論アプリコンポーネントイメージを作成',
            timeout=Duration.minutes(60),
            role=codebuild_role)

        return code_build