Exemple #1
0
def create_build_project(self, role, source_bucket):
    build_project = _cb.Project(
        self, 'CodeBuildProject',
        project_name='DEMO-BUILD',
        source=_cb.Source.s3(
            bucket=source_bucket,
            path='archive.zip'
        ),
        environment=_cb.BuildEnvironment(
            build_image=_cb.LinuxBuildImage.STANDARD_3_0,
            privileged=True
        ),
        environment_variables={
            'IMAGE_REPO_NAME': _cb.BuildEnvironmentVariable(value='demo-repository'),
            'AWS_DEFAULT_REGION': _cb.BuildEnvironmentVariable(value=os.environ.get('REGION')),
            'AWS_ACCOUNT_ID': _cb.BuildEnvironmentVariable(value=os.environ.get('ACCOUNT_ID')),
            'CONTAINER_NAME': _cb.BuildEnvironmentVariable(value='DEMO-CONTAINER'),
        },
        build_spec=_cb.BuildSpec.from_source_filename(filename='etc/cicd/buildspec.yml'),
        artifacts=_cb.Artifacts.s3(
            bucket=source_bucket,
            name='artifact-codebuild.zip',
            package_zip=True,
            include_build_id=False
        ),
        role=role
    )
    return build_project
Exemple #2
0
 def get_build_env_vars(self, ecr_repo):
     return {
         "REPOSITORY_URI":
         codebuild.BuildEnvironmentVariable(value=ecr_repo.repository_uri),
         "DOCKERHUB_USERNAME":
         codebuild.BuildEnvironmentVariable(
             value="/springboot-multiarch/dockerhub/username",
             type=codebuild.BuildEnvironmentVariableType.PARAMETER_STORE),
         "DOCKERHUB_PASSWORD":
         codebuild.BuildEnvironmentVariable(
             value="/springboot-multiarch/dockerhub/password ",
             type=codebuild.BuildEnvironmentVariableType.PARAMETER_STORE),
         "REDIS_HOST":
         codebuild.BuildEnvironmentVariable(
             value=self.redis.attr_redis_endpoint_address),
         "REDIS_PORT":
         codebuild.BuildEnvironmentVariable(
             value=self.redis.attr_redis_endpoint_port),
         "RDS_SECRET":
         codebuild.BuildEnvironmentVariable(
             value=self.rds_cluster.secret.secret_name),
         "RDS_HOST":
         codebuild.BuildEnvironmentVariable(
             value=self.rds_cluster.cluster_endpoint.hostname),
         "RDS_PORT":
         codebuild.BuildEnvironmentVariable(
             value=self.rds_cluster.cluster_endpoint.port),
         "EKS_NAME":
         codebuild.BuildEnvironmentVariable(value=self.eks.cluster_name),
         "EKS_ROLE":
         codebuild.BuildEnvironmentVariable(
             value=self.eks.kubectl_role.role_arn),
     }
Exemple #3
0
    def __init__(
        self,
        scope: core.Construct,
        id: str,
        repo: str,
        artifacts_bucket: str,
        owner: str = 'SeedCompany',
        create_bucket:
        bool = False,  # if true and bucket_name exists, then the artifact bucket will be created
        **kwargs
    ) -> None:
        super().__init__(scope, id, **kwargs)

        if create_bucket:
            artifactStore = s3.Bucket(self,
                                      artifacts_bucket,
                                      bucket_name=artifacts_bucket)
        else:
            artifactStore = s3.Bucket.from_bucket_name(self, artifacts_bucket,
                                                       artifacts_bucket)

        artifacts = codebuild.Artifacts.s3(
            bucket=artifactStore,
            name=repo,
            include_build_id=True,
            package_zip=False,
        )

        #GitHub credentials are entered into CodeBuild manually
        # $ aws codebuild import-source-credentials --server-type GITHUB --auth-type PERSONAL_ACCESS_TOKEN --token <token_value>
        gitRepo = codebuild.Source.git_hub(owner=owner,
                                           repo=repo,
                                           webhook=True)

        retetoRepo = ecr.Repository.from_repository_name(
            self, 'RetetoRepo', 'reteto')

        buildEnv = codebuild.BuildEnvironment(
            build_image=codebuild.LinuxBuildImage.from_ecr_repository(
                retetoRepo),
            compute_type=codebuild.ComputeType.SMALL,
            privileged=True)

        project = codebuild.Project(
            self,
            '%sBuild' % repo.capitalize(),
            project_name='%sBuild' % repo.capitalize(),
            environment=buildEnv,
            environment_variables={
                "AWS_ACCOUNT_ID":
                codebuild.BuildEnvironmentVariable(value=self.account),
                "REPO":
                codebuild.BuildEnvironmentVariable(value=repo)
            },
            source=gitRepo,
            artifacts=artifacts,
            badge=True,
            # see reference.buildspec.yml for a standard buildspec
            build_spec=codebuild.BuildSpec.from_object({}))
Exemple #4
0
    def __init__(self, scope: core.App, id: str, props, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        required_props = [
            'github_source', 'config_source_bucket', 'device_farm_project_name'
        ]
        for prop in required_props:
            if prop not in props:
                raise RuntimeError(f"Parameter {prop} is required.")

        config_source_bucket = props['config_source_bucket']
        device_farm_project_name = props['device_farm_project_name']
        codebuild_project_name_prefix = props['codebuild_project_name_prefix']

        github_source = props['github_source']
        owner = github_source['owner']
        repo = github_source['repo']
        base_branch = github_source['base_branch']

        df_project = DeviceFarmProject(self,
                                       id,
                                       project_name=device_farm_project_name)
        df_pool = DeviceFarmDevicePool(
            self,
            f"{id}DevicePool",
            project_arn=core.Token.as_string(df_project.project_arn),
            device_pool_name="SingleDeviceIntegTestDevicePool")

        PullRequestBuilder(
            self,
            "UnitTestRunner",
            project_name=f"{codebuild_project_name_prefix}-UnitTest",
            github_owner=owner,
            github_repo=repo,
            base_branch=base_branch,
            buildspec_path="scripts/pr-builder-buildspec.yml")

        integtest_project = PullRequestBuilder(
            self,
            "IntegrationTestrunner",
            project_name=f"{codebuild_project_name_prefix}-IntegrationTest",
            github_owner=owner,
            github_repo=repo,
            base_branch=base_branch,
            buildspec_path="scripts/devicefarm-test-runner-buildspec.yml",
            environment_variables={
                'DEVICEFARM_PROJECT_ARN':
                aws_codebuild.BuildEnvironmentVariable(
                    value=df_project.get_arn()),
                'DEVICEFARM_POOL_ARN':
                aws_codebuild.BuildEnvironmentVariable(
                    value=df_pool.device_pool_arn),
                'CONFIG_SOURCE_BUCKET':
                aws_codebuild.BuildEnvironmentVariable(
                    value=config_source_bucket)
            })
        self._add_codebuild_project_runner_permissions(integtest_project.role)
        self._add_devicefarm_test_runner_permissions_to_role(
            integtest_project.role)
    def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        prj_name = self.node.try_get_context("project_name")
        env_name = self.node.try_get_context("env")
        account_id = core.Aws.ACCOUNT_ID
        PROJECT_NUMBER = 2

        # To Store Frontend App
        frontend_bucket = s3.Bucket(self, "frontend",
                                    access_control=s3.BucketAccessControl.BUCKET_OWNER_FULL_CONTROL,
                                    bucket_name=account_id + '-' + env_name + '-frontend',
                                    public_read_access=True,
                                    removal_policy=core.RemovalPolicy.DESTROY,
                                    website_index_document='index.html'
                                    )

        bucket_name = frontend_bucket.bucket_name

        github_token = core.SecretValue.secrets_manager("dev/github-token", json_field='github-from-marsApp')

        cb.GitHubSourceCredentials(self, "CodeBuildGitHubCreds",
                                          access_token=github_token
                                          )

        git_hub_source = cb.Source.git_hub(
            owner="manrodri",
            repo="30miniProjects",
            webhook=True,
            webhook_filters=[
                cb.FilterGroup.in_event_of(cb.EventAction.PUSH).and_branch_is(
                    "master").and_file_path_is('js30Projects/')
            ]
        )

        codebuild_project = cb.Project(
            self,
            "cb-frontend",
            source=git_hub_source,
            environment=cb.BuildEnvironment(
                build_image=cb.LinuxBuildImage.STANDARD_3_0,
                environment_variables={
                    'WEB_BUCKET_NAME': cb.BuildEnvironmentVariable(value=bucket_name),
                    'PROJECT_NUMBER': cb.BuildEnvironmentVariable(value=str(PROJECT_NUMBER))
                }
            ),
        )

        allow_object_actions = iam.PolicyStatement(resources=[f"arn:aws:s3:::{bucket_name}/*"],
                                               actions=["s3:*"])
        allow_bucket_actions = iam.PolicyStatement(
            resources=[f"arn:aws:s3:::{bucket_name}"],
            actions=['s3:*'],
        )
        codebuild_project.add_to_role_policy(allow_object_actions)
        codebuild_project.add_to_role_policy(allow_bucket_actions)
    def _create_train_step(self):
        stage = self.pipeline.add_stage(stage_name=f"{self.name_prefix}-stage")

        role = iam.Role(
            self,
            "Role",
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            description="Role for CodeBuild",
            role_name=f"{self.name_prefix}-codebuild-role",
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AmazonEC2ContainerRegistryFullAccess"),
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AWSStepFunctionsFullAccess"),
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "service-role/AWSLambdaVPCAccessExecutionRole"),
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "SecretsManagerReadWrite"),
            ],
        )

        policy = iam.Policy(self, "PassRolePolicy")
        policy.document.add_statements(
            iam.PolicyStatement(
                actions=["iam:PassRole"],
                resources=[f"arn:aws:iam::{Stack.of(self).account}:role/*"]))
        role.attach_inline_policy(policy)

        build_spec = codebuild.BuildSpec.from_source_filename('buildspec.yml')
        project = codebuild.PipelineProject(
            self,
            "TrainingStepProject",
            build_spec=build_spec,
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.STANDARD_5_0,
                privileged=True),
            role=role,
            security_groups=[self.security_group],
            subnet_selection=self.subnet_selection,
            vpc=self.vpc)

        action = codepipeline_actions.CodeBuildAction(
            action_name=f"{self.name_prefix}-training-action",
            project=project,
            input=self.source_output,
            environment_variables={
                "EXEC_ID":
                codebuild.BuildEnvironmentVariable(
                    value='#{codepipeline.PipelineExecutionId}'),
                "SFN_WORKFLOW_NAME":
                codebuild.BuildEnvironmentVariable(value=self.sfn_name)
            },
            variables_namespace="trainStep",
        )
        stage.add_action(action)
Exemple #7
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        sourceArtifact = codepipeline.Artifact()
        cloudAssemblyArtifact = codepipeline.Artifact()

        pipeline = pipelines.CdkPipeline(self, 'Pipeline',
                                         pipeline_name=self.node.try_get_context(
                                             'repository_name') + "-{}-pipeline".format(STAGE),
                                         cloud_assembly_artifact=cloudAssemblyArtifact,
                                         source_action=actions.GitHubSourceAction(
                                            action_name='GitHub',
                                            output=sourceArtifact,
                                            oauth_token=core.SecretValue.secrets_manager('github-token'),
                                            owner=self.node.try_get_context(
                                             'owner'),
                                            repo=self.node.try_get_context(
                                             'repository_name'),
                                            branch=STAGE
                                        ),
                                         synth_action=pipelines.SimpleSynthAction(
                                             synth_command="cdk synth",
                                             install_commands=[
                                                 "pip install --upgrade pip",
                                                 "npm i -g aws-cdk",
                                                 "pip install -r requirements.txt"
                                             ],
                                             source_artifact=sourceArtifact,
                                             cloud_assembly_artifact=cloudAssemblyArtifact,
                                             environment={
                                                 'privileged': True
                                             },
                                             environment_variables={
                                                 'DEV_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=os.environ['DEV_ACCOUNT_ID']),
                                                 'STG_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=os.environ['STG_ACCOUNT_ID']),
                                                 'PROD_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=os.environ['PROD_ACCOUNT_ID']),
                                                 'MANAGE_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=os.environ['MANAGE_ACCOUNT_ID'])
                                             }
                                         )
                                         )

        dev = PipelineStage(
            self,
            self.node.try_get_context('service_name') + "-{}".format(STAGE),
            env={
                'region': "ap-northeast-1",
                'account': os.environ['DEV_ACCOUNT_ID']
            }
        )

        dev_stage = pipeline.add_application_stage(dev)
Exemple #8
0
    def __init__(self, scope: core.Construct, id: str,
                 source: codepipeline.Artifact,
                 pipeline: codepipeline.Pipeline, bucket: s3.Bucket,
                 role: iam.Role, frontend: str, **kwargs) -> None:

        super().__init__(scope, id, **kwargs)

        branch = id.split('-')[-1]

        # Code build for flask frontend
        env = codebuild.BuildEnvironment(
            build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0,
            compute_type=codebuild.ComputeType.SMALL,
            environment_variables={
                'PROJECTNAME':
                codebuild.BuildEnvironmentVariable(
                    value=os.environ['GITHUB_REPO']),
                'GITHUBUSER':
                codebuild.BuildEnvironmentVariable(
                    value=os.environ['GITHUB_OWNER']),
                'SOURCEBRANCH':
                codebuild.BuildEnvironmentVariable(value=branch),
                'ARTIFACT_BUCKET':
                codebuild.BuildEnvironmentVariable(value=bucket.bucket_arn),
                'REPO_URI':
                codebuild.BuildEnvironmentVariable(value=frontend),
            },
            privileged=True,
        )

        project = codebuild.PipelineProject(
            self,
            'Build_Frontend-' + branch,
            description='Submit build jobs for {} as part of CI/CD pipeline'.
            format(os.environ['GITHUB_REPO']),
            environment=env,
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"),
            role=role)

        cb_actions = codepipeline_actions.CodeBuildAction(
            action_name='CodeBuild-' + branch,
            input=source,
            project=project,
            run_order=3)

        pipeline.add_stage(stage_name='CodeBuild-' + branch,
                           actions=[cb_actions])
Exemple #9
0
    def __init__(self, scope: core.Construct, id: str, buildspec, **kwargs):
        super().__init__(scope, id, **kwargs)
        self.buildspec = buildspec
        self.build_image = codebuild.LinuxBuildImage.STANDARD_2_0

        self.project = codebuild.PipelineProject(
            self,
            "Project",
            environment=codebuild.BuildEnvironment(
                build_image=self.build_image, privileged=True),
            build_spec=codebuild.BuildSpec.from_source_filename(
                self.buildspec),
            environment_variables={
                'REPO_NAME':
                codebuild.BuildEnvironmentVariable(
                    value=config['CODEPIPELINE']['GITHUB_REPO'])
            },
        )

        # TODO: Don't need admin, let's make this least privilege
        self.admin_policy = iam.Policy(
            self,
            "AdminPolicy",
            roles=[self.project.role],
            statements=[iam.PolicyStatement(
                actions=['*'],
                resources=['*'],
            )])
Exemple #10
0
  def __init__(self, scope: core.Construct, id: str, 
    project_name:str,
    build_image:assets.DockerImageAsset, 
    context:BuildContext, 
    build_role:iam.Role,
    app_dir:str, **kwargs) -> None:

    super().__init__(scope, id, **kwargs)

    self.github_master_source = b.Source.git_hub(
      clone_depth=1,
      owner='dr-natetorious',
      repo='app-FinSurf',
      webhook=False
    )

    param_name = '/app-finsurf/artifacts/bin/{}'.format(project_name)
    output_path = 's3://{}/cicd/{}'.format(
      context.buckets.artifacts_bucket.bucket_name,
      project_name)

    self.build_project = b.Project(self,'PythonProject',
      project_name=project_name,
      source= self.github_master_source,
      environment= b.BuildEnvironment(
        build_image= b.LinuxBuildImage.from_ecr_repository(
          repository=build_image.repository,
          tag=build_image.image_uri.split(':')[-1]),
        environment_variables={
          'APP_DIR':b.BuildEnvironmentVariable(value=app_dir),
          'PARAM_NAME': b.BuildEnvironmentVariable(value=param_name),
          'OUTPUT_PATH': b.BuildEnvironmentVariable(value=output_path),
        },
        compute_type=b.ComputeType.SMALL
      ),
      role=build_role,
      encryption_key= context.buckets.artifacts_key,
      build_spec= b.BuildSpec.from_source_filename(filename='cicd/configs/buildspec-python-zip.yml'),
      artifacts= b.Artifacts.s3(
        name=project_name,
        path="/artifacts",
        bucket=context.buckets.artifacts_bucket,
        encryption=True,
        include_build_id=False,
        package_zip=False)
      )
Exemple #11
0
    def __init__(self, pipeline_data: PipelineData):
        super().__init__(pipeline_data.scope,
                         pipeline_data.name,
                         env=pipeline_data.env)
        self.source_artifact = cp.Artifact('Source')
        self.cloud_assembly_artifact = cp.Artifact('CloudAs')
        self.pipeline = CdkPipeline(
            self,
            "Pipeline",
            self_mutating=True,
            cross_account_keys=False,
            cloud_assembly_artifact=self.cloud_assembly_artifact,
            source_action=cpa.BitBucketSourceAction(
                role=iam.LazyRole(
                    self,
                    'SourceRole',
                    assumed_by=iam.AccountPrincipal(self.account),
                    managed_policies=[
                        iam.ManagedPolicy.from_aws_managed_policy_name(
                            'AmazonS3FullAccess')
                    ]),
                action_name="Ship",
                connection_arn=pipeline_data.github_connection_arn,
                owner=pipeline_data.github_owner,
                repo=pipeline_data.repo_name,
                branch=pipeline_data.repo_branch,
                output=self.source_artifact),
            synth_action=SimpleSynthAction(
                install_commands=pipeline_data.synth_install_commands,
                environment=cb.BuildEnvironment(
                    environment_variables={
                        env_key: cb.BuildEnvironmentVariable(
                            value=pipeline_data.build_env[env_key])
                        for env_key in pipeline_data.build_env
                    },
                    build_image=cb.LinuxBuildImage.STANDARD_5_0,
                    compute_type=cb.ComputeType.SMALL,
                    privileged=True),
                synth_command='cdk synth',
                action_name='Synthesize',
                cloud_assembly_artifact=self.cloud_assembly_artifact,
                source_artifact=self.source_artifact))
        pipeline = self.pipeline.node.try_find_child('Pipeline')
        build_stage = pipeline.node.try_find_child('Build')
        synth_action = build_stage.node.try_find_child('Synthesize')
        build_proj = synth_action.node.try_find_child('CdkBuildProject')
        cfn_build_project = build_proj.node.default_child

        # Need Privileged mode for starting docker
        cfn_build_project.add_property_override("Environment.PrivilegedMode",
                                                "true")
        # Updating from v4 by default in aws-cdk to v5
        cfn_build_project.add_property_override("Environment.Image",
                                                "aws/codebuild/standard:5.0")
        # Only clone the last commit. Don't clone the history
        cfn_build_project.add_property_override("Source.GitCloneDepth", 1)

        self.pipeline.add_application_stage(pipeline_data.app_stage)
Exemple #12
0
def create_build_stage(self, source_output, build_project):
    build_stage=_cp.StageProps(
        stage_name='Build',
        actions=[
            _cpa.CodeBuildAction(
                action_name='Build',
                input=source_output,
                project=build_project,
                run_order=1,
                environment_variables={
                  'ENV': _cb.BuildEnvironmentVariable(value='develop'),
                  'FAMILY_NAME': _cb.BuildEnvironmentVariable(value='DEMO-TASK'),
                },
                outputs=[_cp.Artifact(artifact_name='BuildArtifact')],
            )
        ]
    )
    return build_stage
 def get_build_env_vars(self, ecr_repo):
     return {
         "REPOSITORY_URI":
         codebuild.BuildEnvironmentVariable(value=ecr_repo.repository_uri),
         "DOCKERHUB_USERNAME":
         codebuild.BuildEnvironmentVariable(
             value="/hello-bottlerocket/dockerhub/username",
             type=codebuild.BuildEnvironmentVariableType.PARAMETER_STORE),
         "DOCKERHUB_PASSWORD":
         codebuild.BuildEnvironmentVariable(
             value="/hello-bottlerocket/dockerhub/password",
             type=codebuild.BuildEnvironmentVariableType.PARAMETER_STORE),
         "EKS_NAME":
         codebuild.BuildEnvironmentVariable(value=self.eks.cluster_name),
         "EKS_ROLE":
         codebuild.BuildEnvironmentVariable(
             value=self.eks.kubectl_role.role_arn),
     }
Exemple #14
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)
        
        # Create ECR Repository
        ghost_repo = ecr.Repository(
            self, "GhostRepo",
            repository_name="ghost"
        )

        # Create IAM Role For CodeBuild
        ghost_build_role = iam.Role(
            self, "GhostBuildRole",
            assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name("EC2InstanceProfileForImageBuilderECRContainerBuilds")
            ]
        )

        # We only want to fire on the master branch and if there is a change in the dockerbuild folder
        git_hub_source = codebuild.Source.git_hub(
            owner="jasonumiker",
            repo="k8s-plus-aws-gitops",
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH).and_branch_is("master").and_file_path_is("dockerbuild/*")
            ]
        )

        # Create CodeBuild
        build_project = codebuild.Project(
            self, "GhostBuildProject",
            source=git_hub_source,
            role=ghost_build_role,
            build_spec=codebuild.BuildSpec.from_source_filename("dockerbuild/buildspec.yml"),
            environment={
                'privileged': True,
            },
            environment_variables={
                'AWS_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=self.account),
                'IMAGE_REPO_NAME': codebuild.BuildEnvironmentVariable(value=ghost_repo.repository_name)
            }
        )
Exemple #15
0
    def __init__(self, app: core.App, id: str, props, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        bucket = aws_s3.Bucket(
            self,
            "SourceBucket",
            bucket_name=f"flask-bucket-{core.Aws.ACCOUNT_ID}",
            versioned=True,
            removal_policy=core.RemovalPolicy.DESTROY)

        # ECR repository for Docker images
        ecr = aws_ecr.Repository(self,
                                 "ECR",
                                 repository_name="flask-repo",
                                 removal_policy=core.RemovalPolicy.DESTROY)

        ecr_build = aws_codebuild.PipelineProject(
            self,
            "ECRBuild",
            project_name="ecr-image-build",
            build_spec=aws_codebuild.BuildSpec.from_source_filename(
                filename='codebuild/ecr/buildspec.yml'),
            environment=aws_codebuild.BuildEnvironment(privileged=True, ),
            # pass the ecr repo uri into the codebuild project so codebuild knows where to push
            environment_variables={
                'ecr':
                aws_codebuild.BuildEnvironmentVariable(
                    value=ecr.repository_uri),
                'tag':
                aws_codebuild.BuildEnvironmentVariable(value='flask')
            },
            description='Pipeline for CodeBuild',
            timeout=core.Duration.minutes(30),
        )

        ecr.grant_pull_push(ecr_build)

        self.output_params = props.copy()
        self.output_params['ecr'] = ecr.repository_uri
        self.output_params['ecr_build'] = ecr_build
        self.output_params['bucket'] = bucket
Exemple #16
0
 def create_project(self, target_function, stage):
     project = codebuild.PipelineProject(
         self,
         self.create_id("Project", stage),
         project_name=self.create_name(stage),
         environment_variables={
             "FUNCTION_NAME": codebuild.BuildEnvironmentVariable(
                 value=target_function.function_name,
                 type=codebuild.BuildEnvironmentVariableType.PLAINTEXT),
             "STAGE": codebuild.BuildEnvironmentVariable(
                 value=stage,
                 type=codebuild.BuildEnvironmentVariableType.PLAINTEXT)
         }
     )
     project.add_to_role_policy(
         iam.PolicyStatement(
             resources=[target_function.function_arn],
             actions=['lambda:UpdateFunctionCode',
                      'lambda:UpdateFunctionConfiguration']
             )
         )
     return project
Exemple #17
0
    def build_environment_variables(self):
        base_environment = {
            'REPOSITORY_URI':
            aws_codebuild.BuildEnvironmentVariable(
                value=self.ecr_repository.repository_uri),
            'PIPELINE_NAME':
            aws_codebuild.BuildEnvironmentVariable(
                value=self.next_pipeline.pipeline_name),
            'REGION':
            aws_codebuild.BuildEnvironmentVariable(value=self.region)
        }

        build_environment = copy.deepcopy(self.build_environment)
        build_environment.pop('REPOSITORY_URI', None)
        build_environment.pop('PIPELINE_NAME', None)
        build_environment.pop('REGION', None)

        for key, value in build_environment.items():
            if not isinstance(value, aws_codebuild.BuildEnvironmentVariable):
                build_environment[
                    key] = aws_codebuild.BuildEnvironmentVariable(value=value)

        return {**base_environment, **build_environment}
def buildenv(
    environment_variables: dict
) -> typing.Dict[str, cb.BuildEnvironmentVariable]:
    """Facilitate Codebuild environmemt creation
    Simply pass your dict to be turned a env variabltes
    For values that starts with 'aws:sm:', the prefix will be striped off
     and the Codebuild env variable type wil be set to SECRETS_MANAGER.

    Args:
        environment_variables (dict): key/value store to turn into Codebuild Env

    Returns:
        typing.Dict[cb.BuildEnvironmentVariable]: [description]
    """
    envs = dict()
    for env, value in environment_variables.items():
        if isinstance(value, str) and value.startswith('aws:sm:'):
            envs[env] = cb.BuildEnvironmentVariable(
                value=value.replace('aws:sm:', ''),
                type=cb.BuildEnvironmentVariableType.SECRETS_MANAGER)
        else:
            envs[env] = cb.BuildEnvironmentVariable(value=value)
    return envs
Exemple #19
0
    def __init__(self, app: core.App, id: str, props, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        ecs_build = aws_codebuild.PipelineProject(self, "ECSBuild",
            project_name="ecs-cluster-build",
            build_spec=aws_codebuild.BuildSpec.from_source_filename(
                filename='codebuild/ecs/buildspec.yml'),
            environment=aws_codebuild.BuildEnvironment(
                privileged=True,),
            environment_variables={
                'ecr': aws_codebuild.BuildEnvironmentVariable(
                    value=props['ecr'])
            },
            description='Pipeline for CodeBuild',
            timeout=core.Duration.minutes(30),
        )

        self.output_params = props.copy()
        self.output_params['ecs_build'] = ecs_build
Exemple #20
0
 def _create_build_and_assemble_action(
         self,
         input_artifact: aws_codepipeline.Artifact,
         output_artifact: aws_codepipeline.Artifact,
         pipeline_project: aws_codebuild.PipelineProject,
         config_source_bucket: str = None):
     if config_source_bucket is None:
         return aws_codepipeline_actions.CodeBuildAction(
             action_name='BuildAndAssemble',
             input=input_artifact,
             project=pipeline_project,
             outputs=[output_artifact])
     else:
         return aws_codepipeline_actions.CodeBuildAction(
             action_name='BuildAndAssemble',
             input=input_artifact,
             project=pipeline_project,
             environment_variables={
                 'CONFIG_SOURCE_BUCKET':
                 aws_codebuild.BuildEnvironmentVariable(
                     value=config_source_bucket)
             },
             outputs=[output_artifact])
Exemple #21
0
    def _get_build_project(self):
        ecr = aws_ecr.Repository(self,
                                 "ECR",
                                 repository_name="arronmoore-dev",
                                 removal_policy=core.RemovalPolicy.DESTROY)

        cb_docker_build = aws_codebuild.PipelineProject(
            self,
            "DockerBuild",
            project_name=f"arronmoore-dev-docker-build",
            build_spec=aws_codebuild.BuildSpec.from_source_filename(
                filename='buildspec.yml'),
            environment=aws_codebuild.BuildEnvironment(privileged=True, ),
            # pass the ecr repo uri into the codebuild project so codebuild knows where to push
            environment_variables={
                'REPO_URI':
                aws_codebuild.BuildEnvironmentVariable(
                    value=ecr.repository_uri)
            },
            description='Pipeline for CodeBuild',
            timeout=core.Duration.minutes(60),
        )
        ecr.grant_pull_push(cb_docker_build)
        return ecr, cb_docker_build
Exemple #22
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        code = codecommit.Repository(
            self, "CodeRepo", repository_name="iot-gg-cicd-workshop-repo")

        prod_deploy_param_bucket = s3.Bucket(
            self,
            "ProdDeployBucket",
            versioned=True,
        )

        prod_source_bucket = s3.Bucket(
            self,
            "ProdSourceBucket",
            versioned=True,
        )

        ssm.StringParameter(
            self,
            "ProdSourceBucketParameter",
            parameter_name="/iot-gg-cicd-workshop/s3/prod_source_bucket",
            string_value=prod_source_bucket.bucket_name,
        )
        ssm.StringParameter(
            self,
            "ProdDeployBucketParameter",
            parameter_name="/iot-gg-cicd-workshop/s3/prod_deploy_param_bucket",
            string_value=prod_deploy_param_bucket.bucket_name,
        )

        cdk_build = codebuild.PipelineProject(
            self,
            "Build",
            project_name="iot-gg-cicd-workshop-build",
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"),
            environment_variables={
                "AWS_DEFAULT_REGION":
                codebuild.BuildEnvironmentVariable(value=kwargs['env'].region)
            })

        add_policies(cdk_build, [
            "AWSCloudFormationFullAccess",
            "AmazonSSMFullAccess",
            "AmazonS3FullAccess",
            "AWSLambdaFullAccess",
            "IAMFullAccess",
        ])

        cdk_deploy_canary = codebuild.PipelineProject(
            self,
            "Deploy",
            project_name="iot-gg-cicd-workshop-deploy-canary",
            build_spec=codebuild.BuildSpec.from_source_filename(
                "deployspec.yml"),
            environment_variables={
                "AWS_DEFAULT_REGION":
                codebuild.BuildEnvironmentVariable(value=kwargs['env'].region)
            })

        add_policies(cdk_deploy_canary, [
            "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess",
            "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess",
            "AWSLambdaFullAccess", "AWSIoTFullAccess"
        ])

        source_output = codepipeline.Artifact()
        cdk_build_output = codepipeline.Artifact("CdkBuildOutput")

        codepipeline.Pipeline(
            self,
            "Pipeline",
            pipeline_name="iot-gg-cicd-workshop-pipeline-canary",
            stages=[
                codepipeline.StageProps(
                    stage_name="Source",
                    actions=[
                        codepipeline_actions.CodeCommitSourceAction(
                            action_name="CodeCommit_Source",
                            repository=code,
                            output=source_output)
                    ]),
                codepipeline.StageProps(
                    stage_name="Build_Package_Deploy_Lambda",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="Build_Package_Deploy",
                            project=cdk_build,
                            input=source_output,
                            outputs=[cdk_build_output])
                    ]),
                codepipeline.StageProps(
                    stage_name="Deploy_GreenGrass_Canary",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="Deploy_Canary",
                            project=cdk_deploy_canary,
                            input=cdk_build_output)
                    ]),
            ])

        cdk_deploy_prod = codebuild.PipelineProject(
            self,
            "DeployProd",
            project_name="iot-gg-cicd-workshop-deploy-main",
            build_spec=codebuild.BuildSpec.from_object(
                dict(
                    version="0.2",
                    phases=dict(install=dict(commands=[
                        "apt-get install zip",
                        "PROD_SOURCE_BUCKET=$(aws ssm get-parameter --name '/iot-gg-cicd-workshop/s3/prod_source_bucket' --with-decryption --query 'Parameter.Value' --output text)",
                        "aws s3 cp s3://$PROD_SOURCE_BUCKET/prod_deploy.zip prod_deploy.zip",
                        "unzip -o prod_deploy.zip", "ls -la", "make clean init"
                    ]),
                                build=dict(commands=[
                                    "ls -la",
                                    "make deploy-greengrass-prod",
                                ])),
                    artifacts={
                        "base-directory": ".",
                        "files": ["**/*"]
                    },
                    environment=dict(
                        buildImage=codebuild.LinuxBuildImage.STANDARD_2_0))))

        add_policies(cdk_deploy_prod, [
            "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess",
            "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess",
            "AWSLambdaFullAccess"
        ])

        prod_source_output = codepipeline.Artifact()
        codepipeline.Pipeline(
            self,
            "PipelineProd",
            pipeline_name="iot-gg-cicd-workshop-pipeline-main",
            stages=[
                codepipeline.StageProps(
                    stage_name="Source",
                    actions=[
                        codepipeline_actions.S3SourceAction(
                            action_name="S3_Source",
                            bucket=prod_deploy_param_bucket,
                            bucket_key="deploy_params.zip",
                            output=prod_source_output)
                    ]),
                codepipeline.StageProps(
                    stage_name="Deploy_GreenGrass_Prod",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="Deploy_Prod",
                            project=cdk_deploy_prod,
                            input=prod_source_output)
                    ]),
            ])
        prod_source_bucket.grant_read_write(cdk_deploy_canary.role)
        prod_source_bucket.grant_read(cdk_deploy_prod.role)
        prod_deploy_param_bucket.grant_read_write(cdk_deploy_canary.role)
Exemple #23
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        notification_email = ssm.StringParameter.value_from_lookup(
            self,
            parameter_name='/serverless-pipeline/sns/notifications/primary-email'
        )

        github_user = ssm.StringParameter.value_from_lookup(
            self,
            parameter_name='/serverless-pipeline/codepipeline/github/user'
        )

        github_repo = ssm.StringParameter.value_from_lookup(
            self,
            parameter_name='/serverless-pipeline/codepipeline/github/repo'
        )

        github_token = core.SecretValue.secrets_manager(
            '/serverless-pipeline/secrets/github/token',
            json_field='github-token',
        )

        artifact_bucket = s3.Bucket(
            self, 'BuildArtifactsBucket',
            removal_policy=core.RemovalPolicy.RETAIN,
            encryption=s3.BucketEncryption.KMS_MANAGED,
            versioned=True,
        )

        build_project = build.PipelineProject(
            self, 'BuildProject',
            project_name='serveless-pipeline',
            description='Build project for the serverless-pipeline',
            environment=build.LinuxBuildImage.STANDARD_2_0,
            environment_variables={
                'BUILD_ARTIFACT_BUCKET': build.BuildEnvironmentVariable(value=artifact_bucket.bucket_name),
            },
            cache=build.Cache.bucket(artifact_bucket, prefix='codebuild-cache'),
            build_spec=build.BuildSpec.from_object({
                'version': '0.2',
                'phases': {
                    'install': {
                        'runtime-versions': {
                            'nodejs': 10,
                        },
                        'commands': [
                            'echo "--------INSTALL PHASE--------"',
                            'pip3 install aws-sam-cli',
                        ]
                    },
                    'pre_build': {
                        'commands': [
                            'echo "--------PREBUILD PHASE--------"',
                            '# Example shows installation of NPM dependencies for shared deps (layers) in a SAM App',
                            '# cd functions/dependencies/shared_deps_one/nodejs',
                            '# npm install && cd',
                            '# cd functions/dependencies/shared_deps_two/nodejs',
                            '# npm install && cd',
                        ]
                    },
                    'build': {
                        'commands': [
                            'echo "--------BUILD PHASE--------"',
                            'echo "Starting SAM packaging `date` in `pwd`"',
                            'sam package --template-file template.yaml --s3-bucket $BUILD_ARTIFACT_BUCKET --output-template-file packaged.yaml',
                        ]
                    },
                    'post_build': {
                        'commands': [
                            'echo "--------POST-BUILD PHASE--------"',
                            'echo "SAM packaging completed on `date`"',
                        ]
                    }
                },
                'artifacts': {
                    'files': ['packaged.yaml'],
                    'discard-paths': 'yes',
                },
                'cache': {
                    'paths': ['/root/.cache/pip'],
                }
            })
        )

        serverless_pipeline = pipeline.Pipeline(
            self, 'ServerlessPipeline',
            artifact_bucket=artifact_bucket,
            pipeline_name='serverless-pipeline',
            restart_execution_on_update=True,
        )

        source_output = pipeline.Artifact()
        build_output = pipeline.Artifact()
        cfn_output = pipeline.Artifact()

        # NOTE: This Stage/Action requires a manual OAuth handshake in the browser be complete before automated deployment can occur
        # Create a new Pipeline in the console, manually authorize GitHub as a source, and then cancel the pipeline wizard.
        serverless_pipeline.add_stage(stage_name='Source', actions=[
            actions.GitHubSourceAction(
                action_name='SourceCodeRepo',
                owner=github_user,
                oauth_token=github_token,
                repo=github_repo,
                branch='master',
                output=source_output,
            )
        ])
        serverless_pipeline.add_stage(stage_name='Build', actions=[
            actions.CodeBuildAction(
                action_name='CodeBuildProject',
                input=source_output,
                outputs=[build_output],
                project=build_project,
                type=actions.CodeBuildActionType.BUILD,
            )
        ])
        serverless_pipeline.add_stage(stage_name='Staging', actions=[
            actions.CloudFormationCreateReplaceChangeSetAction(
                action_name='CreateChangeSet',
                admin_permissions=True,
                change_set_name='serverless-pipeline-changeset-Staging',
                stack_name='ServerlessPipelineStaging',
                template_path=pipeline.ArtifactPath(
                    build_output,
                    file_name='packaged.yaml'
                ),
                capabilities=[cfn.CloudFormationCapabilities.ANONYMOUS_IAM],
                run_order=1,
            ),
            actions.CloudFormationExecuteChangeSetAction(
                action_name='ExecuteChangeSet',
                change_set_name='serverless-pipeline-changeset-Staging',
                stack_name='ServerlessPipelineStaging',
                output=cfn_output,
                run_order=2,
            ),
        ])

        serverless_pipeline.add_stage(stage_name='Production', actions=[
            actions.CloudFormationCreateReplaceChangeSetAction(
                action_name='CreateChangeSet',
                admin_permissions=True,
                change_set_name='serverless-pipeline-changeset-Production',
                stack_name='ServerlessPipelineProduction',
                template_path=pipeline.ArtifactPath(
                    build_output,
                    file_name='packaged.yaml'
                ),
                capabilities=[cfn.CloudFormationCapabilities.ANONYMOUS_IAM],
                run_order=1,
            ),
            actions.ManualApprovalAction(
                action_name='DeploymentApproval',
                notify_emails=[notification_email],
                run_order=2,
            ),
            actions.CloudFormationExecuteChangeSetAction(
                action_name='ExecuteChangeSet',
                change_set_name='serverless-pipeline-changeset-Production',
                stack_name='ServerlessPipelineProduction',
                output=cfn_output,
                run_order=3,
            ),
        ])

        core.CfnOutput(
            self, 'BuildArtifactsBucketOutput',
            value=artifact_bucket.bucket_name,
            description='Amazon S3 Bucket for Pipeline and Build artifacts',
        )
        core.CfnOutput(
            self, 'CodeBuildProjectOutput',
            value=build_project.project_arn,
            description='CodeBuild Project name',
        )
        core.CfnOutput(
            self, 'CodePipelineOutput',
            value=serverless_pipeline.pipeline_arn,
            description='AWS CodePipeline pipeline name',
        )
Exemple #24
0
    def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here

        pipeline = codepipeline.Pipeline(
            self, "Pipeline", artifact_bucket=s3.Bucket(self, "ArtifactBucket")
        )

        # Define the 'source' stage to be triggered by a webhook on the GitHub
        # repo for the code. Don't be fooled by the name, it's just a codestar
        # connection in the background. Bitbucket isn't involved.
        source_output = codepipeline.Artifact("SourceOutput")
        github_source = pipeline_actions.BitBucketSourceAction(
            action_name="Github_Source",
            connection_arn=core.SecretValue.secrets_manager(
                secret_id="folksgl_github_connection_arn", json_field="arn"
            ).to_string(),
            repo="sam-cicd-python-template",
            owner="folksgl",
            branch="main",
            output=source_output,
        )
        pipeline.add_stage(stage_name="Source", actions=[github_source])

        # Define the 'build' stage
        build_project = codebuild.PipelineProject(
            scope=self,
            id="Build",
            # Declare the pipeline artifact bucket name as an environment variable
            # so the build can send the deployment package to it.
            environment_variables={
                "PACKAGE_BUCKET": codebuild.BuildEnvironmentVariable(
                    value=pipeline.artifact_bucket.bucket_name,
                    type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
                )
            },
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.STANDARD_3_0
            ),
        )
        build_stage_output = codepipeline.Artifact("BuildStageOutput")
        build_action = pipeline_actions.CodeBuildAction(
            action_name="Build",
            project=build_project,
            input=source_output,
            outputs=[build_stage_output],
        )
        pipeline.add_stage(stage_name="Build", actions=[build_action])

        # Define the 'deploy' stage
        stack_name = "gateway-service-python"
        change_set_name = f"{stack_name}-changeset"

        create_change_set = pipeline_actions.CloudFormationCreateReplaceChangeSetAction(
            action_name="CreateChangeSet",
            stack_name=stack_name,
            change_set_name=change_set_name,
            template_path=build_stage_output.at_path("packaged.yaml"),
            admin_permissions=True,
            run_order=1,
        )
        execute_change_set = pipeline_actions.CloudFormationExecuteChangeSetAction(
            action_name="Deploy",
            stack_name=stack_name,
            change_set_name=change_set_name,
            run_order=2,
        )
        pipeline.add_stage(
            stage_name="DevDeployment", actions=[create_change_set, execute_change_set]
        )
Exemple #25
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # ====================================
        # ECR
        # ====================================
        ecr_repository = aws_ecr.Repository(
            self,
            id='ecr_repository',
            repository_name='sample_repository',
            removal_policy=core.RemovalPolicy.DESTROY)

        # ====================================
        # Build Docker Image
        # ====================================
        # codebuild project meant to run in pipeline
        cb_docker_build = aws_codebuild.PipelineProject(
            self,
            "DockerBuild",
            project_name='continuous-delivery',
            # f"{props['namespace']}-Docker-Build",
            build_spec=aws_codebuild.BuildSpec.from_source_filename(
                filename='batch/docker_build_buildspec.yml'),
            environment=aws_codebuild.BuildEnvironment(privileged=True, ),
            # pass the ecr repo uri into the codebuild project so codebuild knows where to push
            environment_variables={
                'ecr':
                aws_codebuild.BuildEnvironmentVariable(
                    value=ecr_repository.repository_uri),
                'tag':
                aws_codebuild.BuildEnvironmentVariable(value='sample-batch')
            },
            description='Pipeline for CodeBuild',
            timeout=core.Duration.minutes(60),
        )
        # ====================================
        # VPC
        # ====================================
        vpc = aws_ec2.Vpc(self,
                          id='vpc',
                          cidr='10.0.0.0/16',
                          max_azs=2,
                          nat_gateways=1,
                          vpn_gateway=False)

        # ====================================
        # ECS
        # ====================================
        # Create ecs cluester.
        ecs_cluster = aws_ecs.Cluster(
            self,
            id='ecs_cluster',
            cluster_name='sample_fargate_batch_cluster',
            vpc=vpc)

        # Create fargate task definition.
        fargate_task_definition = aws_ecs.FargateTaskDefinition(
            self,
            id='fargate-task-definition',
            cpu=256,
            memory_limit_mib=512,
            family='fargate-task-definition')

        # Add container to task definition.
        fargate_task_definition.add_container(
            id='container',
            image=aws_ecs.ContainerImage.from_ecr_repository(ecr_repository),
            logging=aws_ecs.LogDriver.aws_logs(
                stream_prefix='ecs',
                log_group=aws_logs.LogGroup(
                    self,
                    id='log-group',
                    log_group_name='/ecs/fargate/fargate-batch')))

        # Create cloud watch event rule.
        rule = aws_events.Rule(
            self,
            id='rule',
            rule_name='execute-task-rule',
            description='Event rule to execute ecs task.',
            schedule=aws_events.Schedule.cron(
                day=None,
                hour=None,
                minute='*/5',  # execute by every 5 minutes.
                month=None,
                week_day=None,
                year=None))

        rule.add_target(target=aws_events_targets.EcsTask(
            cluster=ecs_cluster,
            task_definition=fargate_task_definition,
            task_count=1))
Exemple #26
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # The code that defines your stack goes here

        base_api = _apigw.RestApi(self, 'PetclinicApiGatewayWithCors')

        api_resource = base_api.root.add_resource('api')

        self.add_cors_options(api_resource)

        website_bucket = _s3.Bucket(self,
                                    'PetclinicWebsite',
                                    website_index_document='index.html',
                                    public_read_access=True,
                                    removal_policy=core.RemovalPolicy.DESTROY)

        # Warm Lambda function Event rule
        event_rule = _events.Rule(self,
                                  'PetclinicLambdaWarmRule',
                                  schedule=_events.Schedule.rate(
                                      core.Duration.minutes(3)))

        code = _commit.Repository(
            self,
            'ServerlessCode',
            repository_name='spring-petclinic-static-resource')

        build_project = _build.PipelineProject(
            self,
            'StaticWebBuild',
            build_spec=_build.BuildSpec.from_object({
                'version': 0.2,
                'phases': {
                    'install': {
                        'runtime-versions': {
                            'java': 'openjdk8'
                        },
                        'commands': []
                    },
                    'build': {
                        'commands': [
                            'mv scripts/config.js scripts/config.js.origin',
                            'sed -e "s,http://localhost:8081/,$API_ENDPOINT,g" scripts/config.js.origin > scripts/config.js'
                        ]
                    },
                },
                'artifacts': {
                    'files': '**/*'
                },
            }),
            environment_variables={
                'API_ENDPOINT':
                _build.BuildEnvironmentVariable(value=base_api.url)
            },
            environment=_build.BuildEnvironment(
                build_image=_build.LinuxBuildImage.STANDARD_2_0))

        source_output = _pipeline.Artifact('SourceOutput')
        build_output = _pipeline.Artifact('BuildOutput')

        pipline = _pipeline.Pipeline(
            self,
            'ServerlessPipeline',
            stages=[{
                'stageName':
                'Source',
                'actions': [
                    _action.CodeCommitSourceAction(
                        action_name='CodeCommit_Source',
                        repository=code,
                        output=source_output)
                ]
            }, {
                'stageName':
                'Build',
                'actions': [
                    _action.CodeBuildAction(action_name='CodeBuild_Static',
                                            project=build_project,
                                            input=source_output,
                                            outputs=[build_output])
                ]
            }, {
                'stageName':
                'Deploy',
                'actions': [
                    _action.S3DeployAction(action_name='Web_Static_Deploy',
                                           input=build_output,
                                           bucket=website_bucket)
                ]
            }])
        core.CfnOutput(self,
                       'RuleArn',
                       export_name='RuleArn',
                       value=event_rule.rule_arn)
        core.CfnOutput(self,
                       'PetclinicApiGatewayWithCorsId',
                       export_name='PetclinicApiGatewayWithCorsId',
                       value=base_api.rest_api_id)
        core.CfnOutput(self,
                       "PetclinicWebsiteUrl",
                       export_name="PetclinicWebsiteUrl",
                       value=website_bucket.bucket_website_url)
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        eks_vpc = ec2.Vpc(self, "VPC", cidr="10.0.0.0/16")

        self.node.apply_aspect(
            core.Tag("kubernetes.io/cluster/cluster", "shared"))

        eks_vpc.private_subnets[0].node.apply_aspect(
            core.Tag("kubernetes.io/role/internal-elb", "1"))
        eks_vpc.private_subnets[1].node.apply_aspect(
            core.Tag("kubernetes.io/role/internal-elb", "1"))
        eks_vpc.public_subnets[0].node.apply_aspect(
            core.Tag("kubernetes.io/role/elb", "1"))
        eks_vpc.public_subnets[1].node.apply_aspect(
            core.Tag("kubernetes.io/role/elb", "1"))

        # Create IAM Role For CodeBuild and Cloud9
        codebuild_role = iam.Role(
            self,
            "BuildRole",
            assumed_by=iam.CompositePrincipal(
                iam.ServicePrincipal("codebuild.amazonaws.com"),
                iam.ServicePrincipal("ec2.amazonaws.com")),
            managed_policies=[
                iam.ManagedPolicy.from_aws_managed_policy_name(
                    "AdministratorAccess")
            ])

        instance_profile = iam.CfnInstanceProfile(
            self, "InstanceProfile", roles=[codebuild_role.role_name])

        # Create CodeBuild PipelineProject
        build_project = codebuild.PipelineProject(
            self,
            "BuildProject",
            role=codebuild_role,
            build_spec=codebuild.BuildSpec.from_source_filename(
                "buildspec.yml"))

        # Create CodePipeline
        pipeline = codepipeline.Pipeline(
            self,
            "Pipeline",
        )

        # Create Artifact
        artifact = codepipeline.Artifact()

        # S3 Source Bucket
        source_bucket = s3.Bucket.from_bucket_attributes(
            self,
            "SourceBucket",
            bucket_arn=core.Fn.join(
                "",
                ["arn:aws:s3:::ee-assets-prod-",
                 core.Fn.ref("AWS::Region")]))

        # Add Source Stage
        pipeline.add_stage(
            stage_name="Source",
            actions=[
                codepipeline_actions.S3SourceAction(
                    action_name="S3SourceRepo",
                    bucket=source_bucket,
                    bucket_key=
                    "modules/2cae1f20008d4fc5aaef294602649b98/v9/source.zip",
                    output=artifact,
                    trigger=codepipeline_actions.S3Trigger.NONE)
            ])

        # Add CodeBuild Stage
        pipeline.add_stage(
            stage_name="Deploy",
            actions=[
                codepipeline_actions.CodeBuildAction(
                    action_name="CodeBuildProject",
                    project=build_project,
                    type=codepipeline_actions.CodeBuildActionType.BUILD,
                    input=artifact,
                    environment_variables={
                        'PublicSubnet1ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.public_subnets[0].subnet_id),
                        'PublicSubnet2ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.public_subnets[1].subnet_id),
                        'PrivateSubnet1ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.private_subnets[0].subnet_id),
                        'PrivateSubnet2ID':
                        codebuild.BuildEnvironmentVariable(
                            value=eks_vpc.private_subnets[1].subnet_id),
                        'AWS_DEFAULT_REGION':
                        codebuild.BuildEnvironmentVariable(value=self.region),
                        'INSTANCEPROFILEID':
                        codebuild.BuildEnvironmentVariable(
                            value=instance_profile.ref),
                        'AWS_ACCOUNT_ID':
                        codebuild.BuildEnvironmentVariable(value=self.account)
                    })
            ])

        cloud9_stack = cloudformation.CfnStack(
            self,
            "Cloud9Stack",
            #            template_url="https://aws-quickstart.s3.amazonaws.com/quickstart-cloud9-ide/templates/cloud9-ide-instance.yaml",
            template_url=
            "https://ee-assets-prod-us-east-1.s3.amazonaws.com/modules/2cae1f20008d4fc5aaef294602649b98/v9/cloud9-ide-instance.yaml",
            parameters={
                "C9InstanceType": "m5.large",
                "C9Subnet": eks_vpc.public_subnets[0].subnet_id
            })

        pipeline.node.add_dependency(eks_vpc)
        pipeline.node.add_dependency(cloud9_stack)
Exemple #28
0
             json_field=deployment_secret['json-fields']['github-oauth-token']),
         trigger=codepipeline_actions.GitHubTrigger.WEBHOOK,
         output=source_output,
     )]),
 codepipeline.StageProps(stage_name='Self-Update', actions=[
     codepipeline_actions.CodeBuildAction(
         action_name='Self_Deploy',
         project=codebuild.PipelineProject(
             stack, 'CodePipelineBuild',
             build_spec=codebuild.BuildSpec.from_source_filename(
                 'codepipeline/pipelines-buildspec.yaml'),
             role=code_build_role,
             environment=codebuild.BuildEnvironment(
                 build_image=codebuild.LinuxBuildImage.STANDARD_4_0,
                 environment_variables={
                     'PROJECT_DIR': codebuild.BuildEnvironmentVariable(value='codepipeline'),
                     'STACK_FILE': codebuild.BuildEnvironmentVariable(value='release_pipeline.py')
                 }
             )
         ),
         input=source_output
     )
 ]),
 codepipeline.StageProps(stage_name='PyPi-Release', actions=[
     codepipeline_actions.CodeBuildAction(
         action_name='PyPi_Release',
         project=codebuild.PipelineProject(
             stack, 'PyPiReleaseBuild',
             build_spec=codebuild.BuildSpec.from_source_filename(
                 'codepipeline/release-buildspec.yaml'),
             role=code_build_role,
Exemple #29
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        uri = self.account + '.dkr.ecr.' + self.region + '.amazonaws.com'
        appl = 'colorteller'
        buildspec = {
            'version': '0.2',
            'phases': {
                'install': {
                    'commands': ['echo install step']
                },
                'pre_build': {
                    'commands': [
                        'echo logging in to AWS ECR...',
                        '$(aws ecr get-login --no-include-email --region %s)' %
                        self.region
                    ]
                },
                'build': {
                    'commands': [
                        'echo building Docker image...',
                        'cd appmeshdemo/colorapp/%s' % appl,
                        'docker build -t %s:latest .' % appl,
                        'docker tag %s:latest %s/%s:latest' % (appl, uri, appl)
                    ]
                },
                'post_build': {
                    'commands': [
                        'echo Docker image build complete!',
                        'echo push latest Docker images to ECR...',
                        'docker push %s/%s:latest' % (uri, appl)
                    ]
                }
            }
        }

        buildenviron = codebuild.BuildEnvironment(
            privileged=True,
            build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0,
            environment_variables={
                'AWS_DEFAULT_REGION':
                codebuild.BuildEnvironmentVariable(value=self.region),
                'AWS_ACCOUNT_ID':
                codebuild.BuildEnvironmentVariable(value=self.account),
                'IMAGE_REPO_NAME':
                codebuild.BuildEnvironmentVariable(value=appl),
                'IMAGE_TAG':
                codebuild.BuildEnvironmentVariable(value='latest')
            })

        proj = codebuild.Project(
            self,
            appl,
            build_spec=codebuild.BuildSpec.from_object(buildspec),
            environment=buildenviron)
        call = custom.AwsSdkCall(service='CodeBuild',
                                 action='startBuild',
                                 parameters={'projectName': proj.project_name},
                                 physical_resource_id='Custom%s' %
                                 proj.project_name)

        custom.AwsCustomResource(self,
                                 'CustomCodeBuild',
                                 on_create=call,
                                 on_update=call)
    def __init__(self, scope: core.Construct, id: str, vpc, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        name = "graviton2-aspnet-lab"

        container_repository = ecr.Repository(scope=self,
                                              id=f"{name}-container",
                                              repository_name=f"{name}")

        codecommit_repo = codecommit.Repository(
            scope=self,
            id=f"{name}-container-git",
            repository_name=f"{name}",
            description=f"Application code")

        pipeline = codepipeline.Pipeline(scope=self,
                                         id=f"{name}-container--pipeline",
                                         pipeline_name=f"{name}")

        source_output = codepipeline.Artifact()
        docker_output_arm64 = codepipeline.Artifact("ARM64_BuildOutput")

        buildspec_arm64 = codebuild.BuildSpec.from_source_filename(
            "arm64-dotnet-buildspec.yml")

        docker_build_arm64 = codebuild.PipelineProject(
            scope=self,
            id=f"DockerBuild_ARM64",
            environment=dict(
                build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM,
                privileged=True),
            environment_variables={
                'REPO_ECR':
                codebuild.BuildEnvironmentVariable(
                    value=container_repository.repository_uri),
            },
            build_spec=buildspec_arm64)

        container_repository.grant_pull_push(docker_build_arm64)

        docker_build_arm64.add_to_role_policy(
            iam.PolicyStatement(
                effect=iam.Effect.ALLOW,
                actions=[
                    "ecr:BatchCheckLayerAvailability",
                    "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage"
                ],
                resources=[
                    f"arn:{core.Stack.of(self).partition}:ecr:{core.Stack.of(self).region}:{core.Stack.of(self).account}:repository/*"
                ],
            ))

        source_action = codepipeline_actions.CodeCommitSourceAction(
            action_name="CodeCommit_Source",
            repository=codecommit_repo,
            output=source_output,
            branch="master")

        pipeline.add_stage(stage_name="Source", actions=[source_action])

        pipeline.add_stage(stage_name="DockerBuild",
                           actions=[
                               codepipeline_actions.CodeBuildAction(
                                   action_name=f"DockerBuild_ARM64",
                                   project=docker_build_arm64,
                                   input=source_output,
                                   outputs=[docker_output_arm64])
                           ])

        # Outputs
        core.CfnOutput(scope=self,
                       id="application_repository",
                       value=codecommit_repo.repository_clone_url_http)