def __init__(self, scope: core.Construct, id: str, build_image:assets.DockerImageAsset, build_role:iam.Role, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.github_master_source = b.Source.git_hub( clone_depth=1, owner='dr-natetorious', repo='app-FinSurf', webhook=False ) self.build_project = b.Project(self, 'DeployInfra', project_name='Deploy-FinSurf-Infra', source= self.github_master_source, environment= b.BuildEnvironment( build_image= b.LinuxBuildImage.from_ecr_repository( repository=build_image.repository, tag=build_image.image_uri.split(':')[-1]), environment_variables={ }, compute_type=b.ComputeType.SMALL ), role=build_role, build_spec= b.BuildSpec.from_source_filename(filename='cicd/configs/buildspec-cdk-infra.yml'), )
def __init__(self, scope: core.Construct, id: str, props, **kwargs) -> None: super().__init__(scope, id, **kwargs) refdata = s3.Bucket.from_bucket_attributes( self, 'reference_data', bucket_name='umccr-refdata-dev') build_env = cb.BuildEnvironment( build_image=cb.LinuxBuildImage.from_docker_registry("docker:dind"), privileged=True, compute_type=cb.ComputeType.LARGE) cb_project = cb.Project( self, id="UmccriseCodeBuildProject", project_name=props['codebuild_project_name'], environment=build_env, timeout=core.Duration.hours(3), source=cb.Source.git_hub( identifier="umccrise", owner="umccr", repo="umccrise", clone_depth=1, webhook=True, webhook_filters=[ cb.FilterGroup.in_event_of( cb.EventAction.PUSH).and_tag_is(semver_tag_regex) ])) # Tackle IAM permissions # https://stackoverflow.com/questions/38587325/aws-ecr-getauthorizationtoken/54806087 cb_project.role.add_managed_policy( iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonEC2ContainerRegistryPowerUser')) refdata.grant_read(cb_project)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) source_artifact = codepipeline.Artifact() cloud_assembly = codepipeline.Artifact() build_environment = codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True ) the_pipeline = pipelines.CdkPipeline(self, "Pipeline", pipeline_name="DefaultPipeline", cloud_assembly_artifact=cloud_assembly, source_action=actions.GitHubSourceAction( action_name="GitHub", output=source_artifact, oauth_token=core.SecretValue.secrets_manager("github-token"), owner="JakeHendy", repo="cloudsatlhr" ), synth_action=pipelines.SimpleSynthAction( source_artifact=source_artifact, subdirectory="source", synth_command="npx cdk synth", install_command="pip install -r requirements.txt", environment=build_environment, cloud_assembly_artifact=cloud_assembly ) ) the_pipeline.add_application_stage(AcquisitionStack(self, "AcqusitionStackDev"))
def create_build_project(self, role, source_bucket): build_project = _cb.Project( self, 'CodeBuildProject', project_name='DEMO-BUILD', source=_cb.Source.s3( bucket=source_bucket, path='archive.zip' ), environment=_cb.BuildEnvironment( build_image=_cb.LinuxBuildImage.STANDARD_3_0, privileged=True ), environment_variables={ 'IMAGE_REPO_NAME': _cb.BuildEnvironmentVariable(value='demo-repository'), 'AWS_DEFAULT_REGION': _cb.BuildEnvironmentVariable(value=os.environ.get('REGION')), 'AWS_ACCOUNT_ID': _cb.BuildEnvironmentVariable(value=os.environ.get('ACCOUNT_ID')), 'CONTAINER_NAME': _cb.BuildEnvironmentVariable(value='DEMO-CONTAINER'), }, build_spec=_cb.BuildSpec.from_source_filename(filename='etc/cicd/buildspec.yml'), artifacts=_cb.Artifacts.s3( bucket=source_bucket, name='artifact-codebuild.zip', package_zip=True, include_build_id=False ), role=role ) return build_project
def __init__(self, scope: core.Construct, id: str, project_git_url, lambda_code_bucket, policy_resources): """ CodeBuild project """ super().__init__(scope, id) stack_name = core.Stack.of(self).stack_name self.project = codebuild.Project( self, "project", project_name=f"{stack_name}-{names.CODEBUILD_PROJECT}", source=codebuild.Source.git_hub_enterprise( https_clone_url=project_git_url, clone_depth=1), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2, compute_type=codebuild.ComputeType.MEDIUM), artifacts=codebuild.Artifacts.s3(name=stack_name, bucket=lambda_code_bucket), badge=True, timeout=core.Duration.minutes(5)) self.project.add_to_role_policy( iam.PolicyStatement(resources=policy_resources, actions=[ "lambda:UpdateFunctionCode", "lambda:PublishVersion", "lambda:UpdateAlias" ]))
def __init__(self, scope: core.Construct, id: str, buildspec, **kwargs): super().__init__(scope, id, **kwargs) self.buildspec = buildspec self.build_image = codebuild.LinuxBuildImage.STANDARD_2_0 self.project = codebuild.PipelineProject( self, "Project", environment=codebuild.BuildEnvironment( build_image=self.build_image, privileged=True), build_spec=codebuild.BuildSpec.from_source_filename( self.buildspec), environment_variables={ 'REPO_NAME': codebuild.BuildEnvironmentVariable( value=config['CODEPIPELINE']['GITHUB_REPO']) }, ) # TODO: Don't need admin, let's make this least privilege self.admin_policy = iam.Policy( self, "AdminPolicy", roles=[self.project.role], statements=[iam.PolicyStatement( actions=['*'], resources=['*'], )])
def __init__( self, scope: core.Construct, id: str, repo: str, artifacts_bucket: str, owner: str = 'SeedCompany', create_bucket: bool = False, # if true and bucket_name exists, then the artifact bucket will be created **kwargs ) -> None: super().__init__(scope, id, **kwargs) if create_bucket: artifactStore = s3.Bucket(self, artifacts_bucket, bucket_name=artifacts_bucket) else: artifactStore = s3.Bucket.from_bucket_name(self, artifacts_bucket, artifacts_bucket) artifacts = codebuild.Artifacts.s3( bucket=artifactStore, name=repo, include_build_id=True, package_zip=False, ) #GitHub credentials are entered into CodeBuild manually # $ aws codebuild import-source-credentials --server-type GITHUB --auth-type PERSONAL_ACCESS_TOKEN --token <token_value> gitRepo = codebuild.Source.git_hub(owner=owner, repo=repo, webhook=True) retetoRepo = ecr.Repository.from_repository_name( self, 'RetetoRepo', 'reteto') buildEnv = codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.from_ecr_repository( retetoRepo), compute_type=codebuild.ComputeType.SMALL, privileged=True) project = codebuild.Project( self, '%sBuild' % repo.capitalize(), project_name='%sBuild' % repo.capitalize(), environment=buildEnv, environment_variables={ "AWS_ACCOUNT_ID": codebuild.BuildEnvironmentVariable(value=self.account), "REPO": codebuild.BuildEnvironmentVariable(value=repo) }, source=gitRepo, artifacts=artifacts, badge=True, # see reference.buildspec.yml for a standard buildspec build_spec=codebuild.BuildSpec.from_object({}))
def __init__(self, pipeline_data: PipelineData): super().__init__(pipeline_data.scope, pipeline_data.name, env=pipeline_data.env) self.source_artifact = cp.Artifact('Source') self.cloud_assembly_artifact = cp.Artifact('CloudAs') self.pipeline = CdkPipeline( self, "Pipeline", self_mutating=True, cross_account_keys=False, cloud_assembly_artifact=self.cloud_assembly_artifact, source_action=cpa.BitBucketSourceAction( role=iam.LazyRole( self, 'SourceRole', assumed_by=iam.AccountPrincipal(self.account), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonS3FullAccess') ]), action_name="Ship", connection_arn=pipeline_data.github_connection_arn, owner=pipeline_data.github_owner, repo=pipeline_data.repo_name, branch=pipeline_data.repo_branch, output=self.source_artifact), synth_action=SimpleSynthAction( install_commands=pipeline_data.synth_install_commands, environment=cb.BuildEnvironment( environment_variables={ env_key: cb.BuildEnvironmentVariable( value=pipeline_data.build_env[env_key]) for env_key in pipeline_data.build_env }, build_image=cb.LinuxBuildImage.STANDARD_5_0, compute_type=cb.ComputeType.SMALL, privileged=True), synth_command='cdk synth', action_name='Synthesize', cloud_assembly_artifact=self.cloud_assembly_artifact, source_artifact=self.source_artifact)) pipeline = self.pipeline.node.try_find_child('Pipeline') build_stage = pipeline.node.try_find_child('Build') synth_action = build_stage.node.try_find_child('Synthesize') build_proj = synth_action.node.try_find_child('CdkBuildProject') cfn_build_project = build_proj.node.default_child # Need Privileged mode for starting docker cfn_build_project.add_property_override("Environment.PrivilegedMode", "true") # Updating from v4 by default in aws-cdk to v5 cfn_build_project.add_property_override("Environment.Image", "aws/codebuild/standard:5.0") # Only clone the last commit. Don't clone the history cfn_build_project.add_property_override("Source.GitCloneDepth", 1) self.pipeline.add_application_stage(pipeline_data.app_stage)
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) self.Project = aws_codebuild.PipelineProject( self, id=id, project_name=id, environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.STANDARD_2_0, compute_type=aws_codebuild.ComputeType.MEDIUM))
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) prj_name = self.node.try_get_context("project_name") env_name = self.node.try_get_context("env") account_id = core.Aws.ACCOUNT_ID PROJECT_NUMBER = 2 # To Store Frontend App frontend_bucket = s3.Bucket(self, "frontend", access_control=s3.BucketAccessControl.BUCKET_OWNER_FULL_CONTROL, bucket_name=account_id + '-' + env_name + '-frontend', public_read_access=True, removal_policy=core.RemovalPolicy.DESTROY, website_index_document='index.html' ) bucket_name = frontend_bucket.bucket_name github_token = core.SecretValue.secrets_manager("dev/github-token", json_field='github-from-marsApp') cb.GitHubSourceCredentials(self, "CodeBuildGitHubCreds", access_token=github_token ) git_hub_source = cb.Source.git_hub( owner="manrodri", repo="30miniProjects", webhook=True, webhook_filters=[ cb.FilterGroup.in_event_of(cb.EventAction.PUSH).and_branch_is( "master").and_file_path_is('js30Projects/') ] ) codebuild_project = cb.Project( self, "cb-frontend", source=git_hub_source, environment=cb.BuildEnvironment( build_image=cb.LinuxBuildImage.STANDARD_3_0, environment_variables={ 'WEB_BUCKET_NAME': cb.BuildEnvironmentVariable(value=bucket_name), 'PROJECT_NUMBER': cb.BuildEnvironmentVariable(value=str(PROJECT_NUMBER)) } ), ) allow_object_actions = iam.PolicyStatement(resources=[f"arn:aws:s3:::{bucket_name}/*"], actions=["s3:*"]) allow_bucket_actions = iam.PolicyStatement( resources=[f"arn:aws:s3:::{bucket_name}"], actions=['s3:*'], ) codebuild_project.add_to_role_policy(allow_object_actions) codebuild_project.add_to_role_policy(allow_bucket_actions)
def _create_train_step(self): stage = self.pipeline.add_stage(stage_name=f"{self.name_prefix}-stage") role = iam.Role( self, "Role", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), description="Role for CodeBuild", role_name=f"{self.name_prefix}-codebuild-role", managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AmazonEC2ContainerRegistryFullAccess"), iam.ManagedPolicy.from_aws_managed_policy_name( "AWSStepFunctionsFullAccess"), iam.ManagedPolicy.from_aws_managed_policy_name( "service-role/AWSLambdaVPCAccessExecutionRole"), iam.ManagedPolicy.from_aws_managed_policy_name( "SecretsManagerReadWrite"), ], ) policy = iam.Policy(self, "PassRolePolicy") policy.document.add_statements( iam.PolicyStatement( actions=["iam:PassRole"], resources=[f"arn:aws:iam::{Stack.of(self).account}:role/*"])) role.attach_inline_policy(policy) build_spec = codebuild.BuildSpec.from_source_filename('buildspec.yml') project = codebuild.PipelineProject( self, "TrainingStepProject", build_spec=build_spec, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_5_0, privileged=True), role=role, security_groups=[self.security_group], subnet_selection=self.subnet_selection, vpc=self.vpc) action = codepipeline_actions.CodeBuildAction( action_name=f"{self.name_prefix}-training-action", project=project, input=self.source_output, environment_variables={ "EXEC_ID": codebuild.BuildEnvironmentVariable( value='#{codepipeline.PipelineExecutionId}'), "SFN_WORKFLOW_NAME": codebuild.BuildEnvironmentVariable(value=self.sfn_name) }, variables_namespace="trainStep", ) stage.add_action(action)
def __init__(self, scope: core.Construct, id: str, ecr_repo_name: str, spec_file_path: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO_NAME, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( codebuild.EventAction.PULL_REQUEST_CREATED, codebuild.EventAction.PULL_REQUEST_UPDATED, codebuild.EventAction.PULL_REQUEST_REOPENED) ], clone_depth=1) # Define a IAM role for this stack. code_build_batch_policy = iam.PolicyDocument.from_json( code_build_batch_policy_in_json([id])) inline_policies = {"code_build_batch_policy": code_build_batch_policy} role = iam.Role( scope=self, id="{}-role".format(id), assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), inline_policies=inline_policies) # Create build spec. placeholder_map = {"ECR_REPO_PLACEHOLDER": ecr_arn(ecr_repo_name)} build_spec_content = YmlLoader.load(spec_file_path, placeholder_map) # Define CodeBuild. project = codebuild.Project( scope=self, id=id, project_name=id, source=git_hub_source, role=role, timeout=core.Duration.minutes(180), environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.SMALL, privileged=False, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), build_spec=codebuild.BuildSpec.from_object(build_spec_content)) # TODO: add build type BUILD_BATCH when CFN finishes the feature release. See CryptoAlg-575. # Add 'BuildBatchConfig' property, which is not supported in CDK. # CDK raw overrides: https://docs.aws.amazon.com/cdk/latest/guide/cfn_layer.html#cfn_layer_raw # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-codebuild-project.html#aws-resource-codebuild-project-properties cfn_build = project.node.default_child cfn_build.add_override("Properties.BuildBatchConfig", { "ServiceRole": role.role_arn, "TimeoutInMins": 180 })
def __init__(self, scope: core.Construct, id: str, buildspec, codepipelinerole, **kwargs): super().__init__(scope, id, **kwargs) self.buildspec = buildspec self.build_image = codebuild.LinuxBuildImage.STANDARD_2_0 self.role = codepipelinerole self.project = codebuild.PipelineProject( self, "Project", environment=codebuild.BuildEnvironment( build_image=self.build_image), build_spec=codebuild.BuildSpec.from_source_filename( self.buildspec), role=self.role)
def __init__(self, scope: core.Construct, id: str, x86_ecr_repo_name: str, arm_ecr_repo_name: str, spec_file_path: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Define CodeBuild resource. git_hub_source = codebuild.Source.git_hub( owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO_NAME, webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of(codebuild.EventAction.PUSH) # The current FIPS branch does not have the configuration needed to run the analytics, once we update # the branch or create a new FIPS branch it should be updated to '(main)|(fips.*)' .and_branch_is("main") ], webhook_triggers_batch_build=True) # Define a IAM role for this stack. metrics_policy = iam.PolicyDocument.from_json( code_build_publish_metrics_in_json()) inline_policies = {"metric_policy": metrics_policy} role = iam.Role( scope=self, id="{}-role".format(id), assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), inline_policies=inline_policies) # Create build spec. placeholder_map = { "X86_ECR_REPO_PLACEHOLDER": ecr_arn(x86_ecr_repo_name), "ARM_ECR_REPO_PLACEHOLDER": ecr_arn(arm_ecr_repo_name) } build_spec_content = YmlLoader.load(spec_file_path, placeholder_map) # Define CodeBuild. analytics = codebuild.Project( scope=self, id="AnalyticsCodeBuild", project_name=id, source=git_hub_source, role=role, timeout=core.Duration.minutes(120), environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.LARGE, privileged=True, build_image=codebuild.LinuxBuildImage.STANDARD_4_0), build_spec=codebuild.BuildSpec.from_object(build_spec_content)) analytics.enable_batch_builds()
def __init__(self, scope: core.Construct, id: str, source: codepipeline.Artifact, pipeline: codepipeline.Pipeline, bucket: s3.Bucket, role: iam.Role, frontend: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) branch = id.split('-')[-1] # Code build for flask frontend env = codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0, compute_type=codebuild.ComputeType.SMALL, environment_variables={ 'PROJECTNAME': codebuild.BuildEnvironmentVariable( value=os.environ['GITHUB_REPO']), 'GITHUBUSER': codebuild.BuildEnvironmentVariable( value=os.environ['GITHUB_OWNER']), 'SOURCEBRANCH': codebuild.BuildEnvironmentVariable(value=branch), 'ARTIFACT_BUCKET': codebuild.BuildEnvironmentVariable(value=bucket.bucket_arn), 'REPO_URI': codebuild.BuildEnvironmentVariable(value=frontend), }, privileged=True, ) project = codebuild.PipelineProject( self, 'Build_Frontend-' + branch, description='Submit build jobs for {} as part of CI/CD pipeline'. format(os.environ['GITHUB_REPO']), environment=env, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), role=role) cb_actions = codepipeline_actions.CodeBuildAction( action_name='CodeBuild-' + branch, input=source, project=project, run_order=3) pipeline.add_stage(stage_name='CodeBuild-' + branch, actions=[cb_actions])
def new_build_project(self, repo: codecommit.Repository, buildspec_path: str, proj_name: str) -> _codebuild.Project: return _codebuild.Project( self, proj_name, badge=True, source=_codebuild.Source.code_commit(repository=repo), description=f"Build project for {proj_name}", environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_5_0, compute_type=_codebuild.ComputeType.LARGE, privileged=True), project_name=proj_name, build_spec=_codebuild.BuildSpec.from_source_filename( filename=buildspec_path), timeout=Duration.minutes(10))
def __init__(self, scope: core.Construct, id: str, project_name:str, build_image:assets.DockerImageAsset, context:BuildContext, build_role:iam.Role, app_dir:str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.github_master_source = b.Source.git_hub( clone_depth=1, owner='dr-natetorious', repo='app-FinSurf', webhook=False ) param_name = '/app-finsurf/artifacts/bin/{}'.format(project_name) output_path = 's3://{}/cicd/{}'.format( context.buckets.artifacts_bucket.bucket_name, project_name) self.build_project = b.Project(self,'PythonProject', project_name=project_name, source= self.github_master_source, environment= b.BuildEnvironment( build_image= b.LinuxBuildImage.from_ecr_repository( repository=build_image.repository, tag=build_image.image_uri.split(':')[-1]), environment_variables={ 'APP_DIR':b.BuildEnvironmentVariable(value=app_dir), 'PARAM_NAME': b.BuildEnvironmentVariable(value=param_name), 'OUTPUT_PATH': b.BuildEnvironmentVariable(value=output_path), }, compute_type=b.ComputeType.SMALL ), role=build_role, encryption_key= context.buckets.artifacts_key, build_spec= b.BuildSpec.from_source_filename(filename='cicd/configs/buildspec-python-zip.yml'), artifacts= b.Artifacts.s3( name=project_name, path="/artifacts", bucket=context.buckets.artifacts_bucket, encryption=True, include_build_id=False, package_zip=False) )
def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) ecs_build = aws_codebuild.PipelineProject(self, "ECSBuild", project_name="ecs-cluster-build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='codebuild/ecs/buildspec.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True,), environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=props['ecr']) }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(30), ) self.output_params = props.copy() self.output_params['ecs_build'] = ecs_build
def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) bucket = aws_s3.Bucket( self, "SourceBucket", bucket_name=f"flask-bucket-{core.Aws.ACCOUNT_ID}", versioned=True, removal_policy=core.RemovalPolicy.DESTROY) # ECR repository for Docker images ecr = aws_ecr.Repository(self, "ECR", repository_name="flask-repo", removal_policy=core.RemovalPolicy.DESTROY) ecr_build = aws_codebuild.PipelineProject( self, "ECRBuild", project_name="ecr-image-build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='codebuild/ecr/buildspec.yml'), environment=aws_codebuild.BuildEnvironment(privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=ecr.repository_uri), 'tag': aws_codebuild.BuildEnvironmentVariable(value='flask') }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(30), ) ecr.grant_pull_push(ecr_build) self.output_params = props.copy() self.output_params['ecr'] = ecr.repository_uri self.output_params['ecr_build'] = ecr_build self.output_params['bucket'] = bucket
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) repo = codecommit.Repository( self, "repo", repository_name="demorepo", description="Repo to test PR with stepfunctions") proj1 = self.new_build_project(repo, "pr_specs/buildspec.yaml", "proj1") proj2 = _codebuild.Project( self, "proj_name", badge=True, description="Build project for ", environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_5_0, compute_type=_codebuild.ComputeType.LARGE, privileged=True), project_name="proj_name", build_spec=_codebuild.BuildSpec.from_source_filename( filename="pr_specs/buildspec2.yaml"), timeout=Duration.minutes(10), ) input_task = _step_fn.Pass(self, "passstate") proj1_tasks = self.new_codebuild_task(proj1) proj2_tasks = self.new_codebuild_task(proj2) definition = input_task.next(proj1_tasks).next(proj2_tasks) _fn = _step_fn.StateMachine( self, "statemachine", definition=definition, state_machine_name="statemachine", )
def __init__(self, scope: Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Create IAM Role For CodeBuild # TODO Make this role's policy least privilege aws_app_resources_build_role = iam.Role( self, "EKSCodeBuildRole", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess") ]) # We only want to fire on the master branch and if there is a change in the dockerbuild folder git_hub_source = codebuild.Source.git_hub( owner=self.node.try_get_context("github_owner"), repo=self.node.try_get_context("github_repo"), branch_or_ref=self.node.try_get_context("github_branch"), webhook=True, webhook_filters=[ codebuild.FilterGroup.in_event_of( codebuild.EventAction.PUSH).and_branch_is( self.node.try_get_context("github_branch")). and_file_path_is("cluster-bootstrap/*") ]) # Create CodeBuild build_project = codebuild.Project( self, "EKSCodeBuild", source=git_hub_source, role=aws_app_resources_build_role, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_5_0, compute_type=codebuild.ComputeType.LARGE), build_spec=codebuild.BuildSpec.from_source_filename( "cluster-bootstrap/buildspec.yml"))
def add_react_build(stack: core.Stack, code_pipeline, source_output, bucket_arn: str): # Could refactor the bucket to be part of the stage # https://github.com/aws-samples/aws-cdk-examples/blob/master/typescript/static-site/static-site.ts # Need to move to a stack / into startuptoolbag # The codebuild project can be moved back out into the pipeline (bit awkward?) react_site_bucket = Bucket.from_bucket_arn(stack, id='SiteBucket', bucket_arn=bucket_arn) stack.build_output_artifact = codepipeline.Artifact() build_output_artifact = codepipeline.Artifact() codebuild_project = codebuild.PipelineProject( stack, "t-u-b-CDKCodebuild", project_name="t-u-b-CodebuildProject", build_spec=codebuild.BuildSpec.from_source_filename( filename='buildspec.yml'), environment=codebuild.BuildEnvironment(privileged=True), description='Pipeline for the-ultimate-boilerplate', timeout=core.Duration.minutes(60), ) build_action = codepipeline_actions.CodeBuildAction( action_name="ReactBuild", project=codebuild_project, input=source_output, outputs=[build_output_artifact]) s3_deploy = codepipeline_actions.S3DeployAction( action_name="ReactS3Push", input=build_output_artifact, bucket=react_site_bucket) # Would be more elegant to be one stage but the input to deploy must be created in a prior stage code_pipeline.add_stage(stage_name="ReactBuild", actions=[build_action]) code_pipeline.add_stage(stage_name="ReactDeploy", actions=[s3_deploy])
def make_codebuild_project( self, project_suffix : str, description : str, buildspec_path : str, env_variables : dict): ''' Creates a codebuild project Parameters ---------- project_suffix : str The suffix of the project. The full project name is APPLICATION_PREFIX _ project_suffix description : str Description used by tags buildspec_path : str the path the buildspec used to build this project env_variables : str The environment variables supplued to the project, e.g. the ECR epo URI ''' project_name = "%s-%s" % (self.APPLICATION_PREFIX, project_suffix) build_project = codebuild.Project( self, project_name, source=codebuild.Source.git_hub(owner=self.GITHUB_REPO_OWNER, repo=self.GITHUB_REPO_NAME), build_spec=codebuild.BuildSpec.from_source_filename(buildspec_path), description=description, environment_variables=env_variables, environment=codebuild.BuildEnvironment( privileged=True, ), project_name=project_name, role=self.codebuild_role_name, timeout=core.Duration.hours(1)) util.tag_resource(build_project, project_name, description)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # As semver dictates: https://regex101.com/r/Ly7O1x/3/ semver_tag_regex = '(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$' refdata = s3.Bucket.from_bucket_attributes( self, 'reference_data', bucket_name='umccr-refdata-dev') build_env = cb.BuildEnvironment( build_image=cb.LinuxBuildImage.from_docker_registry("docker:dind"), privileged=True, compute_type=cb.ComputeType.SMALL) cb_project = cb.Project( self, id="rnasum", environment=build_env, timeout=core.Duration.hours(1), source=cb.Source.git_hub( identifier="rnasum", owner="umccr", repo="rnasum", clone_depth=1, webhook=True, webhook_filters=[ cb.FilterGroup.in_event_of( cb.EventAction.PUSH).and_tag_is(semver_tag_regex) ])) # Tackle IAM permissions # https://stackoverflow.com/questions/38587325/aws-ecr-getauthorizationtoken/54806087 cb_project.role.add_managed_policy( iam.ManagedPolicy.from_aws_managed_policy_name( 'AmazonEC2ContainerRegistryPowerUser')) refdata.grant_read(cb_project)
def _get_build_project(self): ecr = aws_ecr.Repository(self, "ECR", repository_name="arronmoore-dev", removal_policy=core.RemovalPolicy.DESTROY) cb_docker_build = aws_codebuild.PipelineProject( self, "DockerBuild", project_name=f"arronmoore-dev-docker-build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='buildspec.yml'), environment=aws_codebuild.BuildEnvironment(privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'REPO_URI': aws_codebuild.BuildEnvironmentVariable( value=ecr.repository_uri) }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(60), ) ecr.grant_pull_push(cb_docker_build) return ecr, cb_docker_build
def _create_codebuild_project(self, id: str): pipeline_project = aws_codebuild.PipelineProject( self, id, environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True, compute_type=aws_codebuild.ComputeType.LARGE), build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='scripts/apk-builder-buildspec.yml')) build_exec_policy = aws_iam.ManagedPolicy( self, "AmplifyAndroidBuildExecutorPolicy", managed_policy_name=f"AmplifyAndroidBuildExecutorPolicy", description= "Policy used by the CodeBuild role that executes builds.", statements=[ aws_iam.PolicyStatement( actions=self.CODE_BUILD_AMPLIFY_ACTIONS, effect=aws_iam.Effect.ALLOW, resources=["*"]), ]) build_exec_policy.attach_to_role(pipeline_project.role) return pipeline_project
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # The code that defines your stack goes here pipeline = codepipeline.Pipeline( self, "Pipeline", artifact_bucket=s3.Bucket(self, "ArtifactBucket") ) # Define the 'source' stage to be triggered by a webhook on the GitHub # repo for the code. Don't be fooled by the name, it's just a codestar # connection in the background. Bitbucket isn't involved. source_output = codepipeline.Artifact("SourceOutput") github_source = pipeline_actions.BitBucketSourceAction( action_name="Github_Source", connection_arn=core.SecretValue.secrets_manager( secret_id="folksgl_github_connection_arn", json_field="arn" ).to_string(), repo="sam-cicd-python-template", owner="folksgl", branch="main", output=source_output, ) pipeline.add_stage(stage_name="Source", actions=[github_source]) # Define the 'build' stage build_project = codebuild.PipelineProject( scope=self, id="Build", # Declare the pipeline artifact bucket name as an environment variable # so the build can send the deployment package to it. environment_variables={ "PACKAGE_BUCKET": codebuild.BuildEnvironmentVariable( value=pipeline.artifact_bucket.bucket_name, type=codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) }, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_3_0 ), ) build_stage_output = codepipeline.Artifact("BuildStageOutput") build_action = pipeline_actions.CodeBuildAction( action_name="Build", project=build_project, input=source_output, outputs=[build_stage_output], ) pipeline.add_stage(stage_name="Build", actions=[build_action]) # Define the 'deploy' stage stack_name = "gateway-service-python" change_set_name = f"{stack_name}-changeset" create_change_set = pipeline_actions.CloudFormationCreateReplaceChangeSetAction( action_name="CreateChangeSet", stack_name=stack_name, change_set_name=change_set_name, template_path=build_stage_output.at_path("packaged.yaml"), admin_permissions=True, run_order=1, ) execute_change_set = pipeline_actions.CloudFormationExecuteChangeSetAction( action_name="Deploy", stack_name=stack_name, change_set_name=change_set_name, run_order=2, ) pipeline.add_stage( stage_name="DevDeployment", actions=[create_change_set, execute_change_set] )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # ==================================== # ECR # ==================================== ecr_repository = aws_ecr.Repository( self, id='ecr_repository', repository_name='sample_repository', removal_policy=core.RemovalPolicy.DESTROY) # ==================================== # Build Docker Image # ==================================== # codebuild project meant to run in pipeline cb_docker_build = aws_codebuild.PipelineProject( self, "DockerBuild", project_name='continuous-delivery', # f"{props['namespace']}-Docker-Build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='batch/docker_build_buildspec.yml'), environment=aws_codebuild.BuildEnvironment(privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=ecr_repository.repository_uri), 'tag': aws_codebuild.BuildEnvironmentVariable(value='sample-batch') }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(60), ) # ==================================== # VPC # ==================================== vpc = aws_ec2.Vpc(self, id='vpc', cidr='10.0.0.0/16', max_azs=2, nat_gateways=1, vpn_gateway=False) # ==================================== # ECS # ==================================== # Create ecs cluester. ecs_cluster = aws_ecs.Cluster( self, id='ecs_cluster', cluster_name='sample_fargate_batch_cluster', vpc=vpc) # Create fargate task definition. fargate_task_definition = aws_ecs.FargateTaskDefinition( self, id='fargate-task-definition', cpu=256, memory_limit_mib=512, family='fargate-task-definition') # Add container to task definition. fargate_task_definition.add_container( id='container', image=aws_ecs.ContainerImage.from_ecr_repository(ecr_repository), logging=aws_ecs.LogDriver.aws_logs( stream_prefix='ecs', log_group=aws_logs.LogGroup( self, id='log-group', log_group_name='/ecs/fargate/fargate-batch'))) # Create cloud watch event rule. rule = aws_events.Rule( self, id='rule', rule_name='execute-task-rule', description='Event rule to execute ecs task.', schedule=aws_events.Schedule.cron( day=None, hour=None, minute='*/5', # execute by every 5 minutes. month=None, week_day=None, year=None)) rule.add_target(target=aws_events_targets.EcsTask( cluster=ecs_cluster, task_definition=fargate_task_definition, task_count=1))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here base_api = _apigw.RestApi(self, 'PetclinicApiGatewayWithCors') api_resource = base_api.root.add_resource('api') self.add_cors_options(api_resource) website_bucket = _s3.Bucket(self, 'PetclinicWebsite', website_index_document='index.html', public_read_access=True, removal_policy=core.RemovalPolicy.DESTROY) # Warm Lambda function Event rule event_rule = _events.Rule(self, 'PetclinicLambdaWarmRule', schedule=_events.Schedule.rate( core.Duration.minutes(3))) code = _commit.Repository( self, 'ServerlessCode', repository_name='spring-petclinic-static-resource') build_project = _build.PipelineProject( self, 'StaticWebBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': [ 'mv scripts/config.js scripts/config.js.origin', 'sed -e "s,http://localhost:8081/,$API_ENDPOINT,g" scripts/config.js.origin > scripts/config.js' ] }, }, 'artifacts': { 'files': '**/*' }, }), environment_variables={ 'API_ENDPOINT': _build.BuildEnvironmentVariable(value=base_api.url) }, environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') build_output = _pipeline.Artifact('BuildOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction(action_name='CodeBuild_Static', project=build_project, input=source_output, outputs=[build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.S3DeployAction(action_name='Web_Static_Deploy', input=build_output, bucket=website_bucket) ] }]) core.CfnOutput(self, 'RuleArn', export_name='RuleArn', value=event_rule.rule_arn) core.CfnOutput(self, 'PetclinicApiGatewayWithCorsId', export_name='PetclinicApiGatewayWithCorsId', value=base_api.rest_api_id) core.CfnOutput(self, "PetclinicWebsiteUrl", export_name="PetclinicWebsiteUrl", value=website_bucket.bucket_website_url)
def __init__(self, scope: core.Construct, id: str, *, git_token_key="", github_owner="", github_repo="", github_branch="", **kwargs) -> None: super().__init__(scope, id, **kwargs) role = iam.Role( self, "Role", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com")) role.add_managed_policy( iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess")) cdk_project = codebuild.PipelineProject( self, "Codebuild", build_spec=codebuild.BuildSpec.from_source_filename( "codebuild/buildspec.yaml"), cache=codebuild.Cache.bucket(s3.Bucket(self, "Bucket")), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_2_0, privileged=True), role=role) source_output = codepipeline.Artifact() staging_output = codepipeline.Artifact() production_output = codepipeline.Artifact() source_action = codepipeline_actions.GitHubSourceAction( action_name="GitHub_Source", owner=github_owner, repo=github_repo, branch=github_branch, oauth_token=core.SecretValue.secrets_manager(git_token_key), output=source_output) staging_action = codepipeline_actions.CodeBuildAction( action_name="Deliver", project=cdk_project, input=source_output, outputs=[staging_output], environment_variables={"ENV": { "value": "stg" }}) manual_approval_action = codepipeline_actions.ManualApprovalAction( action_name="Approve") production_action = codepipeline_actions.CodeBuildAction( action_name="Deliver", project=cdk_project, input=source_output, outputs=[production_output], environment_variables={"ENV": { "value": "prd" }}) key = kms.Key(self, "key") bucket = s3.Bucket(self, "bucket_artifacts", encryption_key=key) pipeline = codepipeline.Pipeline(self, "Pipeline", artifact_bucket=bucket) pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name="Staging", actions=[staging_action]) pipeline.add_stage(stage_name="Approval", actions=[manual_approval_action]) pipeline.add_stage(stage_name="Production", actions=[production_action])