def _create_image_build_stage(self, stage_name: str, input: codepipeline.Artifact, output: codepipeline.Artifact): """ A pipeline stage that is responsible with building the application. """ project = codebuild.PipelineProject( self, 'Project', build_spec=self._create_build_spec(), environment={ 'build_image': codebuild.LinuxBuildImage.STANDARD_2_0, 'privileged': True }, environment_variables={ 'REPOSITORY_URI': { 'value': self.ecr_repo.repository_uri }, 'CONTAINER_NAME': { 'value': self.container_name } }) self.ecr_repo.grant_pull_push(project.grant_principal) codebuild_action = codepipeline_actions.CodeBuildAction( action_name='CodeBuild_Action', input=input, outputs=[output], project=project) return {'stageName': stage_name, 'actions': [codebuild_action]}
def generate_codebuild_project(scope, role, db_secret): codebuild_project = codebuild.PipelineProject( scope=scope, id="JVSANTOSTier1CodebuildProject", build_spec=generate_buildspec(), project_name="JVSANTOSTier1", role=role, environment=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, environment_variables={ "HOST": BuildEnvironmentVariable( value=f"{db_secret.secret_name}:host", type=BuildEnvironmentVariableType.SECRETS_MANAGER), "USERNAME": BuildEnvironmentVariable( value=f"{db_secret.secret_name}:username", type=BuildEnvironmentVariableType.SECRETS_MANAGER), "PASSWORD": BuildEnvironmentVariable( value=f"{db_secret.secret_name}:password", type=BuildEnvironmentVariableType.SECRETS_MANAGER), "DATABASE": BuildEnvironmentVariable( value=f"{db_secret.secret_name}:dbname", type=BuildEnvironmentVariableType.SECRETS_MANAGER), "PORT": BuildEnvironmentVariable( value=1337, type=BuildEnvironmentVariableType.PLAINTEXT), }) return codebuild_project
def __init__(self, scope: core.Construct, id: str, buildspec, **kwargs): super().__init__(scope, id, **kwargs) self.buildspec = buildspec self.build_image = codebuild.LinuxBuildImage.STANDARD_2_0 self.project = codebuild.PipelineProject( self, "Project", environment=codebuild.BuildEnvironment( build_image=self.build_image, privileged=True), build_spec=codebuild.BuildSpec.from_source_filename( self.buildspec), environment_variables={ 'REPO_NAME': codebuild.BuildEnvironmentVariable( value=config['CODEPIPELINE']['GITHUB_REPO']) }, ) # TODO: Don't need admin, let's make this least privilege self.admin_policy = iam.Policy( self, "AdminPolicy", roles=[self.project.role], statements=[iam.PolicyStatement( actions=['*'], resources=['*'], )])
def create_project(self, build_id, artifacts): project = codebuild.PipelineProject( self, id=build_id, ) return project
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) self.Project = aws_codebuild.PipelineProject( self, id=id, project_name=id, environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.STANDARD_2_0, compute_type=aws_codebuild.ComputeType.MEDIUM))
def _create_train_step(self): stage = self.pipeline.add_stage(stage_name=f"{self.name_prefix}-stage") role = iam.Role( self, "Role", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), description="Role for CodeBuild", role_name=f"{self.name_prefix}-codebuild-role", managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AmazonEC2ContainerRegistryFullAccess"), iam.ManagedPolicy.from_aws_managed_policy_name( "AWSStepFunctionsFullAccess"), iam.ManagedPolicy.from_aws_managed_policy_name( "service-role/AWSLambdaVPCAccessExecutionRole"), iam.ManagedPolicy.from_aws_managed_policy_name( "SecretsManagerReadWrite"), ], ) policy = iam.Policy(self, "PassRolePolicy") policy.document.add_statements( iam.PolicyStatement( actions=["iam:PassRole"], resources=[f"arn:aws:iam::{Stack.of(self).account}:role/*"])) role.attach_inline_policy(policy) build_spec = codebuild.BuildSpec.from_source_filename('buildspec.yml') project = codebuild.PipelineProject( self, "TrainingStepProject", build_spec=build_spec, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_5_0, privileged=True), role=role, security_groups=[self.security_group], subnet_selection=self.subnet_selection, vpc=self.vpc) action = codepipeline_actions.CodeBuildAction( action_name=f"{self.name_prefix}-training-action", project=project, input=self.source_output, environment_variables={ "EXEC_ID": codebuild.BuildEnvironmentVariable( value='#{codepipeline.PipelineExecutionId}'), "SFN_WORKFLOW_NAME": codebuild.BuildEnvironmentVariable(value=self.sfn_name) }, variables_namespace="trainStep", ) stage.add_action(action)
def _project(self, **project_config): if 'build_spec' not in project_config: project_config['build_spec'] = 'buildspec.yml' project_config['build_spec'] = Pipelines.load_buildspec( project_config['build_spec']) self.project = cb.PipelineProject( self, "project", project_name="{}".format(self.node.id), environment=cb.LinuxBuildImage.STANDARD_4_0, cache=cb.Cache.bucket(bucket=self.bucket, prefix='codebuild-cache'), **project_config)
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) this_dir = path.dirname(__file__) code_build_project = codebuild.PipelineProject( self, "demoServiceProject", build_spec=codebuild.BuildSpec.from_source_filename( './pipeline/java_services/DemoService/buildspec.yml')) source_artifact = codepipeline.Artifact() cloud_assembly_artifact = codepipeline.Artifact() java_build_artifact = codepipeline.Artifact() pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, pipeline_name='WebinarPipeline', source_action=cpactions.GitHubSourceAction( action_name='Github', output=source_artifact, oauth_token=core.SecretValue.secrets_manager('github-token'), owner='JuanGQCadavid', repo='cd_last_project_pipeline', trigger=cpactions.GitHubTrigger.POLL), synth_action=pipelines.SimpleSynthAction( source_artifact=source_artifact, cloud_assembly_artifact=cloud_assembly_artifact, install_command= 'npm install -g aws-cdk && pip install -r requirements.txt', synth_command='cdk synth')) build_action = cpactions.CodeBuildAction( input=source_artifact, outputs=[java_build_artifact], project=code_build_project, action_name="demoServicesBuildAction", ) buildStage = pipeline.add_stage(stage_name="JavaBuild") buildStage.add_actions(build_action) pre_prod_stage = pipeline.add_application_stage( WebServiceStage(self, 'Pre-prod', env={'region': 'us-east-1'})) pre_prod_stage.add_manual_approval_action(action_name='PromoteToProd') pipeline.add_application_stage( WebServiceStage(self, 'Prod', env={'region': 'us-east-1'}))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Create IAM Role For CodeBuild codebuild_role = iam.Role( self, "BuildRole", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess") ]) # Create CodeBuild PipelineProject build_project = codebuild.PipelineProject( self, "BuildProject", role=codebuild_role, build_spec=codebuild.BuildSpec.from_source_filename( "aws-app-resources/buildspec.yml")) # Create CodePipeline pipeline = codepipeline.Pipeline(self, "Pipeline") # Create Artifact artifact = codepipeline.Artifact() # Add Source Stage pipeline.add_stage( stage_name="Source", actions=[ codepipeline_actions.GitHubSourceAction( action_name="SourceCodeRepo", owner="jasonumiker", repo="k8s-plus-aws-gitops", output=artifact, oauth_token=core.SecretValue.secrets_manager( 'github-token')) ]) # Add CodeBuild Stage pipeline.add_stage( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeBuildProject", project=build_project, type=codepipeline_actions.CodeBuildActionType.BUILD, input=artifact) ])
def __init__(self, scope: core.Construct, id: str, code_commit_repo: str, default_branch: str = 'mainline', **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository.from_repository_name( self, "codecommitrepo", code_commit_repo) # Cloudformation permission for project builds # right now setting admin permission on policy # modify this to load custom policy per pipeline from policy statement document # iam_cfn_admin_json = Policies.get_iam_cfn_admin_access_policy() policy_statement = iam.PolicyStatement() policy_statement.add_actions("*") policy_statement.add_resources("*") policy_statement.effect = iam.Effect.ALLOW serverless_build = codebuild.PipelineProject(self, "buildpipeline") # add cfn iam statements to build project serverless_build.add_to_role_policy(policy_statement) build_output = codepipeline.Artifact("BuildOutput") codepipeline.Pipeline( self, "imageBuilderDeploymentPipeline", pipeline_name="ImageBuilderDeploymentPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="SourceCode", branch=default_branch, repository=code, output=build_output) ]), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeDeploy", project=serverless_build, input=build_output) ]) ])
def __init__(self, scope: core.Construct, id: str, buildspec, codepipelinerole, **kwargs): super().__init__(scope, id, **kwargs) self.buildspec = buildspec self.build_image = codebuild.LinuxBuildImage.STANDARD_2_0 self.role = codepipelinerole self.project = codebuild.PipelineProject( self, "Project", environment=codebuild.BuildEnvironment( build_image=self.build_image), build_spec=codebuild.BuildSpec.from_source_filename( self.buildspec), role=self.role)
def CdkDeployProject(self, name: str, stage: str): return codebuild.PipelineProject( self, name, build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict( install=dict(commands=install_commands), build=dict(commands=deploy_commands(stage)), ), environment=dict( buildImage=codebuild.LinuxBuildImage.STANDARD_2_0), environment_variable=dict([["environment", stage]]), )))
def __init__(self, scope: core.Construct, id: str, source: codepipeline.Artifact, pipeline: codepipeline.Pipeline, bucket: s3.Bucket, role: iam.Role, frontend: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) branch = id.split('-')[-1] # Code build for flask frontend env = codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0, compute_type=codebuild.ComputeType.SMALL, environment_variables={ 'PROJECTNAME': codebuild.BuildEnvironmentVariable( value=os.environ['GITHUB_REPO']), 'GITHUBUSER': codebuild.BuildEnvironmentVariable( value=os.environ['GITHUB_OWNER']), 'SOURCEBRANCH': codebuild.BuildEnvironmentVariable(value=branch), 'ARTIFACT_BUCKET': codebuild.BuildEnvironmentVariable(value=bucket.bucket_arn), 'REPO_URI': codebuild.BuildEnvironmentVariable(value=frontend), }, privileged=True, ) project = codebuild.PipelineProject( self, 'Build_Frontend-' + branch, description='Submit build jobs for {} as part of CI/CD pipeline'. format(os.environ['GITHUB_REPO']), environment=env, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), role=role) cb_actions = codepipeline_actions.CodeBuildAction( action_name='CodeBuild-' + branch, input=source, project=project, run_order=3) pipeline.add_stage(stage_name='CodeBuild-' + branch, actions=[cb_actions])
def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) ecs_build = aws_codebuild.PipelineProject(self, "ECSBuild", project_name="ecs-cluster-build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='codebuild/ecs/buildspec.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True,), environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=props['ecr']) }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(30), ) self.output_params = props.copy() self.output_params['ecs_build'] = ecs_build
def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) bucket = aws_s3.Bucket( self, "SourceBucket", bucket_name=f"flask-bucket-{core.Aws.ACCOUNT_ID}", versioned=True, removal_policy=core.RemovalPolicy.DESTROY) # ECR repository for Docker images ecr = aws_ecr.Repository(self, "ECR", repository_name="flask-repo", removal_policy=core.RemovalPolicy.DESTROY) ecr_build = aws_codebuild.PipelineProject( self, "ECRBuild", project_name="ecr-image-build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='codebuild/ecr/buildspec.yml'), environment=aws_codebuild.BuildEnvironment(privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=ecr.repository_uri), 'tag': aws_codebuild.BuildEnvironmentVariable(value='flask') }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(30), ) ecr.grant_pull_push(ecr_build) self.output_params = props.copy() self.output_params['ecr'] = ecr.repository_uri self.output_params['ecr_build'] = ecr_build self.output_params['bucket'] = bucket
def setup_web_build_project(self): """Setup build project for Web frontend Using codebuild to create a PipelineProject with 3 phases: * install: npm install * pre_build: run unit tests * build: npm run build and setup artifacts Returns ------- aws_codepipeline.PipelineProject used for web front end deploy """ return cb.PipelineProject( self, 'WebBuild', build_spec=cb.BuildSpec.from_object( dict( version=0.2, phases={ 'install': { 'commands': ['npm install'] }, 'pre_build': { 'commands': ['npm run test:unit'] }, 'build': { 'commands': [ 'npm run build', 'COMMIT_HAHS=$(echo $CODEBUILD_RESOLVED_SOURCE_VERSION |cut -c 1-7)' 'echo ${COMMIT_HASH} > dist/version.txt', ] } }, artifacts={ 'files': ['**/*'], 'base-directory': 'dist', 'name': "dist-${COMMIT_HASH}" })))
def create_project(self, target_function, stage): project = codebuild.PipelineProject( self, self.create_id("Project", stage), project_name=self.create_name(stage), environment_variables={ "FUNCTION_NAME": codebuild.BuildEnvironmentVariable( value=target_function.function_name, type=codebuild.BuildEnvironmentVariableType.PLAINTEXT), "STAGE": codebuild.BuildEnvironmentVariable( value=stage, type=codebuild.BuildEnvironmentVariableType.PLAINTEXT) } ) project.add_to_role_policy( iam.PolicyStatement( resources=[target_function.function_arn], actions=['lambda:UpdateFunctionCode', 'lambda:UpdateFunctionConfiguration'] ) ) return project
def add_react_build(stack: core.Stack, code_pipeline, source_output, bucket_arn: str): # Could refactor the bucket to be part of the stage # https://github.com/aws-samples/aws-cdk-examples/blob/master/typescript/static-site/static-site.ts # Need to move to a stack / into startuptoolbag # The codebuild project can be moved back out into the pipeline (bit awkward?) react_site_bucket = Bucket.from_bucket_arn(stack, id='SiteBucket', bucket_arn=bucket_arn) stack.build_output_artifact = codepipeline.Artifact() build_output_artifact = codepipeline.Artifact() codebuild_project = codebuild.PipelineProject( stack, "t-u-b-CDKCodebuild", project_name="t-u-b-CodebuildProject", build_spec=codebuild.BuildSpec.from_source_filename( filename='buildspec.yml'), environment=codebuild.BuildEnvironment(privileged=True), description='Pipeline for the-ultimate-boilerplate', timeout=core.Duration.minutes(60), ) build_action = codepipeline_actions.CodeBuildAction( action_name="ReactBuild", project=codebuild_project, input=source_output, outputs=[build_output_artifact]) s3_deploy = codepipeline_actions.S3DeployAction( action_name="ReactS3Push", input=build_output_artifact, bucket=react_site_bucket) # Would be more elegant to be one stage but the input to deploy must be created in a prior stage code_pipeline.add_stage(stage_name="ReactBuild", actions=[build_action]) code_pipeline.add_stage(stage_name="ReactDeploy", actions=[s3_deploy])
def _create_codebuild_project(self, id: str): pipeline_project = aws_codebuild.PipelineProject( self, id, environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True, compute_type=aws_codebuild.ComputeType.LARGE), build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='scripts/apk-builder-buildspec.yml')) build_exec_policy = aws_iam.ManagedPolicy( self, "AmplifyAndroidBuildExecutorPolicy", managed_policy_name=f"AmplifyAndroidBuildExecutorPolicy", description= "Policy used by the CodeBuild role that executes builds.", statements=[ aws_iam.PolicyStatement( actions=self.CODE_BUILD_AMPLIFY_ACTIONS, effect=aws_iam.Effect.ALLOW, resources=["*"]), ]) build_exec_policy.attach_to_role(pipeline_project.role) return pipeline_project
def getProjectDefination(self, stack_name, environemt, project_name): project = codebuild.PipelineProject( self, project_name, project_name=project_name, build_spec=codebuild.BuildSpec.from_object({ "version": "0.2", "phases": { "install": { "commands": [ "echo 'starting installation'", "npm install aws-cdk", "npm update", "pip install -r requirements.txt" ] }, "build": { "commands": [ "echo 'starting build stage'", f"npx cdk deploy {stack_name}" ] } } })) return project
def _get_build_project(self): ecr = aws_ecr.Repository(self, "ECR", repository_name="arronmoore-dev", removal_policy=core.RemovalPolicy.DESTROY) cb_docker_build = aws_codebuild.PipelineProject( self, "DockerBuild", project_name=f"arronmoore-dev-docker-build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='buildspec.yml'), environment=aws_codebuild.BuildEnvironment(privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'REPO_URI': aws_codebuild.BuildEnvironmentVariable( value=ecr.repository_uri) }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(60), ) ecr.grant_pull_push(cb_docker_build) return ecr, cb_docker_build
def setup_api_build_project(self): """Setup the build project. Using codebuild to create a PipelineProject with four phases: * install: Instaall requirements for unit test * pre_build: Run unit tests and show coverage * build: Build and tag docker image * post_build: Login into aws ecr, push image to ECR Returns ------- aws_codebuild.PipelineProject object to be used in Pipeline """ project = cb.PipelineProject( self, 'ApiBuild', environment={ 'build_image': cb.LinuxBuildImage.STANDARD_4_0, 'privileged': True, }, build_spec=cb.BuildSpec.from_object( dict( version=0.2, phases={ 'install': { 'commands': [ 'pip install -r requirements_test.txt', 'pip install -U awscli', ] }, 'pre_build': { 'commands': [ 'coverage run --source=. -m unittest', 'coverage report -m', ] }, 'build': { 'commands': [ f'docker build . -t {self.ecr_repo.repository_uri}:latest', ] }, 'post_build': { 'commands': [ '$(aws ecr get-login --no-include-email)', f'docker push {self.ecr_repo.repository_uri}:latest', ''.join([ 'printf \'[{', f'"name": "{self.config.api.ecr_repo}",', f'"imageUri": "{self.ecr_repo.repository_uri}:latest"', '}]\' > imagedefinitions.json', ]), 'cat imagedefinitions.json', ] }, }, artifacts={'files': 'imagedefinitions.json'}))) project.role.add_to_policy( iam.PolicyStatement(resources=['*'], actions=[ 'ecr:GetAuthorizationToken', "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", "ecr:BatchCheckLayerAvailability", 'ecr:PutImage', ], sid='AllowECRLoginAndPush')) return project
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository( self, "CodeRepo", repository_name="iot-gg-cicd-workshop-repo") prod_deploy_param_bucket = s3.Bucket( self, "ProdDeployBucket", versioned=True, ) prod_source_bucket = s3.Bucket( self, "ProdSourceBucket", versioned=True, ) ssm.StringParameter( self, "ProdSourceBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_source_bucket", string_value=prod_source_bucket.bucket_name, ) ssm.StringParameter( self, "ProdDeployBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_deploy_param_bucket", string_value=prod_deploy_param_bucket.bucket_name, ) cdk_build = codebuild.PipelineProject( self, "Build", project_name="iot-gg-cicd-workshop-build", build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_build, [ "AWSCloudFormationFullAccess", "AmazonSSMFullAccess", "AmazonS3FullAccess", "AWSLambdaFullAccess", "IAMFullAccess", ]) cdk_deploy_canary = codebuild.PipelineProject( self, "Deploy", project_name="iot-gg-cicd-workshop-deploy-canary", build_spec=codebuild.BuildSpec.from_source_filename( "deployspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_deploy_canary, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess", "AWSIoTFullAccess" ]) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact("CdkBuildOutput") codepipeline.Pipeline( self, "Pipeline", pipeline_name="iot-gg-cicd-workshop-pipeline-canary", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build_Package_Deploy_Lambda", actions=[ codepipeline_actions.CodeBuildAction( action_name="Build_Package_Deploy", project=cdk_build, input=source_output, outputs=[cdk_build_output]) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Canary", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Canary", project=cdk_deploy_canary, input=cdk_build_output) ]), ]) cdk_deploy_prod = codebuild.PipelineProject( self, "DeployProd", project_name="iot-gg-cicd-workshop-deploy-main", build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict(install=dict(commands=[ "apt-get install zip", "PROD_SOURCE_BUCKET=$(aws ssm get-parameter --name '/iot-gg-cicd-workshop/s3/prod_source_bucket' --with-decryption --query 'Parameter.Value' --output text)", "aws s3 cp s3://$PROD_SOURCE_BUCKET/prod_deploy.zip prod_deploy.zip", "unzip -o prod_deploy.zip", "ls -la", "make clean init" ]), build=dict(commands=[ "ls -la", "make deploy-greengrass-prod", ])), artifacts={ "base-directory": ".", "files": ["**/*"] }, environment=dict( buildImage=codebuild.LinuxBuildImage.STANDARD_2_0)))) add_policies(cdk_deploy_prod, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess" ]) prod_source_output = codepipeline.Artifact() codepipeline.Pipeline( self, "PipelineProd", pipeline_name="iot-gg-cicd-workshop-pipeline-main", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="S3_Source", bucket=prod_deploy_param_bucket, bucket_key="deploy_params.zip", output=prod_source_output) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Prod", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Prod", project=cdk_deploy_prod, input=prod_source_output) ]), ]) prod_source_bucket.grant_read_write(cdk_deploy_canary.role) prod_source_bucket.grant_read(cdk_deploy_prod.role) prod_deploy_param_bucket.grant_read_write(cdk_deploy_canary.role)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) notification_email = ssm.StringParameter.value_from_lookup( self, parameter_name='/serverless-pipeline/sns/notifications/primary-email' ) github_user = ssm.StringParameter.value_from_lookup( self, parameter_name='/serverless-pipeline/codepipeline/github/user' ) github_repo = ssm.StringParameter.value_from_lookup( self, parameter_name='/serverless-pipeline/codepipeline/github/repo' ) github_token = core.SecretValue.secrets_manager( '/serverless-pipeline/secrets/github/token', json_field='github-token', ) artifact_bucket = s3.Bucket( self, 'BuildArtifactsBucket', removal_policy=core.RemovalPolicy.RETAIN, encryption=s3.BucketEncryption.KMS_MANAGED, versioned=True, ) build_project = build.PipelineProject( self, 'BuildProject', project_name='serveless-pipeline', description='Build project for the serverless-pipeline', environment=build.LinuxBuildImage.STANDARD_2_0, environment_variables={ 'BUILD_ARTIFACT_BUCKET': build.BuildEnvironmentVariable(value=artifact_bucket.bucket_name), }, cache=build.Cache.bucket(artifact_bucket, prefix='codebuild-cache'), build_spec=build.BuildSpec.from_object({ 'version': '0.2', 'phases': { 'install': { 'runtime-versions': { 'nodejs': 10, }, 'commands': [ 'echo "--------INSTALL PHASE--------"', 'pip3 install aws-sam-cli', ] }, 'pre_build': { 'commands': [ 'echo "--------PREBUILD PHASE--------"', '# Example shows installation of NPM dependencies for shared deps (layers) in a SAM App', '# cd functions/dependencies/shared_deps_one/nodejs', '# npm install && cd', '# cd functions/dependencies/shared_deps_two/nodejs', '# npm install && cd', ] }, 'build': { 'commands': [ 'echo "--------BUILD PHASE--------"', 'echo "Starting SAM packaging `date` in `pwd`"', 'sam package --template-file template.yaml --s3-bucket $BUILD_ARTIFACT_BUCKET --output-template-file packaged.yaml', ] }, 'post_build': { 'commands': [ 'echo "--------POST-BUILD PHASE--------"', 'echo "SAM packaging completed on `date`"', ] } }, 'artifacts': { 'files': ['packaged.yaml'], 'discard-paths': 'yes', }, 'cache': { 'paths': ['/root/.cache/pip'], } }) ) serverless_pipeline = pipeline.Pipeline( self, 'ServerlessPipeline', artifact_bucket=artifact_bucket, pipeline_name='serverless-pipeline', restart_execution_on_update=True, ) source_output = pipeline.Artifact() build_output = pipeline.Artifact() cfn_output = pipeline.Artifact() # NOTE: This Stage/Action requires a manual OAuth handshake in the browser be complete before automated deployment can occur # Create a new Pipeline in the console, manually authorize GitHub as a source, and then cancel the pipeline wizard. serverless_pipeline.add_stage(stage_name='Source', actions=[ actions.GitHubSourceAction( action_name='SourceCodeRepo', owner=github_user, oauth_token=github_token, repo=github_repo, branch='master', output=source_output, ) ]) serverless_pipeline.add_stage(stage_name='Build', actions=[ actions.CodeBuildAction( action_name='CodeBuildProject', input=source_output, outputs=[build_output], project=build_project, type=actions.CodeBuildActionType.BUILD, ) ]) serverless_pipeline.add_stage(stage_name='Staging', actions=[ actions.CloudFormationCreateReplaceChangeSetAction( action_name='CreateChangeSet', admin_permissions=True, change_set_name='serverless-pipeline-changeset-Staging', stack_name='ServerlessPipelineStaging', template_path=pipeline.ArtifactPath( build_output, file_name='packaged.yaml' ), capabilities=[cfn.CloudFormationCapabilities.ANONYMOUS_IAM], run_order=1, ), actions.CloudFormationExecuteChangeSetAction( action_name='ExecuteChangeSet', change_set_name='serverless-pipeline-changeset-Staging', stack_name='ServerlessPipelineStaging', output=cfn_output, run_order=2, ), ]) serverless_pipeline.add_stage(stage_name='Production', actions=[ actions.CloudFormationCreateReplaceChangeSetAction( action_name='CreateChangeSet', admin_permissions=True, change_set_name='serverless-pipeline-changeset-Production', stack_name='ServerlessPipelineProduction', template_path=pipeline.ArtifactPath( build_output, file_name='packaged.yaml' ), capabilities=[cfn.CloudFormationCapabilities.ANONYMOUS_IAM], run_order=1, ), actions.ManualApprovalAction( action_name='DeploymentApproval', notify_emails=[notification_email], run_order=2, ), actions.CloudFormationExecuteChangeSetAction( action_name='ExecuteChangeSet', change_set_name='serverless-pipeline-changeset-Production', stack_name='ServerlessPipelineProduction', output=cfn_output, run_order=3, ), ]) core.CfnOutput( self, 'BuildArtifactsBucketOutput', value=artifact_bucket.bucket_name, description='Amazon S3 Bucket for Pipeline and Build artifacts', ) core.CfnOutput( self, 'CodeBuildProjectOutput', value=build_project.project_arn, description='CodeBuild Project name', ) core.CfnOutput( self, 'CodePipelineOutput', value=serverless_pipeline.pipeline_arn, description='AWS CodePipeline pipeline name', )
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # The code that defines your stack goes here pipeline = codepipeline.Pipeline( self, "Pipeline", artifact_bucket=s3.Bucket(self, "ArtifactBucket") ) # Define the 'source' stage to be triggered by a webhook on the GitHub # repo for the code. Don't be fooled by the name, it's just a codestar # connection in the background. Bitbucket isn't involved. source_output = codepipeline.Artifact("SourceOutput") github_source = pipeline_actions.BitBucketSourceAction( action_name="Github_Source", connection_arn=core.SecretValue.secrets_manager( secret_id="folksgl_github_connection_arn", json_field="arn" ).to_string(), repo="sam-cicd-python-template", owner="folksgl", branch="main", output=source_output, ) pipeline.add_stage(stage_name="Source", actions=[github_source]) # Define the 'build' stage build_project = codebuild.PipelineProject( scope=self, id="Build", # Declare the pipeline artifact bucket name as an environment variable # so the build can send the deployment package to it. environment_variables={ "PACKAGE_BUCKET": codebuild.BuildEnvironmentVariable( value=pipeline.artifact_bucket.bucket_name, type=codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) }, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_3_0 ), ) build_stage_output = codepipeline.Artifact("BuildStageOutput") build_action = pipeline_actions.CodeBuildAction( action_name="Build", project=build_project, input=source_output, outputs=[build_stage_output], ) pipeline.add_stage(stage_name="Build", actions=[build_action]) # Define the 'deploy' stage stack_name = "gateway-service-python" change_set_name = f"{stack_name}-changeset" create_change_set = pipeline_actions.CloudFormationCreateReplaceChangeSetAction( action_name="CreateChangeSet", stack_name=stack_name, change_set_name=change_set_name, template_path=build_stage_output.at_path("packaged.yaml"), admin_permissions=True, run_order=1, ) execute_change_set = pipeline_actions.CloudFormationExecuteChangeSetAction( action_name="Deploy", stack_name=stack_name, change_set_name=change_set_name, run_order=2, ) pipeline.add_stage( stage_name="DevDeployment", actions=[create_change_set, execute_change_set] )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # ==================================== # ECR # ==================================== ecr_repository = aws_ecr.Repository( self, id='ecr_repository', repository_name='sample_repository', removal_policy=core.RemovalPolicy.DESTROY) # ==================================== # Build Docker Image # ==================================== # codebuild project meant to run in pipeline cb_docker_build = aws_codebuild.PipelineProject( self, "DockerBuild", project_name='continuous-delivery', # f"{props['namespace']}-Docker-Build", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='batch/docker_build_buildspec.yml'), environment=aws_codebuild.BuildEnvironment(privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=ecr_repository.repository_uri), 'tag': aws_codebuild.BuildEnvironmentVariable(value='sample-batch') }, description='Pipeline for CodeBuild', timeout=core.Duration.minutes(60), ) # ==================================== # VPC # ==================================== vpc = aws_ec2.Vpc(self, id='vpc', cidr='10.0.0.0/16', max_azs=2, nat_gateways=1, vpn_gateway=False) # ==================================== # ECS # ==================================== # Create ecs cluester. ecs_cluster = aws_ecs.Cluster( self, id='ecs_cluster', cluster_name='sample_fargate_batch_cluster', vpc=vpc) # Create fargate task definition. fargate_task_definition = aws_ecs.FargateTaskDefinition( self, id='fargate-task-definition', cpu=256, memory_limit_mib=512, family='fargate-task-definition') # Add container to task definition. fargate_task_definition.add_container( id='container', image=aws_ecs.ContainerImage.from_ecr_repository(ecr_repository), logging=aws_ecs.LogDriver.aws_logs( stream_prefix='ecs', log_group=aws_logs.LogGroup( self, id='log-group', log_group_name='/ecs/fargate/fargate-batch'))) # Create cloud watch event rule. rule = aws_events.Rule( self, id='rule', rule_name='execute-task-rule', description='Event rule to execute ecs task.', schedule=aws_events.Schedule.cron( day=None, hour=None, minute='*/5', # execute by every 5 minutes. month=None, week_day=None, year=None)) rule.add_target(target=aws_events_targets.EcsTask( cluster=ecs_cluster, task_definition=fargate_task_definition, task_count=1))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here base_api = _apigw.RestApi(self, 'PetclinicApiGatewayWithCors') api_resource = base_api.root.add_resource('api') self.add_cors_options(api_resource) website_bucket = _s3.Bucket(self, 'PetclinicWebsite', website_index_document='index.html', public_read_access=True, removal_policy=core.RemovalPolicy.DESTROY) # Warm Lambda function Event rule event_rule = _events.Rule(self, 'PetclinicLambdaWarmRule', schedule=_events.Schedule.rate( core.Duration.minutes(3))) code = _commit.Repository( self, 'ServerlessCode', repository_name='spring-petclinic-static-resource') build_project = _build.PipelineProject( self, 'StaticWebBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': [ 'mv scripts/config.js scripts/config.js.origin', 'sed -e "s,http://localhost:8081/,$API_ENDPOINT,g" scripts/config.js.origin > scripts/config.js' ] }, }, 'artifacts': { 'files': '**/*' }, }), environment_variables={ 'API_ENDPOINT': _build.BuildEnvironmentVariable(value=base_api.url) }, environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') build_output = _pipeline.Artifact('BuildOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction(action_name='CodeBuild_Static', project=build_project, input=source_output, outputs=[build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.S3DeployAction(action_name='Web_Static_Deploy', input=build_output, bucket=website_bucket) ] }]) core.CfnOutput(self, 'RuleArn', export_name='RuleArn', value=event_rule.rule_arn) core.CfnOutput(self, 'PetclinicApiGatewayWithCorsId', export_name='PetclinicApiGatewayWithCorsId', value=base_api.rest_api_id) core.CfnOutput(self, "PetclinicWebsiteUrl", export_name="PetclinicWebsiteUrl", value=website_bucket.bucket_website_url)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) eks_vpc = ec2.Vpc(self, "VPC", cidr="10.0.0.0/16") self.node.apply_aspect( core.Tag("kubernetes.io/cluster/cluster", "shared")) eks_vpc.private_subnets[0].node.apply_aspect( core.Tag("kubernetes.io/role/internal-elb", "1")) eks_vpc.private_subnets[1].node.apply_aspect( core.Tag("kubernetes.io/role/internal-elb", "1")) eks_vpc.public_subnets[0].node.apply_aspect( core.Tag("kubernetes.io/role/elb", "1")) eks_vpc.public_subnets[1].node.apply_aspect( core.Tag("kubernetes.io/role/elb", "1")) # Create IAM Role For CodeBuild and Cloud9 codebuild_role = iam.Role( self, "BuildRole", assumed_by=iam.CompositePrincipal( iam.ServicePrincipal("codebuild.amazonaws.com"), iam.ServicePrincipal("ec2.amazonaws.com")), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess") ]) instance_profile = iam.CfnInstanceProfile( self, "InstanceProfile", roles=[codebuild_role.role_name]) # Create CodeBuild PipelineProject build_project = codebuild.PipelineProject( self, "BuildProject", role=codebuild_role, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml")) # Create CodePipeline pipeline = codepipeline.Pipeline( self, "Pipeline", ) # Create Artifact artifact = codepipeline.Artifact() # S3 Source Bucket source_bucket = s3.Bucket.from_bucket_attributes( self, "SourceBucket", bucket_arn=core.Fn.join( "", ["arn:aws:s3:::ee-assets-prod-", core.Fn.ref("AWS::Region")])) # Add Source Stage pipeline.add_stage( stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="S3SourceRepo", bucket=source_bucket, bucket_key= "modules/2cae1f20008d4fc5aaef294602649b98/v9/source.zip", output=artifact, trigger=codepipeline_actions.S3Trigger.NONE) ]) # Add CodeBuild Stage pipeline.add_stage( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeBuildProject", project=build_project, type=codepipeline_actions.CodeBuildActionType.BUILD, input=artifact, environment_variables={ 'PublicSubnet1ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.public_subnets[0].subnet_id), 'PublicSubnet2ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.public_subnets[1].subnet_id), 'PrivateSubnet1ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.private_subnets[0].subnet_id), 'PrivateSubnet2ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.private_subnets[1].subnet_id), 'AWS_DEFAULT_REGION': codebuild.BuildEnvironmentVariable(value=self.region), 'INSTANCEPROFILEID': codebuild.BuildEnvironmentVariable( value=instance_profile.ref), 'AWS_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=self.account) }) ]) cloud9_stack = cloudformation.CfnStack( self, "Cloud9Stack", # template_url="https://aws-quickstart.s3.amazonaws.com/quickstart-cloud9-ide/templates/cloud9-ide-instance.yaml", template_url= "https://ee-assets-prod-us-east-1.s3.amazonaws.com/modules/2cae1f20008d4fc5aaef294602649b98/v9/cloud9-ide-instance.yaml", parameters={ "C9InstanceType": "m5.large", "C9Subnet": eks_vpc.public_subnets[0].subnet_id }) pipeline.node.add_dependency(eks_vpc) pipeline.node.add_dependency(cloud9_stack)
def __init__(self, scope: core.Construct, id: str, *, git_token_key="", github_owner="", github_repo="", github_branch="", **kwargs) -> None: super().__init__(scope, id, **kwargs) role = iam.Role( self, "Role", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com")) role.add_managed_policy( iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess")) cdk_project = codebuild.PipelineProject( self, "Codebuild", build_spec=codebuild.BuildSpec.from_source_filename( "codebuild/buildspec.yaml"), cache=codebuild.Cache.bucket(s3.Bucket(self, "Bucket")), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_2_0, privileged=True), role=role) source_output = codepipeline.Artifact() staging_output = codepipeline.Artifact() production_output = codepipeline.Artifact() source_action = codepipeline_actions.GitHubSourceAction( action_name="GitHub_Source", owner=github_owner, repo=github_repo, branch=github_branch, oauth_token=core.SecretValue.secrets_manager(git_token_key), output=source_output) staging_action = codepipeline_actions.CodeBuildAction( action_name="Deliver", project=cdk_project, input=source_output, outputs=[staging_output], environment_variables={"ENV": { "value": "stg" }}) manual_approval_action = codepipeline_actions.ManualApprovalAction( action_name="Approve") production_action = codepipeline_actions.CodeBuildAction( action_name="Deliver", project=cdk_project, input=source_output, outputs=[production_output], environment_variables={"ENV": { "value": "prd" }}) key = kms.Key(self, "key") bucket = s3.Bucket(self, "bucket_artifacts", encryption_key=key) pipeline = codepipeline.Pipeline(self, "Pipeline", artifact_bucket=bucket) pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name="Staging", actions=[staging_action]) pipeline.add_stage(stage_name="Approval", actions=[manual_approval_action]) pipeline.add_stage(stage_name="Production", actions=[production_action])
oauth_token=core.SecretValue.secrets_manager( secret_id=deployment_secret['secret-id'], json_field=deployment_secret['json-fields']['github-oauth-token']), trigger=codepipeline_actions.GitHubTrigger.WEBHOOK, output=source_output, )]), codepipeline.StageProps(stage_name='Self-Update', actions=[ codepipeline_actions.CodeBuildAction( action_name='Self_Deploy', project=codebuild.PipelineProject( stack, 'CodePipelineBuild', build_spec=codebuild.BuildSpec.from_source_filename( 'codepipeline/pipelines-buildspec.yaml'), role=code_build_role, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_4_0, environment_variables={ 'PROJECT_DIR': codebuild.BuildEnvironmentVariable(value='codepipeline'), 'STACK_FILE': codebuild.BuildEnvironmentVariable(value='release_pipeline.py') } ) ), input=source_output ) ]), codepipeline.StageProps(stage_name='PyPi-Release', actions=[ codepipeline_actions.CodeBuildAction( action_name='PyPi_Release', project=codebuild.PipelineProject( stack, 'PyPiReleaseBuild', build_spec=codebuild.BuildSpec.from_source_filename(