def __init__(self, scope: core.Stack, id: str, **kwargs): super().__init__(scope, id, **kwargs) # create a pipeline self.pipeline = codepipeline.Pipeline(self, "Pipeline", pipeline_name='API_Gateway') # add a source stage self.source_stage = self.pipeline.add_stage(stage_name="Source") self.source_artifact = codepipeline.Artifact() # codebuild projects self.codebuild_validate = CodeBuildProjects( self, "CodebuildValidate", buildspec='buildspec-diff.yml') self.codebuild_deploy = CodeBuildProjects(self, "CodebuildDeploy", buildspec='buildspec.yml') # add source action self.source_stage.add_action( codepipeline_actions.GitHubSourceAction( oauth_token=core.SecretValue.secrets_manager( secret_id='prod/github_oauth_token', json_field='github_oauth_token'), output=self.source_artifact, owner=config['CODEPIPELINE']['GITHUB_OWNER'], repo=config['CODEPIPELINE']['GITHUB_REPO'], branch=config['CODEPIPELINE']['GITHUB_BRANCH'], action_name='Pull_Source', run_order=1, )) # add validate stage self.validate_stage = self.pipeline.add_stage(stage_name='Validate') # add validate codebuild action self.validate_stage.add_action( codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.codebuild_validate.project, action_name='Validate_Changes')) # add approval stage self.approval_stage = self.pipeline.add_stage(stage_name='Approval') # simple approval stage to continue build after manual validation complete self.approval_stage.add_action( codepipeline_actions.ManualApprovalAction(action_name='Approval')) # add deploy stage self.deploy_stage = self.pipeline.add_stage(stage_name='Deploy') # add deploy codebuild action self.deploy_stage.add_action( codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.codebuild_deploy.project, action_name='Deploy_Changes'))
def __init__(self, scope: core.Stack, id: str, **kwargs): super().__init__(scope, id, **kwargs) # create a pipeline self.pipeline = codepipeline.Pipeline(self, "Pipeline", pipeline_name='Service_API') # add a source stage self.source_stage = self.pipeline.add_stage(stage_name="Source") self.source_artifact = codepipeline.Artifact() # codebuild projects self.codebuild_deploy_swagger = CodeBuildProjects( self, "CodebuildSwagger", buildspec='buildspec-swagger.yml') self.codebuild_deploy_ecr = CodeBuildProjects( self, "CodebuildDocker", buildspec='buildspec-docker.yml') # add source action self.source_stage.add_action( codepipeline_actions.GitHubSourceAction( oauth_token=core.SecretValue.secrets_manager( secret_id='prod/github_oauth_token', json_field='github_oauth_token'), output=self.source_artifact, owner=config['CODEPIPELINE']['GITHUB_OWNER'], repo=config['CODEPIPELINE']['GITHUB_REPO'], branch=config['CODEPIPELINE']['GITHUB_BRANCH'], action_name='Pull_Source', run_order=1, )) # add build/test stage self.deploy_stage = self.pipeline.add_stage( stage_name='Test_and_Build') # add build/test codebuild action self.deploy_stage.add_action( codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.codebuild_deploy_ecr.project, action_name='Test_and_Build')) # add deploy stage self.deploy_stage = self.pipeline.add_stage( stage_name='API_Deployment') # add deploy codebuild action self.deploy_stage.add_action( codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.codebuild_deploy_swagger.project, action_name='API_Deployment'))
def _create_image_build_stage(self, stage_name: str, input: codepipeline.Artifact, output: codepipeline.Artifact): """ A pipeline stage that is responsible with building the application. """ project = codebuild.PipelineProject( self, 'Project', build_spec=self._create_build_spec(), environment={ 'build_image': codebuild.LinuxBuildImage.STANDARD_2_0, 'privileged': True }, environment_variables={ 'REPOSITORY_URI': { 'value': self.ecr_repo.repository_uri }, 'CONTAINER_NAME': { 'value': self.container_name } }) self.ecr_repo.grant_pull_push(project.grant_principal) codebuild_action = codepipeline_actions.CodeBuildAction( action_name='CodeBuild_Action', input=input, outputs=[output], project=project) return {'stageName': stage_name, 'actions': [codebuild_action]}
def CdkDeploySimplePipeline(self, name: str, repo, branch: str, src: str, output): cdk_deploy = self.CdkDeployProject(f"{name}-CDKDeploy", stage=branch) cdk_deploy.role.add_to_policy( iam.PolicyStatement(effect=iam.Effect.ALLOW, resources=["*"], actions=["CloudFormation:*", "ec2:*", "s3:*"])) return codepipeline.Pipeline( self, name, stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=repo, branch=branch, output=src) ]), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CdkDeploy", project=cdk_deploy, input=src, outputs=[output]) ]), ])
def create_build_action(self, project, source_output): action_name = "CodeBuild" return codepipeline_actions.CodeBuildAction( action_name=action_name, project=project, input=source_output, outputs=[codepipeline.Artifact("Test")] )
def __init__(self, app: core.App, id: str, props, repo_name: str=None, **kwargs) -> None: super().__init__(app, id, **kwargs) source_output = aws_codepipeline.Artifact(artifact_name='source') code = aws_codecommit.Repository.from_repository_name(self, "ImportedRepo", repo_name) codepipeline = aws_codepipeline.Pipeline(self, "CodePipeline", pipeline_name="flask-pipeline", artifact_bucket=props['bucket'], stages=[ aws_codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=code, output=source_output, run_order=1, ), ] ), aws_codepipeline.StageProps( stage_name='Build', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='DockerBuildImages', input=source_output, project=props['ecr_build'], run_order=1, ) ] ), aws_codepipeline.StageProps( stage_name='Build2', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='ECSBuild', input=source_output, project=props['ecs_build'], run_order=1, ) ] ) ] )
def __init__(self, scope: core.Stack, id: str, **kwargs): super().__init__(scope, id, **kwargs) # create an iam role to be assumed later by codebuild self.role = iam.Role( self, "CodeBuildRole", assumed_by=iam.CompositePrincipal( iam.ServicePrincipal('codebuild.amazonaws.com'), iam.ServicePrincipal('ec2.amazonaws.com'))) # TODO: Don't need admin, let's make this least privilege self.role.add_to_policy( iam.PolicyStatement( actions=['*'], resources=['*'], )) # create a pipeline self.pipeline = codepipeline.Pipeline(self, "Pipeline", pipeline_name='EKS') # add a source stage self.source_stage = self.pipeline.add_stage(stage_name="Source") self.source_artifact = codepipeline.Artifact() # codebuild projects self.codebuild_deploy = CodeBuildProjects(self, "CodebuildDeploy", buildspec='buildspec.yml', codepipelinerole=self.role) # add source action self.source_stage.add_action( codepipeline_actions.GitHubSourceAction( oauth_token=core.SecretValue.secrets_manager( secret_id='prod/github_oauth_token', json_field='github_oauth_token'), output=self.source_artifact, owner=config['CODEPIPELINE']['GITHUB_OWNER'], repo=config['CODEPIPELINE']['GITHUB_REPO'], action_name='Pull_Source', run_order=1, )) # add deploy stage self.deploy_stage = self.pipeline.add_stage(stage_name='Deploy') # add deploy codebuild action self.deploy_stage.add_action( codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.codebuild_deploy.project, action_name='Deploy_EKS_Cluster'))
def _create_train_step(self): stage = self.pipeline.add_stage(stage_name=f"{self.name_prefix}-stage") role = iam.Role( self, "Role", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), description="Role for CodeBuild", role_name=f"{self.name_prefix}-codebuild-role", managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AmazonEC2ContainerRegistryFullAccess"), iam.ManagedPolicy.from_aws_managed_policy_name( "AWSStepFunctionsFullAccess"), iam.ManagedPolicy.from_aws_managed_policy_name( "service-role/AWSLambdaVPCAccessExecutionRole"), iam.ManagedPolicy.from_aws_managed_policy_name( "SecretsManagerReadWrite"), ], ) policy = iam.Policy(self, "PassRolePolicy") policy.document.add_statements( iam.PolicyStatement( actions=["iam:PassRole"], resources=[f"arn:aws:iam::{Stack.of(self).account}:role/*"])) role.attach_inline_policy(policy) build_spec = codebuild.BuildSpec.from_source_filename('buildspec.yml') project = codebuild.PipelineProject( self, "TrainingStepProject", build_spec=build_spec, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_5_0, privileged=True), role=role, security_groups=[self.security_group], subnet_selection=self.subnet_selection, vpc=self.vpc) action = codepipeline_actions.CodeBuildAction( action_name=f"{self.name_prefix}-training-action", project=project, input=self.source_output, environment_variables={ "EXEC_ID": codebuild.BuildEnvironmentVariable( value='#{codepipeline.PipelineExecutionId}'), "SFN_WORKFLOW_NAME": codebuild.BuildEnvironmentVariable(value=self.sfn_name) }, variables_namespace="trainStep", ) stage.add_action(action)
def create_action_stage(self, artifact, project, action_name): output_artifact=pipeline.Artifact() git_action= pipelineActions.CodeBuildAction( input=artifact, project=project, outputs=[output_artifact], action_name=action_name ) return git_action, output_artifact
def _build(self, input, extra_inputs=[]): artifact = cp.Artifact() build = cpa.CodeBuildAction(outputs=[artifact], type=cpa.CodeBuildActionType.BUILD, action_name="Build", input=input, extra_inputs=extra_inputs, project=self.project) self.artifacts['builds'].append(artifact) self.actions['builds'].append(build) self.pipe.add_stage(stage_name="Build", actions=self.actions['builds'])
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) this_dir = path.dirname(__file__) code_build_project = codebuild.PipelineProject( self, "demoServiceProject", build_spec=codebuild.BuildSpec.from_source_filename( './pipeline/java_services/DemoService/buildspec.yml')) source_artifact = codepipeline.Artifact() cloud_assembly_artifact = codepipeline.Artifact() java_build_artifact = codepipeline.Artifact() pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, pipeline_name='WebinarPipeline', source_action=cpactions.GitHubSourceAction( action_name='Github', output=source_artifact, oauth_token=core.SecretValue.secrets_manager('github-token'), owner='JuanGQCadavid', repo='cd_last_project_pipeline', trigger=cpactions.GitHubTrigger.POLL), synth_action=pipelines.SimpleSynthAction( source_artifact=source_artifact, cloud_assembly_artifact=cloud_assembly_artifact, install_command= 'npm install -g aws-cdk && pip install -r requirements.txt', synth_command='cdk synth')) build_action = cpactions.CodeBuildAction( input=source_artifact, outputs=[java_build_artifact], project=code_build_project, action_name="demoServicesBuildAction", ) buildStage = pipeline.add_stage(stage_name="JavaBuild") buildStage.add_actions(build_action) pre_prod_stage = pipeline.add_application_stage( WebServiceStage(self, 'Pre-prod', env={'region': 'us-east-1'})) pre_prod_stage.add_manual_approval_action(action_name='PromoteToProd') pipeline.add_application_stage( WebServiceStage(self, 'Prod', env={'region': 'us-east-1'}))
def __init__(self, app: core.App, id: str, props, **kwargs) -> None: super().__init__(app, id, **kwargs) # define the s3 artifact source_output = aws_codepipeline.Artifact(artifact_name='source') # define the pipeline pipeline = aws_codepipeline.Pipeline( self, "Pipeline", pipeline_name=f"{props['namespace']}", artifact_bucket=props['bucket'], stages=[ aws_codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.S3SourceAction( bucket=props['bucket'], bucket_key='source.zip', action_name='S3Source', run_order=1, output=source_output, trigger=aws_codepipeline_actions.S3Trigger.POLL), ]), aws_codepipeline.StageProps( stage_name='Build', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='DockerBuildImages', input=source_output, project=props['cb_docker_build'], run_order=1, ) ]) ]) # give pipelinerole read write to the bucket props['bucket'].grant_read_write(pipeline.role) #pipeline param to get the pipeline_param = aws_ssm.StringParameter( self, "PPipeline", parameter_name=f"{props['namespace']}-pipeline", string_value=pipeline.pipeline_name, description='IoT playground pipeline bucket') # cfn output core.CfnOutput(self, "PipelineOut", description="Pipeline", value=pipeline.pipeline_name)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Create IAM Role For CodeBuild codebuild_role = iam.Role( self, "BuildRole", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com"), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess") ]) # Create CodeBuild PipelineProject build_project = codebuild.PipelineProject( self, "BuildProject", role=codebuild_role, build_spec=codebuild.BuildSpec.from_source_filename( "aws-app-resources/buildspec.yml")) # Create CodePipeline pipeline = codepipeline.Pipeline(self, "Pipeline") # Create Artifact artifact = codepipeline.Artifact() # Add Source Stage pipeline.add_stage( stage_name="Source", actions=[ codepipeline_actions.GitHubSourceAction( action_name="SourceCodeRepo", owner="jasonumiker", repo="k8s-plus-aws-gitops", output=artifact, oauth_token=core.SecretValue.secrets_manager( 'github-token')) ]) # Add CodeBuild Stage pipeline.add_stage( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeBuildProject", project=build_project, type=codepipeline_actions.CodeBuildActionType.BUILD, input=artifact) ])
def __init__(self, scope: core.Construct, id: str, code_commit_repo: str, default_branch: str = 'mainline', **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository.from_repository_name( self, "codecommitrepo", code_commit_repo) # Cloudformation permission for project builds # right now setting admin permission on policy # modify this to load custom policy per pipeline from policy statement document # iam_cfn_admin_json = Policies.get_iam_cfn_admin_access_policy() policy_statement = iam.PolicyStatement() policy_statement.add_actions("*") policy_statement.add_resources("*") policy_statement.effect = iam.Effect.ALLOW serverless_build = codebuild.PipelineProject(self, "buildpipeline") # add cfn iam statements to build project serverless_build.add_to_role_policy(policy_statement) build_output = codepipeline.Artifact("BuildOutput") codepipeline.Pipeline( self, "imageBuilderDeploymentPipeline", pipeline_name="ImageBuilderDeploymentPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="SourceCode", branch=default_branch, repository=code, output=build_output) ]), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeDeploy", project=serverless_build, input=build_output) ]) ])
def _create_build_and_assemble_action( self, input_artifact: aws_codepipeline.Artifact, output_artifact: aws_codepipeline.Artifact, pipeline_project: aws_codebuild.PipelineProject, config_source_bucket: str = None): if config_source_bucket is None: return aws_codepipeline_actions.CodeBuildAction( action_name='BuildAndAssemble', input=input_artifact, project=pipeline_project, outputs=[output_artifact]) else: return aws_codepipeline_actions.CodeBuildAction( action_name='BuildAndAssemble', input=input_artifact, project=pipeline_project, environment_variables={ 'CONFIG_SOURCE_BUCKET': aws_codebuild.BuildEnvironmentVariable( value=config_source_bucket) }, outputs=[output_artifact])
def setup_api_pipeline(self): """Setup the build pipeline for API. Using codepipeline to create a Pipeline with 3 steps * Source: CodeCommitSourceAction * Build: CodeBuildActioin * Deploy: EcsDeployAction: deploy to ECS service Returns ------- aws_codepipeline.Pipeline """ source_output = cp.Artifact() build_output = cp.Artifact(self.config.build_output) return cp.Pipeline( self, 'ApiPipeline', pipeline_name=self.config.api.pipeline, stages=[ cp.StageProps(stage_name='Source', actions=[ cp_actions.CodeCommitSourceAction( action_name='Source', repository=self.api_source, branch='master', output=source_output, ) ]), cp.StageProps(stage_name='Build', actions=[ cp_actions.CodeBuildAction( action_name='Build', project=self.api_build_project, input=source_output, outputs=[build_output]) ]), cp.StageProps( stage_name='Deploy', actions=[ cp_actions.EcsDeployAction( action_name='Deploy', service=self.service.service, input=build_output, # image_file=build_output.at_path('imagedefinitions.json') ) ]) ])
def setup_web_pipeline(self): """Setup the build pipeline. Using codepipeline to create a Web Pipeline with 3 stages: * Source: CodeCommitSourceAction * Build : CodeBuildActioin * Deploy: S3DeployAction Returns ------- aws_codepipeline.Pipeline """ source_output = cp.Artifact() build_output = cp.Artifact(self.config.web.build_output) return cp.Pipeline( self, 'WebPipeline', pipeline_name=self.config.web.pipeline, stages=[ cp.StageProps(stage_name='Source', actions=[ cp_actions.CodeCommitSourceAction( action_name='Source', repository=self.web_source, branch='master', output=source_output, ) ]), cp.StageProps(stage_name='Build', actions=[ cp_actions.CodeBuildAction( action_name='Build', project=self.web_build_project, input=source_output, outputs=[build_output]) ]), cp.StageProps( stage_name='Deploy', actions=[ cp_actions.S3DeployAction( action_name='Deploy', bucket=self.web_bucket, input=build_output, access_control=s3.BucketAccessControl.PUBLIC_READ) ]) ])
def __init__(self, scope: core.Construct, id: str, source: codepipeline.Artifact, pipeline: codepipeline.Pipeline, bucket: s3.Bucket, role: iam.Role, frontend: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) branch = id.split('-')[-1] # Code build for flask frontend env = codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_DOCKER_18_09_0, compute_type=codebuild.ComputeType.SMALL, environment_variables={ 'PROJECTNAME': codebuild.BuildEnvironmentVariable( value=os.environ['GITHUB_REPO']), 'GITHUBUSER': codebuild.BuildEnvironmentVariable( value=os.environ['GITHUB_OWNER']), 'SOURCEBRANCH': codebuild.BuildEnvironmentVariable(value=branch), 'ARTIFACT_BUCKET': codebuild.BuildEnvironmentVariable(value=bucket.bucket_arn), 'REPO_URI': codebuild.BuildEnvironmentVariable(value=frontend), }, privileged=True, ) project = codebuild.PipelineProject( self, 'Build_Frontend-' + branch, description='Submit build jobs for {} as part of CI/CD pipeline'. format(os.environ['GITHUB_REPO']), environment=env, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), role=role) cb_actions = codepipeline_actions.CodeBuildAction( action_name='CodeBuild-' + branch, input=source, project=project, run_order=3) pipeline.add_stage(stage_name='CodeBuild-' + branch, actions=[cb_actions])
def create_build_stage(self, source_output, build_project): build_stage=_cp.StageProps( stage_name='Build', actions=[ _cpa.CodeBuildAction( action_name='Build', input=source_output, project=build_project, run_order=1, environment_variables={ 'ENV': _cb.BuildEnvironmentVariable(value='develop'), 'FAMILY_NAME': _cb.BuildEnvironmentVariable(value='DEMO-TASK'), }, outputs=[_cp.Artifact(artifact_name='BuildArtifact')], ) ] ) return build_stage
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) try: with open('../.secrets/github_token.txt') as f: github_token = f.read() except FileNotFoundError: print( "Create ../.secrets/github_token.txt and put the token which you create in the github interface into it." ) source_output = aws_codepipeline.Artifact(artifact_name='source') ecr, cb_docker_build = self._get_build_project() pipeline = aws_codepipeline.Pipeline( self, "Pipeline", pipeline_name="cdk-pipeline", stages=[ aws_codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.GitHubSourceAction( output=source_output, action_name="Source", oauth_token=core.SecretValue(github_token), owner='arron1993', repo="arronmoore.com", branch="develop") ]), aws_codepipeline.StageProps( stage_name='Build', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='DockerBuildImages', input=source_output, project=cb_docker_build, run_order=1, ) ]) ])
def generate_pipeline_stages(codebuild_project, role, beanstalk_application, beanstalk_environment, codestar_connection): source_output = codepipeline.Artifact("SourceOutput") source_stage = codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.BitBucketSourceAction( connection_arn=codestar_connection.attr_connection_arn, output=source_output, repo="santos-devops-challenge-tier1", owner="Jayvee1413", action_name="Github", code_build_clone_output=True, run_order=1), ], ) codebuild_output = codepipeline.Artifact("CodebuildOutput") codebuild_stage = codepipeline.StageProps( stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( input=source_output, project=codebuild_project, outputs=[codebuild_output], action_name="codebuild", run_order=2, ) ]) deploy_stage = codepipeline.StageProps( stage_name="Deploy", actions=[ ElasticBeanStalkDeployAction( action_name='Deploy', role=role, application_name=beanstalk_application.application_name, input=codebuild_output, environment_name=beanstalk_environment.environment_name, run_order=3) ]) return [source_stage, codebuild_stage, deploy_stage]
def add_react_build(stack: core.Stack, code_pipeline, source_output, bucket_arn: str): # Could refactor the bucket to be part of the stage # https://github.com/aws-samples/aws-cdk-examples/blob/master/typescript/static-site/static-site.ts # Need to move to a stack / into startuptoolbag # The codebuild project can be moved back out into the pipeline (bit awkward?) react_site_bucket = Bucket.from_bucket_arn(stack, id='SiteBucket', bucket_arn=bucket_arn) stack.build_output_artifact = codepipeline.Artifact() build_output_artifact = codepipeline.Artifact() codebuild_project = codebuild.PipelineProject( stack, "t-u-b-CDKCodebuild", project_name="t-u-b-CodebuildProject", build_spec=codebuild.BuildSpec.from_source_filename( filename='buildspec.yml'), environment=codebuild.BuildEnvironment(privileged=True), description='Pipeline for the-ultimate-boilerplate', timeout=core.Duration.minutes(60), ) build_action = codepipeline_actions.CodeBuildAction( action_name="ReactBuild", project=codebuild_project, input=source_output, outputs=[build_output_artifact]) s3_deploy = codepipeline_actions.S3DeployAction( action_name="ReactS3Push", input=build_output_artifact, bucket=react_site_bucket) # Would be more elegant to be one stage but the input to deploy must be created in a prior stage code_pipeline.add_stage(stage_name="ReactBuild", actions=[build_action]) code_pipeline.add_stage(stage_name="ReactDeploy", actions=[s3_deploy])
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository( self, "CodeRepo", repository_name="iot-gg-cicd-workshop-repo") prod_deploy_param_bucket = s3.Bucket( self, "ProdDeployBucket", versioned=True, ) prod_source_bucket = s3.Bucket( self, "ProdSourceBucket", versioned=True, ) ssm.StringParameter( self, "ProdSourceBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_source_bucket", string_value=prod_source_bucket.bucket_name, ) ssm.StringParameter( self, "ProdDeployBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_deploy_param_bucket", string_value=prod_deploy_param_bucket.bucket_name, ) cdk_build = codebuild.PipelineProject( self, "Build", project_name="iot-gg-cicd-workshop-build", build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_build, [ "AWSCloudFormationFullAccess", "AmazonSSMFullAccess", "AmazonS3FullAccess", "AWSLambdaFullAccess", "IAMFullAccess", ]) cdk_deploy_canary = codebuild.PipelineProject( self, "Deploy", project_name="iot-gg-cicd-workshop-deploy-canary", build_spec=codebuild.BuildSpec.from_source_filename( "deployspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_deploy_canary, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess", "AWSIoTFullAccess" ]) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact("CdkBuildOutput") codepipeline.Pipeline( self, "Pipeline", pipeline_name="iot-gg-cicd-workshop-pipeline-canary", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build_Package_Deploy_Lambda", actions=[ codepipeline_actions.CodeBuildAction( action_name="Build_Package_Deploy", project=cdk_build, input=source_output, outputs=[cdk_build_output]) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Canary", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Canary", project=cdk_deploy_canary, input=cdk_build_output) ]), ]) cdk_deploy_prod = codebuild.PipelineProject( self, "DeployProd", project_name="iot-gg-cicd-workshop-deploy-main", build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict(install=dict(commands=[ "apt-get install zip", "PROD_SOURCE_BUCKET=$(aws ssm get-parameter --name '/iot-gg-cicd-workshop/s3/prod_source_bucket' --with-decryption --query 'Parameter.Value' --output text)", "aws s3 cp s3://$PROD_SOURCE_BUCKET/prod_deploy.zip prod_deploy.zip", "unzip -o prod_deploy.zip", "ls -la", "make clean init" ]), build=dict(commands=[ "ls -la", "make deploy-greengrass-prod", ])), artifacts={ "base-directory": ".", "files": ["**/*"] }, environment=dict( buildImage=codebuild.LinuxBuildImage.STANDARD_2_0)))) add_policies(cdk_deploy_prod, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess" ]) prod_source_output = codepipeline.Artifact() codepipeline.Pipeline( self, "PipelineProd", pipeline_name="iot-gg-cicd-workshop-pipeline-main", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="S3_Source", bucket=prod_deploy_param_bucket, bucket_key="deploy_params.zip", output=prod_source_output) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Prod", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Prod", project=cdk_deploy_prod, input=prod_source_output) ]), ]) prod_source_bucket.grant_read_write(cdk_deploy_canary.role) prod_source_bucket.grant_read(cdk_deploy_prod.role) prod_deploy_param_bucket.grant_read_write(cdk_deploy_canary.role)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) notification_email = ssm.StringParameter.value_from_lookup( self, parameter_name='/serverless-pipeline/sns/notifications/primary-email' ) github_user = ssm.StringParameter.value_from_lookup( self, parameter_name='/serverless-pipeline/codepipeline/github/user' ) github_repo = ssm.StringParameter.value_from_lookup( self, parameter_name='/serverless-pipeline/codepipeline/github/repo' ) github_token = core.SecretValue.secrets_manager( '/serverless-pipeline/secrets/github/token', json_field='github-token', ) artifact_bucket = s3.Bucket( self, 'BuildArtifactsBucket', removal_policy=core.RemovalPolicy.RETAIN, encryption=s3.BucketEncryption.KMS_MANAGED, versioned=True, ) build_project = build.PipelineProject( self, 'BuildProject', project_name='serveless-pipeline', description='Build project for the serverless-pipeline', environment=build.LinuxBuildImage.STANDARD_2_0, environment_variables={ 'BUILD_ARTIFACT_BUCKET': build.BuildEnvironmentVariable(value=artifact_bucket.bucket_name), }, cache=build.Cache.bucket(artifact_bucket, prefix='codebuild-cache'), build_spec=build.BuildSpec.from_object({ 'version': '0.2', 'phases': { 'install': { 'runtime-versions': { 'nodejs': 10, }, 'commands': [ 'echo "--------INSTALL PHASE--------"', 'pip3 install aws-sam-cli', ] }, 'pre_build': { 'commands': [ 'echo "--------PREBUILD PHASE--------"', '# Example shows installation of NPM dependencies for shared deps (layers) in a SAM App', '# cd functions/dependencies/shared_deps_one/nodejs', '# npm install && cd', '# cd functions/dependencies/shared_deps_two/nodejs', '# npm install && cd', ] }, 'build': { 'commands': [ 'echo "--------BUILD PHASE--------"', 'echo "Starting SAM packaging `date` in `pwd`"', 'sam package --template-file template.yaml --s3-bucket $BUILD_ARTIFACT_BUCKET --output-template-file packaged.yaml', ] }, 'post_build': { 'commands': [ 'echo "--------POST-BUILD PHASE--------"', 'echo "SAM packaging completed on `date`"', ] } }, 'artifacts': { 'files': ['packaged.yaml'], 'discard-paths': 'yes', }, 'cache': { 'paths': ['/root/.cache/pip'], } }) ) serverless_pipeline = pipeline.Pipeline( self, 'ServerlessPipeline', artifact_bucket=artifact_bucket, pipeline_name='serverless-pipeline', restart_execution_on_update=True, ) source_output = pipeline.Artifact() build_output = pipeline.Artifact() cfn_output = pipeline.Artifact() # NOTE: This Stage/Action requires a manual OAuth handshake in the browser be complete before automated deployment can occur # Create a new Pipeline in the console, manually authorize GitHub as a source, and then cancel the pipeline wizard. serverless_pipeline.add_stage(stage_name='Source', actions=[ actions.GitHubSourceAction( action_name='SourceCodeRepo', owner=github_user, oauth_token=github_token, repo=github_repo, branch='master', output=source_output, ) ]) serverless_pipeline.add_stage(stage_name='Build', actions=[ actions.CodeBuildAction( action_name='CodeBuildProject', input=source_output, outputs=[build_output], project=build_project, type=actions.CodeBuildActionType.BUILD, ) ]) serverless_pipeline.add_stage(stage_name='Staging', actions=[ actions.CloudFormationCreateReplaceChangeSetAction( action_name='CreateChangeSet', admin_permissions=True, change_set_name='serverless-pipeline-changeset-Staging', stack_name='ServerlessPipelineStaging', template_path=pipeline.ArtifactPath( build_output, file_name='packaged.yaml' ), capabilities=[cfn.CloudFormationCapabilities.ANONYMOUS_IAM], run_order=1, ), actions.CloudFormationExecuteChangeSetAction( action_name='ExecuteChangeSet', change_set_name='serverless-pipeline-changeset-Staging', stack_name='ServerlessPipelineStaging', output=cfn_output, run_order=2, ), ]) serverless_pipeline.add_stage(stage_name='Production', actions=[ actions.CloudFormationCreateReplaceChangeSetAction( action_name='CreateChangeSet', admin_permissions=True, change_set_name='serverless-pipeline-changeset-Production', stack_name='ServerlessPipelineProduction', template_path=pipeline.ArtifactPath( build_output, file_name='packaged.yaml' ), capabilities=[cfn.CloudFormationCapabilities.ANONYMOUS_IAM], run_order=1, ), actions.ManualApprovalAction( action_name='DeploymentApproval', notify_emails=[notification_email], run_order=2, ), actions.CloudFormationExecuteChangeSetAction( action_name='ExecuteChangeSet', change_set_name='serverless-pipeline-changeset-Production', stack_name='ServerlessPipelineProduction', output=cfn_output, run_order=3, ), ]) core.CfnOutput( self, 'BuildArtifactsBucketOutput', value=artifact_bucket.bucket_name, description='Amazon S3 Bucket for Pipeline and Build artifacts', ) core.CfnOutput( self, 'CodeBuildProjectOutput', value=build_project.project_arn, description='CodeBuild Project name', ) core.CfnOutput( self, 'CodePipelineOutput', value=serverless_pipeline.pipeline_arn, description='AWS CodePipeline pipeline name', )
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) # The code that defines your stack goes here pipeline = codepipeline.Pipeline( self, "Pipeline", artifact_bucket=s3.Bucket(self, "ArtifactBucket") ) # Define the 'source' stage to be triggered by a webhook on the GitHub # repo for the code. Don't be fooled by the name, it's just a codestar # connection in the background. Bitbucket isn't involved. source_output = codepipeline.Artifact("SourceOutput") github_source = pipeline_actions.BitBucketSourceAction( action_name="Github_Source", connection_arn=core.SecretValue.secrets_manager( secret_id="folksgl_github_connection_arn", json_field="arn" ).to_string(), repo="sam-cicd-python-template", owner="folksgl", branch="main", output=source_output, ) pipeline.add_stage(stage_name="Source", actions=[github_source]) # Define the 'build' stage build_project = codebuild.PipelineProject( scope=self, id="Build", # Declare the pipeline artifact bucket name as an environment variable # so the build can send the deployment package to it. environment_variables={ "PACKAGE_BUCKET": codebuild.BuildEnvironmentVariable( value=pipeline.artifact_bucket.bucket_name, type=codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) }, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_3_0 ), ) build_stage_output = codepipeline.Artifact("BuildStageOutput") build_action = pipeline_actions.CodeBuildAction( action_name="Build", project=build_project, input=source_output, outputs=[build_stage_output], ) pipeline.add_stage(stage_name="Build", actions=[build_action]) # Define the 'deploy' stage stack_name = "gateway-service-python" change_set_name = f"{stack_name}-changeset" create_change_set = pipeline_actions.CloudFormationCreateReplaceChangeSetAction( action_name="CreateChangeSet", stack_name=stack_name, change_set_name=change_set_name, template_path=build_stage_output.at_path("packaged.yaml"), admin_permissions=True, run_order=1, ) execute_change_set = pipeline_actions.CloudFormationExecuteChangeSetAction( action_name="Deploy", stack_name=stack_name, change_set_name=change_set_name, run_order=2, ) pipeline.add_stage( stage_name="DevDeployment", actions=[create_change_set, execute_change_set] )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here base_api = _apigw.RestApi(self, 'PetclinicApiGatewayWithCors') api_resource = base_api.root.add_resource('api') self.add_cors_options(api_resource) website_bucket = _s3.Bucket(self, 'PetclinicWebsite', website_index_document='index.html', public_read_access=True, removal_policy=core.RemovalPolicy.DESTROY) # Warm Lambda function Event rule event_rule = _events.Rule(self, 'PetclinicLambdaWarmRule', schedule=_events.Schedule.rate( core.Duration.minutes(3))) code = _commit.Repository( self, 'ServerlessCode', repository_name='spring-petclinic-static-resource') build_project = _build.PipelineProject( self, 'StaticWebBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': [ 'mv scripts/config.js scripts/config.js.origin', 'sed -e "s,http://localhost:8081/,$API_ENDPOINT,g" scripts/config.js.origin > scripts/config.js' ] }, }, 'artifacts': { 'files': '**/*' }, }), environment_variables={ 'API_ENDPOINT': _build.BuildEnvironmentVariable(value=base_api.url) }, environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') build_output = _pipeline.Artifact('BuildOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction(action_name='CodeBuild_Static', project=build_project, input=source_output, outputs=[build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.S3DeployAction(action_name='Web_Static_Deploy', input=build_output, bucket=website_bucket) ] }]) core.CfnOutput(self, 'RuleArn', export_name='RuleArn', value=event_rule.rule_arn) core.CfnOutput(self, 'PetclinicApiGatewayWithCorsId', export_name='PetclinicApiGatewayWithCorsId', value=base_api.rest_api_id) core.CfnOutput(self, "PetclinicWebsiteUrl", export_name="PetclinicWebsiteUrl", value=website_bucket.bucket_website_url)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) eks_vpc = ec2.Vpc(self, "VPC", cidr="10.0.0.0/16") self.node.apply_aspect( core.Tag("kubernetes.io/cluster/cluster", "shared")) eks_vpc.private_subnets[0].node.apply_aspect( core.Tag("kubernetes.io/role/internal-elb", "1")) eks_vpc.private_subnets[1].node.apply_aspect( core.Tag("kubernetes.io/role/internal-elb", "1")) eks_vpc.public_subnets[0].node.apply_aspect( core.Tag("kubernetes.io/role/elb", "1")) eks_vpc.public_subnets[1].node.apply_aspect( core.Tag("kubernetes.io/role/elb", "1")) # Create IAM Role For CodeBuild and Cloud9 codebuild_role = iam.Role( self, "BuildRole", assumed_by=iam.CompositePrincipal( iam.ServicePrincipal("codebuild.amazonaws.com"), iam.ServicePrincipal("ec2.amazonaws.com")), managed_policies=[ iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess") ]) instance_profile = iam.CfnInstanceProfile( self, "InstanceProfile", roles=[codebuild_role.role_name]) # Create CodeBuild PipelineProject build_project = codebuild.PipelineProject( self, "BuildProject", role=codebuild_role, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml")) # Create CodePipeline pipeline = codepipeline.Pipeline( self, "Pipeline", ) # Create Artifact artifact = codepipeline.Artifact() # S3 Source Bucket source_bucket = s3.Bucket.from_bucket_attributes( self, "SourceBucket", bucket_arn=core.Fn.join( "", ["arn:aws:s3:::ee-assets-prod-", core.Fn.ref("AWS::Region")])) # Add Source Stage pipeline.add_stage( stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="S3SourceRepo", bucket=source_bucket, bucket_key= "modules/2cae1f20008d4fc5aaef294602649b98/v9/source.zip", output=artifact, trigger=codepipeline_actions.S3Trigger.NONE) ]) # Add CodeBuild Stage pipeline.add_stage( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeBuildProject", project=build_project, type=codepipeline_actions.CodeBuildActionType.BUILD, input=artifact, environment_variables={ 'PublicSubnet1ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.public_subnets[0].subnet_id), 'PublicSubnet2ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.public_subnets[1].subnet_id), 'PrivateSubnet1ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.private_subnets[0].subnet_id), 'PrivateSubnet2ID': codebuild.BuildEnvironmentVariable( value=eks_vpc.private_subnets[1].subnet_id), 'AWS_DEFAULT_REGION': codebuild.BuildEnvironmentVariable(value=self.region), 'INSTANCEPROFILEID': codebuild.BuildEnvironmentVariable( value=instance_profile.ref), 'AWS_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(value=self.account) }) ]) cloud9_stack = cloudformation.CfnStack( self, "Cloud9Stack", # template_url="https://aws-quickstart.s3.amazonaws.com/quickstart-cloud9-ide/templates/cloud9-ide-instance.yaml", template_url= "https://ee-assets-prod-us-east-1.s3.amazonaws.com/modules/2cae1f20008d4fc5aaef294602649b98/v9/cloud9-ide-instance.yaml", parameters={ "C9InstanceType": "m5.large", "C9Subnet": eks_vpc.public_subnets[0].subnet_id }) pipeline.node.add_dependency(eks_vpc) pipeline.node.add_dependency(cloud9_stack)
def __init__(self, scope: core.Construct, id: str, *, git_token_key="", github_owner="", github_repo="", github_branch="", **kwargs) -> None: super().__init__(scope, id, **kwargs) role = iam.Role( self, "Role", assumed_by=iam.ServicePrincipal("codebuild.amazonaws.com")) role.add_managed_policy( iam.ManagedPolicy.from_aws_managed_policy_name( "AdministratorAccess")) cdk_project = codebuild.PipelineProject( self, "Codebuild", build_spec=codebuild.BuildSpec.from_source_filename( "codebuild/buildspec.yaml"), cache=codebuild.Cache.bucket(s3.Bucket(self, "Bucket")), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_2_0, privileged=True), role=role) source_output = codepipeline.Artifact() staging_output = codepipeline.Artifact() production_output = codepipeline.Artifact() source_action = codepipeline_actions.GitHubSourceAction( action_name="GitHub_Source", owner=github_owner, repo=github_repo, branch=github_branch, oauth_token=core.SecretValue.secrets_manager(git_token_key), output=source_output) staging_action = codepipeline_actions.CodeBuildAction( action_name="Deliver", project=cdk_project, input=source_output, outputs=[staging_output], environment_variables={"ENV": { "value": "stg" }}) manual_approval_action = codepipeline_actions.ManualApprovalAction( action_name="Approve") production_action = codepipeline_actions.CodeBuildAction( action_name="Deliver", project=cdk_project, input=source_output, outputs=[production_output], environment_variables={"ENV": { "value": "prd" }}) key = kms.Key(self, "key") bucket = s3.Bucket(self, "bucket_artifacts", encryption_key=key) pipeline = codepipeline.Pipeline(self, "Pipeline", artifact_bucket=bucket) pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name="Staging", actions=[staging_action]) pipeline.add_stage(stage_name="Approval", actions=[manual_approval_action]) pipeline.add_stage(stage_name="Production", actions=[production_action])
oauth_token=core.SecretValue.secrets_manager( secret_id=deployment_secret['secret-id'], json_field=deployment_secret['json-fields']['github-oauth-token']), trigger=codepipeline_actions.GitHubTrigger.WEBHOOK, output=source_output, )]), codepipeline.StageProps(stage_name='Self-Update', actions=[ codepipeline_actions.CodeBuildAction( action_name='Self_Deploy', project=codebuild.PipelineProject( stack, 'CodePipelineBuild', build_spec=codebuild.BuildSpec.from_source_filename( 'codepipeline/pipelines-buildspec.yaml'), role=code_build_role, environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.STANDARD_4_0, environment_variables={ 'PROJECT_DIR': codebuild.BuildEnvironmentVariable(value='codepipeline'), 'STACK_FILE': codebuild.BuildEnvironmentVariable(value='release_pipeline.py') } ) ), input=source_output ) ]), codepipeline.StageProps(stage_name='PyPi-Release', actions=[ codepipeline_actions.CodeBuildAction( action_name='PyPi_Release', project=codebuild.PipelineProject( stack, 'PyPiReleaseBuild', build_spec=codebuild.BuildSpec.from_source_filename(
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Get stack parameters: email and repo address notification_email = core.CfnParameter( self, "Email Address", type="String", description= "Specify an email to receive notifications about pipeline outcomes.", allowed_pattern='^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', min_length=5, max_length=320, constraint_description= "Please enter an email address with correct format ([email protected])" ) git_address = core.CfnParameter( self, "CodeCommit Repo Address", type="String", description= "AWS CodeCommit repository clone URL to connect to the framework.", allowed_pattern= '^(((https:\/\/|ssh:\/\/)(git\-codecommit)\.[a-zA-Z0-9_.+-]+(amazonaws\.com\/)[a-zA-Z0-9-.]+(\/)[a-zA-Z0-9-.]+(\/)[a-zA-Z0-9-.]+$)|)', min_length=0, max_length=320, constraint_description= "CodeCommit address must follow the pattern: ssh or https://git-codecommit.REGION.amazonaws.com/version/repos/REPONAME" ) # Conditions git_address_provided = core.CfnCondition( self, "GitAddressProvided", expression=core.Fn.condition_not( core.Fn.condition_equals(git_address, "")), ) # Constants pipeline_stack_name = "MLOps-pipeline" # CDK Resources setup access_logs_bucket = s3.Bucket( self, "accessLogs", encryption=s3.BucketEncryption.S3_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) access_logs_bucket.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W35", "reason": "This is the access bucket." }, { "id": "W51", "reason": "This S3 bucket does not need a bucket policy.", }, ] } } source_bucket = s3.Bucket.from_bucket_name(self, "BucketByName", "%%BUCKET_NAME%%") blueprints_bucket_name = "blueprint-repository-" + str(uuid.uuid4()) blueprint_repository_bucket = s3.Bucket( self, blueprints_bucket_name, encryption=s3.BucketEncryption.S3_MANAGED, server_access_logs_bucket=access_logs_bucket, server_access_logs_prefix=blueprints_bucket_name, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) blueprint_repository_bucket.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W51", "reason": "This S3 bucket does not need a bucket policy. All access to this bucket is restricted by IAM (CDK grant_read method)", }] } } # Custom resource to copy source bucket content to blueprints bucket custom_resource_lambda_fn = lambda_.Function( self, "CustomResourceLambda", code=lambda_.Code.from_asset("lambdas/custom_resource"), handler="index.on_event", runtime=lambda_.Runtime.PYTHON_3_8, environment={ "source_bucket": "https://%%BUCKET_NAME%%-" + core.Aws.REGION + ".s3.amazonaws.com/%%SOLUTION_NAME%%/%%VERSION%%", "destination_bucket": blueprint_repository_bucket.bucket_name, "LOG_LEVEL": "INFO", }, timeout=core.Duration.seconds(60), ) custom_resource_lambda_fn.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W58", "reason": "The lambda functions role already has permissions to write cloudwatch logs", }] } } blueprint_repository_bucket.grant_write(custom_resource_lambda_fn) custom_resource = core.CustomResource( self, "CustomResourceCopyAssets", service_token=custom_resource_lambda_fn.function_arn, ) custom_resource.node.add_dependency(blueprint_repository_bucket) ### IAM policies setup ### cloudformation_role = iam.Role( self, "mlopscloudformationrole", assumed_by=iam.ServicePrincipal("cloudformation.amazonaws.com"), ) # Cloudformation policy setup orchestrator_policy = iam.Policy( self, "lambdaOrchestratorPolicy", statements=[ iam.PolicyStatement( actions=[ "cloudformation:CreateStack", "cloudformation:DeleteStack", "cloudformation:UpdateStack", "cloudformation:ListStackResources", ], resources=[ f"arn:{core.Aws.PARTITION}:cloudformation:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:stack/{pipeline_stack_name}*/*", ], ), iam.PolicyStatement( actions=[ "iam:CreateRole", "iam:DeleteRole", "iam:DeleteRolePolicy", "iam:GetRole", "iam:GetRolePolicy", "iam:PassRole", "iam:PutRolePolicy", "iam:AttachRolePolicy", "iam:DetachRolePolicy", ], resources=[ f"arn:{core.Aws.PARTITION}:iam::{core.Aws.ACCOUNT_ID}:role/{pipeline_stack_name}*" ], ), iam.PolicyStatement( actions=[ "ecr:CreateRepository", "ecr:DeleteRepository", "ecr:DescribeRepositories", ], resources=[ f"arn:{core.Aws.PARTITION}:ecr:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:repository/awsmlopsmodels*" ], ), iam.PolicyStatement( actions=[ "codebuild:CreateProject", "codebuild:DeleteProject", "codebuild:BatchGetProjects", ], resources=[ f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:project/ContainerFactory*", f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:project/VerifySagemaker*", f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:report-group/*", ], ), iam.PolicyStatement( actions=[ "lambda:CreateFunction", "lambda:DeleteFunction", "lambda:InvokeFunction", "lambda:PublishLayerVersion", "lambda:DeleteLayerVersion", "lambda:GetLayerVersion", "lambda:GetFunctionConfiguration", "lambda:GetFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:UpdateFunctionConfiguration", ], resources=[ f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:layer:*", f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:function:*", ], ), iam.PolicyStatement( actions=["s3:GetObject"], resources=[ blueprint_repository_bucket.bucket_arn, blueprint_repository_bucket.arn_for_objects("*"), f"arn:{core.Aws.PARTITION}:s3:::pipeline-assets-*", ], ), iam.PolicyStatement( actions=[ "codepipeline:CreatePipeline", "codepipeline:DeletePipeline", "codepipeline:GetPipeline", "codepipeline:GetPipelineState", ], resources=[ f"arn:{core.Aws.PARTITION}:codepipeline:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*" ], ), iam.PolicyStatement( actions=[ "apigateway:POST", "apigateway:PATCH", "apigateway:DELETE", "apigateway:GET", "apigateway:PUT", ], resources=[ f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis/*", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/account", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans/*", ], ), iam.PolicyStatement( actions=[ "logs:CreateLogGroup", "logs:DescribeLogGroups", ], resources=[ f"arn:{core.Aws.PARTITION}:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:log-group:*", ], ), iam.PolicyStatement( actions=[ "s3:CreateBucket", "s3:PutEncryptionConfiguration", "s3:PutBucketVersioning", "s3:PutBucketPublicAccessBlock", "s3:PutBucketLogging", ], resources=["arn:" + core.Aws.PARTITION + ":s3:::*"], ), iam.PolicyStatement( actions=[ "sns:CreateTopic", "sns:DeleteTopic", "sns:Subscribe", "sns:Unsubscribe", "sns:GetTopicAttributes", "sns:SetTopicAttributes", ], resources=[ f"arn:{core.Aws.PARTITION}:sns:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*-PipelineNotification*", ], ), iam.PolicyStatement( actions=[ "events:PutRule", "events:DescribeRule", "events:PutTargets", "events:RemoveTargets", "events:DeleteRule", "events:PutEvents", ], resources=[ f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:rule/*", f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:event-bus/*", ], ), ], ) orchestrator_policy.attach_to_role(cloudformation_role) # Lambda function IAM setup lambda_passrole_policy = iam.PolicyStatement( actions=["iam:passrole"], resources=[cloudformation_role.role_arn]) # API Gateway and lambda setup to enable provisioning pipelines through API calls provisioner_apigw_lambda = aws_apigateway_lambda.ApiGatewayToLambda( self, "PipelineOrchestration", lambda_function_props={ "runtime": lambda_.Runtime.PYTHON_3_8, "handler": "index.handler", "code": lambda_.Code.from_asset("lambdas/pipeline_orchestration"), }, api_gateway_props={ "defaultMethodOptions": { "authorizationType": apigw.AuthorizationType.IAM, }, "restApiName": f"{core.Aws.STACK_NAME}-orchestrator", "proxy": False }, ) provision_resource = provisioner_apigw_lambda.api_gateway.root.add_resource( 'provisionpipeline') provision_resource.add_method('POST') status_resource = provisioner_apigw_lambda.api_gateway.root.add_resource( 'pipelinestatus') status_resource.add_method('POST') blueprint_repository_bucket.grant_read( provisioner_apigw_lambda.lambda_function) provisioner_apigw_lambda.lambda_function.add_to_role_policy( lambda_passrole_policy) orchestrator_policy.attach_to_role( provisioner_apigw_lambda.lambda_function.role) provisioner_apigw_lambda.lambda_function.add_to_role_policy( iam.PolicyStatement(actions=["xray:PutTraceSegments"], resources=["*"])) lambda_node = provisioner_apigw_lambda.lambda_function.node.default_child lambda_node.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W12", "reason": "The xray permissions PutTraceSegments is not able to be bound to resources.", }] } } # Environment variables setup provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET_URL", value=str(blueprint_repository_bucket.bucket_regional_domain_name), ) provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET", value=str(blueprint_repository_bucket.bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="ACCESS_BUCKET", value=str(access_logs_bucket.bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="CFN_ROLE_ARN", value=str(cloudformation_role.role_arn)) provisioner_apigw_lambda.lambda_function.add_environment( key="PIPELINE_STACK_NAME", value=pipeline_stack_name) provisioner_apigw_lambda.lambda_function.add_environment( key="NOTIFICATION_EMAIL", value=notification_email.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="LOG_LEVEL", value="DEBUG") cfn_policy_for_lambda = orchestrator_policy.node.default_child cfn_policy_for_lambda.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W76", "reason": "A complex IAM policy is required for this resource.", }] } } ### Codepipeline with Git source definitions ### source_output = codepipeline.Artifact() # processing git_address to retrieve repo name repo_name_split = core.Fn.split("/", git_address.value_as_string) repo_name = core.Fn.select(5, repo_name_split) # getting codecommit repo cdk object using 'from_repository_name' repo = codecommit.Repository.from_repository_name( self, "AWSMLOpsFrameworkRepository", repo_name) codebuild_project = codebuild.PipelineProject( self, "Take config file", build_spec=codebuild.BuildSpec.from_object({ "version": "0.2", "phases": { "build": { "commands": [ "ls -a", "aws lambda invoke --function-name " + provisioner_apigw_lambda.lambda_function. function_name + " --payload fileb://mlops-config.json response.json" + " --invocation-type RequestResponse", ] } }, }), ) # Defining a Codepipeline project with CodeCommit as source codecommit_pipeline = codepipeline.Pipeline( self, "MLOpsCodeCommitPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=repo, output=source_output, ) ], ), codepipeline.StageProps( stage_name="TakeConfig", actions=[ codepipeline_actions.CodeBuildAction( action_name="provision_pipeline", input=source_output, outputs=[], project=codebuild_project, ) ], ), ], cross_account_keys=False, ) codecommit_pipeline.add_to_role_policy( iam.PolicyStatement( actions=["lambda:InvokeFunction"], resources=[ provisioner_apigw_lambda.lambda_function.function_arn ], )) codebuild_project.add_to_role_policy( iam.PolicyStatement( actions=["lambda:InvokeFunction"], resources=[ provisioner_apigw_lambda.lambda_function.function_arn ], )) pipeline_child_nodes = codecommit_pipeline.node.find_all() pipeline_child_nodes[1].node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W35", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, { "id": "W51", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, ] } } ###custom resource for operational metrics### metricsMapping = core.CfnMapping( self, 'AnonymousData', mapping={'SendAnonymousData': { 'Data': 'Yes' }}) metrics_condition = core.CfnCondition( self, 'AnonymousDatatoAWS', expression=core.Fn.condition_equals( metricsMapping.find_in_map('SendAnonymousData', 'Data'), 'Yes')) helper_function = lambda_.Function( self, "SolutionHelper", code=lambda_.Code.from_asset("lambdas/solution_helper"), handler="lambda_function.handler", runtime=lambda_.Runtime.PYTHON_3_8, timeout=core.Duration.seconds(60), ) createIdFunction = core.CustomResource( self, 'CreateUniqueID', service_token=helper_function.function_arn, properties={'Resource': 'UUID'}, resource_type='Custom::CreateUUID') sendDataFunction = core.CustomResource( self, 'SendAnonymousData', service_token=helper_function.function_arn, properties={ 'Resource': 'AnonymousMetric', 'UUID': createIdFunction.get_att_string('UUID'), 'gitSelected': git_address.value_as_string, 'Region': core.Aws.REGION, 'SolutionId': 'SO0136', 'Version': '%%VERSION%%', }, resource_type='Custom::AnonymousData') core.Aspects.of(helper_function).add( ConditionalResources(metrics_condition)) core.Aspects.of(createIdFunction).add( ConditionalResources(metrics_condition)) core.Aspects.of(sendDataFunction).add( ConditionalResources(metrics_condition)) helper_function.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W58", "reason": "The lambda functions role already has permissions to write cloudwatch logs", }] } } # If user chooses Git as pipeline provision type, create codepipeline with Git repo as source core.Aspects.of(repo).add(ConditionalResources(git_address_provided)) core.Aspects.of(codecommit_pipeline).add( ConditionalResources(git_address_provided)) core.Aspects.of(codebuild_project).add( ConditionalResources(git_address_provided))