def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) source_artifact = codepipeline.Artifact('SourceArtifact') cloud_assembly_artifact = codepipeline.Artifact('CloudAssembly') resume_stack_repository = codecommit.Repository.from_repository_name( self, "ResumeStack",repository_name="ResumeStack") pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, pipeline_name="ResumeStack", source_action=cpactions.CodeCommitSourceAction( action_name='GetCodeCommit', output=source_artifact, repository=resume_stack_repository, trigger=cpactions.CodeCommitTrigger.POLL ), synth_action=pipelines.SimpleSynthAction( source_artifact=source_artifact, action_name='SimpleSynthAction', cloud_assembly_artifact=cloud_assembly_artifact, install_command='npm install -g aws-cdk && pip install -r requirements.txt', synth_command='cdk synth')) build_stage = pipeline.add_application_stage(ResumeBuildStage(self, 'BuildWebsite')) deploy_stage = pipeline.add_application_stage(ResumeDeployStage(self, 'Website'))
def __init__(self, scope: core.Construct, id: str, repo: codecommit.Repository, **kwargs) -> None: super().__init__(scope, id, **kwargs) #---------- CodePipeline ----------# artifactBucket = s3.Bucket(self, 'PipelineBucket', bucket_name="bucket-name") pipeline = codepipeline.Pipeline( self, "CodePipeline", artifact_bucket=s3.Bucket.from_bucket_attributes( self, 'ImportedBucket', bucket_name="bucket-name")) source_output = codepipeline.Artifact() source_action = codepipeline_actions.CodeCommitSourceAction( action_name="Source", repository=repo, output=source_output) pipeline.add_stage(stage_name="Source", actions=[source_action]) #---------- Deploy ----------# deploy_application = codedeploy.ServerApplication( self, "CodeDeployApplication", application_name="application-name") deployment_group = codedeploy.ServerDeploymentGroup( self, "DeploymentGroup", application=deploy_application, ) deploy_action = codepipeline_actions.CodeDeployServerDeployAction( action_name="deploy", input=source_output, deployment_group=deployment_group) pipeline.add_stage(stage_name="Deploy", actions=[deploy_action])
def CdkDeploySimplePipeline(self, name: str, repo, branch: str, src: str, output): cdk_deploy = self.CdkDeployProject(f"{name}-CDKDeploy", stage=branch) cdk_deploy.role.add_to_policy( iam.PolicyStatement(effect=iam.Effect.ALLOW, resources=["*"], actions=["CloudFormation:*", "ec2:*", "s3:*"])) return codepipeline.Pipeline( self, name, stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=repo, branch=branch, output=src) ]), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CdkDeploy", project=cdk_deploy, input=src, outputs=[output]) ]), ])
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Creates a CodeCommit repository called 'WorkshopRepo' repo = codecommit.Repository(self, 'WorkshopRepo', repository_name="WorkshopRepo") # Defines the artifact representing the sourcecode source_artifact = codepipeline.Artifact() # Defines the artifact representing the cloud assembly # (cloudformation template + all other assets) cloud_assembly_artifact = codepipeline.Artifact() pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, # Generates the source artifact from the repo we created in the last step source_action=codepipeline_actions.CodeCommitSourceAction( action_name='CodeCommit', # Any Git-based source control output=source_artifact, # Indicates where the artifact is stored repository=repo # Designates the repo to draw code from ), # Builds our source code outlined above into a could assembly artifact synth_action=pipelines.SimpleSynthAction( install_commands=[ 'npm install -g aws-cdk', # Installs the cdk cli on Codebuild 'pip install -r requirements.txt' # Instructs Codebuild to install required packages ], synth_command='npx cdk synth', source_artifact= source_artifact, # Where to get source code to build cloud_assembly_artifact= cloud_assembly_artifact, # Where to place built source )) deploy = WorkshopPipelineStage(self, 'Deploy') deploy_stage = pipeline.add_application_stage(deploy) deploy_stage.add_actions( pipelines.ShellScriptAction(action_name='TestViewerEndpoint', use_outputs={ 'ENDPOINT_URL': pipeline.stack_output( deploy.hc_viewer_url) }, commands=['curl -Ssf $ENDPOINT_URL'])) deploy_stage.add_actions( pipelines.ShellScriptAction( action_name='TestAPIGatewayEndpoint', use_outputs={ 'ENDPOINT_URL': pipeline.stack_output(deploy.hc_endpoint) }, commands=[ 'curl -Ssf $ENDPOINT_URL', 'curl -Ssf $ENDPOINT_URL/hello', 'curl -Ssf $ENDPOINT_URL/test' ]))
def create_source_action(_repo, _source_artifact): return cpactions.CodeCommitSourceAction( branch='master', repository=_repo, output=_source_artifact, action_name="Checkout", code_build_clone_output=True, )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) blue_env = self.node.try_get_context("blue_env") green_env = self.node.try_get_context("green_env") app_name = self.node.try_get_context("app_name") bucket = s3.Bucket( self, 'BlueGreenBucket', # The default removal policy is RETAIN, which means that cdk # destroy will not attempt to delete the new bucket, and it will # remain in your account until manually deleted. By setting the # policy to DESTROY, cdk destroy will attempt to delete the bucket, # but will error if the bucket is not empty. removal_policy=core.RemovalPolicy.DESTROY # NOT recommended for production code ) handler = lmbda.Function(self, 'BlueGreenLambda', runtime=lmbda.Runtime.PYTHON_3_6, code=lmbda.Code.asset('resources'), handler='blue_green.lambda_handler', environment={'BUCKET': bucket.bucket_name}) bucket.grant_read_write(handler) repo = cc.Repository( self, 'Repository', repository_name='MyRepositoryName', ) pipeline = cp.Pipeline(self, 'MyFirstPipeline') source_stage = pipeline.add_stage(stage_name='Source') source_artifact = cp.Artifact('Source') source_action = cpactions.CodeCommitSourceAction( action_name='CodeCommit', repository=repo, output=source_artifact) source_stage.add_action(source_action) deploy_stage = pipeline.add_stage(stage_name='Deploy') lambda_action = cpactions.LambdaInvokeAction( action_name='InvokeAction', lambda_=handler, user_parameters={ 'blueEnvironment': blue_env, 'greenEnvironment': green_env, 'application': app_name }, inputs=[source_artifact]) deploy_stage.add_action(lambda_action)
def __init__(self, scope: core.Construct, id: str, code_commit_repo: str, default_branch: str = 'mainline', **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository.from_repository_name( self, "codecommitrepo", code_commit_repo) # Cloudformation permission for project builds # right now setting admin permission on policy # modify this to load custom policy per pipeline from policy statement document # iam_cfn_admin_json = Policies.get_iam_cfn_admin_access_policy() policy_statement = iam.PolicyStatement() policy_statement.add_actions("*") policy_statement.add_resources("*") policy_statement.effect = iam.Effect.ALLOW serverless_build = codebuild.PipelineProject(self, "buildpipeline") # add cfn iam statements to build project serverless_build.add_to_role_policy(policy_statement) build_output = codepipeline.Artifact("BuildOutput") codepipeline.Pipeline( self, "imageBuilderDeploymentPipeline", pipeline_name="ImageBuilderDeploymentPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="SourceCode", branch=default_branch, repository=code, output=build_output) ]), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions.CodeBuildAction( action_name="CodeDeploy", project=serverless_build, input=build_output) ]) ])
def setup_api_pipeline(self): """Setup the build pipeline for API. Using codepipeline to create a Pipeline with 3 steps * Source: CodeCommitSourceAction * Build: CodeBuildActioin * Deploy: EcsDeployAction: deploy to ECS service Returns ------- aws_codepipeline.Pipeline """ source_output = cp.Artifact() build_output = cp.Artifact(self.config.build_output) return cp.Pipeline( self, 'ApiPipeline', pipeline_name=self.config.api.pipeline, stages=[ cp.StageProps(stage_name='Source', actions=[ cp_actions.CodeCommitSourceAction( action_name='Source', repository=self.api_source, branch='master', output=source_output, ) ]), cp.StageProps(stage_name='Build', actions=[ cp_actions.CodeBuildAction( action_name='Build', project=self.api_build_project, input=source_output, outputs=[build_output]) ]), cp.StageProps( stage_name='Deploy', actions=[ cp_actions.EcsDeployAction( action_name='Deploy', service=self.service.service, input=build_output, # image_file=build_output.at_path('imagedefinitions.json') ) ]) ])
def setup_web_pipeline(self): """Setup the build pipeline. Using codepipeline to create a Web Pipeline with 3 stages: * Source: CodeCommitSourceAction * Build : CodeBuildActioin * Deploy: S3DeployAction Returns ------- aws_codepipeline.Pipeline """ source_output = cp.Artifact() build_output = cp.Artifact(self.config.web.build_output) return cp.Pipeline( self, 'WebPipeline', pipeline_name=self.config.web.pipeline, stages=[ cp.StageProps(stage_name='Source', actions=[ cp_actions.CodeCommitSourceAction( action_name='Source', repository=self.web_source, branch='master', output=source_output, ) ]), cp.StageProps(stage_name='Build', actions=[ cp_actions.CodeBuildAction( action_name='Build', project=self.web_build_project, input=source_output, outputs=[build_output]) ]), cp.StageProps( stage_name='Deploy', actions=[ cp_actions.S3DeployAction( action_name='Deploy', bucket=self.web_bucket, input=build_output, access_control=s3.BucketAccessControl.PUBLIC_READ) ]) ])
def __init__(self, app: core.App, id: str, props, repo_name: str=None, **kwargs) -> None: super().__init__(app, id, **kwargs) source_output = aws_codepipeline.Artifact(artifact_name='source') code = aws_codecommit.Repository.from_repository_name(self, "ImportedRepo", repo_name) codepipeline = aws_codepipeline.Pipeline(self, "CodePipeline", pipeline_name="flask-pipeline", artifact_bucket=props['bucket'], stages=[ aws_codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=code, output=source_output, run_order=1, ), ] ), aws_codepipeline.StageProps( stage_name='Build', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='DockerBuildImages', input=source_output, project=props['ecr_build'], run_order=1, ) ] ), aws_codepipeline.StageProps( stage_name='Build2', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='ECSBuild', input=source_output, project=props['ecs_build'], run_order=1, ) ] ) ] )
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) source_repo = codecommit.Repository( self, "sourcerepo", repository_name='ec2_generic_policy', description='Generic EC2 policy for using on EC2 Instance Roles') source_artifact = codepipeline.Artifact() cloud_assembly_artifact = codepipeline.Artifact() pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, pipeline_name='EC2RolePolicy', source_action=cpactions.CodeCommitSourceAction( output=source_artifact, repository=source_repo, branch='master', trigger=cpactions.CodeCommitTrigger.EVENTS, action_name='OnRepoevent', run_order=1), synth_action=pipelines.SimpleSynthAction( source_artifact=source_artifact, cloud_assembly_artifact=cloud_assembly_artifact, install_command= 'npm install -g aws-cdk && pip install -r requirements.txt', #build_command='pytest unittests', synth_command='cdk synth')) # Add stages as required. app_env = core.Environment(account="194433038617", region="ap-southeast-2") prod_app = Ec2PolicyStage(self, 'Prod', env=app_env) prod_stage = pipeline.add_application_stage(prod_app)
def __init__(self, scope: core.Construct, id: str, frontendBucket, **kwargs): super().__init__(scope, id, **kwargs) prj_name = self.node.try_get_context('project_name') env_name = self.node.try_get_context('env') webhosting_buket = s3.Bucket.from_bucket_name( self, 'webhostingbucket-id', bucket_name=frontendBucket) cdn_id = ssm.StringParameter.from_string_parameter_name( self, 'cdn_id', string_parameter_name=f'/{env_name}/cdn-id') source_repo = ccm.Repository.from_repository_name( self, 'repository-id', repository_name='cdk_app_frontend') artifact_bucket = s3.Bucket( self, 'artifactbucket', encryption=s3.BucketEncryption.S3_MANAGED, access_control=s3.BucketAccessControl.BUCKET_OWNER_FULL_CONTROL) pipeline = cp.Pipeline( self, 'frontend-pipeline', pipeline_name=f'{prj_name}-{env_name}-frontend-pipeline', artifact_bucket=artifact_bucket, restart_execution_on_update=False) source_output = cp.Artifact(artifact_name='source') build_output = cp.Artifact(artifact_name='build') pipeline.add_stage(stage_name='Source', actions=[ cp_actions.CodeCommitSourceAction( action_name='CodeCommitSource', repository=source_repo, branch='master', output=source_output) ])
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here base_api = _apigw.RestApi(self, 'PetclinicApiGatewayWithCors') api_resource = base_api.root.add_resource('api') self.add_cors_options(api_resource) website_bucket = _s3.Bucket(self, 'PetclinicWebsite', website_index_document='index.html', public_read_access=True, removal_policy=core.RemovalPolicy.DESTROY) # Warm Lambda function Event rule event_rule = _events.Rule(self, 'PetclinicLambdaWarmRule', schedule=_events.Schedule.rate( core.Duration.minutes(3))) code = _commit.Repository( self, 'ServerlessCode', repository_name='spring-petclinic-static-resource') build_project = _build.PipelineProject( self, 'StaticWebBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': [ 'mv scripts/config.js scripts/config.js.origin', 'sed -e "s,http://localhost:8081/,$API_ENDPOINT,g" scripts/config.js.origin > scripts/config.js' ] }, }, 'artifacts': { 'files': '**/*' }, }), environment_variables={ 'API_ENDPOINT': _build.BuildEnvironmentVariable(value=base_api.url) }, environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') build_output = _pipeline.Artifact('BuildOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction(action_name='CodeBuild_Static', project=build_project, input=source_output, outputs=[build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.S3DeployAction(action_name='Web_Static_Deploy', input=build_output, bucket=website_bucket) ] }]) core.CfnOutput(self, 'RuleArn', export_name='RuleArn', value=event_rule.rule_arn) core.CfnOutput(self, 'PetclinicApiGatewayWithCorsId', export_name='PetclinicApiGatewayWithCorsId', value=base_api.rest_api_id) core.CfnOutput(self, "PetclinicWebsiteUrl", export_name="PetclinicWebsiteUrl", value=website_bucket.bucket_website_url)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Get stack parameters: email and repo address notification_email = core.CfnParameter( self, "Email Address", type="String", description= "Specify an email to receive notifications about pipeline outcomes.", allowed_pattern='^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$', min_length=5, max_length=320, constraint_description= "Please enter an email address with correct format ([email protected])" ) git_address = core.CfnParameter( self, "CodeCommit Repo Address", type="String", description= "AWS CodeCommit repository clone URL to connect to the framework.", allowed_pattern= '^(((https:\/\/|ssh:\/\/)(git\-codecommit)\.[a-zA-Z0-9_.+-]+(amazonaws\.com\/)[a-zA-Z0-9-.]+(\/)[a-zA-Z0-9-.]+(\/)[a-zA-Z0-9-.]+$)|)', min_length=0, max_length=320, constraint_description= "CodeCommit address must follow the pattern: ssh or https://git-codecommit.REGION.amazonaws.com/version/repos/REPONAME" ) # Conditions git_address_provided = core.CfnCondition( self, "GitAddressProvided", expression=core.Fn.condition_not( core.Fn.condition_equals(git_address, "")), ) # Constants pipeline_stack_name = "MLOps-pipeline" # CDK Resources setup access_logs_bucket = s3.Bucket( self, "accessLogs", encryption=s3.BucketEncryption.S3_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) access_logs_bucket.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W35", "reason": "This is the access bucket." }, { "id": "W51", "reason": "This S3 bucket does not need a bucket policy.", }, ] } } source_bucket = s3.Bucket.from_bucket_name(self, "BucketByName", "%%BUCKET_NAME%%") blueprints_bucket_name = "blueprint-repository-" + str(uuid.uuid4()) blueprint_repository_bucket = s3.Bucket( self, blueprints_bucket_name, encryption=s3.BucketEncryption.S3_MANAGED, server_access_logs_bucket=access_logs_bucket, server_access_logs_prefix=blueprints_bucket_name, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) blueprint_repository_bucket.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W51", "reason": "This S3 bucket does not need a bucket policy. All access to this bucket is restricted by IAM (CDK grant_read method)", }] } } # Custom resource to copy source bucket content to blueprints bucket custom_resource_lambda_fn = lambda_.Function( self, "CustomResourceLambda", code=lambda_.Code.from_asset("lambdas/custom_resource"), handler="index.on_event", runtime=lambda_.Runtime.PYTHON_3_8, environment={ "source_bucket": "https://%%BUCKET_NAME%%-" + core.Aws.REGION + ".s3.amazonaws.com/%%SOLUTION_NAME%%/%%VERSION%%", "destination_bucket": blueprint_repository_bucket.bucket_name, "LOG_LEVEL": "INFO", }, timeout=core.Duration.seconds(60), ) custom_resource_lambda_fn.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W58", "reason": "The lambda functions role already has permissions to write cloudwatch logs", }] } } blueprint_repository_bucket.grant_write(custom_resource_lambda_fn) custom_resource = core.CustomResource( self, "CustomResourceCopyAssets", service_token=custom_resource_lambda_fn.function_arn, ) custom_resource.node.add_dependency(blueprint_repository_bucket) ### IAM policies setup ### cloudformation_role = iam.Role( self, "mlopscloudformationrole", assumed_by=iam.ServicePrincipal("cloudformation.amazonaws.com"), ) # Cloudformation policy setup orchestrator_policy = iam.Policy( self, "lambdaOrchestratorPolicy", statements=[ iam.PolicyStatement( actions=[ "cloudformation:CreateStack", "cloudformation:DeleteStack", "cloudformation:UpdateStack", "cloudformation:ListStackResources", ], resources=[ f"arn:{core.Aws.PARTITION}:cloudformation:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:stack/{pipeline_stack_name}*/*", ], ), iam.PolicyStatement( actions=[ "iam:CreateRole", "iam:DeleteRole", "iam:DeleteRolePolicy", "iam:GetRole", "iam:GetRolePolicy", "iam:PassRole", "iam:PutRolePolicy", "iam:AttachRolePolicy", "iam:DetachRolePolicy", ], resources=[ f"arn:{core.Aws.PARTITION}:iam::{core.Aws.ACCOUNT_ID}:role/{pipeline_stack_name}*" ], ), iam.PolicyStatement( actions=[ "ecr:CreateRepository", "ecr:DeleteRepository", "ecr:DescribeRepositories", ], resources=[ f"arn:{core.Aws.PARTITION}:ecr:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:repository/awsmlopsmodels*" ], ), iam.PolicyStatement( actions=[ "codebuild:CreateProject", "codebuild:DeleteProject", "codebuild:BatchGetProjects", ], resources=[ f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:project/ContainerFactory*", f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:project/VerifySagemaker*", f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:report-group/*", ], ), iam.PolicyStatement( actions=[ "lambda:CreateFunction", "lambda:DeleteFunction", "lambda:InvokeFunction", "lambda:PublishLayerVersion", "lambda:DeleteLayerVersion", "lambda:GetLayerVersion", "lambda:GetFunctionConfiguration", "lambda:GetFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:UpdateFunctionConfiguration", ], resources=[ f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:layer:*", f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:function:*", ], ), iam.PolicyStatement( actions=["s3:GetObject"], resources=[ blueprint_repository_bucket.bucket_arn, blueprint_repository_bucket.arn_for_objects("*"), f"arn:{core.Aws.PARTITION}:s3:::pipeline-assets-*", ], ), iam.PolicyStatement( actions=[ "codepipeline:CreatePipeline", "codepipeline:DeletePipeline", "codepipeline:GetPipeline", "codepipeline:GetPipelineState", ], resources=[ f"arn:{core.Aws.PARTITION}:codepipeline:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*" ], ), iam.PolicyStatement( actions=[ "apigateway:POST", "apigateway:PATCH", "apigateway:DELETE", "apigateway:GET", "apigateway:PUT", ], resources=[ f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis/*", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/account", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans/*", ], ), iam.PolicyStatement( actions=[ "logs:CreateLogGroup", "logs:DescribeLogGroups", ], resources=[ f"arn:{core.Aws.PARTITION}:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:log-group:*", ], ), iam.PolicyStatement( actions=[ "s3:CreateBucket", "s3:PutEncryptionConfiguration", "s3:PutBucketVersioning", "s3:PutBucketPublicAccessBlock", "s3:PutBucketLogging", ], resources=["arn:" + core.Aws.PARTITION + ":s3:::*"], ), iam.PolicyStatement( actions=[ "sns:CreateTopic", "sns:DeleteTopic", "sns:Subscribe", "sns:Unsubscribe", "sns:GetTopicAttributes", "sns:SetTopicAttributes", ], resources=[ f"arn:{core.Aws.PARTITION}:sns:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*-PipelineNotification*", ], ), iam.PolicyStatement( actions=[ "events:PutRule", "events:DescribeRule", "events:PutTargets", "events:RemoveTargets", "events:DeleteRule", "events:PutEvents", ], resources=[ f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:rule/*", f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:event-bus/*", ], ), ], ) orchestrator_policy.attach_to_role(cloudformation_role) # Lambda function IAM setup lambda_passrole_policy = iam.PolicyStatement( actions=["iam:passrole"], resources=[cloudformation_role.role_arn]) # API Gateway and lambda setup to enable provisioning pipelines through API calls provisioner_apigw_lambda = aws_apigateway_lambda.ApiGatewayToLambda( self, "PipelineOrchestration", lambda_function_props={ "runtime": lambda_.Runtime.PYTHON_3_8, "handler": "index.handler", "code": lambda_.Code.from_asset("lambdas/pipeline_orchestration"), }, api_gateway_props={ "defaultMethodOptions": { "authorizationType": apigw.AuthorizationType.IAM, }, "restApiName": f"{core.Aws.STACK_NAME}-orchestrator", "proxy": False }, ) provision_resource = provisioner_apigw_lambda.api_gateway.root.add_resource( 'provisionpipeline') provision_resource.add_method('POST') status_resource = provisioner_apigw_lambda.api_gateway.root.add_resource( 'pipelinestatus') status_resource.add_method('POST') blueprint_repository_bucket.grant_read( provisioner_apigw_lambda.lambda_function) provisioner_apigw_lambda.lambda_function.add_to_role_policy( lambda_passrole_policy) orchestrator_policy.attach_to_role( provisioner_apigw_lambda.lambda_function.role) provisioner_apigw_lambda.lambda_function.add_to_role_policy( iam.PolicyStatement(actions=["xray:PutTraceSegments"], resources=["*"])) lambda_node = provisioner_apigw_lambda.lambda_function.node.default_child lambda_node.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W12", "reason": "The xray permissions PutTraceSegments is not able to be bound to resources.", }] } } # Environment variables setup provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET_URL", value=str(blueprint_repository_bucket.bucket_regional_domain_name), ) provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET", value=str(blueprint_repository_bucket.bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="ACCESS_BUCKET", value=str(access_logs_bucket.bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="CFN_ROLE_ARN", value=str(cloudformation_role.role_arn)) provisioner_apigw_lambda.lambda_function.add_environment( key="PIPELINE_STACK_NAME", value=pipeline_stack_name) provisioner_apigw_lambda.lambda_function.add_environment( key="NOTIFICATION_EMAIL", value=notification_email.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="LOG_LEVEL", value="DEBUG") cfn_policy_for_lambda = orchestrator_policy.node.default_child cfn_policy_for_lambda.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W76", "reason": "A complex IAM policy is required for this resource.", }] } } ### Codepipeline with Git source definitions ### source_output = codepipeline.Artifact() # processing git_address to retrieve repo name repo_name_split = core.Fn.split("/", git_address.value_as_string) repo_name = core.Fn.select(5, repo_name_split) # getting codecommit repo cdk object using 'from_repository_name' repo = codecommit.Repository.from_repository_name( self, "AWSMLOpsFrameworkRepository", repo_name) codebuild_project = codebuild.PipelineProject( self, "Take config file", build_spec=codebuild.BuildSpec.from_object({ "version": "0.2", "phases": { "build": { "commands": [ "ls -a", "aws lambda invoke --function-name " + provisioner_apigw_lambda.lambda_function. function_name + " --payload fileb://mlops-config.json response.json" + " --invocation-type RequestResponse", ] } }, }), ) # Defining a Codepipeline project with CodeCommit as source codecommit_pipeline = codepipeline.Pipeline( self, "MLOpsCodeCommitPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=repo, output=source_output, ) ], ), codepipeline.StageProps( stage_name="TakeConfig", actions=[ codepipeline_actions.CodeBuildAction( action_name="provision_pipeline", input=source_output, outputs=[], project=codebuild_project, ) ], ), ], cross_account_keys=False, ) codecommit_pipeline.add_to_role_policy( iam.PolicyStatement( actions=["lambda:InvokeFunction"], resources=[ provisioner_apigw_lambda.lambda_function.function_arn ], )) codebuild_project.add_to_role_policy( iam.PolicyStatement( actions=["lambda:InvokeFunction"], resources=[ provisioner_apigw_lambda.lambda_function.function_arn ], )) pipeline_child_nodes = codecommit_pipeline.node.find_all() pipeline_child_nodes[1].node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W35", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, { "id": "W51", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, ] } } ###custom resource for operational metrics### metricsMapping = core.CfnMapping( self, 'AnonymousData', mapping={'SendAnonymousData': { 'Data': 'Yes' }}) metrics_condition = core.CfnCondition( self, 'AnonymousDatatoAWS', expression=core.Fn.condition_equals( metricsMapping.find_in_map('SendAnonymousData', 'Data'), 'Yes')) helper_function = lambda_.Function( self, "SolutionHelper", code=lambda_.Code.from_asset("lambdas/solution_helper"), handler="lambda_function.handler", runtime=lambda_.Runtime.PYTHON_3_8, timeout=core.Duration.seconds(60), ) createIdFunction = core.CustomResource( self, 'CreateUniqueID', service_token=helper_function.function_arn, properties={'Resource': 'UUID'}, resource_type='Custom::CreateUUID') sendDataFunction = core.CustomResource( self, 'SendAnonymousData', service_token=helper_function.function_arn, properties={ 'Resource': 'AnonymousMetric', 'UUID': createIdFunction.get_att_string('UUID'), 'gitSelected': git_address.value_as_string, 'Region': core.Aws.REGION, 'SolutionId': 'SO0136', 'Version': '%%VERSION%%', }, resource_type='Custom::AnonymousData') core.Aspects.of(helper_function).add( ConditionalResources(metrics_condition)) core.Aspects.of(createIdFunction).add( ConditionalResources(metrics_condition)) core.Aspects.of(sendDataFunction).add( ConditionalResources(metrics_condition)) helper_function.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W58", "reason": "The lambda functions role already has permissions to write cloudwatch logs", }] } } # If user chooses Git as pipeline provision type, create codepipeline with Git repo as source core.Aspects.of(repo).add(ConditionalResources(git_address_provided)) core.Aspects.of(codecommit_pipeline).add( ConditionalResources(git_address_provided)) core.Aspects.of(codebuild_project).add( ConditionalResources(git_address_provided))
def __init__(self, scope: core.Construct, id: str, config, *, lambda_code: lambda_.CfnParametersCode = None, **kwargs) -> None: super().__init__(scope, id, **kwargs) repo_name = self.node.try_get_context( "name") or config["Default"]["name"] repo_count = self.node.try_get_context( "count") or config["Default"]["count"] for i in range(1, int(repo_count) + 1): code = codecommit.Repository.from_repository_name( self, "ImportedRepo" + str(i), repo_name + '-' + str(i)) cdk_build = codebuild.PipelineProject( self, "CdkBuild" + str(i), project_name="cdk-codebuild-proj-for-" + repo_name + "-" + str(i), build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict(install=dict(commands=[ "npm install -g aws-cdk", "python -m ensurepip --upgrade", "python -m pip install --upgrade pip", "python -m pip install --upgrade virtualenv", "virtualenv .env", ". .env/bin/activate", "pip install -r requirements.txt", "pip install aws_cdk.aws_codedeploy aws_cdk.aws_lambda aws_cdk.aws_codebuild aws_cdk.aws_codepipeline", "pip install aws_cdk.aws_apigateway aws_cdk.aws_codecommit aws_cdk.aws_codepipeline_actions aws_cdk.aws_s3" ]), build=dict(commands=[ "cdk synth CdkServerlessStack" ])), artifacts={ "base-directory": "cdk.out", "files": ["LambdaStack.template.json"] }, environment=dict(buildImage=codebuild.LinuxBuildImage. STANDARD_2_0)))) lambda_build = codebuild.PipelineProject( self, 'LambdaBuild' + str(i), project_name="lambda-codebuild-proj-for-" + repo_name + "-" + str(i), build_spec=codebuild.BuildSpec.from_object( dict(version="0.2", phases=dict(install=dict( commands=["cd lambda", "npm install"]), build=dict(commands="npm run build")), artifacts={ "base-directory": "lambda", "files": ["index.js", "node_modules/**/*"] }, environment=dict(buildImage=codebuild.LinuxBuildImage. STANDARD_2_0)))) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact("CdkBuildOutput") lambda_build_output = codepipeline.Artifact("LambdaBuildOutput") lambda_location = lambda_build_output.s3_location codepipeline.Pipeline( self, "Pipeline" + str(i), pipeline_name="pipeline-for-" + repo_name + "-" + str(i), stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( action_name="Lambda_Build", project=lambda_build, input=source_output, outputs=[lambda_build_output]), codepipeline_actions.CodeBuildAction( action_name="CDK_Build", project=cdk_build, input=source_output, outputs=[cdk_build_output]) ]), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions. CloudFormationCreateUpdateStackAction( action_name="Lambda_CFN_Deploy", template_path=cdk_build_output.at_path( "LambdaStack.template.json"), stack_name="lambda-deployment-stack-" + repo_name + "-" + str(i), admin_permissions=True, parameter_overrides=dict( lambda_code.assign( bucket_name=lambda_location. bucket_name, object_key=lambda_location.object_key, object_version=lambda_location. object_version)), extra_inputs=[lambda_build_output]) ]) ])
def __init__(self, scope: core.Construct, id: str, vpc, **kwargs) -> None: super().__init__(scope, id, **kwargs) name = "graviton2-aspnet-lab" container_repository = ecr.Repository(scope=self, id=f"{name}-container", repository_name=f"{name}") codecommit_repo = codecommit.Repository( scope=self, id=f"{name}-container-git", repository_name=f"{name}", description=f"Application code") pipeline = codepipeline.Pipeline(scope=self, id=f"{name}-container--pipeline", pipeline_name=f"{name}") source_output = codepipeline.Artifact() docker_output_arm64 = codepipeline.Artifact("ARM64_BuildOutput") buildspec_arm64 = codebuild.BuildSpec.from_source_filename( "arm64-dotnet-buildspec.yml") docker_build_arm64 = codebuild.PipelineProject( scope=self, id=f"DockerBuild_ARM64", environment=dict( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, privileged=True), environment_variables={ 'REPO_ECR': codebuild.BuildEnvironmentVariable( value=container_repository.repository_uri), }, build_spec=buildspec_arm64) container_repository.grant_pull_push(docker_build_arm64) docker_build_arm64.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage" ], resources=[ f"arn:{core.Stack.of(self).partition}:ecr:{core.Stack.of(self).region}:{core.Stack.of(self).account}:repository/*" ], )) source_action = codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=codecommit_repo, output=source_output, branch="master") pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name="DockerBuild", actions=[ codepipeline_actions.CodeBuildAction( action_name=f"DockerBuild_ARM64", project=docker_build_arm64, input=source_output, outputs=[docker_output_arm64]) ]) # Outputs core.CfnOutput(scope=self, id="application_repository", value=codecommit_repo.repository_clone_url_http)
def __init__(self, scope: core.Construct, id: str, website: WebsiteConstruct, **kwargs) -> None: super().__init__(scope, id, **kwargs) stack = core.Stack.of(self) repo = codecommit.Repository(self, 'Repository', repository_name=stack.stack_name.lower()) project = codebuild.PipelineProject( self, 'Builder', environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, compute_type=codebuild.ComputeType.LARGE), cache=codebuild.Cache.local(codebuild.LocalCacheMode.CUSTOM, ), build_spec=codebuild.BuildSpec.from_object({ 'version': 0.2, 'cache': { 'paths': ['nodemodules/**/*'], }, 'phases': { 'install': { 'runtime-versions': { 'nodejs': 12 } }, 'pre_build': { 'commands': ['echo Pre-build started on `date`', 'yarn install'] }, 'build': { 'commands': ['echo Build started on `date`', 'yarn build'] } }, 'artifacts': { 'files': ['**/*'], 'base-directory': 'dist' } }), ) source_artifact = codepipeline.Artifact('SourceArtifact') build_artifact = codepipeline.Artifact('BuildArtifact') pipeline = codepipeline.Pipeline( self, 'Pipeline', cross_account_keys=False, restart_execution_on_update=True, stages=[ codepipeline.StageProps( stage_name='Source', actions=[ codepipeline_actions.CodeCommitSourceAction( action_name='Source', repository=repo, output=source_artifact, ) ]), codepipeline.StageProps( stage_name='Build', actions=[ codepipeline_actions.CodeBuildAction( action_name='Build', project=project, input=source_artifact, outputs=[build_artifact], ) ]), codepipeline.StageProps( stage_name='Deploy', actions=[ codepipeline_actions.S3DeployAction( action_name='Deploy', input=build_artifact, bucket=website.bucket, extract=True, ) ]) ])
def __init__(self, scope: Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) CODECOMMIT_REPO_NAME = cdk.CfnParameter( self, "CodeCommitRepoName", type="String", default="serverless-api-pipeline-cdk", description="CodeCommit repository with the project code" ).value_as_string PIPELINE_NAME = "serverless-api-pipeline-cdk" source_artifact = codepipeline.Artifact() cloud_assembly_artifact = codepipeline.Artifact() pipeline = CdkPipeline( self, "Pipeline", pipeline_name=PIPELINE_NAME, cloud_assembly_artifact=cloud_assembly_artifact, source_action=codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", output=source_artifact, branch='main', trigger=codepipeline_actions.CodeCommitTrigger.POLL, repository=codecommit.Repository( self, 'ServerlessApiRepository', repository_name=CODECOMMIT_REPO_NAME)), synth_action=SimpleSynthAction.standard_npm_synth( source_artifact=source_artifact, cloud_assembly_artifact=cloud_assembly_artifact, environment={'privileged': True}, install_command= 'cd ./serverless-rest-api/python-http-cdk; npm install -g aws-cdk; pip install -r requirements.txt; pip install -r ./src/api/requirements.txt ', synth_command='cdk synth --output $CODEBUILD_SRC_DIR/cdk.out')) # Add testing stage to the pipeline and testing activity with permissions necessary to run integration tests testing_stage = AppStage(self, 'serverless-api-pipeline-cdk-Testing', cognito_stack_name='Cognito') pipeline_testing_stage = pipeline.add_application_stage(testing_stage) testing_action = ShellScriptAction( action_name='IntegrationTest', additional_artifacts=[source_artifact], commands=[ 'cd ./serverless-rest-api/python-http-cdk', 'pip install -r ./tests/requirements.txt', 'pip install -r ./src/api/requirements.txt', 'python -m pytest tests/integration -v' ], use_outputs={ 'TEST_APPLICATION_STACK_NAME': pipeline.stack_output(testing_stage.api_stack_name) }, ) pipeline_testing_stage.add_actions(testing_action) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ 'cognito-idp:AdminDeleteUser', 'cognito-idp:AdminConfirmSignUp', 'cognito-idp:AdminAddUserToGroup' ], resources=[ f'arn:aws:cognito-idp:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:userpool/*' ], )) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['secretsmanager:GetRandomPassword'], resources=['*'], )) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['dynamodb:*'], resources=[ f'arn:aws:dynamodb:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:table/{testing_stage.stage_name}*' ], )) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['cloudformation:DescribeStacks'], resources=[ f'arn:aws:cloudformation:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:stack/{testing_stage.stage_name}*/*', f'arn:aws:cloudformation:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:stack/{testing_stage.cognito_stack_name}/*' ], )) # Create production deployment stage to the pipeline with manual approval action deployment_stage = AppStage(self, 'serverless-api-pipeline-cdk-Deployment', cognito_stack_name='Cognito') pipeline_deployment_stage = pipeline.add_application_stage( deployment_stage) pipeline_deployment_stage.add_actions( codepipeline_actions.ManualApprovalAction( action_name='ApproveProductionDeployment', run_order=1))
def __init__(self, scope: core.Construct, id: str, lambda_code: _lambda.CfnParametersCode, custom_resource: _lambda.CfnParametersCode, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here self.lambda_code = lambda_code self.custom_resource = custom_resource code = _commit.Repository( self, 'CustomerServerlessCode', repository_name='spring-petclinic-customers-serverless') lambda_project = _build.PipelineProject( self, 'CustomerLambdaBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': 'mvn package', }, 'post_build': { 'commands': [ 'mkdir deploy', 'cp target/spring-petclinic-customers-serverless-2.0.7.RELEASE.jar deploy/', 'cd deploy && jar xvf spring-petclinic-customers-serverless-2.0.7.RELEASE.jar', 'rm spring-petclinic-customers-serverless-2.0.7.RELEASE.jar', ] } }, 'artifacts': { 'base-directory': 'deploy', 'files': ['**/*'] }, }), environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) cdk_project = _build.PipelineProject( self, 'CustomerCdkBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'python': '3.7', 'nodejs': '10' }, 'commands': [ 'npm install -g [email protected]', 'pip install aws-cdk.core==1.10.0', 'pip install -r requirements.txt' ] }, 'build': { 'commands': [ 'cdk synth -o dist', ] } }, 'artifacts': { 'secondary-artifacts': { 'CdkBuildOutput': { 'base-directory': 'dist', 'files': ['customer-lambda-stack.template.json'] }, 'CustomRecoureOutput': { 'base-directory': 'custom-resource-code', 'discard-paths': 'yes', 'files': ['index.py', 'owner.json', 'cfnresponse.py'] } } } }), environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') cdk_build_output = _pipeline.Artifact('CdkBuildOutput') lambda_build_output = _pipeline.Artifact('LambdaBuildOutput') custom_resource_output = _pipeline.Artifact('CustomRecoureOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction( action_name='CodeBuild_CDK', project=cdk_project, input=source_output, outputs=[cdk_build_output, custom_resource_output]), _action.CodeBuildAction(action_name='CodeBuild_Lambda', project=lambda_project, input=source_output, outputs=[lambda_build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.CloudFormationCreateUpdateStackAction( action_name='Lambda_CFN_Deploy', template_path=cdk_build_output.at_path( 'customer-lambda-stack.template.json'), stack_name='customer-lambda-stack', admin_permissions=True, parameter_overrides={ **self.lambda_code.assign(bucket_name=lambda_build_output.bucket_name, object_key=lambda_build_output.object_key), **self.custom_resource.assign(bucket_name=custom_resource_output.bucket_name, object_key=custom_resource_output.object_key) }, extra_inputs=[ lambda_build_output, custom_resource_output ]) ] }])
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Get stack parameters: email and repo address notification_email = core.CfnParameter( self, "Email Address", type="String", description="Specify an email to receive notifications about pipeline outcomes.", allowed_pattern="^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$", min_length=5, max_length=320, constraint_description="Please enter an email address with correct format ([email protected])", ) git_address = core.CfnParameter( self, "CodeCommit Repo Address", type="String", description="AWS CodeCommit repository clone URL to connect to the framework.", allowed_pattern=( "^(((https:\/\/|ssh:\/\/)(git\-codecommit)\.[a-zA-Z0-9_.+-]+(amazonaws\.com\/)[a-zA-Z0-9-.]" "+(\/)[a-zA-Z0-9-.]+(\/)[a-zA-Z0-9-.]+$)|^$)" ), min_length=0, max_length=320, constraint_description=( "CodeCommit address must follow the pattern: ssh or " "https://git-codecommit.REGION.amazonaws.com/version/repos/REPONAME" ), ) # Get the optional S3 assets bucket to use existing_bucket = core.CfnParameter( self, "ExistingS3Bucket", type="String", description="Name of existing S3 bucket to be used for ML assests. S3 Bucket must be in the same region as the deployed stack, and has versioning enabled. If not provided, a new S3 bucket will be created.", allowed_pattern="((?=^.{3,63}$)(?!^(\d+\.)+\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])\.)*([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])$)|^$)", min_length=0, max_length=63, ) # Conditions git_address_provided = core.CfnCondition( self, "GitAddressProvided", expression=core.Fn.condition_not(core.Fn.condition_equals(git_address, "")), ) # client provided an existing S3 bucket name, to be used for assets existing_bucket_provided = core.CfnCondition( self, "S3BucketProvided", expression=core.Fn.condition_not(core.Fn.condition_equals(existing_bucket.value_as_string.strip(), "")), ) # S3 bucket needs to be created for assets create_new_bucket = core.CfnCondition( self, "CreateS3Bucket", expression=core.Fn.condition_equals(existing_bucket.value_as_string.strip(), ""), ) # Constants pipeline_stack_name = "MLOps-pipeline" # CDK Resources setup access_logs_bucket = s3.Bucket( self, "accessLogs", encryption=s3.BucketEncryption.S3_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Apply secure transfer bucket policy apply_secure_bucket_policy(access_logs_bucket) # This is a logging bucket. access_logs_bucket.node.default_child.cfn_options.metadata = suppress_s3_access_policy() # Import user provide S3 bucket, if any. s3.Bucket.from_bucket_arn is used instead of s3.Bucket.from_bucket_name to allow cross account bucket. client_existing_bucket = s3.Bucket.from_bucket_arn( self, "ClientExistingBucket", f"arn:aws:s3:::{existing_bucket.value_as_string.strip()}", ) # Create the resource if existing_bucket_provided condition is True core.Aspects.of(client_existing_bucket).add(ConditionalResources(existing_bucket_provided)) # Creating assets bucket so that users can upload ML Models to it. assets_bucket = s3.Bucket( self, "pipeline-assets-" + str(uuid.uuid4()), versioned=True, encryption=s3.BucketEncryption.S3_MANAGED, server_access_logs_bucket=access_logs_bucket, server_access_logs_prefix="assets_bucket_access_logs", block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Apply secure transport bucket policy apply_secure_bucket_policy(assets_bucket) # Create the resource if create_new_bucket condition is True core.Aspects.of(assets_bucket).add(ConditionalResources(create_new_bucket)) # Get assets S3 bucket's name/arn, based on the condition assets_s3_bucket_name = core.Fn.condition_if( existing_bucket_provided.logical_id, client_existing_bucket.bucket_name, assets_bucket.bucket_name, ).to_string() blueprints_bucket_name = "blueprint-repository-" + str(uuid.uuid4()) blueprint_repository_bucket = s3.Bucket( self, blueprints_bucket_name, encryption=s3.BucketEncryption.S3_MANAGED, server_access_logs_bucket=access_logs_bucket, server_access_logs_prefix=blueprints_bucket_name, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Apply secure transport bucket policy apply_secure_bucket_policy(blueprint_repository_bucket) # Custom resource to copy source bucket content to blueprints bucket custom_resource_lambda_fn = lambda_.Function( self, "CustomResourceLambda", code=lambda_.Code.from_asset("lambdas/custom_resource"), handler="index.on_event", runtime=lambda_.Runtime.PYTHON_3_8, environment={ "source_bucket": "https://%%BUCKET_NAME%%-" + core.Aws.REGION + ".s3.amazonaws.com/%%SOLUTION_NAME%%/%%VERSION%%", "destination_bucket": blueprint_repository_bucket.bucket_name, "LOG_LEVEL": "INFO", }, timeout=core.Duration.seconds(60), ) custom_resource_lambda_fn.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W58", "reason": "The lambda functions role already has permissions to write cloudwatch logs", } ] } } blueprint_repository_bucket.grant_write(custom_resource_lambda_fn) custom_resource = core.CustomResource( self, "CustomResourceCopyAssets", service_token=custom_resource_lambda_fn.function_arn, ) custom_resource.node.add_dependency(blueprint_repository_bucket) # IAM policies setup ### cloudformation_role = iam.Role( self, "mlopscloudformationrole", assumed_by=iam.ServicePrincipal("cloudformation.amazonaws.com"), ) # Cloudformation policy setup orchestrator_policy = iam.Policy( self, "lambdaOrchestratorPolicy", statements=[ iam.PolicyStatement( actions=[ "cloudformation:CreateStack", "cloudformation:DeleteStack", "cloudformation:UpdateStack", "cloudformation:ListStackResources", ], resources=[ ( f"arn:{core.Aws.PARTITION}:cloudformation:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:stack/{pipeline_stack_name}*/*" ), ], ), iam.PolicyStatement( actions=[ "iam:CreateRole", "iam:DeleteRole", "iam:DeleteRolePolicy", "iam:GetRole", "iam:GetRolePolicy", "iam:PassRole", "iam:PutRolePolicy", "iam:AttachRolePolicy", "iam:DetachRolePolicy", ], resources=[f"arn:{core.Aws.PARTITION}:iam::{core.Aws.ACCOUNT_ID}:role/{pipeline_stack_name}*"], ), iam.PolicyStatement( actions=[ "ecr:CreateRepository", "ecr:DeleteRepository", "ecr:DescribeRepositories", ], resources=[ ( f"arn:{core.Aws.PARTITION}:ecr:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:repository/awsmlopsmodels*" ) ], ), iam.PolicyStatement( actions=[ "codebuild:CreateProject", "codebuild:DeleteProject", "codebuild:BatchGetProjects", ], resources=[ ( f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:project/ContainerFactory*" ), ( f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:project/VerifySagemaker*" ), ( f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:report-group/*" ), ], ), iam.PolicyStatement( actions=[ "lambda:CreateFunction", "lambda:DeleteFunction", "lambda:InvokeFunction", "lambda:PublishLayerVersion", "lambda:DeleteLayerVersion", "lambda:GetLayerVersion", "lambda:GetFunctionConfiguration", "lambda:GetFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:UpdateFunctionConfiguration", ], resources=[ f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:layer:*", f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:function:*", ], ), iam.PolicyStatement( actions=["s3:GetObject"], resources=[ blueprint_repository_bucket.bucket_arn, blueprint_repository_bucket.arn_for_objects("*"), f"arn:{core.Aws.PARTITION}:s3:::{assets_s3_bucket_name}/*", ], ), iam.PolicyStatement( actions=[ "codepipeline:CreatePipeline", "codepipeline:DeletePipeline", "codepipeline:GetPipeline", "codepipeline:GetPipelineState", ], resources=[ ( f"arn:{core.Aws.PARTITION}:codepipeline:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*" ) ], ), iam.PolicyStatement( actions=[ "apigateway:POST", "apigateway:PATCH", "apigateway:DELETE", "apigateway:GET", "apigateway:PUT", ], resources=[ f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis/*", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/account", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans/*", ], ), iam.PolicyStatement( actions=[ "logs:CreateLogGroup", "logs:DescribeLogGroups", ], resources=[ f"arn:{core.Aws.PARTITION}:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:log-group:*", ], ), iam.PolicyStatement( actions=[ "s3:CreateBucket", "s3:PutEncryptionConfiguration", "s3:PutBucketVersioning", "s3:PutBucketPublicAccessBlock", "s3:PutBucketLogging", ], resources=["arn:" + core.Aws.PARTITION + ":s3:::*"], ), iam.PolicyStatement( actions=[ "sns:CreateTopic", "sns:DeleteTopic", "sns:Subscribe", "sns:Unsubscribe", "sns:GetTopicAttributes", "sns:SetTopicAttributes", ], resources=[ ( f"arn:{core.Aws.PARTITION}:sns:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"{pipeline_stack_name}*-PipelineNotification*" ), ( f"arn:{core.Aws.PARTITION}:sns:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"{pipeline_stack_name}*-ModelMonitorPipelineNotification*" ), ], ), iam.PolicyStatement( actions=[ "events:PutRule", "events:DescribeRule", "events:PutTargets", "events:RemoveTargets", "events:DeleteRule", "events:PutEvents", ], resources=[ f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:rule/*", f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:event-bus/*", ], ), ], ) orchestrator_policy.attach_to_role(cloudformation_role) # Lambda function IAM setup lambda_passrole_policy = iam.PolicyStatement(actions=["iam:passrole"], resources=[cloudformation_role.role_arn]) # API Gateway and lambda setup to enable provisioning pipelines through API calls provisioner_apigw_lambda = aws_apigateway_lambda.ApiGatewayToLambda( self, "PipelineOrchestration", lambda_function_props={ "runtime": lambda_.Runtime.PYTHON_3_8, "handler": "index.handler", "code": lambda_.Code.from_asset("lambdas/pipeline_orchestration"), }, api_gateway_props={ "defaultMethodOptions": { "authorizationType": apigw.AuthorizationType.IAM, }, "restApiName": f"{core.Aws.STACK_NAME}-orchestrator", "proxy": False, "dataTraceEnabled": True, }, ) provision_resource = provisioner_apigw_lambda.api_gateway.root.add_resource("provisionpipeline") provision_resource.add_method("POST") status_resource = provisioner_apigw_lambda.api_gateway.root.add_resource("pipelinestatus") status_resource.add_method("POST") blueprint_repository_bucket.grant_read(provisioner_apigw_lambda.lambda_function) provisioner_apigw_lambda.lambda_function.add_to_role_policy(lambda_passrole_policy) orchestrator_policy.attach_to_role(provisioner_apigw_lambda.lambda_function.role) provisioner_apigw_lambda.lambda_function.add_to_role_policy( iam.PolicyStatement(actions=["xray:PutTraceSegments"], resources=["*"]) ) lambda_node = provisioner_apigw_lambda.lambda_function.node.default_child lambda_node.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W12", "reason": "The xray permissions PutTraceSegments is not able to be bound to resources.", } ] } } # Environment variables setup provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET_URL", value=str(blueprint_repository_bucket.bucket_regional_domain_name), ) provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET", value=str(blueprint_repository_bucket.bucket_name) ) provisioner_apigw_lambda.lambda_function.add_environment( key="ACCESS_BUCKET", value=str(access_logs_bucket.bucket_name) ) provisioner_apigw_lambda.lambda_function.add_environment(key="ASSETS_BUCKET", value=str(assets_s3_bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="CFN_ROLE_ARN", value=str(cloudformation_role.role_arn) ) provisioner_apigw_lambda.lambda_function.add_environment(key="PIPELINE_STACK_NAME", value=pipeline_stack_name) provisioner_apigw_lambda.lambda_function.add_environment( key="NOTIFICATION_EMAIL", value=notification_email.value_as_string ) provisioner_apigw_lambda.lambda_function.add_environment(key="LOG_LEVEL", value="DEBUG") cfn_policy_for_lambda = orchestrator_policy.node.default_child cfn_policy_for_lambda.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W76", "reason": "A complex IAM policy is required for this resource.", } ] } } # Codepipeline with Git source definitions ### source_output = codepipeline.Artifact() # processing git_address to retrieve repo name repo_name_split = core.Fn.split("/", git_address.value_as_string) repo_name = core.Fn.select(5, repo_name_split) # getting codecommit repo cdk object using 'from_repository_name' repo = codecommit.Repository.from_repository_name(self, "AWSMLOpsFrameworkRepository", repo_name) codebuild_project = codebuild.PipelineProject( self, "Take config file", build_spec=codebuild.BuildSpec.from_object( { "version": "0.2", "phases": { "build": { "commands": [ "ls -a", "aws lambda invoke --function-name " + provisioner_apigw_lambda.lambda_function.function_name + " --payload fileb://mlops-config.json response.json" + " --invocation-type RequestResponse", ] } }, } ), ) # Defining a Codepipeline project with CodeCommit as source codecommit_pipeline = codepipeline.Pipeline( self, "MLOpsCodeCommitPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=repo, branch="main", output=source_output, ) ], ), codepipeline.StageProps( stage_name="TakeConfig", actions=[ codepipeline_actions.CodeBuildAction( action_name="provision_pipeline", input=source_output, outputs=[], project=codebuild_project, ) ], ), ], cross_account_keys=False, ) codecommit_pipeline.add_to_role_policy( iam.PolicyStatement( actions=["lambda:InvokeFunction"], resources=[provisioner_apigw_lambda.lambda_function.function_arn], ) ) codebuild_project.add_to_role_policy( iam.PolicyStatement( actions=["lambda:InvokeFunction"], resources=[provisioner_apigw_lambda.lambda_function.function_arn], ) ) pipeline_child_nodes = codecommit_pipeline.node.find_all() pipeline_child_nodes[1].node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W35", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, { "id": "W51", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, ] } } # custom resource for operational metrics### metricsMapping = core.CfnMapping(self, "AnonymousData", mapping={"SendAnonymousData": {"Data": "Yes"}}) metrics_condition = core.CfnCondition( self, "AnonymousDatatoAWS", expression=core.Fn.condition_equals(metricsMapping.find_in_map("SendAnonymousData", "Data"), "Yes"), ) helper_function = lambda_.Function( self, "SolutionHelper", code=lambda_.Code.from_asset("lambdas/solution_helper"), handler="lambda_function.handler", runtime=lambda_.Runtime.PYTHON_3_8, timeout=core.Duration.seconds(60), ) createIdFunction = core.CustomResource( self, "CreateUniqueID", service_token=helper_function.function_arn, properties={"Resource": "UUID"}, resource_type="Custom::CreateUUID", ) sendDataFunction = core.CustomResource( self, "SendAnonymousData", service_token=helper_function.function_arn, properties={ "Resource": "AnonymousMetric", "UUID": createIdFunction.get_att_string("UUID"), "gitSelected": git_address.value_as_string, "Region": core.Aws.REGION, "SolutionId": "SO0136", "Version": "%%VERSION%%", }, resource_type="Custom::AnonymousData", ) core.Aspects.of(helper_function).add(ConditionalResources(metrics_condition)) core.Aspects.of(createIdFunction).add(ConditionalResources(metrics_condition)) core.Aspects.of(sendDataFunction).add(ConditionalResources(metrics_condition)) helper_function.node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W58", "reason": "The lambda functions role already has permissions to write cloudwatch logs", } ] } } # If user chooses Git as pipeline provision type, create codepipeline with Git repo as source core.Aspects.of(repo).add(ConditionalResources(git_address_provided)) core.Aspects.of(codecommit_pipeline).add(ConditionalResources(git_address_provided)) core.Aspects.of(codebuild_project).add(ConditionalResources(git_address_provided)) # Create Template Interface self.template_options.metadata = { "AWS::CloudFormation::Interface": { "ParameterGroups": [ { "Label": {"default": "MLOps Framework Settings"}, "Parameters": [ notification_email.logical_id, git_address.logical_id, existing_bucket.logical_id, ], } ], "ParameterLabels": { f"{notification_email.logical_id}": {"default": "Notification Email (Required)"}, f"{git_address.logical_id}": {"default": "CodeCommit Repo URL Address (Optional)"}, f"{existing_bucket.logical_id}": {"default": "Name of an Existing S3 Bucket (Optional)"}, }, } } # Outputs # core.CfnOutput( self, id="BlueprintsBucket", value=f"https://s3.console.aws.amazon.com/s3/buckets/{blueprint_repository_bucket.bucket_name}", description="S3 Bucket to upload MLOps Framework Blueprints", ) core.CfnOutput( self, id="AssetsBucket", value=f"https://s3.console.aws.amazon.com/s3/buckets/{assets_s3_bucket_name}", description="S3 Bucket to upload model artifact", )
def __init__(self, scope: core.Construct, id: str, config: ContainerPipelineConfiguration, **kwargs) -> None: super().__init__(scope, id, **kwargs) # sourceOutput = codepipeline.Artifact( # artifact_name=config.ProjectName + "-SourceOutput" # ) # Code Repo commit = aws_codecommit.Repository(self, config.ProjectName + "-apprepo", repository_name=config.ProjectName + "-app-repo") # Container Repo self.docker_repo = ecr.Repository( scope=self, id=config.ProjectName, removal_policy=core.RemovalPolicy.DESTROY, repository_name=config.ProjectName) pipeline = codepipeline.Pipeline(self, "MyPipeline", pipeline_name=config.ProjectName + "-commit-to-ecr") source_output = codepipeline.Artifact() source_action = codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=commit, output=source_output) # docker file linting cb_docker_build_lint = aws_codebuild.PipelineProject( self, "DockerLint", project_name=config.ProjectName + "-docker-lint", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='configs/buildspec_lint.yml'), environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage. UBUNTU_14_04_NODEJS_10_1_0, privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=self.docker_repo.repository_uri), 'project_name': aws_codebuild.BuildEnvironmentVariable( value=config.ProjectName) }, description='linting the container dockerfile for best practices', timeout=core.Duration.minutes(60), ) # code repo secret scan cb_source_secretscan = aws_codebuild.PipelineProject( self, "SourceSecretScan", project_name=config.ProjectName + "-source-secretscan", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='configs/buildspec_secrets.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True, build_image=aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'commituri': aws_codebuild.BuildEnvironmentVariable( value=commit.repository_clone_url_http), 'ecr': aws_codebuild.BuildEnvironmentVariable( value=self.docker_repo.repository_uri), 'project_name': aws_codebuild.BuildEnvironmentVariable( value=config.ProjectName) }, description='Scanning source for secrets', timeout=core.Duration.minutes(60), ) cb_source_secretscan.add_to_role_policy(statement=iam.PolicyStatement( resources=['*'], actions=['codecommit:*'])) # push to ecr repo # cb_docker_build_push = aws_codebuild.PipelineProject( # self, "DockerBuild", # project_name= config.ProjectName + "-docker-build", # build_spec=aws_codebuild.BuildSpec.from_source_filename( # filename='configs/docker_build_base.yml'), # environment=aws_codebuild.BuildEnvironment( # privileged=True, # compute_type=aws_codebuild.ComputeType.MEDIUM # ), # # pass the ecr repo uri into the codebuild project so codebuild knows where to push # environment_variables={ # 'ecr': aws_codebuild.BuildEnvironmentVariable( # value=self.docker_repo.repository_uri), # 'tag': aws_codebuild.BuildEnvironmentVariable( # value="release"), # 'project_name': aws_codebuild.BuildEnvironmentVariable( # value=config.ProjectName) # }, # description='Deploy to ECR', # timeout=core.Duration.minutes(60), # ) # push Spring app to ecr repo and deploy cb_spring_build_deploy = aws_codebuild.PipelineProject( self, "SpringBuildDeploy", project_name=config.ProjectName + "-spring-build-deploy", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='configs/spring_build_deploy.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True, build_image=aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, compute_type=aws_codebuild.ComputeType.MEDIUM), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=self.docker_repo.repository_uri), 'tag': aws_codebuild.BuildEnvironmentVariable(value="release"), 'project_name': aws_codebuild.BuildEnvironmentVariable( value=config.ProjectName) }, description='Deploy to ECR and Push to Fargate', timeout=core.Duration.minutes(60), ) # grant access to all CodeBuild projects to pull images from ECR statement = iam.PolicyStatement(actions=[ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:DescribeRepositories", "ecr:DescribeImages", "ecr:ListImages", ], resources=['*']) # cb_docker_build_push.add_to_role_policy(statement) cb_spring_build_deploy.add_to_role_policy(statement) cb_docker_build_lint.add_to_role_policy(statement) cb_source_secretscan.add_to_role_policy(statement) pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name='Lint', actions=[ codepipeline_actions.CodeBuildAction( action_name='DockerLintImages', input=source_output, project=cb_docker_build_lint, run_order=1, ) ]) pipeline.add_stage(stage_name='SecretScan', actions=[ codepipeline_actions.CodeBuildAction( action_name='SourceSecretScanImages', input=source_output, project=cb_source_secretscan, run_order=1, ) ]) pipeline.add_stage( # stage_name='Build', stage_name='BuildAndDeploy', actions=[ codepipeline_actions.CodeBuildAction( # action_name='DockerBuildImages', action_name='SpringBuildAndDeploy', input=source_output, # project= cb_docker_build_push, project=cb_spring_build_deploy, run_order=1, ) ]) # self.docker_repo.grant_pull_push(cb_docker_build_push) self.docker_repo.grant_pull_push(cb_spring_build_deploy)
def __init__(self, scope: Stack, prefix: str, artifacts_bucket: IBucket, source_repository: aws_codecommit.Repository, ecr_repository: aws_ecr.Repository, task_def: str, app_spec: str, main_listener: aws_elasticloadbalancingv2.CfnListener, deployments_listener: aws_elasticloadbalancingv2.CfnListener, ecs_cluster: aws_ecs.Cluster, ecs_service: CfnService, production_target_group, deployment_target_group): self.application = aws_codedeploy.EcsApplication( scope, prefix + 'FargateCodeDeployApplication', application_name=prefix + 'FargateCodeDeployApplication', ) self.deployment_group_custom = DeploymentGroup( stack=scope, prefix=prefix, code_repository=source_repository, task_definition=task_def, app_spec=app_spec, ecs_application=self.application, main_listener=main_listener, deployments_listener=deployments_listener, ecs_cluster=ecs_cluster, production_target_group=production_target_group, deployment_target_group=deployment_target_group).get_resource() self.deployment_group_custom.node.add_dependency(ecs_service) self.deployment_group_custom.node.add_dependency(ecs_cluster) self.deployment_group = aws_codedeploy.EcsDeploymentGroup.from_ecs_deployment_group_attributes( scope, prefix + 'FargateDeploymentGroup', application=self.application, deployment_group_name=prefix + 'FargateDeploymentGroup', ) self.deployment_group.node.add_dependency(self.deployment_group_custom) self.deployment_config_repository = aws_codecommit.Repository( scope, prefix + 'FargateDeploymentConfigRepository', description= 'Repository containing appspec and taskdef files for ecs code-deploy blue/green deployments.', repository_name=prefix.lower() + '-deployment-config') self.commit_custom = DeploymentConfig( stack=scope, prefix=prefix, code_repository=self.deployment_config_repository, task_definition=task_def, app_spec=app_spec).get_resource() self.ecr_repository_output_artifact = aws_codepipeline.Artifact( 'EcsImage') self.config_output_artifact = aws_codepipeline.Artifact('EcsConfig') self.ecr_to_ecs_pipeline = aws_codepipeline.Pipeline( scope, prefix + 'FargateEcrToEcsPipeline', artifact_bucket=artifacts_bucket, pipeline_name=prefix + 'FargateEcrToEcsPipeline', stages=[ aws_codepipeline.StageProps( stage_name='SourceStage', actions=[ aws_codepipeline_actions.EcrSourceAction( action_name='SourceEcrAction', output=self.ecr_repository_output_artifact, repository=ecr_repository, run_order=1, ), aws_codepipeline_actions.CodeCommitSourceAction( action_name='SourceCodeCommitAction', output=self.config_output_artifact, repository=self.deployment_config_repository, branch='master', run_order=1, ) ]), aws_codepipeline.StageProps( stage_name='DeployStage', actions=[ aws_codepipeline_actions.CodeDeployEcsDeployAction( action_name='DeployAction', deployment_group=self.deployment_group, app_spec_template_input=self. config_output_artifact, task_definition_template_input=self. config_output_artifact, container_image_inputs=[ aws_codepipeline_actions. CodeDeployEcsContainerImageInput( input=self.ecr_repository_output_artifact, task_definition_placeholder='IMAGE1_NAME') ], run_order=1) ]) ]) self.ecr_to_ecs_pipeline.node.add_dependency(self.commit_custom)
def __init__(self, scope: core.Construct, id: str, vpc: aws_ec2.Vpc, ecs_cluster=aws_ecs.Cluster, alb=elbv2.ApplicationLoadBalancer, albTestListener=elbv2.ApplicationListener, albProdListener=elbv2.ApplicationListener, blueGroup=elbv2.ApplicationTargetGroup, greenGroup=elbv2.ApplicationTargetGroup, **kwargs) -> None: super().__init__(scope, id, **kwargs) ECS_APP_NAME = "Nginx-app", ECS_DEPLOYMENT_GROUP_NAME = "NginxAppECSBlueGreen" ECS_DEPLOYMENT_CONFIG_NAME = "CodeDeployDefault.ECSLinear10PercentEvery1Minutes" ECS_TASKSET_TERMINATION_WAIT_TIME = 10 ECS_TASK_FAMILY_NAME = "Nginx-microservice" ECS_APP_NAME = "Nginx-microservice" ECS_APP_LOG_GROUP_NAME = "/ecs/Nginx-microservice" DUMMY_TASK_FAMILY_NAME = "sample-Nginx-microservice" DUMMY_APP_NAME = "sample-Nginx-microservice" DUMMY_APP_LOG_GROUP_NAME = "/ecs/sample-Nginx-microservice" DUMMY_CONTAINER_IMAGE = "smuralee/nginx" # ============================================================================= # ECR and CodeCommit repositories for the Blue/ Green deployment # ============================================================================= # ECR repository for the docker images NginxecrRepo = aws_ecr.Repository(self, "NginxRepo", image_scan_on_push=True) NginxCodeCommitrepo = aws_codecommit.Repository( self, "NginxRepository", repository_name=ECS_APP_NAME, description="Oussama application hosted on NGINX") # ============================================================================= # CODE BUILD and ECS TASK ROLES for the Blue/ Green deployment # ============================================================================= # IAM role for the Code Build project codeBuildServiceRole = aws_iam.Role( self, "codeBuildServiceRole", assumed_by=aws_iam.ServicePrincipal('codebuild.amazonaws.com')) inlinePolicyForCodeBuild = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=[ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", "ecr:PutImage" ], resources=["*"]) codeBuildServiceRole.add_to_policy(inlinePolicyForCodeBuild) # ECS task role ecsTaskRole = aws_iam.Role( self, "ecsTaskRoleForWorkshop", assumed_by=aws_iam.ServicePrincipal('ecs-tasks.amazonaws.com')) ecsTaskRole.add_managed_policy( aws_iam.ManagedPolicy.from_aws_managed_policy_name( "service-role/AmazonECSTaskExecutionRolePolicy")) # ============================================================================= # CODE DEPLOY APPLICATION for the Blue/ Green deployment # ============================================================================= # Creating the code deploy application codeDeployApplication = codedeploy.EcsApplication( self, "NginxAppCodeDeploy") # Creating the code deploy service role codeDeployServiceRole = aws_iam.Role( self, "codeDeployServiceRole", assumed_by=aws_iam.ServicePrincipal('codedeploy.amazonaws.com')) codeDeployServiceRole.add_managed_policy( aws_iam.ManagedPolicy.from_aws_managed_policy_name( "AWSCodeDeployRoleForECS")) # IAM role for custom lambda function customLambdaServiceRole = aws_iam.Role( self, "codeDeployCustomLambda", assumed_by=aws_iam.ServicePrincipal('lambda.amazonaws.com')) inlinePolicyForLambda = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=[ "iam:PassRole", "sts:AssumeRole", "codedeploy:List*", "codedeploy:Get*", "codedeploy:UpdateDeploymentGroup", "codedeploy:CreateDeploymentGroup", "codedeploy:DeleteDeploymentGroup" ], resources=["*"]) customLambdaServiceRole.add_managed_policy( aws_iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole')) customLambdaServiceRole.add_to_policy(inlinePolicyForLambda) # Custom resource to create the deployment group createDeploymentGroupLambda = aws_lambda.Function( self, 'createDeploymentGroupLambda', code=aws_lambda.Code.from_asset("custom_resources"), runtime=aws_lambda.Runtime.PYTHON_3_8, handler='create_deployment_group.handler', role=customLambdaServiceRole, description="Custom resource to create deployment group", memory_size=128, timeout=core.Duration.seconds(60)) # ================================================================================================ # CloudWatch Alarms for 4XX errors blue4xxMetric = aws_cloudwatch.Metric( namespace='AWS/ApplicationELB', metric_name='HTTPCode_Target_4XX_Count', dimensions={ "TargetGroup": blueGroup.target_group_full_name, "LoadBalancer": alb.load_balancer_full_name }, statistic="sum", period=core.Duration.minutes(1)) blueGroupAlarm = aws_cloudwatch.Alarm( self, "blue4xxErrors", alarm_name="Blue_4xx_Alarm", alarm_description= "CloudWatch Alarm for the 4xx errors of Blue target group", metric=blue4xxMetric, threshold=1, evaluation_periods=1) green4xxMetric = aws_cloudwatch.Metric( namespace='AWS/ApplicationELB', metric_name='HTTPCode_Target_4XX_Count', dimensions={ "TargetGroup": greenGroup.target_group_full_name, "LoadBalancer": alb.load_balancer_full_name }, statistic="sum", period=core.Duration.minutes(1)) greenGroupAlarm = aws_cloudwatch.Alarm( self, "green4xxErrors", alarm_name="Green_4xx_Alarm", alarm_description= "CloudWatch Alarm for the 4xx errors of Green target group", metric=green4xxMetric, threshold=1, evaluation_periods=1) # ================================================================================================ # DUMMY TASK DEFINITION for the initial service creation # This is required for the service being made available to create the CodeDeploy Deployment Group # ================================================================================================ sampleTaskDefinition = aws_ecs.FargateTaskDefinition( self, "sampleTaskDefn", family=DUMMY_TASK_FAMILY_NAME, cpu=256, memory_limit_mib=1024, task_role=ecsTaskRole, execution_role=ecsTaskRole) sampleContainerDefn = sampleTaskDefinition.add_container( "sampleAppContainer", image=aws_ecs.ContainerImage.from_registry(DUMMY_CONTAINER_IMAGE), logging=aws_ecs.AwsLogDriver(log_group=aws_logs.LogGroup( self, "sampleAppLogGroup", log_group_name=DUMMY_APP_LOG_GROUP_NAME, removal_policy=core.RemovalPolicy.DESTROY), stream_prefix=DUMMY_APP_NAME), docker_labels={"name": DUMMY_APP_NAME}) port_mapping = aws_ecs.PortMapping(container_port=80, protocol=aws_ecs.Protocol.TCP) sampleContainerDefn.add_port_mappings(port_mapping) # ================================================================================================ # ECS task definition using ECR image # Will be used by the CODE DEPLOY for Blue/Green deployment # ================================================================================================ NginxTaskDefinition = aws_ecs.FargateTaskDefinition( self, "appTaskDefn", family=ECS_TASK_FAMILY_NAME, cpu=256, memory_limit_mib=1024, task_role=ecsTaskRole, execution_role=ecsTaskRole) NginxcontainerDefinition = NginxTaskDefinition.add_container( "NginxAppContainer", image=aws_ecs.ContainerImage.from_ecr_repository( NginxecrRepo, "latest"), logging=aws_ecs.AwsLogDriver(log_group=aws_logs.LogGroup( self, "NginxAppLogGroup", log_group_name=ECS_APP_LOG_GROUP_NAME, removal_policy=core.RemovalPolicy.DESTROY), stream_prefix=ECS_APP_NAME), docker_labels={"name": ECS_APP_NAME}) NginxcontainerDefinition.add_port_mappings(port_mapping) # ============================================================================= # ECS SERVICE for the Blue/ Green deployment # ============================================================================= NginxAppService = aws_ecs.FargateService( self, "NginxAppService", cluster=ecs_cluster, task_definition=NginxTaskDefinition, health_check_grace_period=core.Duration.seconds(10), desired_count=3, deployment_controller={ "type": aws_ecs.DeploymentControllerType.CODE_DEPLOY }, service_name=ECS_APP_NAME) NginxAppService.connections.allow_from(alb, aws_ec2.Port.tcp(80)) NginxAppService.connections.allow_from(alb, aws_ec2.Port.tcp(8080)) NginxAppService.attach_to_application_target_group(blueGroup) # ============================================================================= # CODE DEPLOY - Deployment Group CUSTOM RESOURCE for the Blue/ Green deployment # ============================================================================= core.CustomResource( self, 'customEcsDeploymentGroup', service_token=createDeploymentGroupLambda.function_arn, properties={ "ApplicationName": codeDeployApplication.application_name, "DeploymentGroupName": ECS_DEPLOYMENT_GROUP_NAME, "DeploymentConfigName": ECS_DEPLOYMENT_CONFIG_NAME, "ServiceRoleArn": codeDeployServiceRole.role_arn, "BlueTargetGroup": blueGroup.target_group_name, "GreenTargetGroup": greenGroup.target_group_name, "ProdListenerArn": albProdListener.listener_arn, "TestListenerArn": albTestListener.listener_arn, "EcsClusterName": ecs_cluster.cluster_name, "EcsServiceName": NginxAppService.service_name, "TerminationWaitTime": ECS_TASKSET_TERMINATION_WAIT_TIME, "BlueGroupAlarm": blueGroupAlarm.alarm_name, "GreenGroupAlarm": greenGroupAlarm.alarm_name, }) ecsDeploymentGroup = codedeploy.EcsDeploymentGroup.from_ecs_deployment_group_attributes( self, "ecsDeploymentGroup", application=codeDeployApplication, deployment_group_name=ECS_DEPLOYMENT_GROUP_NAME, deployment_config=codedeploy.EcsDeploymentConfig. from_ecs_deployment_config_name(self, "ecsDeploymentConfig", ECS_DEPLOYMENT_CONFIG_NAME)) # ============================================================================= # CODE BUILD PROJECT for the Blue/ Green deployment # ============================================================================= # Creating the code build project NginxAppcodebuild = aws_codebuild.Project( self, "NginxAppCodeBuild", role=codeBuildServiceRole, environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.STANDARD_4_0, compute_type=aws_codebuild.ComputeType.SMALL, privileged=True, environment_variables={ 'REPOSITORY_URI': { 'value': NginxecrRepo.repository_uri, 'type': aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT }, 'TASK_EXECUTION_ARN': { 'value': ecsTaskRole.role_arn, 'type': aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT }, 'TASK_FAMILY': { 'value': ECS_TASK_FAMILY_NAME, 'type': aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT } }), source=aws_codebuild.Source.code_commit( repository=NginxCodeCommitrepo)) # ============================================================================= # CODE PIPELINE for Blue/Green ECS deployment # ============================================================================= codePipelineServiceRole = aws_iam.Role( self, "codePipelineServiceRole", assumed_by=aws_iam.ServicePrincipal('codepipeline.amazonaws.com')) inlinePolicyForCodePipeline = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=[ "iam:PassRole", "sts:AssumeRole", "codecommit:Get*", "codecommit:List*", "codecommit:GitPull", "codecommit:UploadArchive", "codecommit:CancelUploadArchive", "codebuild:BatchGetBuilds", "codebuild:StartBuild", "codedeploy:CreateDeployment", "codedeploy:Get*", "codedeploy:RegisterApplicationRevision", "s3:Get*", "s3:List*", "s3:PutObject" ], resources=["*"]) codePipelineServiceRole.add_to_policy(inlinePolicyForCodePipeline) sourceArtifact = codepipeline.Artifact('sourceArtifact') buildArtifact = codepipeline.Artifact('buildArtifact') # S3 bucket for storing the code pipeline artifacts NginxAppArtifactsBucket = s3.Bucket( self, "NginxAppArtifactsBucket", encryption=s3.BucketEncryption.S3_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) # S3 bucket policy for the code pipeline artifacts denyUnEncryptedObjectUploads = aws_iam.PolicyStatement( effect=aws_iam.Effect.DENY, actions=["s3:PutObject"], principals=[aws_iam.AnyPrincipal()], resources=[NginxAppArtifactsBucket.bucket_arn + "/*"], conditions={ "StringNotEquals": { "s3:x-amz-server-side-encryption": "aws:kms" } }) denyInsecureConnections = aws_iam.PolicyStatement( effect=aws_iam.Effect.DENY, actions=["s3:*"], principals=[aws_iam.AnyPrincipal()], resources=[NginxAppArtifactsBucket.bucket_arn + "/*"], conditions={"Bool": { "aws:SecureTransport": "false" }}) NginxAppArtifactsBucket.add_to_resource_policy( denyUnEncryptedObjectUploads) NginxAppArtifactsBucket.add_to_resource_policy(denyInsecureConnections) # Code Pipeline - CloudWatch trigger event is created by CDK codepipeline.Pipeline( self, "ecsBlueGreen", role=codePipelineServiceRole, artifact_bucket=NginxAppArtifactsBucket, stages=[ codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.CodeCommitSourceAction( action_name='Source', repository=NginxCodeCommitrepo, output=sourceArtifact, ) ]), codepipeline.StageProps( stage_name='Build', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='Build', project=NginxAppcodebuild, input=sourceArtifact, outputs=[buildArtifact]) ]), codepipeline.StageProps( stage_name='Deploy', actions=[ aws_codepipeline_actions.CodeDeployEcsDeployAction( action_name='Deploy', deployment_group=ecsDeploymentGroup, app_spec_template_input=buildArtifact, task_definition_template_input=buildArtifact, ) ]) ]) # ============================================================================= # Export the outputs # ============================================================================= core.CfnOutput(self, "ecsBlueGreenCodeRepo", description="Demo app code commit repository", export_name="ecsBlueGreenDemoAppRepo", value=NginxCodeCommitrepo.repository_clone_url_http) core.CfnOutput(self, "ecsBlueGreenLBDns", description="Load balancer DNS", export_name="ecsBlueGreenLBDns", value=alb.load_balancer_dns_name)
def __init__(self, scope: core.Construct, id: str, *, multi_account=False, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Get stack parameters: notification_email = create_notification_email_parameter(self) git_address = create_git_address_parameter(self) # Get the optional S3 assets bucket to use existing_bucket = create_existing_bucket_parameter(self) # Get the optional S3 assets bucket to use existing_ecr_repo = create_existing_ecr_repo_parameter(self) # create only if multi_account template if multi_account: # create development parameters account_type = "development" dev_account_id = create_account_id_parameter( self, "DEV_ACCOUNT_ID", account_type) dev_org_id = create_org_id_parameter(self, "DEV_ORG_ID", account_type) # create staging parameters account_type = "staging" staging_account_id = create_account_id_parameter( self, "STAGING_ACCOUNT_ID", account_type) staging_org_id = create_org_id_parameter(self, "STAGING_ORG_ID", account_type) # create production parameters account_type = "production" prod_account_id = create_account_id_parameter( self, "PROD_ACCOUNT_ID", account_type) prod_org_id = create_org_id_parameter(self, "PROD_ORG_ID", account_type) # Conditions git_address_provided = create_git_address_provided_condition( self, git_address) # client provided an existing S3 bucket name, to be used for assets existing_bucket_provided = create_existing_bucket_provided_condition( self, existing_bucket) # client provided an existing Amazon ECR name existing_ecr_provided = create_existing_ecr_provided_condition( self, existing_ecr_repo) # S3 bucket needs to be created for assets create_new_bucket = create_new_bucket_condition(self, existing_bucket) # Amazon ECR repo needs too be created for custom Algorithms create_new_ecr_repo = create_new_ecr_repo_condition( self, existing_ecr_repo) # Constants pipeline_stack_name = "mlops-pipeline" # CDK Resources setup access_logs_bucket = s3.Bucket( self, "accessLogs", encryption=s3.BucketEncryption.S3_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Apply secure transfer bucket policy apply_secure_bucket_policy(access_logs_bucket) # This is a logging bucket. access_logs_bucket.node.default_child.cfn_options.metadata = suppress_s3_access_policy( ) # Import user provide S3 bucket, if any. s3.Bucket.from_bucket_arn is used instead of # s3.Bucket.from_bucket_name to allow cross account bucket. client_existing_bucket = s3.Bucket.from_bucket_arn( self, "ClientExistingBucket", f"arn:aws:s3:::{existing_bucket.value_as_string.strip()}", ) # Create the resource if existing_bucket_provided condition is True core.Aspects.of(client_existing_bucket).add( ConditionalResources(existing_bucket_provided)) # Import user provided Amazon ECR repository client_erc_repo = ecr.Repository.from_repository_name( self, "ClientExistingECRReo", existing_ecr_repo.value_as_string) # Create the resource if existing_ecr_provided condition is True core.Aspects.of(client_erc_repo).add( ConditionalResources(existing_ecr_provided)) # Creating assets bucket so that users can upload ML Models to it. assets_bucket = s3.Bucket( self, "pipeline-assets-" + str(uuid.uuid4()), versioned=True, encryption=s3.BucketEncryption.S3_MANAGED, server_access_logs_bucket=access_logs_bucket, server_access_logs_prefix="assets_bucket_access_logs", block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Apply secure transport bucket policy apply_secure_bucket_policy(assets_bucket) s3_actions = ["s3:GetObject", "s3:ListBucket"] # if multi account if multi_account: # add permissions for other accounts to access the assets bucket assets_bucket.add_to_resource_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=s3_actions, principals=[ iam.AccountPrincipal(dev_account_id.value_as_string), iam.AccountPrincipal( staging_account_id.value_as_string), iam.AccountPrincipal(prod_account_id.value_as_string), ], resources=[ assets_bucket.bucket_arn, f"{assets_bucket.bucket_arn}/*" ], )) # Create the resource if create_new_bucket condition is True core.Aspects.of(assets_bucket).add( ConditionalResources(create_new_bucket)) # Get assets S3 bucket's name/arn, based on the condition assets_s3_bucket_name = core.Fn.condition_if( existing_bucket_provided.logical_id, client_existing_bucket.bucket_name, assets_bucket.bucket_name, ).to_string() # Creating Amazon ECR repository ecr_repo = ecr.Repository(self, "ECRRepo", image_scan_on_push=True) # if multi account if multi_account: # add permissios to other account to pull images ecr_repo.add_to_resource_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ "ecr:DescribeImages", "ecr:DescribeRepositories", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:BatchCheckLayerAvailability", ], principals=[ iam.AccountPrincipal(dev_account_id.value_as_string), iam.AccountPrincipal( staging_account_id.value_as_string), iam.AccountPrincipal(prod_account_id.value_as_string), ], )) # Create the resource if create_new_ecr condition is True core.Aspects.of(ecr_repo).add( ConditionalResources(create_new_ecr_repo)) # Get ECR repo's name based on the condition ecr_repo_name = core.Fn.condition_if( existing_ecr_provided.logical_id, client_erc_repo.repository_name, ecr_repo.repository_name, ).to_string() # Get ECR repo's arn based on the condition ecr_repo_arn = core.Fn.condition_if( existing_ecr_provided.logical_id, client_erc_repo.repository_arn, ecr_repo.repository_arn, ).to_string() blueprints_bucket_name = "blueprint-repository-" + str(uuid.uuid4()) blueprint_repository_bucket = s3.Bucket( self, blueprints_bucket_name, encryption=s3.BucketEncryption.S3_MANAGED, server_access_logs_bucket=access_logs_bucket, server_access_logs_prefix=blueprints_bucket_name, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, ) # Apply secure transport bucket policy apply_secure_bucket_policy(blueprint_repository_bucket) # if multi account if multi_account: # add permissions for other accounts to access the blueprint bucket blueprint_repository_bucket.add_to_resource_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=s3_actions, principals=[ iam.AccountPrincipal(dev_account_id.value_as_string), iam.AccountPrincipal( staging_account_id.value_as_string), iam.AccountPrincipal(prod_account_id.value_as_string), ], resources=[ blueprint_repository_bucket.bucket_arn, f"{blueprint_repository_bucket.bucket_arn}/*" ], )) # Custom resource to copy source bucket content to blueprints bucket custom_resource_lambda_fn = lambda_.Function( self, "CustomResourceLambda", code=lambda_.Code.from_asset("lambdas/custom_resource"), handler="index.on_event", runtime=lambda_.Runtime.PYTHON_3_8, environment={ "source_bucket": "https://%%BUCKET_NAME%%-" + core.Aws.REGION + ".s3.amazonaws.com/%%SOLUTION_NAME%%/%%VERSION%%", "destination_bucket": blueprint_repository_bucket.bucket_name, "LOG_LEVEL": "INFO", }, timeout=core.Duration.seconds(60), ) custom_resource_lambda_fn.node.default_child.cfn_options.metadata = suppress_lambda_policies( ) blueprint_repository_bucket.grant_write(custom_resource_lambda_fn) custom_resource = core.CustomResource( self, "CustomResourceCopyAssets", service_token=custom_resource_lambda_fn.function_arn, ) custom_resource.node.add_dependency(blueprint_repository_bucket) # IAM policies setup ### cloudformation_role = iam.Role( self, "mlopscloudformationrole", assumed_by=iam.ServicePrincipal("cloudformation.amazonaws.com"), ) lambda_invoke_action = "lambda:InvokeFunction" # Cloudformation policy setup orchestrator_policy = iam.Policy( self, "lambdaOrchestratorPolicy", statements=[ iam.PolicyStatement( actions=[ "cloudformation:CreateStack", "cloudformation:DeleteStack", "cloudformation:UpdateStack", "cloudformation:ListStackResources", ], resources=[ (f"arn:{core.Aws.PARTITION}:cloudformation:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:stack/{pipeline_stack_name}*/*" ), ], ), iam.PolicyStatement( actions=[ "iam:CreateRole", "iam:DeleteRole", "iam:DeleteRolePolicy", "iam:GetRole", "iam:GetRolePolicy", "iam:PassRole", "iam:PutRolePolicy", "iam:AttachRolePolicy", "iam:DetachRolePolicy", ], resources=[ f"arn:{core.Aws.PARTITION}:iam::{core.Aws.ACCOUNT_ID}:role/{pipeline_stack_name}*" ], ), iam.PolicyStatement( actions=[ "ecr:CreateRepository", "ecr:DescribeRepositories", ], resources=[ (f"arn:{core.Aws.PARTITION}:ecr:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:repository/{ecr_repo_name}") ], ), iam.PolicyStatement( actions=[ "codebuild:CreateProject", "codebuild:DeleteProject", "codebuild:BatchGetProjects", ], resources=[ (f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:project/ContainerFactory*"), (f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:project/VerifySagemaker*"), (f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:report-group/*"), ], ), iam.PolicyStatement( actions=[ "lambda:CreateFunction", "lambda:DeleteFunction", lambda_invoke_action, "lambda:PublishLayerVersion", "lambda:DeleteLayerVersion", "lambda:GetLayerVersion", "lambda:GetFunctionConfiguration", "lambda:GetFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:UpdateFunctionConfiguration", ], resources=[ f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:layer:*", f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:function:*", ], ), iam.PolicyStatement( actions=s3_actions, resources=[ blueprint_repository_bucket.bucket_arn, blueprint_repository_bucket.arn_for_objects("*"), f"arn:{core.Aws.PARTITION}:s3:::{assets_s3_bucket_name}/*", ], ), iam.PolicyStatement( actions=[ "codepipeline:CreatePipeline", "codepipeline:UpdatePipeline", "codepipeline:DeletePipeline", "codepipeline:GetPipeline", "codepipeline:GetPipelineState", ], resources= [(f"arn:{core.Aws.PARTITION}:codepipeline:{core.Aws.REGION}:" f"{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*")], ), iam.PolicyStatement( actions=[ "apigateway:POST", "apigateway:PATCH", "apigateway:DELETE", "apigateway:GET", "apigateway:PUT", ], resources=[ f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis/*", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/account", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans", f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans/*", ], ), iam.PolicyStatement( actions=[ "logs:CreateLogGroup", "logs:DescribeLogGroups", ], resources=[ f"arn:{core.Aws.PARTITION}:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:log-group:*", ], ), iam.PolicyStatement( actions=[ "s3:CreateBucket", "s3:PutEncryptionConfiguration", "s3:PutBucketVersioning", "s3:PutBucketPublicAccessBlock", "s3:PutBucketLogging", ], resources=[f"arn:{core.Aws.PARTITION}:s3:::*"], ), iam.PolicyStatement( actions=[ "s3:PutObject", ], resources=[ f"arn:{core.Aws.PARTITION}:s3:::{assets_s3_bucket_name}/*" ], ), iam.PolicyStatement( actions=[ "sns:CreateTopic", "sns:DeleteTopic", "sns:Subscribe", "sns:Unsubscribe", "sns:GetTopicAttributes", "sns:SetTopicAttributes", ], resources= [(f"arn:{core.Aws.PARTITION}:sns:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"{pipeline_stack_name}*-*PipelineNotification*")], ), iam.PolicyStatement( actions=[ "events:PutRule", "events:DescribeRule", "events:PutTargets", "events:RemoveTargets", "events:DeleteRule", "events:PutEvents", ], resources=[ f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:rule/*", f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:event-bus/*", ], ), ], ) orchestrator_policy.attach_to_role(cloudformation_role) # Lambda function IAM setup lambda_passrole_policy = iam.PolicyStatement( actions=["iam:passrole"], resources=[cloudformation_role.role_arn]) # create sagemaker layer sm_layer = sagemaker_layer(self, blueprint_repository_bucket) # make sure the sagemaker code is uploaded first to the blueprints bucket sm_layer.node.add_dependency(custom_resource) # API Gateway and lambda setup to enable provisioning pipelines through API calls provisioner_apigw_lambda = aws_apigateway_lambda.ApiGatewayToLambda( self, "PipelineOrchestration", lambda_function_props={ "runtime": lambda_.Runtime.PYTHON_3_8, "handler": "index.handler", "code": lambda_.Code.from_asset("lambdas/pipeline_orchestration"), "layers": [sm_layer], "timeout": core.Duration.minutes(10), }, api_gateway_props={ "defaultMethodOptions": { "authorizationType": apigw.AuthorizationType.IAM, }, "restApiName": f"{core.Aws.STACK_NAME}-orchestrator", "proxy": False, "dataTraceEnabled": True, }, ) # add lambda supressions provisioner_apigw_lambda.lambda_function.node.default_child.cfn_options.metadata = suppress_lambda_policies( ) provision_resource = provisioner_apigw_lambda.api_gateway.root.add_resource( "provisionpipeline") provision_resource.add_method("POST") status_resource = provisioner_apigw_lambda.api_gateway.root.add_resource( "pipelinestatus") status_resource.add_method("POST") blueprint_repository_bucket.grant_read( provisioner_apigw_lambda.lambda_function) provisioner_apigw_lambda.lambda_function.add_to_role_policy( lambda_passrole_policy) orchestrator_policy.attach_to_role( provisioner_apigw_lambda.lambda_function.role) # Environment variables setup provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET_URL", value=str(blueprint_repository_bucket.bucket_regional_domain_name), ) provisioner_apigw_lambda.lambda_function.add_environment( key="BLUEPRINT_BUCKET", value=str(blueprint_repository_bucket.bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="ACCESS_BUCKET", value=str(access_logs_bucket.bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="ASSETS_BUCKET", value=str(assets_s3_bucket_name)) provisioner_apigw_lambda.lambda_function.add_environment( key="CFN_ROLE_ARN", value=str(cloudformation_role.role_arn)) provisioner_apigw_lambda.lambda_function.add_environment( key="PIPELINE_STACK_NAME", value=pipeline_stack_name) provisioner_apigw_lambda.lambda_function.add_environment( key="NOTIFICATION_EMAIL", value=notification_email.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="REGION", value=core.Aws.REGION) provisioner_apigw_lambda.lambda_function.add_environment( key="IS_MULTI_ACCOUNT", value=str(multi_account)) # if multi account if multi_account: provisioner_apigw_lambda.lambda_function.add_environment( key="DEV_ACCOUNT_ID", value=dev_account_id.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="DEV_ORG_ID", value=dev_org_id.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="STAGING_ACCOUNT_ID", value=staging_account_id.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="STAGING_ORG_ID", value=staging_org_id.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="PROD_ACCOUNT_ID", value=prod_account_id.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="PROD_ORG_ID", value=prod_org_id.value_as_string) provisioner_apigw_lambda.lambda_function.add_environment( key="ECR_REPO_NAME", value=ecr_repo_name) provisioner_apigw_lambda.lambda_function.add_environment( key="ECR_REPO_ARN", value=ecr_repo_arn) provisioner_apigw_lambda.lambda_function.add_environment( key="LOG_LEVEL", value="DEBUG") cfn_policy_for_lambda = orchestrator_policy.node.default_child cfn_policy_for_lambda.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [{ "id": "W76", "reason": "A complex IAM policy is required for this resource.", }] } } # Codepipeline with Git source definitions ### source_output = codepipeline.Artifact() # processing git_address to retrieve repo name repo_name_split = core.Fn.split("/", git_address.value_as_string) repo_name = core.Fn.select(5, repo_name_split) # getting codecommit repo cdk object using 'from_repository_name' repo = codecommit.Repository.from_repository_name( self, "AWSMLOpsFrameworkRepository", repo_name) codebuild_project = codebuild.PipelineProject( self, "Take config file", build_spec=codebuild.BuildSpec.from_object({ "version": "0.2", "phases": { "build": { "commands": [ "ls -a", "aws lambda invoke --function-name " + provisioner_apigw_lambda.lambda_function. function_name + " --payload fileb://mlops-config.json response.json" + " --invocation-type RequestResponse", ] } }, }), ) # Defining a Codepipeline project with CodeCommit as source codecommit_pipeline = codepipeline.Pipeline( self, "MLOpsCodeCommitPipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=repo, branch="main", output=source_output, ) ], ), codepipeline.StageProps( stage_name="TakeConfig", actions=[ codepipeline_actions.CodeBuildAction( action_name="provision_pipeline", input=source_output, outputs=[], project=codebuild_project, ) ], ), ], cross_account_keys=False, ) codecommit_pipeline.add_to_role_policy( iam.PolicyStatement( actions=[lambda_invoke_action], resources=[ provisioner_apigw_lambda.lambda_function.function_arn ], )) codebuild_project.add_to_role_policy( iam.PolicyStatement( actions=[lambda_invoke_action], resources=[ provisioner_apigw_lambda.lambda_function.function_arn ], )) pipeline_child_nodes = codecommit_pipeline.node.find_all() pipeline_child_nodes[1].node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W35", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, { "id": "W51", "reason": "This is a managed bucket generated by CDK for codepipeline.", }, ] } } # custom resource for operational metrics### metrics_mapping = core.CfnMapping( self, "AnonymousData", mapping={"SendAnonymousData": { "Data": "Yes" }}) metrics_condition = core.CfnCondition( self, "AnonymousDatatoAWS", expression=core.Fn.condition_equals( metrics_mapping.find_in_map("SendAnonymousData", "Data"), "Yes"), ) helper_function = lambda_.Function( self, "SolutionHelper", code=lambda_.Code.from_asset("lambdas/solution_helper"), handler="lambda_function.handler", runtime=lambda_.Runtime.PYTHON_3_8, timeout=core.Duration.seconds(60), ) helper_function.node.default_child.cfn_options.metadata = suppress_lambda_policies( ) create_id_function = core.CustomResource( self, "CreateUniqueID", service_token=helper_function.function_arn, properties={"Resource": "UUID"}, resource_type="Custom::CreateUUID", ) send_data_function = core.CustomResource( self, "SendAnonymousData", service_token=helper_function.function_arn, properties={ "Resource": "AnonymousMetric", "UUID": create_id_function.get_att_string("UUID"), "gitSelected": git_address.value_as_string, "Region": core.Aws.REGION, "SolutionId": "SO0136", "Version": "%%VERSION%%", }, resource_type="Custom::AnonymousData", ) core.Aspects.of(helper_function).add( ConditionalResources(metrics_condition)) core.Aspects.of(create_id_function).add( ConditionalResources(metrics_condition)) core.Aspects.of(send_data_function).add( ConditionalResources(metrics_condition)) # If user chooses Git as pipeline provision type, create codepipeline with Git repo as source core.Aspects.of(repo).add(ConditionalResources(git_address_provided)) core.Aspects.of(codecommit_pipeline).add( ConditionalResources(git_address_provided)) core.Aspects.of(codebuild_project).add( ConditionalResources(git_address_provided)) # Create Template Interface paramaters_list = [ notification_email.logical_id, git_address.logical_id, existing_bucket.logical_id, existing_ecr_repo.logical_id, ] # if multi account if multi_account: paramaters_list.extend([ dev_account_id.logical_id, dev_org_id.logical_id, staging_account_id.logical_id, staging_org_id.logical_id, prod_account_id.logical_id, prod_org_id.logical_id, ]) paramaters_labels = { f"{notification_email.logical_id}": { "default": "Notification Email (Required)" }, f"{git_address.logical_id}": { "default": "CodeCommit Repo URL Address (Optional)" }, f"{existing_bucket.logical_id}": { "default": "Name of an Existing S3 Bucket (Optional)" }, f"{existing_ecr_repo.logical_id}": { "default": "Name of an Existing Amazon ECR repository (Optional)" }, } if multi_account: paramaters_labels.update({ f"{dev_account_id.logical_id}": { "default": "Development Account ID (Required)" }, f"{dev_org_id.logical_id}": { "default": "Development Account Organizational Unit ID (Required)" }, f"{staging_account_id.logical_id}": { "default": "Staging Account ID (Required)" }, f"{staging_org_id.logical_id}": { "default": "Staging Account Organizational Unit ID (Required)" }, f"{prod_account_id.logical_id}": { "default": "Production Account ID (Required)" }, f"{prod_org_id.logical_id}": { "default": "Production Account Organizational Unit ID (Required)" }, }) self.template_options.metadata = { "AWS::CloudFormation::Interface": { "ParameterGroups": [{ "Label": { "default": "MLOps Framework Settings" }, "Parameters": paramaters_list, }], "ParameterLabels": paramaters_labels, } } # Outputs # core.CfnOutput( self, id="BlueprintsBucket", value= f"https://s3.console.aws.amazon.com/s3/buckets/{blueprint_repository_bucket.bucket_name}", description="S3 Bucket to upload MLOps Framework Blueprints", ) core.CfnOutput( self, id="AssetsBucket", value= f"https://s3.console.aws.amazon.com/s3/buckets/{assets_s3_bucket_name}", description="S3 Bucket to upload model artifact", ) core.CfnOutput( self, id="ECRRepoName", value=ecr_repo_name, description="Amazon ECR repository's name", ) core.CfnOutput( self, id="ECRRepoArn", value=ecr_repo_arn, description="Amazon ECR repository's arn", )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository( self, "CodeRepo", repository_name="iot-gg-cicd-workshop-repo") prod_deploy_param_bucket = s3.Bucket( self, "ProdDeployBucket", versioned=True, ) prod_source_bucket = s3.Bucket( self, "ProdSourceBucket", versioned=True, ) ssm.StringParameter( self, "ProdSourceBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_source_bucket", string_value=prod_source_bucket.bucket_name, ) ssm.StringParameter( self, "ProdDeployBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_deploy_param_bucket", string_value=prod_deploy_param_bucket.bucket_name, ) cdk_build = codebuild.PipelineProject( self, "Build", project_name="iot-gg-cicd-workshop-build", build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_build, [ "AWSCloudFormationFullAccess", "AmazonSSMFullAccess", "AmazonS3FullAccess", "AWSLambdaFullAccess", "IAMFullAccess", ]) cdk_deploy_canary = codebuild.PipelineProject( self, "Deploy", project_name="iot-gg-cicd-workshop-deploy-canary", build_spec=codebuild.BuildSpec.from_source_filename( "deployspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_deploy_canary, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess", "AWSIoTFullAccess" ]) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact("CdkBuildOutput") codepipeline.Pipeline( self, "Pipeline", pipeline_name="iot-gg-cicd-workshop-pipeline-canary", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build_Package_Deploy_Lambda", actions=[ codepipeline_actions.CodeBuildAction( action_name="Build_Package_Deploy", project=cdk_build, input=source_output, outputs=[cdk_build_output]) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Canary", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Canary", project=cdk_deploy_canary, input=cdk_build_output) ]), ]) cdk_deploy_prod = codebuild.PipelineProject( self, "DeployProd", project_name="iot-gg-cicd-workshop-deploy-main", build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict(install=dict(commands=[ "apt-get install zip", "PROD_SOURCE_BUCKET=$(aws ssm get-parameter --name '/iot-gg-cicd-workshop/s3/prod_source_bucket' --with-decryption --query 'Parameter.Value' --output text)", "aws s3 cp s3://$PROD_SOURCE_BUCKET/prod_deploy.zip prod_deploy.zip", "unzip -o prod_deploy.zip", "ls -la", "make clean init" ]), build=dict(commands=[ "ls -la", "make deploy-greengrass-prod", ])), artifacts={ "base-directory": ".", "files": ["**/*"] }, environment=dict( buildImage=codebuild.LinuxBuildImage.STANDARD_2_0)))) add_policies(cdk_deploy_prod, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess" ]) prod_source_output = codepipeline.Artifact() codepipeline.Pipeline( self, "PipelineProd", pipeline_name="iot-gg-cicd-workshop-pipeline-main", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="S3_Source", bucket=prod_deploy_param_bucket, bucket_key="deploy_params.zip", output=prod_source_output) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Prod", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Prod", project=cdk_deploy_prod, input=prod_source_output) ]), ]) prod_source_bucket.grant_read_write(cdk_deploy_canary.role) prod_source_bucket.grant_read(cdk_deploy_prod.role) prod_deploy_param_bucket.grant_read_write(cdk_deploy_canary.role)
def __init__( self, scope: core.Stack, prefix: str, pipeline_params: PipelineParameters, lambda_params: LambdaParameters, vpc_params: VpcParameters ): """ AWS CDK package that helps deploying a lambda function. :param scope: A scope in which resources shall be created. :param prefix: Prefix for all of your resource IDs and names. :param pipeline_params: Parameters, letting you supply ssh key for accessing remote repositories. :param lambda_params: Parameters, focusing on the Lambda function itself. :param vpc_params: Parameters, focused on Virtual Private Cloud settings. """ # CodeCommmit repository to store your function source code. self.project_repository = aws_codecommit.Repository( scope, prefix + 'CiCdLambdaCodeCommitRepo', repository_name=prefix + 'CiCdLambdaCodeCommitRepo', ) # The lambda function for which this package is made. self.function = aws_lambda.Function( scope, prefix + 'Function', code=aws_lambda.Code.from_inline( 'def runner():\n' ' return \'Hello, World!\'' ), handler=lambda_params.lambda_handler, runtime=lambda_params.lambda_runtime, description=f'Lambda function {prefix}.', environment=lambda_params.environment, function_name=prefix, memory_size=lambda_params.lambda_memory, reserved_concurrent_executions=5, role=lambda_params.execution_role, security_groups=vpc_params.security_groups, timeout=core.Duration.seconds(lambda_params.lambda_timeout), vpc=vpc_params.vpc, vpc_subnets=aws_ec2.SubnetSelection(subnets=vpc_params.subnets) ) # Create alarms for the function. if lambda_params.alarms_sns_topic: self.alarms = LambdaAlarms(scope, prefix, lambda_params.alarms_sns_topic, self.function) else: self.alarms = None # Convert bucket name to an S3 friendly one. bucket_name = self.__convert(prefix + 'CiCdLambdaArtifactsBucket') self.bucket = EmptyS3Bucket( scope, prefix + 'CiCdLambdaDeploymentBucket', bucket_name=bucket_name ) # Create a BuildSpec object for CodeBuild self.buildspec = BuildSpecObject( prefix, self.bucket, pipeline_params.ssh_params.secret_id, pipeline_params.ssh_params.private_key, pipeline_params.install_args, pipeline_params.test_args, pipeline_params.custom_pre_build_commands ) # CodeBuild project, that installs functions dependencies, runs tests and deploys it to Lambda. self.code_build_project = aws_codebuild.PipelineProject( scope, prefix + 'CiCdLambdaCodeBuildProject', project_name=prefix + 'CiCdLambdaCodeBuildProject', environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.STANDARD_3_0, compute_type=aws_codebuild.ComputeType.SMALL, privileged=True ), build_spec=aws_codebuild.BuildSpec.from_object(self.buildspec.get_object()), ) # Adding permissions that allow CodeBuild to do the aforementioned things. self.code_build_project.role.add_to_policy( statement=aws_iam.PolicyStatement( actions=[ 's3:*', 'lambda:UpdateFunctionCode', ], resources=['*'], effect=aws_iam.Effect.ALLOW) ) # If a secret is provided, we allow CodeBuild to read it. if pipeline_params.ssh_params.secret_arn is not None: self.code_build_project.role.add_to_policy( statement=aws_iam.PolicyStatement( actions=[ 'secretsmanager:GetSecretValue' ], resources=[pipeline_params.ssh_params.secret_arn], effect=aws_iam.Effect.ALLOW) ) # If KMS key is provided, we allow CodeBuild to decrypt using it. if pipeline_params.ssh_params.kms_key_arn is not None: self.code_build_project.role.add_to_policy( statement=aws_iam.PolicyStatement( actions=[ "kms:Decrypt" ], effect=aws_iam.Effect.ALLOW, resources=[pipeline_params.ssh_params.kms_key_arn] ) ) # Push hte initial commit to CodeCommit. self.initial_commit = InitialCommit( scope, prefix, self.project_repository ).get_resource() self.source_artifact = aws_codepipeline.Artifact(artifact_name=prefix + 'CiCdLambdaSourceArtifact') # CodePipeline source action to read from CodeCommit. self.source_action = aws_codepipeline_actions.CodeCommitSourceAction( repository=self.project_repository, branch='master', action_name='CodeCommitSource', run_order=1, trigger=aws_codepipeline_actions.CodeCommitTrigger.EVENTS, output=self.source_artifact ) # CodePipeline build action that uses the CodeBuild project. self.build_action = aws_codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.code_build_project, action_name='BuildAction', run_order=1 ) # CodePipeline pipeline that executes both actions. self.codecommit_to_lambda_pipeline = aws_codepipeline.Pipeline( scope, prefix + 'CiCdLambdaPipeline', pipeline_name=prefix + 'CiCdLambdaPipeline', artifact_bucket=self.bucket, stages=[ aws_codepipeline.StageProps( stage_name='SourceStage', actions=[self.source_action] ), aws_codepipeline.StageProps( stage_name='BuildStage', actions=[self.build_action] ) ] )
def __init__( self, scope: core.Construct, id: str, branch: str, sandbox_account: str, **kwargs ) -> None: """Init the Construct fore creating hd-auto-service-catalog. Args: scope: CDK Parent Stack aap.py id: Name of the stack: "hd-auto-service-catalog" branch: string for A/B Deployment sandbox_account: Sandbox account id **kwargs: """ super().__init__(scope, id, **kwargs) # # The code that defines your stack goes here # def id_generator(size=6, chars=string.ascii_uppercase + string.digits): # string = "".join(random.choice(chars) for _ in range(size)).lower() # return string # # branch = branch # ############################################################## # Tagging List # ############################################################## tagging_list = [] # ############################################################## # Account List # ############################################################## # account_list = ["431892011317"] # ############################################################## # Parameters # ############################################################## # =============================== # App name app_name = core.CfnParameter( self, id="AppName-{}".format(branch), description="Name of the app", type="String", default="hd-auto-cicd-service-catalog", ) # =============================== # Environment name env_name = core.CfnParameter( self, id="EnvName-{}".format(branch), description="Name of the environment", type="String", default="auto", ) # =============================== # IAM Role and Policy parameter role_name = core.CfnParameter( self, id="ConstraintRoleName-{}".format(branch), description="Name of the launch constraint role", type="String", default="CrossAccountAdmin", ) # =============================== # Principal management lambdas unassign_lambda = core.CfnParameter( self, id="UnassignPrincipalLambdaName-{}".format(branch), description="Name of the unassign principal management Lambda", type="String", default="UnassignPrincipalFromServiceCatalog", ) assign_lambda = core.CfnParameter( self, id="AssignPrincipalLambdaName-{}".format(branch), description="Name of the assign principal management Lambda", type="String", default="AssignPrincipalToServiceCatalog", ) # =============================== # Branch name if branch == "master": branch_name = "master" elif branch == "dmz": branch_name = "dmz" else: branch_name = "feature/{}".format(branch.split("-")[1]) # =============================== # Path name path_name = core.CfnParameter( self, id="Path-{}".format(branch), description="CodeCommit repository folder for Service Catalogs Products", type="String", default="service_catalog/products/", ) # =============================== # Path for the configuration INI path_ini = core.CfnParameter( self, id="ConfigINI-{}".format(branch), description="Configuration file path", type="String", default="service_catalog/config/config_{}.ini".format(branch.split("-")[0]), ) # =============================== # Path for the template store template_store = core.CfnParameter( self, id="TemplateStore-{}".format(branch), description="S3 Bucket and Folder evaluated CloudFormation Templates", type="String", default="template-store/", ) # ############################################################## # Artifacts Bucket # ############################################################## artifact_bucket = _s3.Bucket( self, id="ArtifactsBucket-{}".format(branch), bucket_name="my-sandbox-cicd-build-artifacts-{}".format( branch.split("-")[0] ), removal_policy=core.RemovalPolicy.DESTROY, ) empty_s3_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "s3:DeleteBucket", "s3:ListBucket", "s3:DeleteObjects", "s3:DeleteObject", ], resources=[artifact_bucket.bucket_arn, artifact_bucket.bucket_arn + "/*",], ) empty_bucket_lambda = Lambda.create_lambda( self, name="EmptyArtifactsBucket-{}".format(branch), function_name="EmptyArtifactsBucket-{}".format(branch), handler="empty_bucket.empty_bucket", code_injection_method=_lambda.Code.asset(path="./src/lambda/empty_bucket/"), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=None, policy_statements=[empty_s3_policy,], log_retention=None, environment_vars=[], ) cr_empty_bucket = core.CustomResource( self, id="CR-EmptyBucket-{}".format(branch), service_token=empty_bucket_lambda.lambda_function_object.function_arn, properties={"BUCKET_NAME": artifact_bucket.bucket_name,}, removal_policy=core.RemovalPolicy.DESTROY, ) cr_empty_bucket.node.add_dependency(artifact_bucket) tagging_list.append(cr_empty_bucket) artifact_bucket.add_to_resource_policy( permission=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["s3:GetObject"], resources=[artifact_bucket.bucket_arn + "/template-store/*",], principals=[_iam.ServicePrincipal("servicecatalog"),], ) ) tagging_list.append(artifact_bucket) # ############################################################## # Code repo # ############################################################## if branch == "master": service_catalog_git = _code.Repository( self, id="ServiceCatalogGit", repository_name="hd-auto-service-catalog", description="This git hosts all templates for the ServiceCatalog and the CICD itself.", ) tagging_list.append(service_catalog_git) else: service_catalog_git = _code.Repository.from_repository_name( self, id="ServiceCatalogGit", repository_name="hd-auto-service-catalog", ) tagging_list.append(service_catalog_git) # ############################################################## # Lambda Layer # ############################################################## source_code = _lambda.Code.from_asset("./src/lambda_layer/") layer = _lambda.LayerVersion( self, id="Python3_7_Layer-{}".format(branch), code=source_code, compatible_runtimes=[_lambda.Runtime.PYTHON_3_7], ) tagging_list.append(layer) # ############################################################## # CodeBuild Project # ############################################################## build_project = _codebuild.PipelineProject( self, id="BuildProject-{}".format(branch), project_name="hd-auto-cicd-service-catalog-{}".format(branch), description="Build project for the Service Catalog pipeline", environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_4_0, privileged=True ), cache=_codebuild.Cache.bucket(artifact_bucket, prefix="codebuild-cache"), build_spec=_codebuild.BuildSpec.from_source_filename("./buildspec.yaml"), ) tagging_list.append(build_project) # CodeBuild IAM permissions to read write to s3 artifact_bucket.grant_read_write(build_project) # Build and create test runs for templates build_project.add_to_role_policy( statement=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, not_actions=["aws-portal:*", "organizations:*"], resources=["*"], # No further restriction due to IAM! ) ) # ############################################################## # Service Catalog # ############################################################## portfolio = _servicecatalog.CfnPortfolio( self, id="BasicPortfolio-{}".format(branch), display_name="hd-mdp-portfolio-{}".format(branch), provider_name="MDP-Team", accept_language="en", description=""" This portfolio contains AWS Services combined into technical and functional approved architectures. You don't need IAM permissions to run those products. You will use them. """, ) remove_portfolio_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "servicecatalog:SearchProductsAsAdmin", "servicecatalog:DeleteProduct", "servicecatalog:DeleteConstraint", "servicecatalog:ListConstraintsForPortfolio", "servicecatalog:DisassociatePrincipalFromPortfolio", "servicecatalog:DisassociateProductFromPortfolio", ], resources=["*",], ) iam_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "iam:GetRole", "iam:PassRole", "iam:CreateRole", "iam:DeleteRole", "iam:ListRoles", "iam:PutRolePolicy", "iam:DeleteRolePolicy", "iam:DeletePolicy", ], resources=[ "arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role_name.value_as_string ), ], ) remove_products_lambda = Lambda.create_lambda( self, name="RemoveProductsFromPortfolio-{}".format(branch), function_name="RemoveProductsFromPortfolio-{}".format(branch), handler="remove_portfolio.remove_portfolio", code_injection_method=_lambda.Code.asset( path="./src/lambda/remove_portfolio/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=None, policy_statements=[remove_portfolio_policy, iam_policy], log_retention=None, environment_vars=[ {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),} ], ) cr_remove_products = core.CustomResource( self, id="CR-RemoveProductsFromPortfolio-{}".format(branch), service_token=remove_products_lambda.lambda_function_object.function_arn, properties={"PORTFOLIO_ID": portfolio.ref,}, removal_policy=core.RemovalPolicy.DESTROY, ) cr_remove_products.node.add_dependency(portfolio) iam_role_list = [role_name.value_as_string] if branch == "master": # TODO: Accept Portfolio share principal management # for idx, account in enumerate(account_list): # _servicecatalog.CfnPortfolioShare( # self, # id="PortfolioSharing-{}-{}".format(branch, idx), # account_id=account, # portfolio_id=portfolio.ref, # accept_language="en", # ) for idx, role in enumerate(iam_role_list): _servicecatalog.CfnPortfolioPrincipalAssociation( self, id="PrincipalAssociation-{}-{}".format(branch, idx), portfolio_id=portfolio.ref, principal_arn="arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role ), principal_type="IAM", accept_language="en", ) core.CfnOutput( self, id="PortfolioId-{}".format(branch), value=portfolio.ref ) tagging_list.append(portfolio) else: for idx, role in enumerate(iam_role_list): _servicecatalog.CfnPortfolioPrincipalAssociation( self, id="PrincipalAssociation-{}-{}".format(branch, idx), portfolio_id=portfolio.ref, principal_arn="arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role ), principal_type="IAM", accept_language="en", ) core.CfnOutput( self, id="PortfolioId-{}".format(branch), value=portfolio.ref ) tagging_list.append(portfolio) # ############################################################## # Lambda Permissions # ############################################################## s3_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "s3:GetObject*", "s3:GetBucket*", "s3:List*", "s3:DeleteObject*", "s3:PutObject*", "s3:Abort*", ], resources=[artifact_bucket.bucket_arn, artifact_bucket.bucket_arn + "/*"], ) codecommit_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "codecommit:GetDifferences", "codecommit:GetBranch", "codecommit:GetCommit", ], resources=[service_catalog_git.repository_arn], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) codebuild_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["codebuild:StartBuild", "codebuild:UpdateProject*"], resources=[build_project.project_arn], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) service_catalog_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "servicecatalog:CreateProduct", "servicecatalog:CreateProvisioningArtifact", "servicecatalog:UpdateProvisioningArtifact", "servicecatalog:DeleteProvisioningArtifact", "servicecatalog:ListProvisioningArtifacts", "servicecatalog:ListPortfolios", "servicecatalog:SearchProductsAsAdmin", "servicecatalog:AssociateProductWithPortfolio", "servicecatalog:AssociatePrincipalWithPortfolio", "servicecatalog:DisassociatePrincipalFromPortfolio", "servicecatalog:DisassociateProductFromPortfolio", "servicecatalog:DeleteProduct", "servicecatalog:CreatePortfolioShare", "servicecatalog:AcceptPortfolioShare", "servicecatalog:CreateConstraint", "servicecatalog:DeleteConstraint", "servicecatalog:ListConstraintsForPortfolio", ], resources=["*"], ) sts_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["sts:AssumeRole"], resources=[ "arn:aws:iam::{}:role/{}".format( sandbox_account, role_name.value_as_string ), ], ) codepipeline_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "codepipeline:PutJobFailureResult", # Supports only a wildcard (*) in the policy Resource element. "codepipeline:PutJobSuccessResult", # Supports only a wildcard (*) in the policy Resource element. ], # https://docs.aws.amazon.com/codepipeline/latest/userguide/permissions-reference.html resources=["*"], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) lambda_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "lambda:GetFunction", "lambda:CreateFunction", "lambda:DeleteFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:CreateEventSourceMapping", "lambda:DeleteEventSourceMapping", "lambda:InvokeFunction", "lambda:UpdateFunctionCode", "lambda:UpdateFunctionConfiguration", ], resources=[ "arn:aws:lambda:{}:{}:function:{}-{}".format( core.Aws.REGION, sandbox_account, unassign_lambda.value_as_string, sandbox_account, ), "arn:aws:lambda:{}:{}:function:{}-{}".format( core.Aws.REGION, sandbox_account, assign_lambda.value_as_string, sandbox_account, ), ], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) # ############################################################## # CICD Lambdas # ############################################################## # ========================== # Get Latest Git Meta Data git_metadata = Lambda.create_lambda( self, name="GetLastGitChanges-{}".format(branch), function_name="GetLastGitChanges-{}".format(branch,), handler="git_metadata.get_changes", code_injection_method=_lambda.Code.asset(path="./src/lambda/git_metadata/"), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ codecommit_policy, codebuild_policy, codepipeline_policy, service_catalog_policy, ], log_retention=None, environment_vars=[ { "Key": "REPOSITORY_NAME", "Value": "{}".format(service_catalog_git.repository_name), }, ], ) # ========================== # Principal Management Lambda principal_management = Lambda.create_lambda( self, name="PrincipalManagement-{}".format(branch), function_name="PrincipalManagement-{}".format(branch), handler="principal_management.principal_management", code_injection_method=_lambda.Code.asset( path="./src/lambda/principal_management/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=1024, timeout=120, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ iam_policy, lambda_policy, sts_policy, service_catalog_policy, codepipeline_policy, codecommit_policy, ], log_retention=None, environment_vars=[ {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),} ], ) # ========================== # Sync Service Catalog Lambda service_catalog_synchronisation = Lambda.create_lambda( self, name="UpdateServiceCatalog-{}".format(branch), function_name="UpdateServiceCatalog-{}".format(branch), handler="sync_catalog.service_catalog_janitor", code_injection_method=_lambda.Code.asset( path="./src/lambda/update_servicecatalog/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=1024, timeout=120, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ sts_policy, service_catalog_policy, codepipeline_policy, codecommit_policy, iam_policy, s3_policy, ], log_retention=None, environment_vars=[ { "Key": "LOCAL_ROLE_NAME_SC", "Value": "{}".format(role_name.value_as_string), }, {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),}, { "Key": "REPOSITORY_NAME", "Value": "{}".format(service_catalog_git.repository_name), }, {"Key": "PATH_INI", "Value": "{}".format(path_ini.value_as_string)}, {"Key": "PATH", "Value": "{}".format(path_name.value_as_string)}, {"Key": "BUCKET", "Value": "{}".format(artifact_bucket.bucket_name)}, { "Key": "S3_PATH", "Value": "{}".format(template_store.value_as_string), }, ], ) # ############################################################## # CodePipeline # ############################################################## # General output source_output = _codepipeline.Artifact("git-change") tested_source_files = _codepipeline.Artifact("tested-cfn") cicd_pipeline = _codepipeline.Pipeline( self, id="ServiceCatalogPipeline-{}".format(branch), pipeline_name="ServiceCatalog-CICD-{}".format(branch), artifact_bucket=artifact_bucket, stages=[ _codepipeline.StageProps( stage_name="Source_CFN-Templates", actions=[ _codepipeline_actions.CodeCommitSourceAction( action_name="SourceControlCFNTemplates", output=source_output, repository=service_catalog_git, variables_namespace="source", branch=branch_name, ), ], ), _codepipeline.StageProps( stage_name="Getting_CFN-Template", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="GettingCFNTemplate", lambda_=git_metadata.lambda_function_object, user_parameters={ "before_commit": "", "after_commit": "#{source.CommitId}", }, variables_namespace="filtered_source", ) ], ), _codepipeline.StageProps( stage_name="Testing_CFN-Template", actions=[ _codepipeline_actions.CodeBuildAction( type=_codepipeline_actions.CodeBuildActionType.BUILD, action_name="TestingCFNTemplates", project=build_project, input=source_output, outputs=[tested_source_files], environment_variables={ "PIPELINE_NAME": _codebuild.BuildEnvironmentVariable( value="ServiceCatalog-CICD-{}".format(branch), type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_ADDED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.added_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_MODIFIED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.modified_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_DELETED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.deleted_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "JOB_ID": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.job_id}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "REPOSITORY_BRANCH": _codebuild.BuildEnvironmentVariable( value="#{source.BranchName}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "REPOSITORY_NAME": _codebuild.BuildEnvironmentVariable( value="#{source.RepositoryName}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), }, ) ], ), _codepipeline.StageProps( stage_name="Principal_Management", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="PrincipalManagement", lambda_=principal_management.lambda_function_object, user_parameters={ "job_id": "#{filtered_source.job_id}", "commit_id": "#{filtered_source.commit_id}", "portfolio_id": portfolio.ref, }, ) ], ), _codepipeline.StageProps( stage_name="Update_Servicecatalog", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="UpdateServiceCatalog", lambda_=service_catalog_synchronisation.lambda_function_object, inputs=[source_output], user_parameters={ "modified_files": "#{filtered_source.modified_files}", "added_files": "#{filtered_source.added_files}", "deleted_files": "#{filtered_source.deleted_files}", "job_id": "#{filtered_source.job_id}", "commit_id": "#{filtered_source.commit_id}", "portfolio_id": portfolio.ref, }, ) ], ), ], ) cicd_pipeline.add_to_role_policy( statement=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["codecommit:GetBranch", "codecommit:GetCommit"], resources=[service_catalog_git.repository_arn], ) ) tagging_list.append(cicd_pipeline) # ############################################################## # Tag resources # ############################################################## Tags.tag_resources( resources_list=tagging_list, keys_list=["app", "env"], values_list=[app_name.value_as_string, env_name.value_as_string], ) _ssm.StringParameter( self, id="LambdaLayerExport-{}".format(branch), parameter_name="/hd/mdp/{}/lambda/layer-pandas-numpy-servicecatalog".format( branch ), description="Lambda Layer ARN", string_value=layer.layer_version_arn, )
def __init__(self, scope: core.Construct, id: str, stage: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Environment related settings if stage == "prod": branch = "master" elif stage == "dev": branch = "development" else: branch = stage project_name = self.node.try_get_context('project') codebuild_role = iam.Role( self, "CodeBuildDeploymentRole", assumed_by=iam.ServicePrincipal(service="codebuild.amazonaws.com"), description="Allows CodeBuild to deploy the cms", role_name=project_name + "-deployment-role-" + stage, ) policy = iam.Policy( self, "DeploymentPolicy", roles=[codebuild_role], policy_name=project_name + "-deployment-policy-" + stage, statements=[ iam.PolicyStatement( sid="IamAccess", effect=iam.Effect("ALLOW"), resources=[ "arn:aws:iam::" + os.environ["CDK_DEFAULT_ACCOUNT"] + ":*" ], actions=[ "iam:AttachRolePolicy", "iam:CreateRole", "iam:DeleteRole", "iam:DetachRolePolicy", "iam:GetRole", "iam:PassRole", ], ), iam.PolicyStatement( sid="LambdaAccess", effect=iam.Effect("ALLOW"), resources=[ "arn:aws:lambda:" + os.environ["CDK_DEFAULT_REGION"] + ":" + os.environ["CDK_DEFAULT_ACCOUNT"] + ":function:*", ], actions=[ "lambda:AddPermission", "lambda:AddPermission20150331v2", "lambda:CreateFunction", "lambda:CreateFunction20150331", "lambda:DeleteFunction", "lambda:DeleteFunction20150331", "lambda:GetFunctionConfiguration", "lambda:GetFunctionConfiguration20150331v2", "lambda:UpdateFunctionCode", "lambda:UpdateFunctionCode20150331v2", "lambda:UpdateFunctionConfiguration", "lambda:UpdateFunctionConfiguration20150331v2", "lambda:PutFunctionConcurrency", ], ), iam.PolicyStatement( sid="ExternalLambdaPermissions", effect=iam.Effect("ALLOW"), resources=[ "arn:aws:lambda:eu-central-1:632417926021:layer:*" ], actions=[ "lambda:GetLayerVersion", ], ), iam.PolicyStatement( sid="S3Access", effect=iam.Effect("ALLOW"), resources=["arn:aws:s3:::*"], actions=[ "s3:CreateBucket", "s3:ListBucket", "s3:PutAccelerateConfiguration", "s3:PutBucketCORS", "s3:PutBucketNotification", "s3:PutObject", ], ), iam.PolicyStatement( sid="KmsAccess", effect=iam.Effect("ALLOW"), resources=[ "arn:aws:kms:" + os.environ["CDK_DEFAULT_REGION"] + ":" + os.environ["CDK_DEFAULT_ACCOUNT"] + ":key/*" ], actions=[ "kms:CreateGrant", "kms:Decrypt", "kms:DescribeKey", "kms:Encrypt", ], ), iam.PolicyStatement( sid="ApiGwAccess", effect=iam.Effect("ALLOW"), resources=[ "arn:aws:apigateway:" + os.environ["CDK_DEFAULT_REGION"] + "::*" ], actions=[ "apigateway:DELETE", "apigateway:GET", "apigateway:POST", "apigateway:PUT", ], ), iam.PolicyStatement( sid="CloudFrontAccess", effect=iam.Effect("ALLOW"), resources=["*"], actions=[ "cloudfront:CreateDistribution", "cloudfront:DeleteDistribution", "cloudfront:GetDistributionConfig", "cloudfront:UpdateDistribution", ], ), iam.PolicyStatement( sid="CognitoAccess", effect=iam.Effect("ALLOW"), resources=["*"], actions=[ "cognito-idp:CreateUserPool", "cognito-idp:CreateUserPoolClient", "cognito-idp:DeleteUserPool", "cognito-idp:UpdateUserPoolClient", ], # conditions={ # "StringEquals": { # "aws:RequestedRegion": # "" + os.environ["CDK_DEFAULT_REGION"] + "" # } # }, ), ], ) # environment bucket env_bucket = s3.Bucket( self, "EnvironmentBucket", bucket_name=project_name + "-environment-bucket-" + stage, ) env_bucket.grant_read_write(identity=codebuild_role) # state bucket state_bucket = s3.Bucket( self, "StateBucket", bucket_name=project_name + "-state-bucket-" + stage, ) state_bucket.grant_read_write(identity=codebuild_role) # CodePipeline pipeline = codepipeline.Pipeline( self, branch + "CodePipeline", pipeline_name=project_name + "-cms-pipeline-" + stage, ) # CodeCommit Source source_output = codepipeline.Artifact() source_action = codepipeline_actions.CodeCommitSourceAction( action_name="Source", repository=codecommit.Repository.from_repository_name( self, "CodeCommitRepository", repository_name=project_name + "-cms-repository", ), output=source_output, branch=branch, ) # CodeDeploy deploy_api_output = codepipeline.Artifact() deploy_api_project = codebuild.PipelineProject( self, "DeployApiProject", project_name=project_name + "-deploy-api-" + stage, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec-api.yml"), environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.MEDIUM, build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, ), role=codebuild_role, environment_variables={ "ENVIRONMENT": codebuild.BuildEnvironmentVariable(value=stage) }) deploy_api_action = codepipeline_actions.CodeBuildAction( action_name="deploy-api", input=source_output, project=deploy_api_project, outputs=[deploy_api_output], ) deploy_apps_output = codepipeline.Artifact() deploy_apps_project = codebuild.PipelineProject( self, "DeployAppsProject", project_name=project_name + "-deploy-apps-" + stage, build_spec=codebuild.BuildSpec.from_source_filename( "buildspec-apps.yml"), environment=codebuild.BuildEnvironment( compute_type=codebuild.ComputeType.MEDIUM, build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, ), role=codebuild_role, environment_variables={ "ENVIRONMENT": codebuild.BuildEnvironmentVariable(value=stage) }) deploy_apps_action = codepipeline_actions.CodeBuildAction( action_name="deploy-apps", input=source_output, project=deploy_apps_project, outputs=[deploy_apps_output], ) # add pipeline stages pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name="DeployApi", actions=[deploy_api_action]) pipeline.add_stage(stage_name="DeployApps", actions=[deploy_apps_action])
def __init__(self, scope: core.Construct, id: str, eks, redis, rds_cluster, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.eks = eks self.redis = redis self.rds_cluster = rds_cluster # create ECR ecr_repo = ecr.Repository(self, "ECRRep", repository_name="springboot-multiarch") # create code repo code = codecommit.Repository(self, "CodeRep", repository_name="springboot-multiarch") core.CfnOutput(self, "CodeCommitOutput", value=code.repository_clone_url_http) # create code builds arm_build = codebuild.PipelineProject( self, "ARMBuild", build_spec=codebuild.BuildSpec.from_source_filename( "cdk/pipeline/armbuild.yml"), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, privileged=True), environment_variables=self.get_build_env_vars(ecr_repo)) self.add_role_access_to_build(arm_build) amd_build = codebuild.PipelineProject( self, "AMDBuild", build_spec=codebuild.BuildSpec.from_source_filename( "cdk/pipeline/amdbuild.yml"), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True), environment_variables=self.get_build_env_vars(ecr_repo)) self.add_role_access_to_build(amd_build) post_build = codebuild.PipelineProject( self, "PostBuild", build_spec=codebuild.BuildSpec.from_source_filename( "cdk/pipeline/post_build.yml"), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True), environment_variables=self.get_build_env_vars(ecr_repo)) self.add_role_access_to_build(post_build) # create pipeline source_output = codepipeline.Artifact() arm_build_output = codepipeline.Artifact("ARMBuildOutput") amd_build_output = codepipeline.Artifact("AMDBuildOutput") post_build_output = codepipeline.Artifact("PostBuildOutput") codepipeline.Pipeline( self, "Pipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( action_name="ARM_Build", project=arm_build, input=source_output, outputs=[arm_build_output]), codepipeline_actions.CodeBuildAction( action_name="AMD_Build", project=amd_build, input=source_output, outputs=[amd_build_output]), ]), codepipeline.StageProps( stage_name="PostBuild", actions=[ codepipeline_actions.CodeBuildAction( action_name="Post_Build", project=post_build, input=source_output, outputs=[post_build_output]) ]), ])
def create_action( scope: core.Construct, id: str, action_def: Union[CodeCommitAction, CodeBuildAction, CloudFormationCreateUpdateStackAction, ApprovalAction, LambdaInvokeAction, S3SourceAction, ], ): action_name = action_def.pop("name") run_order = action_def.get("run_order", 1) variables_namespace = action_def.get("variables_namespace") role = (aws_iam.Role.from_role_arn(scope, f"{id}RoleRef", action_def["role_arn"]) if "role_arn" in action_def else None) if action_def["type"] == "CODECOMMIT": action_def = cast(CodeCommitAction, action_def) repository = aws_codecommit.Repository.from_repository_name( scope, f"{id}Repo", action_def["repository"]) output = aws_codepipeline.Artifact(action_def["output"]) return aws_codepipeline_actions.CodeCommitSourceAction( action_name=action_name, output=output, repository=repository, branch=action_def.get("branch", "master"), run_order=run_order, role=role, variables_namespace=variables_namespace, ) elif action_def["type"] == "S3_SOURCE": action_def = cast(S3SourceAction, action_def) output = aws_codepipeline.Artifact(action_def["output"]) if "kms_key_arn" in action_def: role = aws_iam.Role( scope, f"{id}Role", assumed_by=aws_iam.AccountRootPrincipal(), ) aws_kms.Key.from_key_arn( scope, f"{id}KeyRef", key_arn=action_def["kms_key_arn"]).grant_decrypt(role) if "bucket" in action_def: bucket = aws_s3.Bucket.from_bucket_name(scope, f"{id}SourceBucketRef", action_def["bucket"]) else: bucket = aws_s3.Bucket( scope, f"{id}SourceBucket", block_public_access=aws_s3.BlockPublicAccess.BLOCK_ALL, removal_policy=core.RemovalPolicy.DESTROY, ) core.CfnOutput(scope, f"{id}SourceBucketName", value=bucket.bucket_name) return aws_codepipeline_actions.S3SourceAction( action_name=action_name, output=output, run_order=run_order, role=role, bucket=bucket, bucket_key=action_def["key"], ) elif action_def["type"] == "CODEBUILD": action_def = cast(CodeBuildAction, action_def) # Set up CodeBuild project project_params = { "build_spec": aws_codebuild.BuildSpec.from_source_filename( action_def.get("build_spec", "buildspec.yaml")), "timeout": core.Duration.minutes(int(action_def.get("timeout_minutes", 60))), } project_params["environment"] = { "build_image": aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3 } if "environment" in action_def: if "build_image" in action_def["environment"]: project_params["environment"]["build_image"] = getattr( aws_codebuild.LinuxBuildImage, action_def["environment"].pop("build_image"), ) if "compute_type" in action_def["environment"]: project_params["environment"]["compute_type"] = getattr( aws_codebuild.ComputeType, action_def["environment"].pop("compute_type"), ) project_params["environment"].update(**action_def["environment"]) project_role = aws_iam.Role( scope, f"{id}CodeBuildRole", path="/codebuild/", assumed_by=aws_iam.ServicePrincipal( service="codebuild.amazonaws.com"), ) project_role.add_to_policy( aws_iam.PolicyStatement(actions=["*"], resources=["*"], effect=aws_iam.Effect.ALLOW)) project_environment_variables = ({ var_key: aws_codebuild.BuildEnvironmentVariable( value=str(var_value), type=aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) for var_key, var_value in action_def["environment_variables"].items() if "#" not in str(var_value) } if "environment_variables" in action_def else None) project = aws_codebuild.PipelineProject( scope, f"{id}Project", project_name=id, role=project_role, environment_variables=project_environment_variables, **project_params, ) pipeline_environment_variables = ({ var_key: aws_codebuild.BuildEnvironmentVariable( value=str(var_value), type=aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) for var_key, var_value in action_def["environment_variables"].items() if "#" in str(var_value) } if "environment_variables" in action_def else None) extra_inputs = ([ aws_codepipeline.Artifact(input_) for input_ in action_def["extra_inputs"] ] if "extra_inputs" in action_def else None) outputs = ([ aws_codepipeline.Artifact(output) for output in action_def["outputs"] ] if "outputs" in action_def else None) return aws_codepipeline_actions.CodeBuildAction( action_name=action_name, input=aws_codepipeline.Artifact(action_def["input"]), project=project, run_order=run_order, role=role, variables_namespace=variables_namespace, environment_variables=pipeline_environment_variables, extra_inputs=extra_inputs, outputs=outputs, ) elif action_def["type"] == "CLOUDFORMATION": action_def = cast(CloudFormationCreateUpdateStackAction, action_def) return aws_codepipeline_actions.CloudFormationCreateUpdateStackAction( action_name=action_name, admin_permissions=False, stack_name=action_def["stack_name"], template_path=aws_codepipeline.ArtifactPath( aws_codepipeline.Artifact(action_def["input"]), action_def.get("template_path", "template.yaml"), ), capabilities=[ # This lstrip does not support all possibilties, but is good enough for now aws_cloudformation.CloudFormationCapabilities[ capability.lstrip("CAPABILITY_")] for capability in action_def["capabilities"] ] if "capabilities" in action_def else None, deployment_role=role, role=role, parameter_overrides=action_def.get("parameter_overrides"), run_order=run_order, variables_namespace=variables_namespace, ) elif action_def["type"] == "APPROVAL": action_def = cast(ApprovalAction, action_def) return aws_codepipeline_actions.ManualApprovalAction( action_name=action_name, run_order=run_order, role=role, additional_information=action_def.get("additional_information"), external_entity_link=action_def.get("external_entity_link"), notification_topic=action_def.get("notification_topic"), variables_namespace=variables_namespace, ) elif action_def["type"] == "LAMBDA": action_def = cast(LambdaInvokeAction, action_def) user_parameters = action_def.get("user_parameters") return aws_codepipeline_actions.LambdaInvokeAction( action_name=action_name, run_order=run_order, lambda_=aws_lambda.Function.from_function_arn( scope, f"{id}Lambda", action_def["function_arn"]), user_parameters=user_parameters, role=role, variables_namespace=variables_namespace, )