def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here self.repo = codecommit.Repository(self, "my-repo", repository_name="test-repo")
def __init__(self, scope: core.Construct, id: str, eks_base_cluster: eks.Cluster, git_user, git_password) -> None: super().__init__(scope=scope, id=id) name = scope.node.try_get_context("name") region = scope.region # Repo for Kubernetes infraestructure codecommit_repo_kubernetes = codecommit.Repository( scope=self, id=f"{name}-kubernetes-git", repository_name=f"kubernetes-infra-{name}", description=f"Kubernetes Infra Code") self.k8s_infra_git_host = "git-codecommit." + region + ".amazonaws.com/v1/repos/" + codecommit_repo_kubernetes.repository_name core.CfnOutput( scope=self, id="k8s_infrastructure_repository", value=codecommit_repo_kubernetes.repository_clone_url_http) fluxcd_cluster.FluxcdCluster(scope=self, eks_base_cluster=eks_base_cluster, git_user=git_user, git_password=git_password, git_repository=self.k8s_infra_git_host)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Creates a CodeCommit repository called 'WorkshopRepo' repo = codecommit.Repository(self, 'WorkshopRepo', repository_name="WorkshopRepo") # Defines the artifact representing the sourcecode source_artifact = codepipeline.Artifact() # Defines the artifact representing the cloud assembly # (cloudformation template + all other assets) cloud_assembly_artifact = codepipeline.Artifact() pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, # Generates the source artifact from the repo we created in the last step source_action=codepipeline_actions.CodeCommitSourceAction( action_name='CodeCommit', # Any Git-based source control output=source_artifact, # Indicates where the artifact is stored repository=repo # Designates the repo to draw code from ), # Builds our source code outlined above into a could assembly artifact synth_action=pipelines.SimpleSynthAction( install_commands=[ 'npm install -g aws-cdk', # Installs the cdk cli on Codebuild 'pip install -r requirements.txt' # Instructs Codebuild to install required packages ], synth_command='npx cdk synth', source_artifact= source_artifact, # Where to get source code to build cloud_assembly_artifact= cloud_assembly_artifact, # Where to place built source )) deploy = WorkshopPipelineStage(self, 'Deploy') deploy_stage = pipeline.add_application_stage(deploy) deploy_stage.add_actions( pipelines.ShellScriptAction(action_name='TestViewerEndpoint', use_outputs={ 'ENDPOINT_URL': pipeline.stack_output( deploy.hc_viewer_url) }, commands=['curl -Ssf $ENDPOINT_URL'])) deploy_stage.add_actions( pipelines.ShellScriptAction( action_name='TestAPIGatewayEndpoint', use_outputs={ 'ENDPOINT_URL': pipeline.stack_output(deploy.hc_endpoint) }, commands=[ 'curl -Ssf $ENDPOINT_URL', 'curl -Ssf $ENDPOINT_URL/hello', 'curl -Ssf $ENDPOINT_URL/test' ]))
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) blue_env = self.node.try_get_context("blue_env") green_env = self.node.try_get_context("green_env") app_name = self.node.try_get_context("app_name") bucket = s3.Bucket( self, 'BlueGreenBucket', # The default removal policy is RETAIN, which means that cdk # destroy will not attempt to delete the new bucket, and it will # remain in your account until manually deleted. By setting the # policy to DESTROY, cdk destroy will attempt to delete the bucket, # but will error if the bucket is not empty. removal_policy=core.RemovalPolicy.DESTROY # NOT recommended for production code ) handler = lmbda.Function(self, 'BlueGreenLambda', runtime=lmbda.Runtime.PYTHON_3_6, code=lmbda.Code.asset('resources'), handler='blue_green.lambda_handler', environment={'BUCKET': bucket.bucket_name}) bucket.grant_read_write(handler) repo = cc.Repository( self, 'Repository', repository_name='MyRepositoryName', ) pipeline = cp.Pipeline(self, 'MyFirstPipeline') source_stage = pipeline.add_stage(stage_name='Source') source_artifact = cp.Artifact('Source') source_action = cpactions.CodeCommitSourceAction( action_name='CodeCommit', repository=repo, output=source_artifact) source_stage.add_action(source_action) deploy_stage = pipeline.add_stage(stage_name='Deploy') lambda_action = cpactions.LambdaInvokeAction( action_name='InvokeAction', lambda_=handler, user_parameters={ 'blueEnvironment': blue_env, 'greenEnvironment': green_env, 'application': app_name }, inputs=[source_artifact]) deploy_stage.add_action(lambda_action)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) project_name = self.node.try_get_context("project") cms_repository = codecommit.Repository(self, "CmsRepository", repository_name=project_name + "-cms-repository")
def create_component_source_repository(self) -> Resource: """Greengrassのコンポーネント内で利用するソースコードを管理するリポジトリ Returns: Resource: CodeCommit """ name = f"{self.stack_name}_{self.component_id}_component-source".lower( ) repo = aws_codecommit.Repository(self, id=name, repository_name=name) return repo
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # [ CodeCommit: Repository ] # # Creates the repository to store the project in. codecommit.Repository(self, 'CodeCommitRepo', repository_name='CodeCommitRepo', description='CodeCommitRepo repository' )
def __init__(self, scope: core.Construct, id: str, config, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here repo_name = self.node.try_get_context( "name") or config["Default"]["name"] repo_count = self.node.try_get_context( "count") or config["Default"]["count"] for i in range(1, int(repo_count) + 1): codecommit.Repository(self, "Repository" + str(i), repository_name=repo_name + '-' + str(i), description="Repo number " + str(i) + " of " + repo_count)
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) repo = codecommit.Repository( self, "repo", repository_name="demorepo", description="Repo to test PR with stepfunctions") proj1 = self.new_build_project(repo, "pr_specs/buildspec.yaml", "proj1") proj2 = _codebuild.Project( self, "proj_name", badge=True, description="Build project for ", environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_5_0, compute_type=_codebuild.ComputeType.LARGE, privileged=True), project_name="proj_name", build_spec=_codebuild.BuildSpec.from_source_filename( filename="pr_specs/buildspec2.yaml"), timeout=Duration.minutes(10), ) input_task = _step_fn.Pass(self, "passstate") proj1_tasks = self.new_codebuild_task(proj1) proj2_tasks = self.new_codebuild_task(proj2) definition = input_task.next(proj1_tasks).next(proj2_tasks) _fn = _step_fn.StateMachine( self, "statemachine", definition=definition, state_machine_name="statemachine", )
def __init__(self, scope: core.Construct, id: str, **kwargs): super().__init__(scope, id, **kwargs) source_repo = codecommit.Repository( self, "sourcerepo", repository_name='ec2_generic_policy', description='Generic EC2 policy for using on EC2 Instance Roles') source_artifact = codepipeline.Artifact() cloud_assembly_artifact = codepipeline.Artifact() pipeline = pipelines.CdkPipeline( self, 'Pipeline', cloud_assembly_artifact=cloud_assembly_artifact, pipeline_name='EC2RolePolicy', source_action=cpactions.CodeCommitSourceAction( output=source_artifact, repository=source_repo, branch='master', trigger=cpactions.CodeCommitTrigger.EVENTS, action_name='OnRepoevent', run_order=1), synth_action=pipelines.SimpleSynthAction( source_artifact=source_artifact, cloud_assembly_artifact=cloud_assembly_artifact, install_command= 'npm install -g aws-cdk && pip install -r requirements.txt', #build_command='pytest unittests', synth_command='cdk synth')) # Add stages as required. app_env = core.Environment(account="194433038617", region="ap-southeast-2") prod_app = Ec2PolicyStage(self, 'Prod', env=app_env) prod_stage = pipeline.add_application_stage(prod_app)
def __init__( self, scope: core.Stack, prefix: str, pipeline_params: PipelineParameters, lambda_params: LambdaParameters, vpc_params: VpcParameters ): """ AWS CDK package that helps deploying a lambda function. :param scope: A scope in which resources shall be created. :param prefix: Prefix for all of your resource IDs and names. :param pipeline_params: Parameters, letting you supply ssh key for accessing remote repositories. :param lambda_params: Parameters, focusing on the Lambda function itself. :param vpc_params: Parameters, focused on Virtual Private Cloud settings. """ # CodeCommmit repository to store your function source code. self.project_repository = aws_codecommit.Repository( scope, prefix + 'CiCdLambdaCodeCommitRepo', repository_name=prefix + 'CiCdLambdaCodeCommitRepo', ) # The lambda function for which this package is made. self.function = aws_lambda.Function( scope, prefix + 'Function', code=aws_lambda.Code.from_inline( 'def runner():\n' ' return \'Hello, World!\'' ), handler=lambda_params.lambda_handler, runtime=lambda_params.lambda_runtime, description=f'Lambda function {prefix}.', environment=lambda_params.environment, function_name=prefix, memory_size=lambda_params.lambda_memory, reserved_concurrent_executions=5, role=lambda_params.execution_role, security_groups=vpc_params.security_groups, timeout=core.Duration.seconds(lambda_params.lambda_timeout), vpc=vpc_params.vpc, vpc_subnets=aws_ec2.SubnetSelection(subnets=vpc_params.subnets) ) # Create alarms for the function. if lambda_params.alarms_sns_topic: self.alarms = LambdaAlarms(scope, prefix, lambda_params.alarms_sns_topic, self.function) else: self.alarms = None # Convert bucket name to an S3 friendly one. bucket_name = self.__convert(prefix + 'CiCdLambdaArtifactsBucket') self.bucket = EmptyS3Bucket( scope, prefix + 'CiCdLambdaDeploymentBucket', bucket_name=bucket_name ) # Create a BuildSpec object for CodeBuild self.buildspec = BuildSpecObject( prefix, self.bucket, pipeline_params.ssh_params.secret_id, pipeline_params.ssh_params.private_key, pipeline_params.install_args, pipeline_params.test_args, pipeline_params.custom_pre_build_commands ) # CodeBuild project, that installs functions dependencies, runs tests and deploys it to Lambda. self.code_build_project = aws_codebuild.PipelineProject( scope, prefix + 'CiCdLambdaCodeBuildProject', project_name=prefix + 'CiCdLambdaCodeBuildProject', environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.STANDARD_3_0, compute_type=aws_codebuild.ComputeType.SMALL, privileged=True ), build_spec=aws_codebuild.BuildSpec.from_object(self.buildspec.get_object()), ) # Adding permissions that allow CodeBuild to do the aforementioned things. self.code_build_project.role.add_to_policy( statement=aws_iam.PolicyStatement( actions=[ 's3:*', 'lambda:UpdateFunctionCode', ], resources=['*'], effect=aws_iam.Effect.ALLOW) ) # If a secret is provided, we allow CodeBuild to read it. if pipeline_params.ssh_params.secret_arn is not None: self.code_build_project.role.add_to_policy( statement=aws_iam.PolicyStatement( actions=[ 'secretsmanager:GetSecretValue' ], resources=[pipeline_params.ssh_params.secret_arn], effect=aws_iam.Effect.ALLOW) ) # If KMS key is provided, we allow CodeBuild to decrypt using it. if pipeline_params.ssh_params.kms_key_arn is not None: self.code_build_project.role.add_to_policy( statement=aws_iam.PolicyStatement( actions=[ "kms:Decrypt" ], effect=aws_iam.Effect.ALLOW, resources=[pipeline_params.ssh_params.kms_key_arn] ) ) # Push hte initial commit to CodeCommit. self.initial_commit = InitialCommit( scope, prefix, self.project_repository ).get_resource() self.source_artifact = aws_codepipeline.Artifact(artifact_name=prefix + 'CiCdLambdaSourceArtifact') # CodePipeline source action to read from CodeCommit. self.source_action = aws_codepipeline_actions.CodeCommitSourceAction( repository=self.project_repository, branch='master', action_name='CodeCommitSource', run_order=1, trigger=aws_codepipeline_actions.CodeCommitTrigger.EVENTS, output=self.source_artifact ) # CodePipeline build action that uses the CodeBuild project. self.build_action = aws_codepipeline_actions.CodeBuildAction( input=self.source_artifact, project=self.code_build_project, action_name='BuildAction', run_order=1 ) # CodePipeline pipeline that executes both actions. self.codecommit_to_lambda_pipeline = aws_codepipeline.Pipeline( scope, prefix + 'CiCdLambdaPipeline', pipeline_name=prefix + 'CiCdLambdaPipeline', artifact_bucket=self.bucket, stages=[ aws_codepipeline.StageProps( stage_name='SourceStage', actions=[self.source_action] ), aws_codepipeline.StageProps( stage_name='BuildStage', actions=[self.build_action] ) ] )
def __init__(self, scope: core.Construct, id: str, lambda_code: _lambda.CfnParametersCode, custom_resource: _lambda.CfnParametersCode, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here self.lambda_code = lambda_code self.custom_resource = custom_resource code = _commit.Repository( self, 'CustomerServerlessCode', repository_name='spring-petclinic-customers-serverless') lambda_project = _build.PipelineProject( self, 'CustomerLambdaBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': 'mvn package', }, 'post_build': { 'commands': [ 'mkdir deploy', 'cp target/spring-petclinic-customers-serverless-2.0.7.RELEASE.jar deploy/', 'cd deploy && jar xvf spring-petclinic-customers-serverless-2.0.7.RELEASE.jar', 'rm spring-petclinic-customers-serverless-2.0.7.RELEASE.jar', ] } }, 'artifacts': { 'base-directory': 'deploy', 'files': ['**/*'] }, }), environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) cdk_project = _build.PipelineProject( self, 'CustomerCdkBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'python': '3.7', 'nodejs': '10' }, 'commands': [ 'npm install -g [email protected]', 'pip install aws-cdk.core==1.10.0', 'pip install -r requirements.txt' ] }, 'build': { 'commands': [ 'cdk synth -o dist', ] } }, 'artifacts': { 'secondary-artifacts': { 'CdkBuildOutput': { 'base-directory': 'dist', 'files': ['customer-lambda-stack.template.json'] }, 'CustomRecoureOutput': { 'base-directory': 'custom-resource-code', 'discard-paths': 'yes', 'files': ['index.py', 'owner.json', 'cfnresponse.py'] } } } }), environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') cdk_build_output = _pipeline.Artifact('CdkBuildOutput') lambda_build_output = _pipeline.Artifact('LambdaBuildOutput') custom_resource_output = _pipeline.Artifact('CustomRecoureOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction( action_name='CodeBuild_CDK', project=cdk_project, input=source_output, outputs=[cdk_build_output, custom_resource_output]), _action.CodeBuildAction(action_name='CodeBuild_Lambda', project=lambda_project, input=source_output, outputs=[lambda_build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.CloudFormationCreateUpdateStackAction( action_name='Lambda_CFN_Deploy', template_path=cdk_build_output.at_path( 'customer-lambda-stack.template.json'), stack_name='customer-lambda-stack', admin_permissions=True, parameter_overrides={ **self.lambda_code.assign(bucket_name=lambda_build_output.bucket_name, object_key=lambda_build_output.object_key), **self.custom_resource.assign(bucket_name=custom_resource_output.bucket_name, object_key=custom_resource_output.object_key) }, extra_inputs=[ lambda_build_output, custom_resource_output ]) ] }])
def __init__( self, scope: Construct, id: str, context: "Context", team_context: "TeamContext", parameters: Dict[str, Any], ) -> None: super().__init__( scope=scope, id=id, stack_name=id, env=Environment(account=context.account_id, region=context.region), ) Tags.of(scope=cast(IConstruct, self)).add( key="Env", value=f"orbit-{context.name}") repo: codecommit.Repository = codecommit.Repository( scope=self, id="repo", repository_name=f"orbit-{context.name}-{team_context.name}", ) if team_context.eks_pod_role_arn is None: raise ValueError("Pod Role arn required") team_role = iam.Role.from_role_arn( scope=self, id="team-role", role_arn=team_context.eks_pod_role_arn, mutable=True, ) team_role.attach_inline_policy(policy=iam.Policy( scope=self, id="codecommit", policy_name="codecommit", statements=[ iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ "codecommit:CreateBranch", "codecommit:DeleteCommentContent", "codecommit:ListPullRequests", "codecommit:UpdatePullRequestApprovalRuleContent", "codecommit:PutFile", "codecommit:GetPullRequestApprovalStates", "codecommit:CreateCommit", "codecommit:ListTagsForResource", "codecommit:BatchDescribeMergeConflicts", "codecommit:GetCommentsForComparedCommit", "codecommit:DeletePullRequestApprovalRule", "codecommit:GetCommentReactions", "codecommit:GetComment", "codecommit:UpdateComment", "codecommit:MergePullRequestByThreeWay", "codecommit:CreatePullRequest", "codecommit:UpdatePullRequestApprovalState", "codecommit:GetPullRequestOverrideState", "codecommit:PostCommentForPullRequest", "codecommit:GetRepositoryTriggers", "codecommit:UpdatePullRequestDescription", "codecommit:GetObjectIdentifier", "codecommit:BatchGetPullRequests", "codecommit:GetFile", "codecommit:GetUploadArchiveStatus", "codecommit:MergePullRequestBySquash", "codecommit:GetDifferences", "codecommit:GetRepository", "codecommit:GetMergeConflicts", "codecommit:GetMergeCommit", "codecommit:PostCommentForComparedCommit", "codecommit:GitPush", "codecommit:GetMergeOptions", "codecommit:AssociateApprovalRuleTemplateWithRepository", "codecommit:PutCommentReaction", "codecommit:GetTree", "codecommit:BatchAssociateApprovalRuleTemplateWithRepositories", "codecommit:GetReferences", "codecommit:GetBlob", "codecommit:DescribeMergeConflicts", "codecommit:UpdatePullRequestTitle", "codecommit:GetCommit", "codecommit:OverridePullRequestApprovalRules", "codecommit:GetCommitHistory", "codecommit:GetCommitsFromMergeBase", "codecommit:BatchGetCommits", "codecommit:TestRepositoryTriggers", "codecommit:DescribePullRequestEvents", "codecommit:UpdatePullRequestStatus", "codecommit:CreatePullRequestApprovalRule", "codecommit:UpdateDefaultBranch", "codecommit:GetPullRequest", "codecommit:PutRepositoryTriggers", "codecommit:UploadArchive", "codecommit:ListAssociatedApprovalRuleTemplatesForRepository", "codecommit:MergeBranchesBySquash", "codecommit:ListBranches", "codecommit:GitPull", "codecommit:BatchGetRepositories", "codecommit:GetCommentsForPullRequest", "codecommit:BatchDisassociateApprovalRuleTemplateFromRepositories", "codecommit:CancelUploadArchive", "codecommit:GetFolder", "codecommit:PostCommentReply", "codecommit:MergeBranchesByFastForward", "codecommit:CreateUnreferencedMergeCommit", "codecommit:EvaluatePullRequestApprovalRules", "codecommit:MergeBranchesByThreeWay", "codecommit:GetBranch", "codecommit:DisassociateApprovalRuleTemplateFromRepository", "codecommit:MergePullRequestByFastForward", "codecommit:DeleteFile", "codecommit:DeleteBranch", ], resources=[repo.repository_arn], ), iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ "codecommit:ListRepositoriesForApprovalRuleTemplate", "codecommit:CreateApprovalRuleTemplate", "codecommit:UpdateApprovalRuleTemplateName", "codecommit:GetApprovalRuleTemplate", "codecommit:ListApprovalRuleTemplates", "codecommit:DeleteApprovalRuleTemplate", "codecommit:ListRepositories", "codecommit:UpdateApprovalRuleTemplateContent", "codecommit:UpdateApprovalRuleTemplateDescription", ], resources=["*"], ), ], ))
def __init__(self, scope: core.Construct, id: str, config: ContainerPipelineConfiguration, **kwargs) -> None: super().__init__(scope, id, **kwargs) # sourceOutput = codepipeline.Artifact( # artifact_name=config.ProjectName + "-SourceOutput" # ) # Code Repo commit = aws_codecommit.Repository(self, config.ProjectName + "-apprepo", repository_name=config.ProjectName + "-app-repo") # Container Repo self.docker_repo = ecr.Repository( scope=self, id=config.ProjectName, removal_policy=core.RemovalPolicy.DESTROY, repository_name=config.ProjectName) pipeline = codepipeline.Pipeline(self, "MyPipeline", pipeline_name=config.ProjectName + "-commit-to-ecr") source_output = codepipeline.Artifact() source_action = codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", repository=commit, output=source_output) # docker file linting cb_docker_build_lint = aws_codebuild.PipelineProject( self, "DockerLint", project_name=config.ProjectName + "-docker-lint", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='configs/buildspec_lint.yml'), environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage. UBUNTU_14_04_NODEJS_10_1_0, privileged=True, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=self.docker_repo.repository_uri), 'project_name': aws_codebuild.BuildEnvironmentVariable( value=config.ProjectName) }, description='linting the container dockerfile for best practices', timeout=core.Duration.minutes(60), ) # code repo secret scan cb_source_secretscan = aws_codebuild.PipelineProject( self, "SourceSecretScan", project_name=config.ProjectName + "-source-secretscan", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='configs/buildspec_secrets.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True, build_image=aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, ), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'commituri': aws_codebuild.BuildEnvironmentVariable( value=commit.repository_clone_url_http), 'ecr': aws_codebuild.BuildEnvironmentVariable( value=self.docker_repo.repository_uri), 'project_name': aws_codebuild.BuildEnvironmentVariable( value=config.ProjectName) }, description='Scanning source for secrets', timeout=core.Duration.minutes(60), ) cb_source_secretscan.add_to_role_policy(statement=iam.PolicyStatement( resources=['*'], actions=['codecommit:*'])) # push to ecr repo # cb_docker_build_push = aws_codebuild.PipelineProject( # self, "DockerBuild", # project_name= config.ProjectName + "-docker-build", # build_spec=aws_codebuild.BuildSpec.from_source_filename( # filename='configs/docker_build_base.yml'), # environment=aws_codebuild.BuildEnvironment( # privileged=True, # compute_type=aws_codebuild.ComputeType.MEDIUM # ), # # pass the ecr repo uri into the codebuild project so codebuild knows where to push # environment_variables={ # 'ecr': aws_codebuild.BuildEnvironmentVariable( # value=self.docker_repo.repository_uri), # 'tag': aws_codebuild.BuildEnvironmentVariable( # value="release"), # 'project_name': aws_codebuild.BuildEnvironmentVariable( # value=config.ProjectName) # }, # description='Deploy to ECR', # timeout=core.Duration.minutes(60), # ) # push Spring app to ecr repo and deploy cb_spring_build_deploy = aws_codebuild.PipelineProject( self, "SpringBuildDeploy", project_name=config.ProjectName + "-spring-build-deploy", build_spec=aws_codebuild.BuildSpec.from_source_filename( filename='configs/spring_build_deploy.yml'), environment=aws_codebuild.BuildEnvironment( privileged=True, build_image=aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, compute_type=aws_codebuild.ComputeType.MEDIUM), # pass the ecr repo uri into the codebuild project so codebuild knows where to push environment_variables={ 'ecr': aws_codebuild.BuildEnvironmentVariable( value=self.docker_repo.repository_uri), 'tag': aws_codebuild.BuildEnvironmentVariable(value="release"), 'project_name': aws_codebuild.BuildEnvironmentVariable( value=config.ProjectName) }, description='Deploy to ECR and Push to Fargate', timeout=core.Duration.minutes(60), ) # grant access to all CodeBuild projects to pull images from ECR statement = iam.PolicyStatement(actions=[ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:DescribeRepositories", "ecr:DescribeImages", "ecr:ListImages", ], resources=['*']) # cb_docker_build_push.add_to_role_policy(statement) cb_spring_build_deploy.add_to_role_policy(statement) cb_docker_build_lint.add_to_role_policy(statement) cb_source_secretscan.add_to_role_policy(statement) pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name='Lint', actions=[ codepipeline_actions.CodeBuildAction( action_name='DockerLintImages', input=source_output, project=cb_docker_build_lint, run_order=1, ) ]) pipeline.add_stage(stage_name='SecretScan', actions=[ codepipeline_actions.CodeBuildAction( action_name='SourceSecretScanImages', input=source_output, project=cb_source_secretscan, run_order=1, ) ]) pipeline.add_stage( # stage_name='Build', stage_name='BuildAndDeploy', actions=[ codepipeline_actions.CodeBuildAction( # action_name='DockerBuildImages', action_name='SpringBuildAndDeploy', input=source_output, # project= cb_docker_build_push, project=cb_spring_build_deploy, run_order=1, ) ]) # self.docker_repo.grant_pull_push(cb_docker_build_push) self.docker_repo.grant_pull_push(cb_spring_build_deploy)
def __init__(self, scope: core.Construct, id: str, eks, redis, rds_cluster, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.eks = eks self.redis = redis self.rds_cluster = rds_cluster # create ECR ecr_repo = ecr.Repository(self, "ECRRep", repository_name="springboot-multiarch") # create code repo code = codecommit.Repository(self, "CodeRep", repository_name="springboot-multiarch") core.CfnOutput(self, "CodeCommitOutput", value=code.repository_clone_url_http) # create code builds arm_build = codebuild.PipelineProject( self, "ARMBuild", build_spec=codebuild.BuildSpec.from_source_filename( "cdk/pipeline/armbuild.yml"), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, privileged=True), environment_variables=self.get_build_env_vars(ecr_repo)) self.add_role_access_to_build(arm_build) amd_build = codebuild.PipelineProject( self, "AMDBuild", build_spec=codebuild.BuildSpec.from_source_filename( "cdk/pipeline/amdbuild.yml"), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True), environment_variables=self.get_build_env_vars(ecr_repo)) self.add_role_access_to_build(amd_build) post_build = codebuild.PipelineProject( self, "PostBuild", build_spec=codebuild.BuildSpec.from_source_filename( "cdk/pipeline/post_build.yml"), environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, privileged=True), environment_variables=self.get_build_env_vars(ecr_repo)) self.add_role_access_to_build(post_build) # create pipeline source_output = codepipeline.Artifact() arm_build_output = codepipeline.Artifact("ARMBuildOutput") amd_build_output = codepipeline.Artifact("AMDBuildOutput") post_build_output = codepipeline.Artifact("PostBuildOutput") codepipeline.Pipeline( self, "Pipeline", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( action_name="ARM_Build", project=arm_build, input=source_output, outputs=[arm_build_output]), codepipeline_actions.CodeBuildAction( action_name="AMD_Build", project=amd_build, input=source_output, outputs=[amd_build_output]), ]), codepipeline.StageProps( stage_name="PostBuild", actions=[ codepipeline_actions.CodeBuildAction( action_name="Post_Build", project=post_build, input=source_output, outputs=[post_build_output]) ]), ])
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # Note: typo of role name is copied from original workshop mysfits_notebook_role = aws_iam.Role( self, "MysfitsNotbookRole", assumed_by=aws_iam.ServicePrincipal("sagemaker.amazonaws.com"), ) mysfits_notebook_policy = aws_iam.PolicyStatement() mysfits_notebook_policy.add_actions( "sagemaker:*", "ecr:GetAuthorizationToken", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage", "ecr:BatchCheckLayerAvailability", "cloudwatch:PutMetricData", "logs:CreateLogGroup", "logs:CreateLogStream", "logs:DescribeLogStreams", "logs:PutLogEvents", "logs:GetLogEvents", "s3:CreateBucket", "s3:ListBucket", "s3:GetBucketLocation", "s3:GetObject", "s3:PutObject", "s3:DeleteObject", ) mysfits_notebook_policy.add_all_resources() mysfits_notebook_pass_role_policy = aws_iam.PolicyStatement() mysfits_notebook_pass_role_policy.add_actions("iam:PassRole") mysfits_notebook_pass_role_policy.add_all_resources() mysfits_notebook_pass_role_policy.add_condition( "StringEquals", {"iam:PassedToService": "sagemaker.amazonaws.com"}) aws_iam.Policy( self, "MysfitsNotebookPolicy", statements=[ mysfits_notebook_pass_role_policy, mysfits_notebook_policy ], roles=[mysfits_notebook_role], ) notebook_instance = aws_sagemaker.CfnNotebookInstance( self, "MythicalMysfits-SageMaker-Notebook", instance_type="ml.t2.medium", role_arn=mysfits_notebook_role.role_arn, ) lambda_repository = aws_codecommit.Repository( self, "RecommendationsLambdaRepository", repository_name="MythicalMysfits-RecommendationsLambdaRepository", ) core.CfnOutput( self, "recommandationsRepositoryCloneUrlHttp", value=lambda_repository.repository_clone_url_http, description="Recommendations Lambda Repository Clone Url HTTP", ) core.CfnOutput( self, "recommandationsRepositoryCloneUrlSsh", value=lambda_repository.repository_clone_url_ssh, description="Recommendations Lambda Repository Clone Url SSH", ) recommendations_lambda_function_policy_statement = aws_iam.PolicyStatement( ) recommendations_lambda_function_policy_statement.add_actions( "sagemaker:InvokeEndpoint") recommendations_lambda_function_policy_statement.add_all_resources() mysfits_recommendations = aws_lambda.Function( self, "Function", handler="recommendations.recommend", runtime=aws_lambda.Runtime.PYTHON_3_6, description="A microservice backend to a SageMaker endpoint", memory_size=128, code=aws_lambda.Code.asset( os.path.join("..", "..", "lambda-recommendations/service")), timeout=core.Duration.seconds(30), initial_policy=[recommendations_lambda_function_policy_statement], ) questions_api_role = aws_iam.Role( self, "QuestionsApiRole", assumed_by=aws_iam.ServicePrincipal("apigateway.amazonaws.com"), ) api_policy = aws_iam.PolicyStatement() api_policy.add_actions("lambda:InvokeFunction") api_policy.add_resources(mysfits_recommendations.function_arn) aws_iam.Policy( self, "QuestionsApiPolicy", policy_name="questions_api_policy", statements=[api_policy], roles=[questions_api_role], ) questions_integration = aws_apigateway.LambdaIntegration( mysfits_recommendations, credentials_role=questions_api_role, integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_templates={ "application/json": '{"status": "OK"}' }, ) ], ) api = aws_apigateway.LambdaRestApi( self, "APIEndpoint", handler=mysfits_recommendations, rest_api_name="Recommendation API Service", proxy=False, ) recommendations_method = api.root.add_resource("recommendations") recommendations_method.add_method( "POST", questions_integration, method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], authorization_type=aws_apigateway.AuthorizationType.NONE, ) recommendations_method.add_method( "OPTIONS", aws_apigateway.MockIntegration( integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, ) ], passthrough_behavior=aws_apigateway.PassthroughBehavior.NEVER, request_templates={"application/json": '{"statusCode": 200}'}, ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], )
def __init__(self, scope: core.Construct, id: str, props: KinesisFirehoseStackProps, **kwargs) -> None: super().__init__(scope, id, **kwargs) lambda_repository = aws_codecommit.Repository( self, "ClicksProcessingLambdaRepository", repository_name="MythicalMysfits-ClicksProcessingLambdaRepository", ) core.CfnOutput( self, "kinesisRepositoryCloneUrlHttp", value=lambda_repository.repository_clone_url_http, description="Clicks Processing Lambda Repository Clone URL HTTP", ) core.CfnOutput( self, "kinesisRepositoryCloneUrlSsh", value=lambda_repository.repository_clone_url_ssh, description="Clicks Processing Lambda Repository Clone URL SSH", ) clicks_destination_bucket = aws_s3.Bucket(self, "Bucket", versioned=True) lambda_function_policy = aws_iam.PolicyStatement() lambda_function_policy.add_actions("dynamodb:GetItem") lambda_function_policy.add_resources(props.table.table_arn) mysfits_clicks_processor = aws_lambda.Function( self, "Function", handler="streamProcessor.processRecord", runtime=aws_lambda.Runtime.PYTHON_3_7, description= "An Amazon Kinesis Firehose stream processor that enriches click records to not just include a mysfitId, but also other attributes that can be analyzed later.", memory_size=128, code=aws_lambda.Code.asset("../../lambda-streaming-processor"), timeout=core.Duration.seconds(30), initial_policy=[lambda_function_policy], environment={ # TODO: this seems better than having the user copy/paste it in, but is it the best way? "MYSFITS_API_URL": "https://{}.execute-api.{}.amazonaws.com/prod/".format( props.api_gateway.ref, core.Aws.REGION) }, ) firehose_delivery_role = aws_iam.Role( self, "FirehoseDeliveryRole", role_name="FirehoseDeliveryRole", assumed_by=aws_iam.ServicePrincipal("firehose.amazonaws.com"), external_id=core.Aws.ACCOUNT_ID, ) firehose_delivery_policy_s3_statement = aws_iam.PolicyStatement() firehose_delivery_policy_s3_statement.add_actions( "s3:AbortMultipartUpload", "s3:GetBucketLocation", "s3:GetObject", "s3:ListBucket", "s3:ListBucketMultipartUploads", "s3:PutObject", ) firehose_delivery_policy_s3_statement.add_resources( clicks_destination_bucket.bucket_arn) firehose_delivery_policy_s3_statement.add_resources( clicks_destination_bucket.arn_for_objects("*")) firehose_delivery_policy_lambda_statement = aws_iam.PolicyStatement() firehose_delivery_policy_lambda_statement.add_actions( "lambda:InvokeFunction") firehose_delivery_policy_lambda_statement.add_resources( mysfits_clicks_processor.function_arn) firehose_delivery_role.add_to_policy( firehose_delivery_policy_s3_statement) firehose_delivery_role.add_to_policy( firehose_delivery_policy_lambda_statement) mysfits_firehose_to_s3 = aws_kinesisfirehose.CfnDeliveryStream( self, "DeliveryStream", extended_s3_destination_configuration=aws_kinesisfirehose. CfnDeliveryStream.ExtendedS3DestinationConfigurationProperty( bucket_arn=clicks_destination_bucket.bucket_arn, buffering_hints=aws_kinesisfirehose.CfnDeliveryStream. BufferingHintsProperty(interval_in_seconds=60, size_in_m_bs=50), compression_format="UNCOMPRESSED", prefix="firehose/", role_arn=firehose_delivery_role.role_arn, processing_configuration=aws_kinesisfirehose.CfnDeliveryStream. ProcessingConfigurationProperty( enabled=True, processors=[ aws_kinesisfirehose.CfnDeliveryStream. ProcessorProperty( parameters=[ aws_kinesisfirehose.CfnDeliveryStream. ProcessorParameterProperty( parameter_name="LambdaArn", parameter_value=mysfits_clicks_processor. function_arn, ) ], type="Lambda", ) ], ), ), ) aws_lambda.CfnPermission( self, "Permission", action="lambda:InvokeFunction", function_name=mysfits_clicks_processor.function_arn, principal="firehose.amazonaws.com", source_account=core.Aws.ACCOUNT_ID, source_arn=mysfits_firehose_to_s3.attr_arn, ) click_processing_api_role = aws_iam.Role( self, "ClickProcessingApiRole", assumed_by=aws_iam.ServicePrincipal("apigateway.amazonaws.com"), ) api_policy = aws_iam.PolicyStatement() api_policy.add_actions("firehose:PutRecord") api_policy.add_resources(mysfits_firehose_to_s3.attr_arn) aws_iam.Policy( self, "ClickProcessingApiPolicy", policy_name="api_gateway_firehose_proxy_role", statements=[api_policy], roles=[click_processing_api_role], ) api = aws_apigateway.RestApi( self, "APIEndpoint", rest_api_name="ClickProcessing API Service", endpoint_types=[aws_apigateway.EndpointType.REGIONAL], ) clicks = api.root.add_resource("clicks") clicks.add_method( "PUT", aws_apigateway.AwsIntegration( service="firehose", integration_http_method="POST", action="PutRecord", options=aws_apigateway.IntegrationOptions( connection_type=aws_apigateway.ConnectionType.INTERNET, credentials_role=click_processing_api_role, integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_templates={ "application/json": '{"status": "OK"}' }, response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,PUT'", "method.response.header.Access-Control-Allow-Origin": "'*'", }, ) ], request_parameters={ "integration.request.header.Content-Type": "'application/x-amz-json-1.1'" }, request_templates={ "application/json": """{ "DeliveryStreamName": "%s", "Record": { "Data": "$util.base64Encode($input.json('$'))" }}""" % mysfits_firehose_to_s3.ref }, ), ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], ) clicks.add_method( "OPTIONS", aws_apigateway.MockIntegration( integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, ) ], passthrough_behavior=aws_apigateway.PassthroughBehavior.NEVER, request_templates={"application/json": '{"statusCode": 200}'}, ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) # The code that defines your stack goes here base_api = _apigw.RestApi(self, 'PetclinicApiGatewayWithCors') api_resource = base_api.root.add_resource('api') self.add_cors_options(api_resource) website_bucket = _s3.Bucket(self, 'PetclinicWebsite', website_index_document='index.html', public_read_access=True, removal_policy=core.RemovalPolicy.DESTROY) # Warm Lambda function Event rule event_rule = _events.Rule(self, 'PetclinicLambdaWarmRule', schedule=_events.Schedule.rate( core.Duration.minutes(3))) code = _commit.Repository( self, 'ServerlessCode', repository_name='spring-petclinic-static-resource') build_project = _build.PipelineProject( self, 'StaticWebBuild', build_spec=_build.BuildSpec.from_object({ 'version': 0.2, 'phases': { 'install': { 'runtime-versions': { 'java': 'openjdk8' }, 'commands': [] }, 'build': { 'commands': [ 'mv scripts/config.js scripts/config.js.origin', 'sed -e "s,http://localhost:8081/,$API_ENDPOINT,g" scripts/config.js.origin > scripts/config.js' ] }, }, 'artifacts': { 'files': '**/*' }, }), environment_variables={ 'API_ENDPOINT': _build.BuildEnvironmentVariable(value=base_api.url) }, environment=_build.BuildEnvironment( build_image=_build.LinuxBuildImage.STANDARD_2_0)) source_output = _pipeline.Artifact('SourceOutput') build_output = _pipeline.Artifact('BuildOutput') pipline = _pipeline.Pipeline( self, 'ServerlessPipeline', stages=[{ 'stageName': 'Source', 'actions': [ _action.CodeCommitSourceAction( action_name='CodeCommit_Source', repository=code, output=source_output) ] }, { 'stageName': 'Build', 'actions': [ _action.CodeBuildAction(action_name='CodeBuild_Static', project=build_project, input=source_output, outputs=[build_output]) ] }, { 'stageName': 'Deploy', 'actions': [ _action.S3DeployAction(action_name='Web_Static_Deploy', input=build_output, bucket=website_bucket) ] }]) core.CfnOutput(self, 'RuleArn', export_name='RuleArn', value=event_rule.rule_arn) core.CfnOutput(self, 'PetclinicApiGatewayWithCorsId', export_name='PetclinicApiGatewayWithCorsId', value=base_api.rest_api_id) core.CfnOutput(self, "PetclinicWebsiteUrl", export_name="PetclinicWebsiteUrl", value=website_bucket.bucket_website_url)
def __init__(self, scope: Stack, prefix: str, artifacts_bucket: IBucket, source_repository: aws_codecommit.Repository, ecr_repository: aws_ecr.Repository, task_def: str, app_spec: str, main_listener: aws_elasticloadbalancingv2.CfnListener, deployments_listener: aws_elasticloadbalancingv2.CfnListener, ecs_cluster: aws_ecs.Cluster, ecs_service: CfnService, production_target_group, deployment_target_group): self.application = aws_codedeploy.EcsApplication( scope, prefix + 'FargateCodeDeployApplication', application_name=prefix + 'FargateCodeDeployApplication', ) self.deployment_group_custom = DeploymentGroup( stack=scope, prefix=prefix, code_repository=source_repository, task_definition=task_def, app_spec=app_spec, ecs_application=self.application, main_listener=main_listener, deployments_listener=deployments_listener, ecs_cluster=ecs_cluster, production_target_group=production_target_group, deployment_target_group=deployment_target_group).get_resource() self.deployment_group_custom.node.add_dependency(ecs_service) self.deployment_group_custom.node.add_dependency(ecs_cluster) self.deployment_group = aws_codedeploy.EcsDeploymentGroup.from_ecs_deployment_group_attributes( scope, prefix + 'FargateDeploymentGroup', application=self.application, deployment_group_name=prefix + 'FargateDeploymentGroup', ) self.deployment_group.node.add_dependency(self.deployment_group_custom) self.deployment_config_repository = aws_codecommit.Repository( scope, prefix + 'FargateDeploymentConfigRepository', description= 'Repository containing appspec and taskdef files for ecs code-deploy blue/green deployments.', repository_name=prefix.lower() + '-deployment-config') self.commit_custom = DeploymentConfig( stack=scope, prefix=prefix, code_repository=self.deployment_config_repository, task_definition=task_def, app_spec=app_spec).get_resource() self.ecr_repository_output_artifact = aws_codepipeline.Artifact( 'EcsImage') self.config_output_artifact = aws_codepipeline.Artifact('EcsConfig') self.ecr_to_ecs_pipeline = aws_codepipeline.Pipeline( scope, prefix + 'FargateEcrToEcsPipeline', artifact_bucket=artifacts_bucket, pipeline_name=prefix + 'FargateEcrToEcsPipeline', stages=[ aws_codepipeline.StageProps( stage_name='SourceStage', actions=[ aws_codepipeline_actions.EcrSourceAction( action_name='SourceEcrAction', output=self.ecr_repository_output_artifact, repository=ecr_repository, run_order=1, ), aws_codepipeline_actions.CodeCommitSourceAction( action_name='SourceCodeCommitAction', output=self.config_output_artifact, repository=self.deployment_config_repository, branch='master', run_order=1, ) ]), aws_codepipeline.StageProps( stage_name='DeployStage', actions=[ aws_codepipeline_actions.CodeDeployEcsDeployAction( action_name='DeployAction', deployment_group=self.deployment_group, app_spec_template_input=self. config_output_artifact, task_definition_template_input=self. config_output_artifact, container_image_inputs=[ aws_codepipeline_actions. CodeDeployEcsContainerImageInput( input=self.ecr_repository_output_artifact, task_definition_placeholder='IMAGE1_NAME') ], run_order=1) ]) ]) self.ecr_to_ecs_pipeline.node.add_dependency(self.commit_custom)
def __init__( self, scope: core.Construct, id: str, branch: str, sandbox_account: str, **kwargs ) -> None: """Init the Construct fore creating hd-auto-service-catalog. Args: scope: CDK Parent Stack aap.py id: Name of the stack: "hd-auto-service-catalog" branch: string for A/B Deployment sandbox_account: Sandbox account id **kwargs: """ super().__init__(scope, id, **kwargs) # # The code that defines your stack goes here # def id_generator(size=6, chars=string.ascii_uppercase + string.digits): # string = "".join(random.choice(chars) for _ in range(size)).lower() # return string # # branch = branch # ############################################################## # Tagging List # ############################################################## tagging_list = [] # ############################################################## # Account List # ############################################################## # account_list = ["431892011317"] # ############################################################## # Parameters # ############################################################## # =============================== # App name app_name = core.CfnParameter( self, id="AppName-{}".format(branch), description="Name of the app", type="String", default="hd-auto-cicd-service-catalog", ) # =============================== # Environment name env_name = core.CfnParameter( self, id="EnvName-{}".format(branch), description="Name of the environment", type="String", default="auto", ) # =============================== # IAM Role and Policy parameter role_name = core.CfnParameter( self, id="ConstraintRoleName-{}".format(branch), description="Name of the launch constraint role", type="String", default="CrossAccountAdmin", ) # =============================== # Principal management lambdas unassign_lambda = core.CfnParameter( self, id="UnassignPrincipalLambdaName-{}".format(branch), description="Name of the unassign principal management Lambda", type="String", default="UnassignPrincipalFromServiceCatalog", ) assign_lambda = core.CfnParameter( self, id="AssignPrincipalLambdaName-{}".format(branch), description="Name of the assign principal management Lambda", type="String", default="AssignPrincipalToServiceCatalog", ) # =============================== # Branch name if branch == "master": branch_name = "master" elif branch == "dmz": branch_name = "dmz" else: branch_name = "feature/{}".format(branch.split("-")[1]) # =============================== # Path name path_name = core.CfnParameter( self, id="Path-{}".format(branch), description="CodeCommit repository folder for Service Catalogs Products", type="String", default="service_catalog/products/", ) # =============================== # Path for the configuration INI path_ini = core.CfnParameter( self, id="ConfigINI-{}".format(branch), description="Configuration file path", type="String", default="service_catalog/config/config_{}.ini".format(branch.split("-")[0]), ) # =============================== # Path for the template store template_store = core.CfnParameter( self, id="TemplateStore-{}".format(branch), description="S3 Bucket and Folder evaluated CloudFormation Templates", type="String", default="template-store/", ) # ############################################################## # Artifacts Bucket # ############################################################## artifact_bucket = _s3.Bucket( self, id="ArtifactsBucket-{}".format(branch), bucket_name="my-sandbox-cicd-build-artifacts-{}".format( branch.split("-")[0] ), removal_policy=core.RemovalPolicy.DESTROY, ) empty_s3_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "s3:DeleteBucket", "s3:ListBucket", "s3:DeleteObjects", "s3:DeleteObject", ], resources=[artifact_bucket.bucket_arn, artifact_bucket.bucket_arn + "/*",], ) empty_bucket_lambda = Lambda.create_lambda( self, name="EmptyArtifactsBucket-{}".format(branch), function_name="EmptyArtifactsBucket-{}".format(branch), handler="empty_bucket.empty_bucket", code_injection_method=_lambda.Code.asset(path="./src/lambda/empty_bucket/"), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=None, policy_statements=[empty_s3_policy,], log_retention=None, environment_vars=[], ) cr_empty_bucket = core.CustomResource( self, id="CR-EmptyBucket-{}".format(branch), service_token=empty_bucket_lambda.lambda_function_object.function_arn, properties={"BUCKET_NAME": artifact_bucket.bucket_name,}, removal_policy=core.RemovalPolicy.DESTROY, ) cr_empty_bucket.node.add_dependency(artifact_bucket) tagging_list.append(cr_empty_bucket) artifact_bucket.add_to_resource_policy( permission=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["s3:GetObject"], resources=[artifact_bucket.bucket_arn + "/template-store/*",], principals=[_iam.ServicePrincipal("servicecatalog"),], ) ) tagging_list.append(artifact_bucket) # ############################################################## # Code repo # ############################################################## if branch == "master": service_catalog_git = _code.Repository( self, id="ServiceCatalogGit", repository_name="hd-auto-service-catalog", description="This git hosts all templates for the ServiceCatalog and the CICD itself.", ) tagging_list.append(service_catalog_git) else: service_catalog_git = _code.Repository.from_repository_name( self, id="ServiceCatalogGit", repository_name="hd-auto-service-catalog", ) tagging_list.append(service_catalog_git) # ############################################################## # Lambda Layer # ############################################################## source_code = _lambda.Code.from_asset("./src/lambda_layer/") layer = _lambda.LayerVersion( self, id="Python3_7_Layer-{}".format(branch), code=source_code, compatible_runtimes=[_lambda.Runtime.PYTHON_3_7], ) tagging_list.append(layer) # ############################################################## # CodeBuild Project # ############################################################## build_project = _codebuild.PipelineProject( self, id="BuildProject-{}".format(branch), project_name="hd-auto-cicd-service-catalog-{}".format(branch), description="Build project for the Service Catalog pipeline", environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_4_0, privileged=True ), cache=_codebuild.Cache.bucket(artifact_bucket, prefix="codebuild-cache"), build_spec=_codebuild.BuildSpec.from_source_filename("./buildspec.yaml"), ) tagging_list.append(build_project) # CodeBuild IAM permissions to read write to s3 artifact_bucket.grant_read_write(build_project) # Build and create test runs for templates build_project.add_to_role_policy( statement=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, not_actions=["aws-portal:*", "organizations:*"], resources=["*"], # No further restriction due to IAM! ) ) # ############################################################## # Service Catalog # ############################################################## portfolio = _servicecatalog.CfnPortfolio( self, id="BasicPortfolio-{}".format(branch), display_name="hd-mdp-portfolio-{}".format(branch), provider_name="MDP-Team", accept_language="en", description=""" This portfolio contains AWS Services combined into technical and functional approved architectures. You don't need IAM permissions to run those products. You will use them. """, ) remove_portfolio_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "servicecatalog:SearchProductsAsAdmin", "servicecatalog:DeleteProduct", "servicecatalog:DeleteConstraint", "servicecatalog:ListConstraintsForPortfolio", "servicecatalog:DisassociatePrincipalFromPortfolio", "servicecatalog:DisassociateProductFromPortfolio", ], resources=["*",], ) iam_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "iam:GetRole", "iam:PassRole", "iam:CreateRole", "iam:DeleteRole", "iam:ListRoles", "iam:PutRolePolicy", "iam:DeleteRolePolicy", "iam:DeletePolicy", ], resources=[ "arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role_name.value_as_string ), ], ) remove_products_lambda = Lambda.create_lambda( self, name="RemoveProductsFromPortfolio-{}".format(branch), function_name="RemoveProductsFromPortfolio-{}".format(branch), handler="remove_portfolio.remove_portfolio", code_injection_method=_lambda.Code.asset( path="./src/lambda/remove_portfolio/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=None, policy_statements=[remove_portfolio_policy, iam_policy], log_retention=None, environment_vars=[ {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),} ], ) cr_remove_products = core.CustomResource( self, id="CR-RemoveProductsFromPortfolio-{}".format(branch), service_token=remove_products_lambda.lambda_function_object.function_arn, properties={"PORTFOLIO_ID": portfolio.ref,}, removal_policy=core.RemovalPolicy.DESTROY, ) cr_remove_products.node.add_dependency(portfolio) iam_role_list = [role_name.value_as_string] if branch == "master": # TODO: Accept Portfolio share principal management # for idx, account in enumerate(account_list): # _servicecatalog.CfnPortfolioShare( # self, # id="PortfolioSharing-{}-{}".format(branch, idx), # account_id=account, # portfolio_id=portfolio.ref, # accept_language="en", # ) for idx, role in enumerate(iam_role_list): _servicecatalog.CfnPortfolioPrincipalAssociation( self, id="PrincipalAssociation-{}-{}".format(branch, idx), portfolio_id=portfolio.ref, principal_arn="arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role ), principal_type="IAM", accept_language="en", ) core.CfnOutput( self, id="PortfolioId-{}".format(branch), value=portfolio.ref ) tagging_list.append(portfolio) else: for idx, role in enumerate(iam_role_list): _servicecatalog.CfnPortfolioPrincipalAssociation( self, id="PrincipalAssociation-{}-{}".format(branch, idx), portfolio_id=portfolio.ref, principal_arn="arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role ), principal_type="IAM", accept_language="en", ) core.CfnOutput( self, id="PortfolioId-{}".format(branch), value=portfolio.ref ) tagging_list.append(portfolio) # ############################################################## # Lambda Permissions # ############################################################## s3_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "s3:GetObject*", "s3:GetBucket*", "s3:List*", "s3:DeleteObject*", "s3:PutObject*", "s3:Abort*", ], resources=[artifact_bucket.bucket_arn, artifact_bucket.bucket_arn + "/*"], ) codecommit_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "codecommit:GetDifferences", "codecommit:GetBranch", "codecommit:GetCommit", ], resources=[service_catalog_git.repository_arn], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) codebuild_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["codebuild:StartBuild", "codebuild:UpdateProject*"], resources=[build_project.project_arn], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) service_catalog_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "servicecatalog:CreateProduct", "servicecatalog:CreateProvisioningArtifact", "servicecatalog:UpdateProvisioningArtifact", "servicecatalog:DeleteProvisioningArtifact", "servicecatalog:ListProvisioningArtifacts", "servicecatalog:ListPortfolios", "servicecatalog:SearchProductsAsAdmin", "servicecatalog:AssociateProductWithPortfolio", "servicecatalog:AssociatePrincipalWithPortfolio", "servicecatalog:DisassociatePrincipalFromPortfolio", "servicecatalog:DisassociateProductFromPortfolio", "servicecatalog:DeleteProduct", "servicecatalog:CreatePortfolioShare", "servicecatalog:AcceptPortfolioShare", "servicecatalog:CreateConstraint", "servicecatalog:DeleteConstraint", "servicecatalog:ListConstraintsForPortfolio", ], resources=["*"], ) sts_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["sts:AssumeRole"], resources=[ "arn:aws:iam::{}:role/{}".format( sandbox_account, role_name.value_as_string ), ], ) codepipeline_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "codepipeline:PutJobFailureResult", # Supports only a wildcard (*) in the policy Resource element. "codepipeline:PutJobSuccessResult", # Supports only a wildcard (*) in the policy Resource element. ], # https://docs.aws.amazon.com/codepipeline/latest/userguide/permissions-reference.html resources=["*"], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) lambda_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "lambda:GetFunction", "lambda:CreateFunction", "lambda:DeleteFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:CreateEventSourceMapping", "lambda:DeleteEventSourceMapping", "lambda:InvokeFunction", "lambda:UpdateFunctionCode", "lambda:UpdateFunctionConfiguration", ], resources=[ "arn:aws:lambda:{}:{}:function:{}-{}".format( core.Aws.REGION, sandbox_account, unassign_lambda.value_as_string, sandbox_account, ), "arn:aws:lambda:{}:{}:function:{}-{}".format( core.Aws.REGION, sandbox_account, assign_lambda.value_as_string, sandbox_account, ), ], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) # ############################################################## # CICD Lambdas # ############################################################## # ========================== # Get Latest Git Meta Data git_metadata = Lambda.create_lambda( self, name="GetLastGitChanges-{}".format(branch), function_name="GetLastGitChanges-{}".format(branch,), handler="git_metadata.get_changes", code_injection_method=_lambda.Code.asset(path="./src/lambda/git_metadata/"), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ codecommit_policy, codebuild_policy, codepipeline_policy, service_catalog_policy, ], log_retention=None, environment_vars=[ { "Key": "REPOSITORY_NAME", "Value": "{}".format(service_catalog_git.repository_name), }, ], ) # ========================== # Principal Management Lambda principal_management = Lambda.create_lambda( self, name="PrincipalManagement-{}".format(branch), function_name="PrincipalManagement-{}".format(branch), handler="principal_management.principal_management", code_injection_method=_lambda.Code.asset( path="./src/lambda/principal_management/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=1024, timeout=120, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ iam_policy, lambda_policy, sts_policy, service_catalog_policy, codepipeline_policy, codecommit_policy, ], log_retention=None, environment_vars=[ {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),} ], ) # ========================== # Sync Service Catalog Lambda service_catalog_synchronisation = Lambda.create_lambda( self, name="UpdateServiceCatalog-{}".format(branch), function_name="UpdateServiceCatalog-{}".format(branch), handler="sync_catalog.service_catalog_janitor", code_injection_method=_lambda.Code.asset( path="./src/lambda/update_servicecatalog/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=1024, timeout=120, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ sts_policy, service_catalog_policy, codepipeline_policy, codecommit_policy, iam_policy, s3_policy, ], log_retention=None, environment_vars=[ { "Key": "LOCAL_ROLE_NAME_SC", "Value": "{}".format(role_name.value_as_string), }, {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),}, { "Key": "REPOSITORY_NAME", "Value": "{}".format(service_catalog_git.repository_name), }, {"Key": "PATH_INI", "Value": "{}".format(path_ini.value_as_string)}, {"Key": "PATH", "Value": "{}".format(path_name.value_as_string)}, {"Key": "BUCKET", "Value": "{}".format(artifact_bucket.bucket_name)}, { "Key": "S3_PATH", "Value": "{}".format(template_store.value_as_string), }, ], ) # ############################################################## # CodePipeline # ############################################################## # General output source_output = _codepipeline.Artifact("git-change") tested_source_files = _codepipeline.Artifact("tested-cfn") cicd_pipeline = _codepipeline.Pipeline( self, id="ServiceCatalogPipeline-{}".format(branch), pipeline_name="ServiceCatalog-CICD-{}".format(branch), artifact_bucket=artifact_bucket, stages=[ _codepipeline.StageProps( stage_name="Source_CFN-Templates", actions=[ _codepipeline_actions.CodeCommitSourceAction( action_name="SourceControlCFNTemplates", output=source_output, repository=service_catalog_git, variables_namespace="source", branch=branch_name, ), ], ), _codepipeline.StageProps( stage_name="Getting_CFN-Template", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="GettingCFNTemplate", lambda_=git_metadata.lambda_function_object, user_parameters={ "before_commit": "", "after_commit": "#{source.CommitId}", }, variables_namespace="filtered_source", ) ], ), _codepipeline.StageProps( stage_name="Testing_CFN-Template", actions=[ _codepipeline_actions.CodeBuildAction( type=_codepipeline_actions.CodeBuildActionType.BUILD, action_name="TestingCFNTemplates", project=build_project, input=source_output, outputs=[tested_source_files], environment_variables={ "PIPELINE_NAME": _codebuild.BuildEnvironmentVariable( value="ServiceCatalog-CICD-{}".format(branch), type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_ADDED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.added_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_MODIFIED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.modified_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_DELETED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.deleted_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "JOB_ID": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.job_id}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "REPOSITORY_BRANCH": _codebuild.BuildEnvironmentVariable( value="#{source.BranchName}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "REPOSITORY_NAME": _codebuild.BuildEnvironmentVariable( value="#{source.RepositoryName}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), }, ) ], ), _codepipeline.StageProps( stage_name="Principal_Management", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="PrincipalManagement", lambda_=principal_management.lambda_function_object, user_parameters={ "job_id": "#{filtered_source.job_id}", "commit_id": "#{filtered_source.commit_id}", "portfolio_id": portfolio.ref, }, ) ], ), _codepipeline.StageProps( stage_name="Update_Servicecatalog", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="UpdateServiceCatalog", lambda_=service_catalog_synchronisation.lambda_function_object, inputs=[source_output], user_parameters={ "modified_files": "#{filtered_source.modified_files}", "added_files": "#{filtered_source.added_files}", "deleted_files": "#{filtered_source.deleted_files}", "job_id": "#{filtered_source.job_id}", "commit_id": "#{filtered_source.commit_id}", "portfolio_id": portfolio.ref, }, ) ], ), ], ) cicd_pipeline.add_to_role_policy( statement=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["codecommit:GetBranch", "codecommit:GetCommit"], resources=[service_catalog_git.repository_arn], ) ) tagging_list.append(cicd_pipeline) # ############################################################## # Tag resources # ############################################################## Tags.tag_resources( resources_list=tagging_list, keys_list=["app", "env"], values_list=[app_name.value_as_string, env_name.value_as_string], ) _ssm.StringParameter( self, id="LambdaLayerExport-{}".format(branch), parameter_name="/hd/mdp/{}/lambda/layer-pandas-numpy-servicecatalog".format( branch ), description="Lambda Layer ARN", string_value=layer.layer_version_arn, )
def __init__(self, scope: Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) CODECOMMIT_REPO_NAME = cdk.CfnParameter( self, "CodeCommitRepoName", type="String", default="serverless-api-pipeline-cdk", description="CodeCommit repository with the project code" ).value_as_string PIPELINE_NAME = "serverless-api-pipeline-cdk" source_artifact = codepipeline.Artifact() cloud_assembly_artifact = codepipeline.Artifact() pipeline = CdkPipeline( self, "Pipeline", pipeline_name=PIPELINE_NAME, cloud_assembly_artifact=cloud_assembly_artifact, source_action=codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit", output=source_artifact, branch='main', trigger=codepipeline_actions.CodeCommitTrigger.POLL, repository=codecommit.Repository( self, 'ServerlessApiRepository', repository_name=CODECOMMIT_REPO_NAME)), synth_action=SimpleSynthAction.standard_npm_synth( source_artifact=source_artifact, cloud_assembly_artifact=cloud_assembly_artifact, environment={'privileged': True}, install_command= 'cd ./serverless-rest-api/python-http-cdk; npm install -g aws-cdk; pip install -r requirements.txt; pip install -r ./src/api/requirements.txt ', synth_command='cdk synth --output $CODEBUILD_SRC_DIR/cdk.out')) # Add testing stage to the pipeline and testing activity with permissions necessary to run integration tests testing_stage = AppStage(self, 'serverless-api-pipeline-cdk-Testing', cognito_stack_name='Cognito') pipeline_testing_stage = pipeline.add_application_stage(testing_stage) testing_action = ShellScriptAction( action_name='IntegrationTest', additional_artifacts=[source_artifact], commands=[ 'cd ./serverless-rest-api/python-http-cdk', 'pip install -r ./tests/requirements.txt', 'pip install -r ./src/api/requirements.txt', 'python -m pytest tests/integration -v' ], use_outputs={ 'TEST_APPLICATION_STACK_NAME': pipeline.stack_output(testing_stage.api_stack_name) }, ) pipeline_testing_stage.add_actions(testing_action) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ 'cognito-idp:AdminDeleteUser', 'cognito-idp:AdminConfirmSignUp', 'cognito-idp:AdminAddUserToGroup' ], resources=[ f'arn:aws:cognito-idp:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:userpool/*' ], )) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['secretsmanager:GetRandomPassword'], resources=['*'], )) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['dynamodb:*'], resources=[ f'arn:aws:dynamodb:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:table/{testing_stage.stage_name}*' ], )) testing_action.project.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=['cloudformation:DescribeStacks'], resources=[ f'arn:aws:cloudformation:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:stack/{testing_stage.stage_name}*/*', f'arn:aws:cloudformation:{cdk.Aws.REGION}:{cdk.Aws.ACCOUNT_ID}:stack/{testing_stage.cognito_stack_name}/*' ], )) # Create production deployment stage to the pipeline with manual approval action deployment_stage = AppStage(self, 'serverless-api-pipeline-cdk-Deployment', cognito_stack_name='Cognito') pipeline_deployment_stage = pipeline.add_application_stage( deployment_stage) pipeline_deployment_stage.add_actions( codepipeline_actions.ManualApprovalAction( action_name='ApproveProductionDeployment', run_order=1))
def __init__(self, scope: cdk.Construct, construct_id: str, ecr_repository: ecr.Repository, ecs_service: ecs.FargateService, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) backend_repository = codecommit.Repository( self, 'BackendRepository', repository_name='MythicalMysfits-BackendRepository' ) codebuild_project = codebuild.PipelineProject( self, 'BuildProject', project_name='MythicalMysfitsServiceCodeBuildProject', environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_PYTHON_3_5_2, compute_type=codebuild.ComputeType.SMALL, environment_variables={ 'AWS_ACCOUNT_ID': codebuild.BuildEnvironmentVariable( type=codebuild.BuildEnvironmentVariableType.PLAINTEXT, value=self.account), 'AWS_DEFAULT_REGION': codebuild.BuildEnvironmentVariable( type=codebuild.BuildEnvironmentVariableType.PLAINTEXT, value=self.region), }, privileged=True ) ) codebuild_policy_stm = _iam.PolicyStatement() codebuild_policy_stm.add_resources(backend_repository.repository_arn) codebuild_policy_stm.add_actions( "codecommit:ListBranches", "codecommit:ListRepositories", "codecommit:BatchGetRepositories", "codecommit:GitPull" ) codebuild_project.add_to_role_policy(codebuild_policy_stm) ecr_repository.grant_pull_push(codebuild_project.grant_principal) source_output = codepipeline.Artifact() source_action = actions.CodeCommitSourceAction( action_name='CodeCommit-Source', branch='main', trigger=actions.CodeCommitTrigger.EVENTS, repository=backend_repository, output=source_output ) build_output = codepipeline.Artifact() build_action = actions.CodeBuildAction( action_name='Build', input=source_output, outputs=[ build_output ], project=codebuild_project ) deploy_action = actions.EcsDeployAction( action_name='DeployAction', service=ecs_service, input=build_output ) pipeline = codepipeline.Pipeline( self, 'Pipeline', pipeline_name='MythicalMysfitsPipeline', ) pipeline.add_stage(stage_name='Source', actions=[source_action]) pipeline.add_stage(stage_name='Build', actions=[build_action]) # # the following pipeline.add_stage doesn't work # pipeline.add_stage(stage_name='Deploy', actions=[deploy_action]) cdk.CfnOutput(self, 'BackendRepositoryCloneUrlHttp', description='Backend Repository CloneUrl HTTP', value=backend_repository.repository_clone_url_http) cdk.CfnOutput(self, 'BackendRepositoryCloneUrlSsh', description='Backend Repository CloneUrl SSH', value=backend_repository.repository_clone_url_ssh)
def __init__(self, scope: core.Construct, id: str, vpc, **kwargs) -> None: super().__init__(scope, id, **kwargs) name = "graviton2-aspnet-lab" container_repository = ecr.Repository(scope=self, id=f"{name}-container", repository_name=f"{name}") codecommit_repo = codecommit.Repository( scope=self, id=f"{name}-container-git", repository_name=f"{name}", description=f"Application code") pipeline = codepipeline.Pipeline(scope=self, id=f"{name}-container--pipeline", pipeline_name=f"{name}") source_output = codepipeline.Artifact() docker_output_arm64 = codepipeline.Artifact("ARM64_BuildOutput") buildspec_arm64 = codebuild.BuildSpec.from_source_filename( "arm64-dotnet-buildspec.yml") docker_build_arm64 = codebuild.PipelineProject( scope=self, id=f"DockerBuild_ARM64", environment=dict( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_ARM, privileged=True), environment_variables={ 'REPO_ECR': codebuild.BuildEnvironmentVariable( value=container_repository.repository_uri), }, build_spec=buildspec_arm64) container_repository.grant_pull_push(docker_build_arm64) docker_build_arm64.add_to_role_policy( iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ "ecr:BatchCheckLayerAvailability", "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage" ], resources=[ f"arn:{core.Stack.of(self).partition}:ecr:{core.Stack.of(self).region}:{core.Stack.of(self).account}:repository/*" ], )) source_action = codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=codecommit_repo, output=source_output, branch="master") pipeline.add_stage(stage_name="Source", actions=[source_action]) pipeline.add_stage(stage_name="DockerBuild", actions=[ codepipeline_actions.CodeBuildAction( action_name=f"DockerBuild_ARM64", project=docker_build_arm64, input=source_output, outputs=[docker_output_arm64]) ]) # Outputs core.CfnOutput(scope=self, id="application_repository", value=codecommit_repo.repository_clone_url_http)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) cdkRepository = codecommit.Repository( self, "CDKRepository", repository_name="MythicalMysfitsService-Repository-CDK", ) webRepository = codecommit.Repository( self, "WebRepository", repository_name="MythicalMysfitsService-Repository-Web", ) apiRepository = codecommit.Repository( self, "APIRepository", repository_name="MythicalMysfitsService-Repository-API", ) lambdaRepository = codecommit.Repository( self, "LambdaRepository", repository_name="MythicalMysfitsService-Repository-Lambda", ) core.CfnOutput( self, "CDKRepositoryCloneUrlHttp", description="CDK Repository CloneUrl HTTP", value=cdkRepository.repository_clone_url_http, ) core.CfnOutput( self, "CDKRepositoryCloneUrlSsh", description="CDK Repository CloneUrl SSH", value=cdkRepository.repository_clone_url_ssh, ) core.CfnOutput( self, "WebRepositoryCloneUrlHttp", description="Web Repository CloneUrl HTTP", value=webRepository.repository_clone_url_http, ) core.CfnOutput( self, "WebRepositoryCloneUrlSsh", description="Web Repository CloneUrl SSH", value=webRepository.repository_clone_url_http, ) core.CfnOutput( self, "APIRepositoryCloneUrlHttp", description="API Repository CloneUrl HTTP", value=apiRepository.repository_clone_url_http, ) core.CfnOutput( self, "APIRepositoryCloneUrlSsh", description="API Repository CloneUrl SSH", value=apiRepository.repository_clone_url_ssh, ) core.CfnOutput( self, "LambdaRepositoryCloneUrlHttp", description="Lambda Repository CloneUrl HTTP", value=lambdaRepository.repository_clone_url_ssh, ) core.CfnOutput( self, "LambdaRepositoryCloneUrlSsh", description="Lambda Repository CloneUrl SSH", value=lambdaRepository.repository_clone_url_ssh, )
def __init__(self, scope: core.Construct, id: str, website: WebsiteConstruct, **kwargs) -> None: super().__init__(scope, id, **kwargs) stack = core.Stack.of(self) repo = codecommit.Repository(self, 'Repository', repository_name=stack.stack_name.lower()) project = codebuild.PipelineProject( self, 'Builder', environment=codebuild.BuildEnvironment( build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, compute_type=codebuild.ComputeType.LARGE), cache=codebuild.Cache.local(codebuild.LocalCacheMode.CUSTOM, ), build_spec=codebuild.BuildSpec.from_object({ 'version': 0.2, 'cache': { 'paths': ['nodemodules/**/*'], }, 'phases': { 'install': { 'runtime-versions': { 'nodejs': 12 } }, 'pre_build': { 'commands': ['echo Pre-build started on `date`', 'yarn install'] }, 'build': { 'commands': ['echo Build started on `date`', 'yarn build'] } }, 'artifacts': { 'files': ['**/*'], 'base-directory': 'dist' } }), ) source_artifact = codepipeline.Artifact('SourceArtifact') build_artifact = codepipeline.Artifact('BuildArtifact') pipeline = codepipeline.Pipeline( self, 'Pipeline', cross_account_keys=False, restart_execution_on_update=True, stages=[ codepipeline.StageProps( stage_name='Source', actions=[ codepipeline_actions.CodeCommitSourceAction( action_name='Source', repository=repo, output=source_artifact, ) ]), codepipeline.StageProps( stage_name='Build', actions=[ codepipeline_actions.CodeBuildAction( action_name='Build', project=project, input=source_artifact, outputs=[build_artifact], ) ]), codepipeline.StageProps( stage_name='Deploy', actions=[ codepipeline_actions.S3DeployAction( action_name='Deploy', input=build_artifact, bucket=website.bucket, extract=True, ) ]) ])
def __init__(self, scope: core.Construct, id: str, UserName="******", Repo="default", WebService="default", **kwargs): super().__init__(scope, id, **kwargs) self.My_CodeBuild_Role = _iam.Role( self, 'CodeBuildRole-Web-' + UserName, assumed_by=_iam.CompositePrincipal( _iam.ServicePrincipal('ec2.amazonaws.com'), _iam.ServicePrincipal('codebuild.amazonaws.com'))) for repo in Repo.getRepositoriesList(): Repo.getRepositories(repo).grant_pull_push(self.My_CodeBuild_Role) self.My_CodeCommit_Web = _codecommit.Repository( self, "CodeCommit-Web-" + UserName, repository_name="Workshop-Web-" + UserName, description="CodeCommit for Web Project,Owner:" + UserName) self.My_CodeBuild_Web = _codebuild.PipelineProject( self, "CodeBuild-Web-" + UserName, project_name="CodeBuild-Web" + UserName, role=self.My_CodeBuild_Role, environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_2_0, privileged=True)) self.CodeCommit_Web_Source = _codepipeline.Artifact( "CodeCommit_Web_Source-" + UserName) self.EcsImage_Web_Source = _codepipeline.Artifact( 'EcsImage_Web_Source-' + UserName) self.FargateImage_Web_Source = _codepipeline.Artifact( 'FargateImage_Web_Source-' + UserName) self.My_CodePipeline_Web = _codepipeline.Pipeline( self, "CodePipeline-Web-" + UserName, stages=[ _codepipeline.StageProps( stage_name="Source", actions=[ _codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Web_Source", repository=self.My_CodeCommit_Web, branch="master", output=self.CodeCommit_Web_Source) ]), _codepipeline.StageProps( stage_name="Build", actions=[ _codepipeline_actions.CodeBuildAction( action_name="CodeCommit_Web_Build", project=self.My_CodeBuild_Web, input=self.CodeCommit_Web_Source, outputs=[self.FargateImage_Web_Source]) ]), _codepipeline.StageProps( stage_name="Deploy", actions=[ _codepipeline_actions.EcsDeployAction( action_name='CodeDeploy_Web_Deploy', service=WebService.getFargateService( "WebApplicationService"), input=self.FargateImage_Web_Source) ]) ]) core.CfnOutput(self, "CodeCommit For WebApplication", value=self.My_CodeCommit_Web.repository_clone_url_http)
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) lambda_repository = aws_codecommit.Repository( self, "QuestionsLambdaRepository", repository_name="MythicalMysfits-QuestionsLambdaRepository", ) core.CfnOutput( self, "questionsRepositoryCloneUrlHTTP", value=lambda_repository.repository_clone_url_http, description="Questions Lambda Repository Clone URL HTTP", ) core.CfnOutput( self, "questionsRepositoryCloneUrlSSH", value=lambda_repository.repository_clone_url_ssh, description="Questions Lambda Repository Clone URL SSH", ) table = aws_dynamodb.Table( self, "Table", table_name="MysfitsQuestionsTable", partition_key=aws_dynamodb.Attribute( name="QuestionId", type=aws_dynamodb.AttributeType.STRING), stream=aws_dynamodb.StreamViewType.NEW_IMAGE, ) lambda_function_policy_statement_ddb = aws_iam.PolicyStatement() lambda_function_policy_statement_ddb.add_actions("dynamodb:PutItem") lambda_function_policy_statement_ddb.add_resources(table.table_arn) lambda_function_policy_statement_xray = aws_iam.PolicyStatement() lambda_function_policy_statement_xray.add_actions( "xray:PutTraceSegments", "xray:PutTelemetryRecords", "xray:GetSamplingRules", "xray:GetSamplingTargets", "xray:GetSamplingStatisticSummaries", ) lambda_function_policy_statement_xray.add_all_resources() mysfits_post_question = aws_lambda.Function( self, "PostQuestionFunction", handler="mysfitsPostQuestion.postQuestion", runtime=aws_lambda.Runtime.PYTHON_3_6, description= "A microservice Lambda function that receives a new question submitted to the MythicalMysfits website from a user and inserts it into a DynamoDB database table.", memory_size=128, code=aws_lambda.Code.asset( os.path.join("..", "..", "lambda-questions", "PostQuestionsService")), timeout=core.Duration.seconds(30), initial_policy=[ lambda_function_policy_statement_ddb, lambda_function_policy_statement_xray, ], tracing=aws_lambda.Tracing.ACTIVE, ) topic = aws_sns.Topic( self, "Topic", display_name="MythicalMysfitsQuestionsTopic", topic_name="MythicalMysfitsQuestionsTopic", ) topic.add_subscription(subs.EmailSubscription(os.environ["SNS_EMAIL"])) post_question_lamdaa_function_policy_statement_sns = aws_iam.PolicyStatement( ) post_question_lamdaa_function_policy_statement_sns.add_actions( "sns:Publish") post_question_lamdaa_function_policy_statement_sns.add_resources( topic.topic_arn) mysfits_process_question_stream = aws_lambda.Function( self, "ProcessQuestionStreamFunction", handler="mysfitsProcessStream.processStream", runtime=aws_lambda.Runtime.PYTHON_3_6, description= "An AWS Lambda function that will process all new questions posted to mythical mysfits and notify the site administrator of the question that was asked.", memory_size=128, code=aws_lambda.Code.asset( os.path.join("..", "..", "lambda-questions", "ProcessQuestionsStream")), timeout=core.Duration.seconds(30), initial_policy=[ post_question_lamdaa_function_policy_statement_sns, lambda_function_policy_statement_xray, ], tracing=aws_lambda.Tracing.ACTIVE, environment={"SNS_TOPIC_ARN": topic.topic_arn}, events=[ event.DynamoEventSource( table, starting_position=aws_lambda.StartingPosition.TRIM_HORIZON, batch_size=1, ) ], ) questions_api_role = aws_iam.Role( self, "QuestionsApiRole", assumed_by=aws_iam.ServicePrincipal("apigateway.amazonaws.com"), ) api_policy = aws_iam.PolicyStatement() api_policy.add_actions("lambda:InvokeFunction") api_policy.add_resources(mysfits_post_question.function_arn) aws_iam.Policy( self, "QuestionsApiPolicy", policy_name="questions_api_policy", statements=[api_policy], roles=[questions_api_role], ) questions_integration = aws_apigateway.LambdaIntegration( mysfits_post_question, credentials_role=questions_api_role, integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_templates={ "application/json": '{"status": "OK"}' }, ) ], ) api = aws_apigateway.LambdaRestApi( self, "APIEndpoint", handler=mysfits_post_question, options=aws_apigateway.RestApiProps( rest_api_name="Questions API Server"), proxy=False, ) questions_method = api.root.add_resource("questions") questions_method.add_method( "POST", questions_integration, method_responses=[ aws_apigateway.MethodResponse(status_code="200") ], authorization_type=aws_apigateway.AuthorizationType.NONE, ) questions_method.add_method( "OPTIONS", aws_apigateway.MockIntegration( integration_responses=[ aws_apigateway.IntegrationResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token,X-Amz-User-Agent'", "method.response.header.Access-Control-Allow-Origin": "'*'", "method.response.header.Access-Control-Allow-Credentials": "'false'", "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,GET,PUT,POST,DELETE'", }, ) ], passthrough_behavior=aws_apigateway.PassthroughBehavior.NEVER, request_templates={"application/json": '{"statusCode": 200}'}, ), method_responses=[ aws_apigateway.MethodResponse( status_code="200", response_parameters={ "method.response.header.Access-Control-Allow-Headers": True, "method.response.header.Access-Control-Allow-Methods": True, "method.response.header.Access-Control-Allow-Credentials": True, "method.response.header.Access-Control-Allow-Origin": True, }, ) ], )
def __init__(self, scope: core.Construct, id: str, repository_name: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) codebuild_start_fn = _create_fn_from_folder( scope=self, folder_name="codebuild_start_fn", ) codebuild_result_fn = _create_fn_from_folder( scope=self, folder_name="codebuild_result_fn", ) repo = codecommit.Repository( scope=self, id="Repository", repository_name=repository_name, ) project = codebuild.Project( scope=self, id="PullRequestCodeCommitProject", source=codebuild.Source.code_commit(repository=repo), badge=True, ) project.on_build_started( id="on-build-started", target=targets.LambdaFunction(handler=codebuild_start_fn), ) project.on_build_succeeded( id="on-build-succeeded", target=targets.LambdaFunction(handler=codebuild_result_fn), ) project.on_build_failed( id="on-build-failed", target=targets.LambdaFunction(handler=codebuild_result_fn), ) on_pull_request_state_change_rule = repo.on_pull_request_state_change( id="on-pull-request-change", event_pattern=events.EventPattern( detail={"event": [ "pullRequestSourceBranchUpdated", "pullRequestCreated", ]}), # target=targets.LambdaFunction( # handler=pull_request_fn, # ) ) on_pull_request_state_change_rule.add_target( target=targets.CodeBuildProject( project=project, event=events.RuleTargetInput.from_object( { "sourceVersion": events.EventField.from_path("$.detail.sourceCommit"), "artifactsOverride": {"type": "NO_ARTIFACTS"}, "environmentVariablesOverride": [ { "name": "pullRequestId", "value": events.EventField.from_path("$.detail.pullRequestId"), "type": "PLAINTEXT" }, { "name": "repositoryName", "value": events.EventField.from_path("$.detail.repositoryNames[0]"), "type": "PLAINTEXT" }, { "name": "sourceCommit", "value": events.EventField.from_path("$.detail.sourceCommit"), "type": "PLAINTEXT" }, { "name": "destinationCommit", "value": events.EventField.from_path("$.detail.destinationCommit"), "type": "PLAINTEXT" } ] } ), ) )
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) code = codecommit.Repository( self, "CodeRepo", repository_name="iot-gg-cicd-workshop-repo") prod_deploy_param_bucket = s3.Bucket( self, "ProdDeployBucket", versioned=True, ) prod_source_bucket = s3.Bucket( self, "ProdSourceBucket", versioned=True, ) ssm.StringParameter( self, "ProdSourceBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_source_bucket", string_value=prod_source_bucket.bucket_name, ) ssm.StringParameter( self, "ProdDeployBucketParameter", parameter_name="/iot-gg-cicd-workshop/s3/prod_deploy_param_bucket", string_value=prod_deploy_param_bucket.bucket_name, ) cdk_build = codebuild.PipelineProject( self, "Build", project_name="iot-gg-cicd-workshop-build", build_spec=codebuild.BuildSpec.from_source_filename( "buildspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_build, [ "AWSCloudFormationFullAccess", "AmazonSSMFullAccess", "AmazonS3FullAccess", "AWSLambdaFullAccess", "IAMFullAccess", ]) cdk_deploy_canary = codebuild.PipelineProject( self, "Deploy", project_name="iot-gg-cicd-workshop-deploy-canary", build_spec=codebuild.BuildSpec.from_source_filename( "deployspec.yml"), environment_variables={ "AWS_DEFAULT_REGION": codebuild.BuildEnvironmentVariable(value=kwargs['env'].region) }) add_policies(cdk_deploy_canary, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess", "AWSIoTFullAccess" ]) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact("CdkBuildOutput") codepipeline.Pipeline( self, "Pipeline", pipeline_name="iot-gg-cicd-workshop-pipeline-canary", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, output=source_output) ]), codepipeline.StageProps( stage_name="Build_Package_Deploy_Lambda", actions=[ codepipeline_actions.CodeBuildAction( action_name="Build_Package_Deploy", project=cdk_build, input=source_output, outputs=[cdk_build_output]) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Canary", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Canary", project=cdk_deploy_canary, input=cdk_build_output) ]), ]) cdk_deploy_prod = codebuild.PipelineProject( self, "DeployProd", project_name="iot-gg-cicd-workshop-deploy-main", build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict(install=dict(commands=[ "apt-get install zip", "PROD_SOURCE_BUCKET=$(aws ssm get-parameter --name '/iot-gg-cicd-workshop/s3/prod_source_bucket' --with-decryption --query 'Parameter.Value' --output text)", "aws s3 cp s3://$PROD_SOURCE_BUCKET/prod_deploy.zip prod_deploy.zip", "unzip -o prod_deploy.zip", "ls -la", "make clean init" ]), build=dict(commands=[ "ls -la", "make deploy-greengrass-prod", ])), artifacts={ "base-directory": ".", "files": ["**/*"] }, environment=dict( buildImage=codebuild.LinuxBuildImage.STANDARD_2_0)))) add_policies(cdk_deploy_prod, [ "AWSCloudFormationFullAccess", "AWSGreengrassFullAccess", "AmazonSSMFullAccess", "ResourceGroupsandTagEditorReadOnlyAccess", "AWSLambdaFullAccess" ]) prod_source_output = codepipeline.Artifact() codepipeline.Pipeline( self, "PipelineProd", pipeline_name="iot-gg-cicd-workshop-pipeline-main", stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="S3_Source", bucket=prod_deploy_param_bucket, bucket_key="deploy_params.zip", output=prod_source_output) ]), codepipeline.StageProps( stage_name="Deploy_GreenGrass_Prod", actions=[ codepipeline_actions.CodeBuildAction( action_name="Deploy_Prod", project=cdk_deploy_prod, input=prod_source_output) ]), ]) prod_source_bucket.grant_read_write(cdk_deploy_canary.role) prod_source_bucket.grant_read(cdk_deploy_prod.role) prod_deploy_param_bucket.grant_read_write(cdk_deploy_canary.role)
def __init__(self, scope: core.Construct, id: str, vpc: aws_ec2.Vpc, ecs_cluster=aws_ecs.Cluster, alb=elbv2.ApplicationLoadBalancer, albTestListener=elbv2.ApplicationListener, albProdListener=elbv2.ApplicationListener, blueGroup=elbv2.ApplicationTargetGroup, greenGroup=elbv2.ApplicationTargetGroup, **kwargs) -> None: super().__init__(scope, id, **kwargs) ECS_APP_NAME = "Nginx-app", ECS_DEPLOYMENT_GROUP_NAME = "NginxAppECSBlueGreen" ECS_DEPLOYMENT_CONFIG_NAME = "CodeDeployDefault.ECSLinear10PercentEvery1Minutes" ECS_TASKSET_TERMINATION_WAIT_TIME = 10 ECS_TASK_FAMILY_NAME = "Nginx-microservice" ECS_APP_NAME = "Nginx-microservice" ECS_APP_LOG_GROUP_NAME = "/ecs/Nginx-microservice" DUMMY_TASK_FAMILY_NAME = "sample-Nginx-microservice" DUMMY_APP_NAME = "sample-Nginx-microservice" DUMMY_APP_LOG_GROUP_NAME = "/ecs/sample-Nginx-microservice" DUMMY_CONTAINER_IMAGE = "smuralee/nginx" # ============================================================================= # ECR and CodeCommit repositories for the Blue/ Green deployment # ============================================================================= # ECR repository for the docker images NginxecrRepo = aws_ecr.Repository(self, "NginxRepo", image_scan_on_push=True) NginxCodeCommitrepo = aws_codecommit.Repository( self, "NginxRepository", repository_name=ECS_APP_NAME, description="Oussama application hosted on NGINX") # ============================================================================= # CODE BUILD and ECS TASK ROLES for the Blue/ Green deployment # ============================================================================= # IAM role for the Code Build project codeBuildServiceRole = aws_iam.Role( self, "codeBuildServiceRole", assumed_by=aws_iam.ServicePrincipal('codebuild.amazonaws.com')) inlinePolicyForCodeBuild = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=[ "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability", "ecr:InitiateLayerUpload", "ecr:UploadLayerPart", "ecr:CompleteLayerUpload", "ecr:PutImage" ], resources=["*"]) codeBuildServiceRole.add_to_policy(inlinePolicyForCodeBuild) # ECS task role ecsTaskRole = aws_iam.Role( self, "ecsTaskRoleForWorkshop", assumed_by=aws_iam.ServicePrincipal('ecs-tasks.amazonaws.com')) ecsTaskRole.add_managed_policy( aws_iam.ManagedPolicy.from_aws_managed_policy_name( "service-role/AmazonECSTaskExecutionRolePolicy")) # ============================================================================= # CODE DEPLOY APPLICATION for the Blue/ Green deployment # ============================================================================= # Creating the code deploy application codeDeployApplication = codedeploy.EcsApplication( self, "NginxAppCodeDeploy") # Creating the code deploy service role codeDeployServiceRole = aws_iam.Role( self, "codeDeployServiceRole", assumed_by=aws_iam.ServicePrincipal('codedeploy.amazonaws.com')) codeDeployServiceRole.add_managed_policy( aws_iam.ManagedPolicy.from_aws_managed_policy_name( "AWSCodeDeployRoleForECS")) # IAM role for custom lambda function customLambdaServiceRole = aws_iam.Role( self, "codeDeployCustomLambda", assumed_by=aws_iam.ServicePrincipal('lambda.amazonaws.com')) inlinePolicyForLambda = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=[ "iam:PassRole", "sts:AssumeRole", "codedeploy:List*", "codedeploy:Get*", "codedeploy:UpdateDeploymentGroup", "codedeploy:CreateDeploymentGroup", "codedeploy:DeleteDeploymentGroup" ], resources=["*"]) customLambdaServiceRole.add_managed_policy( aws_iam.ManagedPolicy.from_aws_managed_policy_name( 'service-role/AWSLambdaBasicExecutionRole')) customLambdaServiceRole.add_to_policy(inlinePolicyForLambda) # Custom resource to create the deployment group createDeploymentGroupLambda = aws_lambda.Function( self, 'createDeploymentGroupLambda', code=aws_lambda.Code.from_asset("custom_resources"), runtime=aws_lambda.Runtime.PYTHON_3_8, handler='create_deployment_group.handler', role=customLambdaServiceRole, description="Custom resource to create deployment group", memory_size=128, timeout=core.Duration.seconds(60)) # ================================================================================================ # CloudWatch Alarms for 4XX errors blue4xxMetric = aws_cloudwatch.Metric( namespace='AWS/ApplicationELB', metric_name='HTTPCode_Target_4XX_Count', dimensions={ "TargetGroup": blueGroup.target_group_full_name, "LoadBalancer": alb.load_balancer_full_name }, statistic="sum", period=core.Duration.minutes(1)) blueGroupAlarm = aws_cloudwatch.Alarm( self, "blue4xxErrors", alarm_name="Blue_4xx_Alarm", alarm_description= "CloudWatch Alarm for the 4xx errors of Blue target group", metric=blue4xxMetric, threshold=1, evaluation_periods=1) green4xxMetric = aws_cloudwatch.Metric( namespace='AWS/ApplicationELB', metric_name='HTTPCode_Target_4XX_Count', dimensions={ "TargetGroup": greenGroup.target_group_full_name, "LoadBalancer": alb.load_balancer_full_name }, statistic="sum", period=core.Duration.minutes(1)) greenGroupAlarm = aws_cloudwatch.Alarm( self, "green4xxErrors", alarm_name="Green_4xx_Alarm", alarm_description= "CloudWatch Alarm for the 4xx errors of Green target group", metric=green4xxMetric, threshold=1, evaluation_periods=1) # ================================================================================================ # DUMMY TASK DEFINITION for the initial service creation # This is required for the service being made available to create the CodeDeploy Deployment Group # ================================================================================================ sampleTaskDefinition = aws_ecs.FargateTaskDefinition( self, "sampleTaskDefn", family=DUMMY_TASK_FAMILY_NAME, cpu=256, memory_limit_mib=1024, task_role=ecsTaskRole, execution_role=ecsTaskRole) sampleContainerDefn = sampleTaskDefinition.add_container( "sampleAppContainer", image=aws_ecs.ContainerImage.from_registry(DUMMY_CONTAINER_IMAGE), logging=aws_ecs.AwsLogDriver(log_group=aws_logs.LogGroup( self, "sampleAppLogGroup", log_group_name=DUMMY_APP_LOG_GROUP_NAME, removal_policy=core.RemovalPolicy.DESTROY), stream_prefix=DUMMY_APP_NAME), docker_labels={"name": DUMMY_APP_NAME}) port_mapping = aws_ecs.PortMapping(container_port=80, protocol=aws_ecs.Protocol.TCP) sampleContainerDefn.add_port_mappings(port_mapping) # ================================================================================================ # ECS task definition using ECR image # Will be used by the CODE DEPLOY for Blue/Green deployment # ================================================================================================ NginxTaskDefinition = aws_ecs.FargateTaskDefinition( self, "appTaskDefn", family=ECS_TASK_FAMILY_NAME, cpu=256, memory_limit_mib=1024, task_role=ecsTaskRole, execution_role=ecsTaskRole) NginxcontainerDefinition = NginxTaskDefinition.add_container( "NginxAppContainer", image=aws_ecs.ContainerImage.from_ecr_repository( NginxecrRepo, "latest"), logging=aws_ecs.AwsLogDriver(log_group=aws_logs.LogGroup( self, "NginxAppLogGroup", log_group_name=ECS_APP_LOG_GROUP_NAME, removal_policy=core.RemovalPolicy.DESTROY), stream_prefix=ECS_APP_NAME), docker_labels={"name": ECS_APP_NAME}) NginxcontainerDefinition.add_port_mappings(port_mapping) # ============================================================================= # ECS SERVICE for the Blue/ Green deployment # ============================================================================= NginxAppService = aws_ecs.FargateService( self, "NginxAppService", cluster=ecs_cluster, task_definition=NginxTaskDefinition, health_check_grace_period=core.Duration.seconds(10), desired_count=3, deployment_controller={ "type": aws_ecs.DeploymentControllerType.CODE_DEPLOY }, service_name=ECS_APP_NAME) NginxAppService.connections.allow_from(alb, aws_ec2.Port.tcp(80)) NginxAppService.connections.allow_from(alb, aws_ec2.Port.tcp(8080)) NginxAppService.attach_to_application_target_group(blueGroup) # ============================================================================= # CODE DEPLOY - Deployment Group CUSTOM RESOURCE for the Blue/ Green deployment # ============================================================================= core.CustomResource( self, 'customEcsDeploymentGroup', service_token=createDeploymentGroupLambda.function_arn, properties={ "ApplicationName": codeDeployApplication.application_name, "DeploymentGroupName": ECS_DEPLOYMENT_GROUP_NAME, "DeploymentConfigName": ECS_DEPLOYMENT_CONFIG_NAME, "ServiceRoleArn": codeDeployServiceRole.role_arn, "BlueTargetGroup": blueGroup.target_group_name, "GreenTargetGroup": greenGroup.target_group_name, "ProdListenerArn": albProdListener.listener_arn, "TestListenerArn": albTestListener.listener_arn, "EcsClusterName": ecs_cluster.cluster_name, "EcsServiceName": NginxAppService.service_name, "TerminationWaitTime": ECS_TASKSET_TERMINATION_WAIT_TIME, "BlueGroupAlarm": blueGroupAlarm.alarm_name, "GreenGroupAlarm": greenGroupAlarm.alarm_name, }) ecsDeploymentGroup = codedeploy.EcsDeploymentGroup.from_ecs_deployment_group_attributes( self, "ecsDeploymentGroup", application=codeDeployApplication, deployment_group_name=ECS_DEPLOYMENT_GROUP_NAME, deployment_config=codedeploy.EcsDeploymentConfig. from_ecs_deployment_config_name(self, "ecsDeploymentConfig", ECS_DEPLOYMENT_CONFIG_NAME)) # ============================================================================= # CODE BUILD PROJECT for the Blue/ Green deployment # ============================================================================= # Creating the code build project NginxAppcodebuild = aws_codebuild.Project( self, "NginxAppCodeBuild", role=codeBuildServiceRole, environment=aws_codebuild.BuildEnvironment( build_image=aws_codebuild.LinuxBuildImage.STANDARD_4_0, compute_type=aws_codebuild.ComputeType.SMALL, privileged=True, environment_variables={ 'REPOSITORY_URI': { 'value': NginxecrRepo.repository_uri, 'type': aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT }, 'TASK_EXECUTION_ARN': { 'value': ecsTaskRole.role_arn, 'type': aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT }, 'TASK_FAMILY': { 'value': ECS_TASK_FAMILY_NAME, 'type': aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT } }), source=aws_codebuild.Source.code_commit( repository=NginxCodeCommitrepo)) # ============================================================================= # CODE PIPELINE for Blue/Green ECS deployment # ============================================================================= codePipelineServiceRole = aws_iam.Role( self, "codePipelineServiceRole", assumed_by=aws_iam.ServicePrincipal('codepipeline.amazonaws.com')) inlinePolicyForCodePipeline = aws_iam.PolicyStatement( effect=aws_iam.Effect.ALLOW, actions=[ "iam:PassRole", "sts:AssumeRole", "codecommit:Get*", "codecommit:List*", "codecommit:GitPull", "codecommit:UploadArchive", "codecommit:CancelUploadArchive", "codebuild:BatchGetBuilds", "codebuild:StartBuild", "codedeploy:CreateDeployment", "codedeploy:Get*", "codedeploy:RegisterApplicationRevision", "s3:Get*", "s3:List*", "s3:PutObject" ], resources=["*"]) codePipelineServiceRole.add_to_policy(inlinePolicyForCodePipeline) sourceArtifact = codepipeline.Artifact('sourceArtifact') buildArtifact = codepipeline.Artifact('buildArtifact') # S3 bucket for storing the code pipeline artifacts NginxAppArtifactsBucket = s3.Bucket( self, "NginxAppArtifactsBucket", encryption=s3.BucketEncryption.S3_MANAGED, block_public_access=s3.BlockPublicAccess.BLOCK_ALL) # S3 bucket policy for the code pipeline artifacts denyUnEncryptedObjectUploads = aws_iam.PolicyStatement( effect=aws_iam.Effect.DENY, actions=["s3:PutObject"], principals=[aws_iam.AnyPrincipal()], resources=[NginxAppArtifactsBucket.bucket_arn + "/*"], conditions={ "StringNotEquals": { "s3:x-amz-server-side-encryption": "aws:kms" } }) denyInsecureConnections = aws_iam.PolicyStatement( effect=aws_iam.Effect.DENY, actions=["s3:*"], principals=[aws_iam.AnyPrincipal()], resources=[NginxAppArtifactsBucket.bucket_arn + "/*"], conditions={"Bool": { "aws:SecureTransport": "false" }}) NginxAppArtifactsBucket.add_to_resource_policy( denyUnEncryptedObjectUploads) NginxAppArtifactsBucket.add_to_resource_policy(denyInsecureConnections) # Code Pipeline - CloudWatch trigger event is created by CDK codepipeline.Pipeline( self, "ecsBlueGreen", role=codePipelineServiceRole, artifact_bucket=NginxAppArtifactsBucket, stages=[ codepipeline.StageProps( stage_name='Source', actions=[ aws_codepipeline_actions.CodeCommitSourceAction( action_name='Source', repository=NginxCodeCommitrepo, output=sourceArtifact, ) ]), codepipeline.StageProps( stage_name='Build', actions=[ aws_codepipeline_actions.CodeBuildAction( action_name='Build', project=NginxAppcodebuild, input=sourceArtifact, outputs=[buildArtifact]) ]), codepipeline.StageProps( stage_name='Deploy', actions=[ aws_codepipeline_actions.CodeDeployEcsDeployAction( action_name='Deploy', deployment_group=ecsDeploymentGroup, app_spec_template_input=buildArtifact, task_definition_template_input=buildArtifact, ) ]) ]) # ============================================================================= # Export the outputs # ============================================================================= core.CfnOutput(self, "ecsBlueGreenCodeRepo", description="Demo app code commit repository", export_name="ecsBlueGreenDemoAppRepo", value=NginxCodeCommitrepo.repository_clone_url_http) core.CfnOutput(self, "ecsBlueGreenLBDns", description="Load balancer DNS", export_name="ecsBlueGreenLBDns", value=alb.load_balancer_dns_name)