Exemple #1
0
 def _create_deploy_stage(self, stage_name: str,
                          input: codepipeline.Artifact):
     """
     A pipeline stage that will finally
     deploy the application container
     into our ECS Fargate service.
     """
     ecs_deploy_action = codepipeline_actions.EcsDeployAction(
         action_name='ECSDeploy_Action',
         input=input,
         service=self.service,
     )
     return {'stageName': stage_name, 'actions': [ecs_deploy_action]}
Exemple #2
0
    def setup_api_pipeline(self):
        """Setup the build pipeline for API.

        Using codepipeline to create a Pipeline with 3 steps
            * Source: CodeCommitSourceAction
            * Build:  CodeBuildActioin
            * Deploy: EcsDeployAction: deploy to ECS service

        Returns
        -------
        aws_codepipeline.Pipeline

        """

        source_output = cp.Artifact()
        build_output = cp.Artifact(self.config.build_output)
        return cp.Pipeline(
            self,
            'ApiPipeline',
            pipeline_name=self.config.api.pipeline,
            stages=[
                cp.StageProps(stage_name='Source',
                              actions=[
                                  cp_actions.CodeCommitSourceAction(
                                      action_name='Source',
                                      repository=self.api_source,
                                      branch='master',
                                      output=source_output,
                                  )
                              ]),
                cp.StageProps(stage_name='Build',
                              actions=[
                                  cp_actions.CodeBuildAction(
                                      action_name='Build',
                                      project=self.api_build_project,
                                      input=source_output,
                                      outputs=[build_output])
                              ]),
                cp.StageProps(
                    stage_name='Deploy',
                    actions=[
                        cp_actions.EcsDeployAction(
                            action_name='Deploy',
                            service=self.service.service,
                            input=build_output,
                            # image_file=build_output.at_path('imagedefinitions.json')
                        )
                    ])
            ])
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here

        vpc = _ec2.Vpc(self,
                       "ecs-vpc",
                       cidr="10.0.0.0/16",
                       nat_gateways=1,
                       max_azs=3)

        clusterAdmin = _iam.Role(self,
                                 "AdminRole",
                                 assumed_by=_iam.AccountRootPrincipal())

        cluster = _ecs.Cluster(self, "ecs-cluster", vpc=vpc)

        logging = _ecs.AwsLogDriver(stream_prefix="ecs-logs")

        taskRole = _iam.Role(
            self,
            f"ecs-taskRole-{cdk.Stack.stack_name}",
            role_name=f"ecs-taskRole-{cdk.Stack.stack_name}",
            assumed_by=_iam.ServicePrincipal("ecs-tasks.amazonaws.com"))

        # ECS Contructs

        executionRolePolicy = _iam.PolicyStatement(
            effect=_iam.Effect.ALLOW,
            resources=['*'],
            actions=[
                "ecr:GetAuthorizationToken", "ecr:BatchCheckLayerAvailability",
                "ecr:GetDownloadUrlForLayer", "ecr:BatchGetImage",
                "logs:CreateLogStream", "logs:PutLogEvents"
            ])

        taskDef = _ecs.FargateTaskDefinition(self,
                                             "ecs-taskdef",
                                             task_role=taskRole)

        taskDef.add_to_execution_role_policy(executionRolePolicy)

        container = taskDef.add_container(
            'flask-app',
            image=_ecs.ContainerImage.from_registry(
                "nikunjv/flask-image:blue"),
            memory_limit_mib=256,
            cpu=256,
            logging=logging)

        container.add_port_mappings(
            _ecs.PortMapping(container_port=5000, protocol=_ecs.Protocol.TCP))

        fargateService = ecs_patterns.ApplicationLoadBalancedFargateService(
            self,
            "ecs-service",
            cluster=cluster,
            task_definition=taskDef,
            public_load_balancer=True,
            desired_count=3,
            listener_port=80)

        scaling = fargateService.service.auto_scale_task_count(max_capacity=6)

        scaling.scale_on_cpu_utilization(
            "CpuScaling",
            target_utilization_percent=10,
            scale_in_cooldown=cdk.Duration.seconds(300),
            scale_out_cooldown=cdk.Duration.seconds(300))

        # PIPELINE CONSTRUCTS

        # ECR Repo

        ecrRepo = ecr.Repository(self, "EcrRepo")

        gitHubSource = codebuild.Source.git_hub(
            owner='samuelhailemariam',
            repo='aws-ecs-fargate-cicd-cdk',
            webhook=True,
            webhook_filters=[
                codebuild.FilterGroup.in_event_of(
                    codebuild.EventAction.PUSH).and_branch_is('main'),
            ])

        # CODEBUILD - project

        project = codebuild.Project(
            self,
            "ECSProject",
            project_name=cdk.Aws.STACK_NAME,
            source=gitHubSource,
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2,
                privileged=True),
            environment_variables={
                "CLUSTER_NAME": {
                    'value': cluster.cluster_name
                },
                "ECR_REPO_URI": {
                    'value': ecrRepo.repository_uri
                }
            },
            build_spec=codebuild.BuildSpec.from_object({
                'version': "0.2",
                'phases': {
                    'pre_build': {
                        'commands': [
                            'env',
                            'export TAG=${CODEBUILD_RESOLVED_SOURCE_VERSION}'
                        ]
                    },
                    'build': {
                        'commands': [
                            'cd docker-app',
                            'docker build -t $ECR_REPO_URI:$TAG .',
                            '$(aws ecr get-login --no-include-email)',
                            'docker push $ECR_REPO_URI:$TAG'
                        ]
                    },
                    'post_build': {
                        'commands': [
                            'echo "In Post-Build Stage"', 'cd ..',
                            "printf '[{\"name\":\"flask-app\",\"imageUri\":\"%s\"}]' $ECR_REPO_URI:$TAG > imagedefinitions.json",
                            "pwd; ls -al; cat imagedefinitions.json"
                        ]
                    }
                },
                'artifacts': {
                    'files': ['imagedefinitions.json']
                }
            }))

        # PIPELINE ACTIONS

        sourceOutput = codepipeline.Artifact()
        buildOutput = codepipeline.Artifact()

        sourceAction = codepipeline_actions.GitHubSourceAction(
            action_name='GitHub_Source',
            owner='samuelhailemariam',
            repo='aws-ecs-fargate-cicd-cdk',
            branch='master',
            oauth_token=cdk.SecretValue.secrets_manager("/my/github/token"),
            output=sourceOutput)

        buildAction = codepipeline_actions.CodeBuildAction(
            action_name='codeBuild',
            project=project,
            input=sourceOutput,
            outputs=[buildOutput])

        manualApprovalAction = codepipeline_actions.ManualApprovalAction(
            action_name='Approve')

        deployAction = codepipeline_actions.EcsDeployAction(
            action_name='DeployAction',
            service=fargateService.service,
            image_file=codepipeline.ArtifactPath(buildOutput,
                                                 'imagedefinitions.json'))

        pipeline = codepipeline.Pipeline(self, "ECSPipeline")

        source_stage = pipeline.add_stage(stage_name="Source",
                                          actions=[sourceAction])

        build_stage = pipeline.add_stage(stage_name="Build",
                                         actions=[buildAction])

        approve_stage = pipeline.add_stage(stage_name="Approve",
                                           actions=[manualApprovalAction])

        deploy_stage = pipeline.add_stage(stage_name="Deploy-to-ECS",
                                          actions=[deployAction])

        ecrRepo.grant_pull_push(project.role)

        project.add_to_role_policy(
            _iam.PolicyStatement(resources=['cluster.cluster_arn'],
                                 actions=[
                                     "ecs:DescribeCluster",
                                     "ecr:GetAuthorizationToken",
                                     "ecr:BatchCheckLayerAvailability",
                                     "ecr:BatchGetImage",
                                     "ecr:GetDownloadUrlForLayer"
                                 ]))

        # OUTPUT

        cdk.CfnOutput(
            self,
            "LoadBlancer-DNS",
            value=fargateService.load_balancer.load_balancer_dns_name)
Exemple #4
0
def create_pipeline(
    scope: core.Construct,
    stack_name: str,
    ecr_repository: ecr.Repository,
    app_service: ecs.FargateService,
    config: StackConfig,
    worker_service: ecs.FargateService = None,
):

    project = codebuild.PipelineProject(
        scope,
        'build',
        project_name=stack_name,
        description=f'Build project for {stack_name}. Managed by AWS CDK.',
        environment=codebuild.BuildEnvironment(
            privileged=True,
            build_image=codebuild.LinuxBuildImage.AMAZON_LINUX_2_2),
        environment_variables={
            'REPOSITORY_URI':
            codebuild.BuildEnvironmentVariable(
                value=ecr_repository.repository_uri),
        },
        cache=codebuild.Cache.local(codebuild.LocalCacheMode.DOCKER_LAYER,
                                    codebuild.LocalCacheMode.CUSTOM,
                                    codebuild.LocalCacheMode.SOURCE),
        build_spec=codebuild.BuildSpec.from_object({
            'version': '0.2',
            'phases': {
                'pre_build': {
                    'commands': [
                        '$(aws ecr get-login --no-include-email --region $AWS_REGION)',
                        'IMAGE_LATEST=${REPOSITORY_URI}:latest',
                        'IMAGE_VERSION=${REPOSITORY_URI}:${CODEBUILD_RESOLVED_SOURCE_VERSION:0:7}'
                    ]
                },
                'build': {
                    'commands': [
                        f'docker login -u="{config.docker_user}" -p="{config.docker_password}"',
                        'docker build -f Dockerfile.prod -t ${IMAGE_LATEST} .',
                        'docker tag ${IMAGE_LATEST} ${IMAGE_VERSION}'
                    ]
                },
                'post_build': {
                    'commands': [
                        'docker push ${IMAGE_LATEST}',
                        'docker push ${IMAGE_VERSION}',
                        "printf '[{\"name\":\"container\",\"imageUri\":\"%s\"}]' ${IMAGE_VERSION} > imagedefinitions.json"
                    ]
                }
            },
            'artifacts': {
                'files': ['imagedefinitions.json']
            }
        }))
    ecr_repository.grant_pull_push(project)
    source_output = codepipeline.Artifact()
    source_action = actions.GitHubSourceAction(
        action_name='Source',
        owner=config.repo_owner,
        repo=config.repo_name,
        branch=config.repo_branch,
        oauth_token=core.SecretValue.plain_text(config.github_access_token),
        output=source_output,
    )

    build_output = codepipeline.Artifact()
    build_action = actions.CodeBuildAction(
        action_name='Build',
        project=project,
        input=source_output,
        outputs=[build_output],
        type=actions.CodeBuildActionType.BUILD,
    )

    artifact_bucket = s3.Bucket.from_bucket_name(scope, 'artifactBucket',
                                                 config.artifact_bucket)

    deploy_actions = [
        actions.EcsDeployAction(
            action_name='App',
            service=app_service,
            input=build_output,
        )
    ]
    if worker_service:
        deploy_actions.append(
            actions.EcsDeployAction(
                action_name='Worker',
                service=worker_service,
                input=build_output,
            ))

    pipeline = codepipeline.Pipeline(
        scope,
        'pipeline',
        pipeline_name=stack_name,
        restart_execution_on_update=True,
        artifact_bucket=artifact_bucket,
    )
    pipeline.add_stage(
        stage_name='Source',
        actions=[source_action],
    )
    pipeline.add_stage(stage_name='Build', actions=[build_action])
    if config.enable_deploy_approval:
        pipeline.add_stage(stage_name='Approval',
                           actions=[
                               actions.ManualApprovalAction(
                                   action_name='Approve', notify_emails=[])
                           ])
    pipeline.add_stage(
        stage_name='Deploy',
        actions=deploy_actions,
    )
Exemple #5
0
    def __init__(self, scope: cdk.Construct, construct_id: str,
                 ecr_repository: ecr.Repository,
                 ecs_service: ecs.FargateService,
                 **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        backend_repository = codecommit.Repository(
            self, 'BackendRepository',
            repository_name='MythicalMysfits-BackendRepository'
        )

        codebuild_project = codebuild.PipelineProject(
            self, 'BuildProject',
            project_name='MythicalMysfitsServiceCodeBuildProject',
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.UBUNTU_14_04_PYTHON_3_5_2,
                compute_type=codebuild.ComputeType.SMALL,
                environment_variables={
                    'AWS_ACCOUNT_ID': codebuild.BuildEnvironmentVariable(
                        type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
                        value=self.account),
                    'AWS_DEFAULT_REGION': codebuild.BuildEnvironmentVariable(
                        type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
                        value=self.region),
                },
                privileged=True
            )
        )

        codebuild_policy_stm = _iam.PolicyStatement()
        codebuild_policy_stm.add_resources(backend_repository.repository_arn)
        codebuild_policy_stm.add_actions(
            "codecommit:ListBranches",
            "codecommit:ListRepositories",
            "codecommit:BatchGetRepositories",
            "codecommit:GitPull"
        )
        codebuild_project.add_to_role_policy(codebuild_policy_stm)

        ecr_repository.grant_pull_push(codebuild_project.grant_principal)

        source_output = codepipeline.Artifact()
        source_action = actions.CodeCommitSourceAction(
            action_name='CodeCommit-Source',
            branch='main',
            trigger=actions.CodeCommitTrigger.EVENTS,
            repository=backend_repository,
            output=source_output
        )

        build_output = codepipeline.Artifact()
        build_action = actions.CodeBuildAction(
            action_name='Build',
            input=source_output,
            outputs=[
                build_output
            ],
            project=codebuild_project
        )

        deploy_action = actions.EcsDeployAction(
            action_name='DeployAction',
            service=ecs_service,
            input=build_output
        )

        pipeline = codepipeline.Pipeline(
            self, 'Pipeline',
            pipeline_name='MythicalMysfitsPipeline',
        )
        pipeline.add_stage(stage_name='Source', actions=[source_action])
        pipeline.add_stage(stage_name='Build', actions=[build_action])
        # # the following pipeline.add_stage doesn't work
        # pipeline.add_stage(stage_name='Deploy', actions=[deploy_action])

        cdk.CfnOutput(self, 'BackendRepositoryCloneUrlHttp',
                      description='Backend Repository CloneUrl HTTP',
                      value=backend_repository.repository_clone_url_http)

        cdk.CfnOutput(self, 'BackendRepositoryCloneUrlSsh',
                      description='Backend Repository CloneUrl SSH',
                      value=backend_repository.repository_clone_url_ssh)
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 UserName="******",
                 Repo="default",
                 WebService="default",
                 **kwargs):
        super().__init__(scope, id, **kwargs)

        self.My_CodeBuild_Role = _iam.Role(
            self,
            'CodeBuildRole-Web-' + UserName,
            assumed_by=_iam.CompositePrincipal(
                _iam.ServicePrincipal('ec2.amazonaws.com'),
                _iam.ServicePrincipal('codebuild.amazonaws.com')))

        for repo in Repo.getRepositoriesList():
            Repo.getRepositories(repo).grant_pull_push(self.My_CodeBuild_Role)

        self.My_CodeCommit_Web = _codecommit.Repository(
            self,
            "CodeCommit-Web-" + UserName,
            repository_name="Workshop-Web-" + UserName,
            description="CodeCommit for Web Project,Owner:" + UserName)

        self.My_CodeBuild_Web = _codebuild.PipelineProject(
            self,
            "CodeBuild-Web-" + UserName,
            project_name="CodeBuild-Web" + UserName,
            role=self.My_CodeBuild_Role,
            environment=_codebuild.BuildEnvironment(
                build_image=_codebuild.LinuxBuildImage.STANDARD_2_0,
                privileged=True))

        self.CodeCommit_Web_Source = _codepipeline.Artifact(
            "CodeCommit_Web_Source-" + UserName)
        self.EcsImage_Web_Source = _codepipeline.Artifact(
            'EcsImage_Web_Source-' + UserName)
        self.FargateImage_Web_Source = _codepipeline.Artifact(
            'FargateImage_Web_Source-' + UserName)

        self.My_CodePipeline_Web = _codepipeline.Pipeline(
            self,
            "CodePipeline-Web-" + UserName,
            stages=[
                _codepipeline.StageProps(
                    stage_name="Source",
                    actions=[
                        _codepipeline_actions.CodeCommitSourceAction(
                            action_name="CodeCommit_Web_Source",
                            repository=self.My_CodeCommit_Web,
                            branch="master",
                            output=self.CodeCommit_Web_Source)
                    ]),
                _codepipeline.StageProps(
                    stage_name="Build",
                    actions=[
                        _codepipeline_actions.CodeBuildAction(
                            action_name="CodeCommit_Web_Build",
                            project=self.My_CodeBuild_Web,
                            input=self.CodeCommit_Web_Source,
                            outputs=[self.FargateImage_Web_Source])
                    ]),
                _codepipeline.StageProps(
                    stage_name="Deploy",
                    actions=[
                        _codepipeline_actions.EcsDeployAction(
                            action_name='CodeDeploy_Web_Deploy',
                            service=WebService.getFargateService(
                                "WebApplicationService"),
                            input=self.FargateImage_Web_Source)
                    ])
            ])

        core.CfnOutput(self,
                       "CodeCommit For WebApplication",
                       value=self.My_CodeCommit_Web.repository_clone_url_http)
Exemple #7
0
    def __init__(self, scope: core.Construct, id: str, repo_arn: str,
                 cluster: ecs.ICluster, repo: ecr.IRepository,
                 clientFirewall: ec2.ISecurityGroup,
                 docdbClientFirewall: ec2.ISecurityGroup, cmnamespace: str,
                 cmmsk: str, cmddb: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # service skeleton
        streamproc_task_definition = ecs.FargateTaskDefinition(
            scope=self,
            id="StreamProcTaskDef",
            cpu=1024,
            memory_limit_mib=2048)
        streamproc_container = streamproc_task_definition.add_container(
            id="StreamProcContainer",
            image=ecs.ContainerImage.from_ecr_repository(repository=repo,
                                                         tag='latest'),
            logging=ecs.LogDrivers.aws_logs(stream_prefix="StreamProcessing"),
            environment={
                'NAMESPACE': cmnamespace,
                'MSK_SERVICE': cmmsk,
                'TOPIC_NAME': 'MyTopic',
                'DDB_SERVICE': cmddb
            },
        )
        streamproc_task_definition.add_to_task_role_policy(
            statement=iam.PolicyStatement(
                resources=['*'],
                actions=['servicediscovery:DiscoverInstances']))
        streamproc_task_definition.add_to_task_role_policy(
            statement=iam.PolicyStatement(
                resources=['*'], actions=['kafka:GetBootstrapBrokers']))
        streamproc_service = ecs.FargateService(
            scope=self,
            id="StreamProcessingService",
            task_definition=streamproc_task_definition,
            assign_public_ip=False,
            security_group=clientFirewall,
            cluster=cluster,
            desired_count=1)
        streamproc_scaling = streamproc_service.auto_scale_task_count(
            max_capacity=10)
        streamproc_scaling.scale_on_cpu_utilization(
            "CpuScaling", target_utilization_percent=70)
        ssm.StringParameter(scope=self,
                            id='SSMParamStreamProcImageName',
                            string_value=streamproc_container.container_name,
                            parameter_name='image_streamproc')

        # pipeline
        self.cbproject = codebuild.PipelineProject(
            scope=self,
            id='KafkaToDocdbBuildImage',
            cache=codebuild.Cache.local(codebuild.LocalCacheMode.DOCKER_LAYER),
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.
                UBUNTU_14_04_DOCKER_18_09_0,
                privileged=True,
                compute_type=codebuild.ComputeType.LARGE))
        self.cbproject.add_to_role_policy(statement=iam.PolicyStatement(
            resources=['*'],
            actions=['ssm:GetParameters', 'ecr:GetAuthorizationToken']))
        self.cbproject.add_to_role_policy(statement=iam.PolicyStatement(
            resources=[repo_arn], actions=['ecr:*']))
        self.pipeline = codepipeline.Pipeline(scope=self,
                                              id="KafkaToDocDb",
                                              pipeline_name='KafkaToDocdb')
        self.pipeline.add_stage(
            stage_name='Source',
            actions=[
                actions.CodeCommitSourceAction(
                    repository=codecommit.Repository.from_repository_name(
                        scope=self,
                        id='FargateStreamProcessorRepo',
                        repository_name='FargateStreamProcessor'),
                    action_name="Get-Code",
                    output=codepipeline.Artifact('code'))
            ])
        self.pipeline.add_stage(
            stage_name='Build',
            actions=[
                actions.CodeBuildAction(
                    input=codepipeline.Artifact('code'),
                    project=self.cbproject,
                    outputs=[codepipeline.Artifact('image')],
                    action_name='Build-Image')
            ])
        self.pipeline.add_stage(stage_name='Deploy',
                                actions=[
                                    actions.EcsDeployAction(
                                        service=streamproc_service,
                                        input=codepipeline.Artifact('image'),
                                        action_name='Deploy-Image')
                                ])

        core.CfnOutput(self,
                       "IgnoredOutput",
                       value=docdbClientFirewall.security_group_id)
    def __init__(self, scope: core.Construct, id: str, cluster: ecs.ICluster,
                 kafkaClientFirewall: ec2.ISecurityGroup,
                 lbFirewall: ec2.ISecurityGroup, kcrepo: ecr.IRepository,
                 hcrepo: ecr.IRepository, cmnamespace: str, cmmsk: str,
                 vpc: ec2.IVpc, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # S3 buckets
        self.kafkaConnectBucket = s3.Bucket(
            scope=self,
            id="KafkaConnectBucket",
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
            encryption=s3.BucketEncryption.S3_MANAGED)
        core.CfnOutput(scope=self,
                       id="KafkaConnectBucketName",
                       value=self.kafkaConnectBucket.bucket_name)

        # service skeleton
        kc_task_definition = ecs.FargateTaskDefinition(
            scope=self,
            id="KafkaConnectTaskDef",
            cpu=4096,
            memory_limit_mib=8192)
        kc_container = kc_task_definition.add_container(
            id="KafkaConnectContainer",
            image=ecs.ContainerImage.from_ecr_repository(repository=kcrepo,
                                                         tag='latest'),
            logging=ecs.LogDrivers.aws_logs(stream_prefix="KafkaConnect"),
            environment={
                'CONNECT_PLUGIN_PATH': "/usr/share/java",
                'MSK_SERVICE': cmmsk,
                'CONNECT_GROUP_ID': 'KcS3SinkGroup',
                'CONNECT_CONFIG_STORAGE_TOPIC': 'kc_config',
                'CONNECT_OFFSET_STORAGE_TOPIC': 'kc_offset',
                'CONNECT_STATUS_STORAGE_TOPIC': 'kc_status',
                'CONNECT_VALUE_CONVERTER':
                'org.apache.kafka.connect.storage.StringConverter',
                'CONNECT_KEY_CONVERTER':
                'org.apache.kafka.connect.storage.StringConverter',
                'CONNECT_REST_PORT': '8083',
                'CONNECT_CONSUMER_AUTO_OFFSET_RESET': 'latest',
                'CONNECT_OFFSET_FLUSH_INTERVAL_MS': '120000',
                'CONNECT_OFFSET_FLUSH_TIMEOUT_MS': '20000',
                'CONNECT_INTERNAL_KEY_CONVERTER':
                'org.apache.kafka.connect.json.JsonConverter',
                'CONNECT_INTERNAL_VALUE_CONVERTER':
                'org.apache.kafka.connect.json.JsonConverter',
                'CONNECT_INTERNAL_KEY_CONVERTER_SCHEMAS_ENABLE': 'false',
                'CONNECT_INTERNAL_VALUE_CONVERTER_SCHEMAS_ENABLE': 'false',
                'CONNECT_SECURITY_PROTOCOL': 'SSL',
                'CONNECT_CONSUMER_SECURITY_PROTOCOL': 'SSL',
                'CONNECT_PRODUCER_SECURITY_PROTOCOL': 'SSL',
                'REGION': self.region
            })
        kc_container.add_port_mappings(
            ecs.PortMapping(container_port=8083,
                            host_port=8083,
                            protocol=ecs.Protocol.TCP))
        hc_container = kc_task_definition.add_container(
            id="HealthCheckContainer",
            image=ecs.ContainerImage.from_ecr_repository(repository=hcrepo,
                                                         tag='latest'),
            logging=ecs.LogDrivers.aws_logs(stream_prefix="KafkaConnectHc"))
        hc_container.add_port_mappings(
            ecs.PortMapping(container_port=18083,
                            host_port=18083,
                            protocol=ecs.Protocol.TCP))
        kc_task_definition.add_to_task_role_policy(
            statement=iam.PolicyStatement(
                resources=['*'],
                actions=['servicediscovery:DiscoverInstances']))
        kc_task_definition.add_to_task_role_policy(
            statement=iam.PolicyStatement(
                resources=['*'], actions=['kafka:GetBootstrapBrokers']))
        kc_task_definition.add_to_task_role_policy(
            statement=iam.PolicyStatement(resources=[
                self.kafkaConnectBucket.bucket_arn,
                self.kafkaConnectBucket.bucket_arn + '/*'
            ],
                                          actions=['s3:*']))
        kc_svc = ecs.FargateService(scope=self,
                                    id="KafkaConnectSvc",
                                    task_definition=kc_task_definition,
                                    security_group=kafkaClientFirewall,
                                    cluster=cluster,
                                    desired_count=1)
        kc_scaling = kc_svc.auto_scale_task_count(max_capacity=10)
        kc_scaling.scale_on_cpu_utilization("CpuScaling",
                                            target_utilization_percent=70)
        ssm.StringParameter(scope=self,
                            id='SSMParamS3SinkImageName',
                            string_value=kc_container.container_name,
                            parameter_name='image_s3sink')
        ssm.StringParameter(scope=self,
                            id='SSMParamS3SinkHCImageName',
                            string_value=hc_container.container_name,
                            parameter_name='image_s3sink_hc')

        # Create ALB
        self.lb = elbv2.ApplicationLoadBalancer(self,
                                                "KafkaConnectALB",
                                                vpc=vpc,
                                                security_group=lbFirewall,
                                                internet_facing=False)
        listener = self.lb.add_listener(
            "KafkaConnectListener",
            port=8083,
            protocol=elbv2.ApplicationProtocol.HTTP,
            open=False)

        health_check = elbv2.HealthCheck(interval=core.Duration.seconds(120),
                                         path="/",
                                         port='18083',
                                         timeout=core.Duration.seconds(60))

        # Attach ALB to ECS Service
        listener.add_targets(
            "KafkaConnectSvcListener",
            port=8083,
            protocol=elbv2.ApplicationProtocol.HTTP,
            targets=[kc_svc],
            health_check=health_check,
        )
        core.CfnOutput(scope=self,
                       id="KafkaConnectAlbDns",
                       value=self.lb.load_balancer_dns_name)

        # pipeline
        self.cbproject = codebuild.PipelineProject(
            scope=self,
            id='KafkaS3SinkBuildImage',
            cache=codebuild.Cache.local(codebuild.LocalCacheMode.DOCKER_LAYER),
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.
                UBUNTU_14_04_DOCKER_18_09_0,
                privileged=True,
                compute_type=codebuild.ComputeType.LARGE))
        self.cbproject.add_to_role_policy(statement=iam.PolicyStatement(
            resources=['*'],
            actions=['ssm:GetParameters', 'ecr:GetAuthorizationToken']))
        self.cbproject.add_to_role_policy(
            statement=iam.PolicyStatement(resources=['*'], actions=['ecr:*']))
        self.pipeline = codepipeline.Pipeline(scope=self,
                                              id="KafkaS3Sink",
                                              pipeline_name='KafkaS3Sink')
        self.pipeline.add_stage(
            stage_name='Source',
            actions=[
                actions.CodeCommitSourceAction(
                    repository=codecommit.Repository.from_repository_name(
                        scope=self,
                        id='FargateKcRepo',
                        repository_name='FargateS3Sink'),
                    action_name="Get-Code-Kc",
                    output=codepipeline.Artifact('code')),
                actions.CodeCommitSourceAction(
                    repository=codecommit.Repository.from_repository_name(
                        scope=self,
                        id='FargateHcRepo',
                        repository_name='FargateS3SinkHealthCheck'),
                    action_name="Get-Code-Hc",
                    output=codepipeline.Artifact('codehc'))
            ])
        self.pipeline.add_stage(
            stage_name='Build',
            actions=[
                actions.CodeBuildAction(
                    input=codepipeline.Artifact('code'),
                    extra_inputs=[codepipeline.Artifact('codehc')],
                    project=self.cbproject,
                    outputs=[codepipeline.Artifact('image')],
                    action_name='Build-Image')
            ])
        self.pipeline.add_stage(stage_name='Deploy',
                                actions=[
                                    actions.EcsDeployAction(
                                        service=kc_svc,
                                        input=codepipeline.Artifact('image'),
                                        action_name='Deploy-Image')
                                ])
Exemple #9
0
    def __init__(self, app: core.App, id: str, props, **kwargs) -> None:
        super().__init__(app, id, **kwargs)

        # variables
        # Context Variables
        namespace = self.node.try_get_context('namespace')
        application = self.node.try_get_context('application')
        image_name_context = application['image-name']
        code_branch = application['branch']

        # Services from Infra stack
        fargateService = props['container-infra']['fargateService']
        bucket = props['container-infra']['pipeline-bucket']

        # Services from PipelineBase stack
        codecommit = props['pipeline-base']['codecommit']
        codebuild = props['pipeline-base']['codebuild']

        # define the s3 artifact for stages
        source_output = _codepipeline.Artifact()
        build_output = _codepipeline.Artifact()

        ### defining the pipeline stages ###

        # code commit (source) stage
        code_commit_source_action = _codepipeline_actions.CodeCommitSourceAction(
            repository=codecommit,
            branch=code_branch,
            output=source_output,
            trigger=_codepipeline_actions.CodeCommitTrigger.POLL,
            action_name="CodeCommitSource",
            run_order=1,
            variables_namespace=f"{namespace}")
        source_stage = _codepipeline.StageProps(
            stage_name="Source", actions=[code_commit_source_action])

        # code build (build) stage
        code_build_action = _codepipeline_actions.CodeBuildAction(
            action_name='DockerBuildImages',
            input=source_output,
            project=codebuild,
            run_order=1,
            outputs=[build_output])
        build_stage = _codepipeline.StageProps(stage_name="Build",
                                               actions=[code_build_action])

        # code deploy (deploy) stage
        deploy_action = _codepipeline_actions.EcsDeployAction(
            action_name="DeployAction",
            service=fargateService.service,
            image_file=_codepipeline.ArtifactPath(build_output,
                                                  "imagedefinitions.json"))
        deploy_stage = _codepipeline.StageProps(stage_name="Deploy",
                                                actions=[deploy_action])

        pipeline = _codepipeline.Pipeline(
            self,
            "Pipeline",
            pipeline_name=f"{namespace}-{image_name_context}-pipeline",
            artifact_bucket=bucket,
            cross_account_keys=False,
            stages=[source_stage, build_stage, deploy_stage])

        # give pipelinerole read write to the bucket
        bucket.grant_read_write(pipeline.role)
        pipeline.add_to_role_policy(
            _iam.PolicyStatement(actions=["s3:*"],
                                 resources=[f"{bucket.bucket_arn}"]))

        # cfn output
        core.CfnOutput(self,
                       "PipelineOut",
                       description="Pipeline",
                       value=pipeline.pipeline_name)
Exemple #10
0
    def __init__(self, scope: core.Construct, id: str, props: CiCdStackProps,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        backend_repository = aws_codecommit.Repository(
            self,
            "BackendRespository",
            repository_name="MythicalMysfits-BackendRepository",
        )

        codebuild_project = aws_codebuild.PipelineProject(
            self,
            "BuildProject",
            project_name="MythicalMysfitsServiceCodeBuildProject",
            environment=aws_codebuild.BuildEnvironment(
                compute_type=aws_codebuild.ComputeType.SMALL,
                build_image=aws_codebuild.LinuxBuildImage.
                UBUNTU_14_04_PYTHON_3_5_2,
                privileged=True,
                environment_variables={
                    "AWS_ACCOUNT_ID":
                    aws_codebuild.BuildEnvironmentVariable(
                        type=aws_codebuild.BuildEnvironmentVariableType.
                        PLAINTEXT,
                        value=core.Aws.ACCOUNT_ID,
                    ),
                    "AWS_DEFAULT_REGION":
                    aws_codebuild.BuildEnvironmentVariable(
                        type=aws_codebuild.BuildEnvironmentVariableType.
                        PLAINTEXT,
                        value=core.Aws.REGION,
                    ),
                },
            ),
        )

        codebuild_policy = aws_iam.PolicyStatement()
        codebuild_policy.add_resources(backend_repository.repository_arn)
        codebuild_policy.add_actions(
            "codecommit:ListBranches",
            "codecommit:ListRepositories",
            "codecommit:BatchGetRepositories",
            "codecommit:GitPull",
        )
        codebuild_project.add_to_role_policy(codebuild_policy)

        props.ecr_repository.grant_pull_push(codebuild_project.grant_principal)

        source_output = aws_codepipeline.Artifact()
        source_action = aws_codepipeline_actions.CodeCommitSourceAction(
            action_name="CodeCommit-Source",
            branch="master",
            trigger=aws_codepipeline_actions.CodeCommitTrigger.POLL,
            repository=backend_repository,
            output=source_output,
        )

        build_output = aws_codepipeline.Artifact()
        build_action = aws_codepipeline_actions.CodeBuildAction(
            action_name="Build",
            input=source_output,
            outputs=[build_output],
            project=codebuild_project,
        )

        deploy_action = aws_codepipeline_actions.EcsDeployAction(
            action_name="DeployAction",
            service=props.ecs_service,
            input=build_output)

        pipeline = aws_codepipeline.Pipeline(
            self, "Pipeline", pipeline_name="MythicalMysfitsPipeline")
        pipeline.add_stage(stage_name="Source", actions=[source_action])
        pipeline.add_stage(stage_name="Build", actions=[build_action])
        pipeline.add_stage(stage_name="Deploy", actions=[deploy_action])

        core.CfnOutput(
            self,
            "BackendRepositoryCloneUrlHttp",
            description="Backend Repository CloneUrl HTTP",
            value=backend_repository.repository_clone_url_http,
        )
        core.CfnOutput(
            self,
            "BackendRepositoryCloneUrlSsh",
            description="Backend Repository CloneUrl SSH",
            value=backend_repository.repository_clone_url_ssh,
        )