Example #1
0
    def _source(self,
                owner: str,
                repo: str,
                branch: str = 'master',
                connection: str = None,
                oauth: str = None):
        """[summary]

        Args:
            owner (str): Github organization/user
            repo (str): git repository url name
            branch (str): git branch
            connection (str): AWS codebuild connection_arn
            oauth (str): Github oauth token
        """
        artifact = cp.Artifact()

        if not connection and not oauth:
            raise SystemError("No credentials for Github provided")

        checkout = cpa.BitBucketSourceAction(
            connection_arn=connection,
            action_name="Source-{}".format(branch),
            output=artifact,
            owner=owner,
            repo=repo,
            branch=branch,
            code_build_clone_output=True)

        self.artifacts['sources'].append(artifact)
        self.actions['sources'].append(checkout)
        self.pipe.add_stage(stage_name='Source@{}'.format(repo),
                            actions=[checkout])
    def __init__(self, scope: core.Construct, id: str, params: dict, **kwargs):
        super().__init__(scope, id, **kwargs)

        source_artifact = codepipeline.Artifact()
        cloud_assembly_artifact = codepipeline.Artifact()

        pipeline = pipelines.CdkPipeline(
            self,
            'CdkPipeline',
            cloud_assembly_artifact=cloud_assembly_artifact,
            pipeline_name=params['pipeline_name'],
            source_action=cpactions.BitBucketSourceAction(
                action_name='GithubAction',
                output=source_artifact,
                connection_arn=params['connection_arn'],
                owner=params['github_owner'],
                repo=params['github_repo'],
                branch=params['github_branch']),
            synth_action=pipelines.SimpleSynthAction(
                source_artifact=source_artifact,
                cloud_assembly_artifact=cloud_assembly_artifact,
                role_policy_statements=[
                    aws_iam.PolicyStatement(
                        actions=["secretsmanager:GetSecretValue"],
                        resources=[params['secret_arn']])
                ],
                install_command=
                'npm install -g aws-cdk && pip install --upgrade pip && pip install -r requirements.txt',
                synth_command="cdk synth -v -c region=%s -c secret_name=%s" %
                (params['region'], params['secret_name']),
            ))

        pipeline.add_application_stage(CmnStage(self, 'CMN'))
Example #3
0
    def __init__(self, pipeline_data: PipelineData):
        super().__init__(pipeline_data.scope,
                         pipeline_data.name,
                         env=pipeline_data.env)
        self.source_artifact = cp.Artifact('Source')
        self.cloud_assembly_artifact = cp.Artifact('CloudAs')
        self.pipeline = CdkPipeline(
            self,
            "Pipeline",
            self_mutating=True,
            cross_account_keys=False,
            cloud_assembly_artifact=self.cloud_assembly_artifact,
            source_action=cpa.BitBucketSourceAction(
                role=iam.LazyRole(
                    self,
                    'SourceRole',
                    assumed_by=iam.AccountPrincipal(self.account),
                    managed_policies=[
                        iam.ManagedPolicy.from_aws_managed_policy_name(
                            'AmazonS3FullAccess')
                    ]),
                action_name="Ship",
                connection_arn=pipeline_data.github_connection_arn,
                owner=pipeline_data.github_owner,
                repo=pipeline_data.repo_name,
                branch=pipeline_data.repo_branch,
                output=self.source_artifact),
            synth_action=SimpleSynthAction(
                install_commands=pipeline_data.synth_install_commands,
                environment=cb.BuildEnvironment(
                    environment_variables={
                        env_key: cb.BuildEnvironmentVariable(
                            value=pipeline_data.build_env[env_key])
                        for env_key in pipeline_data.build_env
                    },
                    build_image=cb.LinuxBuildImage.STANDARD_5_0,
                    compute_type=cb.ComputeType.SMALL,
                    privileged=True),
                synth_command='cdk synth',
                action_name='Synthesize',
                cloud_assembly_artifact=self.cloud_assembly_artifact,
                source_artifact=self.source_artifact))
        pipeline = self.pipeline.node.try_find_child('Pipeline')
        build_stage = pipeline.node.try_find_child('Build')
        synth_action = build_stage.node.try_find_child('Synthesize')
        build_proj = synth_action.node.try_find_child('CdkBuildProject')
        cfn_build_project = build_proj.node.default_child

        # Need Privileged mode for starting docker
        cfn_build_project.add_property_override("Environment.PrivilegedMode",
                                                "true")
        # Updating from v4 by default in aws-cdk to v5
        cfn_build_project.add_property_override("Environment.Image",
                                                "aws/codebuild/standard:5.0")
        # Only clone the last commit. Don't clone the history
        cfn_build_project.add_property_override("Source.GitCloneDepth", 1)

        self.pipeline.add_application_stage(pipeline_data.app_stage)
Example #4
0
    def __init__(self, scope: core.Construct, id: str, params: dict, **kwargs):
        super().__init__(scope, id, **kwargs)

        source_artifact = codepipeline.Artifact()
        cloud_assembly_artifact = codepipeline.Artifact()

        pipeline = pipelines.CdkPipeline(self, 'datapipeline-demo-cd', 
            cloud_assembly_artifact=cloud_assembly_artifact,
            pipeline_name=params['pipeline_name'],            
            source_action=cpactions.BitBucketSourceAction(
                action_name='GithubAction',
                output=source_artifact,
                connection_arn=params['connection_arn'],
                owner=params['github_owner'],
                repo=params['github_repo'],
                branch=params['github_branch']
            ),
            synth_action=pipelines.SimpleSynthAction(
                source_artifact=source_artifact,
                cloud_assembly_artifact=cloud_assembly_artifact,
                role_policy_statements=[
                    aws_iam.PolicyStatement(
                        actions=["secretsmanager:GetSecretValue"],
                        resources=[params['secret_arn']]
                    ),
                    aws_iam.PolicyStatement(
                        actions=[
                            "ec2:CreateNetworkInterface",
                            "ec2:DescribeAvailabilityZones",
                            "ec2:DescribeInternetGateways",
                            "ec2:DescribeSecurityGroups",
                            "ec2:DescribeSubnets",
                            "ec2:DescribeVpcs",
                            "ec2:DeleteNetworkInterface",
                            "ec2:ModifyNetworkInterfaceAttribute"
                        ],
                        resources=['*']
                    ),
                ],
                install_command='npm install -g aws-cdk && pip install --upgrade pip && pip install -r requirements.txt',
                synth_command="cdk synth -v -c region=%s -c secret_name=%s"%(params['region'],params['secret_name']),
            )
        )

        pipeline.add_application_stage(DeploymentStage(self, 'datapipeline-demo'))
    def __init__(self, scope: core.Construct, id: str, **kwargs):
        super().__init__(scope, id, **kwargs)

        param = Parameters.instance()

        pipeline_name = param.getParameter('pipeline_name')
        connection_arn = param.getParameter('connection_arn')
        github_owner = param.getParameter('github_owner')
        github_repo = param.getParameter('github_repo')
        github_branch = param.getParameter('github_branch')
        secret_arn = param.getParameter('secret_arn')

        source_artifact = codepipeline.Artifact()
        cloud_assembly_artifact = codepipeline.Artifact()

        pipeline = pipelines.CdkPipeline(
            self,
            'CdkPipeline',
            cloud_assembly_artifact=cloud_assembly_artifact,
            pipeline_name=pipeline_name,
            source_action=cpactions.BitBucketSourceAction(
                action_name='GithubAction',
                output=source_artifact,
                connection_arn=connection_arn,
                owner=github_owner,
                repo=github_repo,
                branch=github_branch),
            synth_action=pipelines.SimpleSynthAction(
                source_artifact=source_artifact,
                cloud_assembly_artifact=cloud_assembly_artifact,
                role_policy_statements=[
                    aws_iam.PolicyStatement(
                        actions=["secretsmanager:GetSecretValue"],
                        resources=[secret_arn])
                ],
                install_command=
                'npm install -g aws-cdk && pip install --upgrade pip && pip install -r requirements.txt',
                synth_command=
                f"cdk synth -v -c region={AppContext.region} -c secret_name={AppContext.secret_name}",
            ))

        pipeline.add_application_stage(DeployStage(self, 'Deploy'))
Example #6
0
def generate_pipeline_stages(codebuild_project, role, beanstalk_application,
                             beanstalk_environment, codestar_connection):
    source_output = codepipeline.Artifact("SourceOutput")
    source_stage = codepipeline.StageProps(
        stage_name="Source",
        actions=[
            codepipeline_actions.BitBucketSourceAction(
                connection_arn=codestar_connection.attr_connection_arn,
                output=source_output,
                repo="santos-devops-challenge-tier1",
                owner="Jayvee1413",
                action_name="Github",
                code_build_clone_output=True,
                run_order=1),
        ],
    )
    codebuild_output = codepipeline.Artifact("CodebuildOutput")
    codebuild_stage = codepipeline.StageProps(
        stage_name="Build",
        actions=[
            codepipeline_actions.CodeBuildAction(
                input=source_output,
                project=codebuild_project,
                outputs=[codebuild_output],
                action_name="codebuild",
                run_order=2,
            )
        ])
    deploy_stage = codepipeline.StageProps(
        stage_name="Deploy",
        actions=[
            ElasticBeanStalkDeployAction(
                action_name='Deploy',
                role=role,
                application_name=beanstalk_application.application_name,
                input=codebuild_output,
                environment_name=beanstalk_environment.environment_name,
                run_order=3)
        ])
    return [source_stage, codebuild_stage, deploy_stage]
Example #7
0
    def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # The code that defines your stack goes here

        pipeline = codepipeline.Pipeline(
            self, "Pipeline", artifact_bucket=s3.Bucket(self, "ArtifactBucket")
        )

        # Define the 'source' stage to be triggered by a webhook on the GitHub
        # repo for the code. Don't be fooled by the name, it's just a codestar
        # connection in the background. Bitbucket isn't involved.
        source_output = codepipeline.Artifact("SourceOutput")
        github_source = pipeline_actions.BitBucketSourceAction(
            action_name="Github_Source",
            connection_arn=core.SecretValue.secrets_manager(
                secret_id="folksgl_github_connection_arn", json_field="arn"
            ).to_string(),
            repo="sam-cicd-python-template",
            owner="folksgl",
            branch="main",
            output=source_output,
        )
        pipeline.add_stage(stage_name="Source", actions=[github_source])

        # Define the 'build' stage
        build_project = codebuild.PipelineProject(
            scope=self,
            id="Build",
            # Declare the pipeline artifact bucket name as an environment variable
            # so the build can send the deployment package to it.
            environment_variables={
                "PACKAGE_BUCKET": codebuild.BuildEnvironmentVariable(
                    value=pipeline.artifact_bucket.bucket_name,
                    type=codebuild.BuildEnvironmentVariableType.PLAINTEXT,
                )
            },
            environment=codebuild.BuildEnvironment(
                build_image=codebuild.LinuxBuildImage.STANDARD_3_0
            ),
        )
        build_stage_output = codepipeline.Artifact("BuildStageOutput")
        build_action = pipeline_actions.CodeBuildAction(
            action_name="Build",
            project=build_project,
            input=source_output,
            outputs=[build_stage_output],
        )
        pipeline.add_stage(stage_name="Build", actions=[build_action])

        # Define the 'deploy' stage
        stack_name = "gateway-service-python"
        change_set_name = f"{stack_name}-changeset"

        create_change_set = pipeline_actions.CloudFormationCreateReplaceChangeSetAction(
            action_name="CreateChangeSet",
            stack_name=stack_name,
            change_set_name=change_set_name,
            template_path=build_stage_output.at_path("packaged.yaml"),
            admin_permissions=True,
            run_order=1,
        )
        execute_change_set = pipeline_actions.CloudFormationExecuteChangeSetAction(
            action_name="Deploy",
            stack_name=stack_name,
            change_set_name=change_set_name,
            run_order=2,
        )
        pipeline.add_stage(
            stage_name="DevDeployment", actions=[create_change_set, execute_change_set]
        )
Example #8
0
    def __init__(self, scope: core.Construct, id: str,
                 shared_context: Dict[str, str], **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        self.pipeline_id = f'{id}-cicd-stack'

        artifact_bucket = s3.Bucket(
            scope=self,
            id=f'{id}-artifacts-bucket',
            removal_policy=core.RemovalPolicy.DELETE,
            auto_delete_objects=True,
            encryption=s3.BucketEncryption.KMS_MANAGED,
            versioned=False,
            lifecycle_rules=[LifecycleRule(expiration=core.Duration.days(2))])

        classifier_pipeline = pipeline.Pipeline(
            scope=self,
            id=f'{id}-pipeline',
            artifact_bucket=artifact_bucket,
            pipeline_name=self.pipeline_id,
            restart_execution_on_update=True,
        )

        source_output = pipeline.Artifact()

        classifier_pipeline.add_stage(
            stage_name='GithubSources',
            actions=[
                actions.BitBucketSourceAction(
                    connection_arn=shared_context['github_connection_arn'],
                    owner=shared_context['github_owner'],
                    repo=shared_context['github_repo'],
                    action_name='SourceCodeRepo',
                    branch='master',
                    output=source_output,
                )
            ])

        self.ecr_repository = ecr.Repository(scope=self, id=f'{id}-ecr-repo')
        self.ecr_repository.add_lifecycle_rule(
            max_image_age=core.Duration.days(7))

        build_project = build.PipelineProject(
            scope=self,
            id=f'{id}-build-project',
            project_name=f'ClassifierBuildProject',
            description=f'Build project for the classifier',
            environment=build.BuildEnvironment(
                build_image=build.LinuxBuildImage.STANDARD_3_0,
                privileged=True,
                compute_type=build.ComputeType.MEDIUM),
            environment_variables={
                'REPOSITORY_URI':
                build.BuildEnvironmentVariable(
                    value=self.ecr_repository.repository_uri),
            },
            timeout=core.Duration.minutes(15),
            cache=build.Cache.bucket(artifact_bucket,
                                     prefix=f'codebuild-cache'),
            build_spec=build.BuildSpec.from_source_filename('buildspec.yml'),
        )

        build_project.add_to_role_policy(
            iam.PolicyStatement(actions=[
                'codebuild:CreateReportGroup', 'codebuild:CreateReport',
                'codebuild:BatchPutTestCases', 'codebuild:UpdateReport',
                'codebuild:StartBuild'
            ],
                                resources=['*']))

        self.ecr_repository.grant_pull_push(build_project)

        build_output = pipeline.Artifact()

        classifier_pipeline.add_stage(
            stage_name='BuildStage',
            actions=[
                actions.CodeBuildAction(action_name='CodeBuildProjectAction',
                                        input=source_output,
                                        outputs=[build_output],
                                        project=build_project,
                                        type=actions.CodeBuildActionType.BUILD,
                                        run_order=1)
            ])
Example #9
0
    def __init__(self, scope: core.Construct, id: str, *, artifact_bucket_name: str, **kwargs) -> None:
        """Define the resources for the CodePipeline.
        
        :param scope: the parent construct
        :param id: the logical id
        :param artifact_bucket_name: the bucket name for artifacts passed between code pipeline 
        """
        super().__init__(scope, id, **kwargs)

        stack = core.Stack.of(self)
        account_id = stack.account
        region = stack.region

        repo_name = 'clin-msi'

        # build projects
        environment = cb.BuildEnvironment(build_image=cb.LinuxBuildImage.STANDARD_4_0)
        self._pytest_project = cb.PipelineProject(self, 'UnitTest', 
            project_name=f"{repo_name}-unittest",
            build_spec=cb.BuildSpec.from_object(buildspec_pytest),
            environment=environment)
        self._publish_project = cb.PipelineProject(self, 'Publish',
            project_name=f"{repo_name}-publish",
            build_spec=cb.BuildSpec.from_object(buildspec_publish),
            environment=environment)
        self._publish_project.add_to_role_policy(
            iam.PolicyStatement(
                effect=iam.Effect.ALLOW,
                actions=["ssm:GetParameters"],
                resources=[
                    f"arn:aws:ssm:{region}:{account_id}:parameter/ClinMsi/PyPI/Credentials/Username",
                    f"arn:aws:ssm:{region}:{account_id}:parameter/ClinMsi/PyPI/Credentials/Password"
                ]
            ))

        # actions
        source_output = cp.Artifact()
        source_action = actions.BitBucketSourceAction(
            action_name='GitHub',
            connection_arn=f"arn:aws:codestar-connections:{region}:{account_id}:connection/a859d7f0-0bf3-48f5-bce1-492c9bc08bef",
            output=source_output,
            code_build_clone_output=True,
            owner="nch-igm",
            repo=repo_name,
            branch="master")

        unittest_action = actions.CodeBuildAction(action_name='UnittestAction', input=source_output, project=self._pytest_project)
        publish_action = actions.CodeBuildAction(action_name='PyPIPublishAction', input=source_output, project=self._publish_project)
        manual_approval = actions.ManualApprovalAction(action_name='ApproveToPublish')

        # initialize pipeline
        self._artifact_bucket = s3.Bucket.from_bucket_name(self, 'Bucket', artifact_bucket_name)
        self._pipeline = cp.Pipeline(
            self, 'CodePipeline',
            pipeline_name=repo_name,
            artifact_bucket=self._artifact_bucket)
        self._pipeline.add_to_role_policy(
            iam.PolicyStatement(
                effect=iam.Effect.ALLOW,
                actions=['codebuild:StartBuild', 'kms:PutKeyPolicy'],
                resources=[
                    self._pytest_project.project_arn,
                    self._publish_project.project_arn,
                    self._pipeline.artifact_bucket.bucket_arn
                ]))
        self._pipeline.add_stage(stage_name="Source", actions=[source_action])
        self._pipeline.add_stage(stage_name="UnitTest", actions=[unittest_action])
        self._pipeline.add_stage(stage_name="ManualApprovalForPublish", actions=[manual_approval])
        self._pipeline.add_stage(stage_name="Publish", actions=[publish_action])