def create_codebuild_project(template) -> cb.Project:
    from troposphere.codebuild import Project, Environment, Artifacts, Source

    environment = Environment(
        ComputeType='BUILD_GENERAL1_SMALL',
        Image='aws/codebuild/standard:3.0',
        Type='LINUX_CONTAINER',
    )

    codebuild_role = template.add_resource(
        Role(
            "CodeBuildRole",
            AssumeRolePolicyDocument=Policy(Statement=[
                Statement(Effect=Allow,
                          Action=[AssumeRole],
                          Principal=Principal("Service",
                                              ["codebuild.amazonaws.com"]))
            ]),
            ManagedPolicyArns=[
                'arn:aws:iam::aws:policy/AmazonS3FullAccess',
                'arn:aws:iam::aws:policy/CloudWatchFullAccess',
                'arn:aws:iam::aws:policy/AWSCodeBuildAdminAccess',
            ],
        ))

    # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-source.html
    return Project("ContinuousCodeBuild",
                   Name="ContinuousCodeBuild",
                   Description='Continous pipeline',
                   Artifacts=Artifacts(Type='CODEPIPELINE'),
                   Environment=environment,
                   Source=Source(Type='CODEPIPELINE'),
                   ServiceRole=Ref(codebuild_role))
Beispiel #2
0
def build_artifacts(identifier: str, s3_bucketname: str) -> Artifacts:
    """ CodeBuild Artifact and Secondary Artifact creation. """
    artifact = Artifacts(
        Name=f"{identifier}Artifact",
        ArtifactIdentifier=identifier,
        EncryptionDisabled=True,
        Location=s3_bucketname,
        NamespaceType='NONE',  # NOTE: case sensitive
        OverrideArtifactName=False,
        Packaging='ZIP',  # NOTE: case sensitive
        Type='S3')  # NOTE: case sensitive
    return artifact
def create_template():
    t = Template()

    oauth_token = t.add_parameter(
        Parameter(
            "GitHubOAuthToken",
            Description="Secret for github",
            Type="AWS::SSM::Parameter::Value<String>",
            Default="oauth",
            NoEcho=True,
        ))

    owner = t.add_parameter(Parameter(
        "Owner",
        Type="String",
    ))

    repo = t.add_parameter(Parameter(
        "Repo",
        Type="String",
    ))

    branch = t.add_parameter(Parameter(
        "Branch",
        Type="String",
    ))

    artifact_bucket = t.add_resource(Bucket('ArtifactBucket'))

    cd_role = t.add_resource(code_build_service_role(artifact_bucket))

    code_build = t.add_resource(
        Project(
            'CodeBuild',
            Name=STACK_NAME,
            Artifacts=Artifacts(Type='NO_ARTIFACTS', ),
            Environment=Environment(
                ComputeType='BUILD_GENERAL1_SMALL',
                Image='aws/codebuild/python:3.6.5',
                Type='LINUX_CONTAINER',
            ),
            ServiceRole=Ref(cd_role),
            Source=Source(
                Type='GITHUB',
                Auth=SourceAuth(Type='OAUTH', ),
                Location=
                'https://github.com/karlw00t/aws-encryption-sdk-cli.git',
            ),
            Triggers=ProjectTriggers(Webhook=True, ),
        ))

    return t.to_dict()
def getCodeBuild(name: str, serviceRole: Role,
                 buildspec: List[str]) -> Project:
    env = Environment(ComputeType="BUILD_GENERAL1_SMALL",
                      Image="frolvlad/alpine-python3",
                      Type="LINUX_CONTAINER",
                      PrivilegedMode=False)
    source = Source(Type="CODEPIPELINE", BuildSpec=Join("\n", buildspec))
    artifacts = Artifacts(Type="CODEPIPELINE")
    return Project(alphanum(name),
                   Name=Sub("${AWS::StackName}-" + alphanum(name)),
                   Environment=env,
                   Source=source,
                   Artifacts=artifacts,
                   ServiceRole=Ref(serviceRole))
Beispiel #5
0
def build_project(template=Template(),
                  section=None,
                  project_name=None,
                  raw_env=None,
                  service_role: str = None) -> Template:
    template.set_version('2010-09-09')
    artifacts = Artifacts(Type='NO_ARTIFACTS')
    env_list = list()

    try:
        logging.debug(f'raw_env is {raw_env}')
        env = raw_env.split(' ')
    except AttributeError:
        env = config.get(section, 'env').split(' ')
        logging.debug(f'Section is {section}')

    for i in env:
        k, v = i.split("=")
        env_list.append({"Name": k, "Value": v})

    environment = Environment(
        ComputeType=config.get(section, 'compute_type'),
        Image=str(config.get(section, 'image')),
        Type=str(config.get(section, 'env_type')),
        PrivilegedMode=True,
        EnvironmentVariables=env_list,
    )

    source = Source(Location=config.get(section, 'source_location'),
                    Type=config.get(section, 'source_type'),
                    GitCloneDepth=config.get(section, 'source_clonedepth'),
                    BuildSpec=config.get(section, 'buildspec'),
                    ReportBuildStatus=True)

    project_id = project = Project(
        project_name,
        Artifacts=artifacts,
        Environment=environment,
        Name=project_name,
        TimeoutInMinutes=config.get(section, 'timeout_in_min'),
        ServiceRole=Ref(service_role),
        Source=source,
        SourceVersion=config.get(section, 'source_version'),
        BadgeEnabled=True,
        DependsOn=service_role,
    )
    template.add_resource(project)
    template.add_output(
        [Output(f"CodeBuildProject{project_name}", Value=Ref(project_id))])
 def __init__(self, roleCodeBuild):
     self.roleCodeBuild = roleCodeBuild
     self.kMSKeyArn = Ref('KMSKeyArn')
     self.vpcid = Ref("VPCID")
     self.privatesubnetOne = Ref("PrivateSubnetOne")
     self.privatesubnetTwo = Ref("PrivateSubnetTwo")
     self.sgpipeline = Ref("SG")
     self.computeType = 'BUILD_GENERAL1_SMALL'
     self.image = 'aws/codebuild/standard:2.0'
     self.type = 'LINUX_CONTAINER'
     self.artifacts = Artifacts(Type='CODEPIPELINE')
     self.vpcConfig = VpcConfig(
         VpcId=self.vpcid,
         Subnets=[self.privatesubnetOne, self.privatesubnetTwo],
         SecurityGroupIds=[self.sgpipeline])
     self.timeoutInMinutes = 10
Beispiel #7
0
def __create_build_project(template: Template) -> Project:
    build_spec = template.add_parameter(parameter=Parameter(
        title='BuildSpecPath', Default='buildspec.yml', Type='String'))

    bucket = ImportValue(CommonResource.ExportName.BUCKET_NAME.value)

    build_project = template.add_resource(resource=Project(
        title='SampleBuildProject',
        Name='sample-codepipeline-project',
        Artifacts=Artifacts(Type='CODEPIPELINE', ),
        Source=Source(
            BuildSpec=Ref(build_spec),
            Type='CODEPIPELINE',
        ),
        Environment=Environment(
            ComputeType='BUILD_GENERAL1_SMALL',
            Image='aws/codebuild/amazonlinux2-x86_64-standard:1.0',
            Type='LINUX_CONTAINER',
            EnvironmentVariables=[
                EnvironmentVariable(Name='S3_BUCKET', Value=bucket)
            ]),
        ServiceRole=ImportValue(
            CommonResource.ExportName.CODE_BUILD_SERVICE_ROLE_ARN.value)))
    return build_project
Beispiel #8
0
def build_template(sierrafile):
    template = Template()

    template.add_version('2010-09-09')

    template.add_metadata(build_interface(sierrafile.extra_params))

    parameters = AttrDict(

        # Network Parameters

        vpc_cidr=template.add_parameter(Parameter(
            'VpcCidr',
            Type='String',
            Default='192.172.0.0/16',
        )),
        subnet1_cidr=template.add_parameter(Parameter(
            'Subnet1Cidr',
            Type='String',
            Default='192.172.1.0/24',
        )),
        subnet2_cidr=template.add_parameter(Parameter(
            'Subnet2Cidr',
            Type='String',
            Default='192.172.2.0/24',
        )),

        # ECS Parameters

        cluster_size=template.add_parameter(Parameter(
            'ClusterSize',
            Type='Number',
            Default=2,
        )),
        instance_type=template.add_parameter(Parameter(
            'InstanceType',
            Type='String',
            Default='t2.medium'
        )),
        key_name=template.add_parameter(Parameter(
            'KeyName',
            Type='AWS::EC2::KeyPair::KeyName',
        )),
        image_id=template.add_parameter(Parameter(
            'ImageId',
            Type='AWS::SSM::Parameter::Value<AWS::EC2::Image::Id>',
            Default=(
                '/aws/service/ecs/optimized-ami'
                '/amazon-linux/recommended/image_id'
            ),
            Description=(
              'An SSM parameter that resolves to a valid AMI ID.'
              ' This is the AMI that will be used to create ECS hosts.'
              ' The default is the current recommended ECS-optimized AMI.'
            )
        )),

        # Other Parameters

        github_token=template.add_parameter(Parameter(
            'GitHubToken',
            Type='String',
            NoEcho=True,
        )),
    )

    # Environment Variable Parameters

    for env_var_param, env_var_name in sierrafile.extra_params:
        template.add_parameter(Parameter(
            env_var_param,
            Type='String',
            NoEcho=True,
        ))

    # Resource Declarations

    # # Network

    network_vpc = template.add_resource(VPC(
        'NetworkVpc',
        CidrBlock=Ref(parameters.vpc_cidr),
        Tags=Tags(Name=Ref('AWS::StackName')),
    ))

    network_ig = template.add_resource(InternetGateway(
        'NetworkInternetGateway',
        Tags=Tags(Name=Ref('AWS::StackName')),
    ))

    vpc_attach = template.add_resource(VPCGatewayAttachment(
        'NetworkInternetGatewayAttachment',
        InternetGatewayId=Ref(network_ig),
        VpcId=Ref(network_vpc),
    ))

    route_table = template.add_resource(RouteTable(
        'NetworkRouteTable',
        VpcId=Ref(network_vpc),
        Tags=Tags(Name=Ref('AWS::StackName')),
    ))

    template.add_resource(Route(
        'NetworkDefaultRoute',
        DependsOn=[vpc_attach.title],
        RouteTableId=Ref(route_table),
        DestinationCidrBlock='0.0.0.0/0',
        GatewayId=Ref(network_ig),
    ))

    subnet1 = template.add_resource(Subnet(
        'NetworkSubnet1',
        VpcId=Ref(network_vpc),
        AvailabilityZone=Select(0, GetAZs()),
        MapPublicIpOnLaunch=True,
        CidrBlock=Ref(parameters.subnet1_cidr),
        Tags=Tags(Name=Sub('${AWS::StackName} (Public)')),
    ))

    subnet2 = template.add_resource(Subnet(
        'NetworkSubnet2',
        VpcId=Ref(network_vpc),
        AvailabilityZone=Select(1, GetAZs()),
        MapPublicIpOnLaunch=True,
        CidrBlock=Ref(parameters.subnet2_cidr),
        Tags=Tags(Name=Sub('${AWS::StackName} (Public)')),
    ))

    template.add_resource(SubnetRouteTableAssociation(
        'NetworkSubnet1RouteTableAssociation',
        RouteTableId=Ref(route_table),
        SubnetId=Ref(subnet1),
    ))

    template.add_resource(SubnetRouteTableAssociation(
        'NetworkSubnet2RouteTableAssociation',
        RouteTableId=Ref(route_table),
        SubnetId=Ref(subnet2),
    ))

    elb = template.add_resource(LoadBalancer(
        ELB_NAME,
        Name=Sub('${AWS::StackName}-elb'),
        Type='network',
        Subnets=[Ref(subnet1), Ref(subnet2)],
    ))

    # # Cluster

    ecs_host_role = template.add_resource(Role(
        'EcsHostRole',
        AssumeRolePolicyDocument=PolicyDocument(
            Statement=[Statement(
                Effect=Allow,
                Principal=Principal('Service', 'ec2.amazonaws.com'),
                Action=[awacs.sts.AssumeRole]
            )],
        ),
        ManagedPolicyArns=[
            'arn:aws:iam::aws:policy/'
            'service-role/AmazonEC2ContainerServiceforEC2Role'
        ]
    ))

    ecs_host_profile = template.add_resource(InstanceProfile(
        'EcsHostInstanceProfile',
        Roles=[Ref(ecs_host_role)]
    ))

    ecs_host_sg = template.add_resource(SecurityGroup(
        'EcsHostSecurityGroup',
        GroupDescription=Sub('${AWS::StackName}-hosts'),
        VpcId=Ref(network_vpc),
        SecurityGroupIngress=[SecurityGroupRule(
            CidrIp='0.0.0.0/0',
            IpProtocol='-1'
        )]
    ))

    cluster = template.add_resource(Cluster(
        'EcsCluster',
        ClusterName=Ref('AWS::StackName')
    ))

    autoscaling_name = 'EcsHostAutoScalingGroup'
    launch_conf_name = 'EcsHostLaunchConfiguration'

    launch_conf = template.add_resource(LaunchConfiguration(
        launch_conf_name,
        ImageId=Ref(parameters.image_id),
        InstanceType=Ref(parameters.instance_type),
        IamInstanceProfile=Ref(ecs_host_profile),
        KeyName=Ref(parameters.key_name),
        SecurityGroups=[Ref(ecs_host_sg)],
        UserData=Base64(Sub(
            '#!/bin/bash\n'
            'yum install -y aws-cfn-bootstrap\n'
            '/opt/aws/bin/cfn-init -v'
            ' --region ${AWS::Region}'
            ' --stack ${AWS::StackName}'
            f' --resource {launch_conf_name}\n'
            '/opt/aws/bin/cfn-signal -e $?'
            ' --region ${AWS::Region}'
            ' --stack ${AWS::StackName}'
            f' --resource {autoscaling_name}\n'
        )),
        Metadata={
            'AWS::CloudFormation::Init': {
                'config': {
                    'commands': {
                        '01_add_instance_to_cluster': {
                            'command': Sub(
                                f'echo ECS_CLUSTER=${{{cluster.title}}}'
                                f' > /etc/ecs/ecs.config'
                            ),
                        }
                    },
                    'files': {
                        '/etc/cfn/cfn-hup.conf': {
                            'mode': 0o400,
                            'owner': 'root',
                            'group': 'root',
                            'content': Sub(
                                '[main]\n'
                                'stack=${AWS::StackId}\n'
                                'region=${AWS::Region}\n'
                            ),
                        },
                        '/etc/cfn/hooks.d/cfn-auto-reloader.conf': {
                            'content': Sub(
                                '[cfn-auto-reloader-hook]\n'
                                'triggers=post.update\n'
                                'path=Resources.ContainerInstances.Metadata'
                                '.AWS::CloudFormation::Init\n'
                                'action=/opt/aws/bin/cfn-init -v'
                                ' --region ${AWS::Region}'
                                ' --stack ${AWS::StackName}'
                                f' --resource {launch_conf_name}\n'
                            ),
                        },
                    },
                    'services': {
                        'sysvinit': {
                            'cfn-hup': {
                                'enabled': True,
                                'ensureRunning': True,
                                'files': [
                                    '/etc/cfn/cfn-hup.conf',
                                    '/etc/cfn/hooks.d/cfn-auto-reloader.conf'
                                ]
                            }
                        }
                    }
                }
            }
        }
    ))

    autoscaling_group = template.add_resource(AutoScalingGroup(
        autoscaling_name,
        VPCZoneIdentifier=[Ref(subnet1), Ref(subnet2)],
        LaunchConfigurationName=Ref(launch_conf),
        DesiredCapacity=Ref(parameters.cluster_size),
        MinSize=Ref(parameters.cluster_size),
        MaxSize=Ref(parameters.cluster_size),
        Tags=[{
            'Key': 'Name',
            'Value': Sub('${AWS::StackName} - ECS Host'),
            'PropagateAtLaunch': True,
        }],
        CreationPolicy=CreationPolicy(
            ResourceSignal=ResourceSignal(Timeout='PT15M'),
        ),
        UpdatePolicy=UpdatePolicy(
            AutoScalingRollingUpdate=AutoScalingRollingUpdate(
                MinInstancesInService=1,
                MaxBatchSize=1,
                PauseTime='PT5M',
                WaitOnResourceSignals=True,
            ),
        ),
    ))

    # # Services

    task_role = template.add_resource(Role(
        'TaskExecutionRole',
        AssumeRolePolicyDocument=PolicyDocument(
            Statement=[Statement(
                Effect=Allow,
                Principal=Principal('Service', 'ecs-tasks.amazonaws.com'),
                Action=[awacs.sts.AssumeRole],
            )],
        ),
        ManagedPolicyArns=[
            'arn:aws:iam::aws:policy/'
            'service-role/AmazonECSTaskExecutionRolePolicy'
        ],
    ))

    artifact_bucket = template.add_resource(Bucket(
        'ArtifactBucket',
        DeletionPolicy='Retain',
    ))

    codebuild_role = template.add_resource(Role(
        'CodeBuildServiceRole',
        Path='/',
        AssumeRolePolicyDocument=PolicyDocument(
            Version='2012-10-17',
            Statement=[
                Statement(
                    Effect=Allow,
                    Principal=Principal(
                        'Service', 'codebuild.amazonaws.com'
                    ),
                    Action=[
                        awacs.sts.AssumeRole,
                    ],
                ),
            ],
        ),
        Policies=[Policy(
            PolicyName='root',
            PolicyDocument=PolicyDocument(
                Version='2012-10-17',
                Statement=[
                    Statement(
                        Resource=['*'],
                        Effect=Allow,
                        Action=[
                            awacs.ssm.GetParameters,
                        ],
                    ),
                    Statement(
                        Resource=['*'],
                        Effect=Allow,
                        Action=[
                            awacs.s3.GetObject,
                            awacs.s3.PutObject,
                            awacs.s3.GetObjectVersion,
                        ],
                    ),
                    Statement(
                        Resource=['*'],
                        Effect=Allow,
                        Action=[
                            awacs.logs.CreateLogGroup,
                            awacs.logs.CreateLogStream,
                            awacs.logs.PutLogEvents,
                        ],
                    ),
                ],
            ),
        )],
    ))

    codepipeline_role = template.add_resource(Role(
        'CodePipelineServiceRole',
        Path='/',
        AssumeRolePolicyDocument=PolicyDocument(
            Version='2012-10-17',
            Statement=[
                Statement(
                    Effect=Allow,
                    Principal=Principal(
                        'Service', 'codepipeline.amazonaws.com'
                    ),
                    Action=[
                        awacs.sts.AssumeRole,
                    ],
                ),
            ],
        ),
        Policies=[Policy(
            PolicyName='root',
            PolicyDocument=PolicyDocument(
                Version='2012-10-17',
                Statement=[
                    Statement(
                        Resource=[
                            Sub(f'${{{artifact_bucket.title}.Arn}}/*')
                        ],
                        Effect=Allow,
                        Action=[
                            awacs.s3.GetBucketVersioning,
                            awacs.s3.GetObject,
                            awacs.s3.GetObjectVersion,
                            awacs.s3.PutObject,
                        ],
                    ),
                    Statement(
                        Resource=['*'],
                        Effect=Allow,
                        Action=[
                            awacs.ecs.DescribeServices,
                            awacs.ecs.DescribeTaskDefinition,
                            awacs.ecs.DescribeTasks,
                            awacs.ecs.ListTasks,
                            awacs.ecs.RegisterTaskDefinition,
                            awacs.ecs.UpdateService,
                            awacs.codebuild.StartBuild,
                            awacs.codebuild.BatchGetBuilds,
                            awacs.iam.PassRole,
                        ],
                    ),
                ],
            ),
        )],
    ))

    log_group = template.add_resource(LogGroup(
        'LogGroup',
        LogGroupName=Sub('/ecs/${AWS::StackName}'),
    ))

    if any(conf.pipeline.enable for conf in sierrafile.services.values()):
        project = template.add_resource(Project(
            'CodeBuildProject',
            Name=Sub('${AWS::StackName}-build'),
            ServiceRole=Ref(codebuild_role),
            Artifacts=Artifacts(Type='CODEPIPELINE'),
            Source=Source(Type='CODEPIPELINE'),
            Environment=Environment(
                ComputeType='BUILD_GENERAL1_SMALL',
                Image='aws/codebuild/docker:17.09.0',
                Type='LINUX_CONTAINER',
            ),
        ))

    for name, settings in sierrafile.services.items():
        task_definition = template.add_resource(TaskDefinition(
            f'{name}TaskDefinition',
            RequiresCompatibilities=['EC2'],
            Cpu=str(settings.container.cpu),
            Memory=str(settings.container.memory),
            NetworkMode='bridge',
            ExecutionRoleArn=Ref(task_role.title),
            ContainerDefinitions=[
                ContainerDefinition(
                    Name=f'{name}',
                    Image=settings.container.image,
                    Memory=str(settings.container.memory),
                    Essential=True,
                    PortMappings=[
                        PortMapping(
                            ContainerPort=settings.container.port,
                            Protocol='tcp',
                        ),
                    ],
                    Environment=[
                        troposphere.ecs.Environment(Name=k, Value=v)
                        for k, v in sierrafile.env_vars.items()
                        if k in settings.get('environment', [])
                    ],
                    LogConfiguration=LogConfiguration(
                        LogDriver='awslogs',
                        Options={
                            'awslogs-region': Ref('AWS::Region'),
                            'awslogs-group': Ref(log_group.title),
                            'awslogs-stream-prefix': Ref('AWS::StackName'),
                        },
                    ),
                ),
            ],
        ))

        target_group = template.add_resource(TargetGroup(
            f'{name}TargetGroup',
            Port=settings.container.port,
            Protocol='TCP',
            VpcId=Ref(network_vpc),
            Tags=Tags(Name=Sub(f'${{AWS::StackName}}-{name}')),
        ))

        listener = template.add_resource(Listener(
            f'{name}ElbListener',
            LoadBalancerArn=Ref(elb),
            Port=settings.container.port,
            Protocol='TCP',
            DefaultActions=[
                Action(TargetGroupArn=Ref(target_group), Type='forward')
            ],
        ))

        service = template.add_resource(Service(
            f'{name}Service',
            Cluster=Ref(cluster),
            ServiceName=f'{name}-service',
            DependsOn=[autoscaling_group.title, listener.title],
            DesiredCount=settings.container.count,
            TaskDefinition=Ref(task_definition),
            LaunchType='EC2',
            LoadBalancers=[
                troposphere.ecs.LoadBalancer(
                    ContainerName=f'{name}',
                    ContainerPort=settings.container.port,
                    TargetGroupArn=Ref(target_group),
                ),
            ],
        ))

        if settings.pipeline.enable:
            pipeline = template.add_resource(Pipeline(
                f'{name}Pipeline',
                RoleArn=GetAtt(codepipeline_role, 'Arn'),
                ArtifactStore=ArtifactStore(
                    Type='S3',
                    Location=Ref(artifact_bucket),
                ),
                Stages=[
                    Stages(
                        Name='Source',
                        Actions=[Actions(
                            Name='Source',
                            ActionTypeId=ActionTypeId(
                                Category='Source',
                                Owner='ThirdParty',
                                Version='1',
                                Provider='GitHub',
                            ),
                            OutputArtifacts=[
                                OutputArtifacts(Name=f'{name}Source'),
                            ],
                            RunOrder='1',
                            Configuration={
                                'Owner': settings.pipeline.user,
                                'Repo': settings.pipeline.repo,
                                'Branch': settings.pipeline.branch,
                                'OAuthToken': Ref(parameters.github_token),
                            },
                        )],
                    ),
                    Stages(
                        Name='Build',
                        Actions=[Actions(
                            Name='Build',
                            ActionTypeId=ActionTypeId(
                                Category='Build',
                                Owner='AWS',
                                Version='1',
                                Provider='CodeBuild',
                            ),
                            InputArtifacts=[
                                InputArtifacts(Name=f'{name}Source'),
                            ],
                            OutputArtifacts=[
                                OutputArtifacts(Name=f'{name}Build'),
                            ],
                            RunOrder='1',
                            Configuration={
                                'ProjectName': Ref(project),
                            },
                        )],
                    ),
                    Stages(
                        Name='Deploy',
                        Actions=[Actions(
                            Name='Deploy',
                            ActionTypeId=ActionTypeId(
                                Category='Deploy',
                                Owner='AWS',
                                Version='1',
                                Provider='ECS',
                            ),
                            InputArtifacts=[
                                InputArtifacts(Name=f'{name}Build')
                            ],
                            RunOrder='1',
                            Configuration={
                                'ClusterName': Ref(cluster),
                                'ServiceName': Ref(service),
                                'FileName': 'image.json',
                            },
                        )],
                    ),
                ],
            ))

            template.add_resource(Webhook(
                f'{name}CodePipelineWebhook',
                Name=Sub(f'${{AWS::StackName}}-{name}-webhook'),
                Authentication='GITHUB_HMAC',
                AuthenticationConfiguration=AuthenticationConfiguration(
                    SecretToken=Ref(parameters.github_token),
                ),
                Filters=[FilterRule(
                    JsonPath='$.ref',
                    MatchEquals=f'refs/heads/{settings.pipeline.branch}'
                )],
                TargetAction='Source',
                TargetPipeline=Ref(pipeline),
                TargetPipelineVersion=1,
                RegisterWithThirdParty=True,
            ))

    return template
Beispiel #9
0
    commands: 
      - aws codepipeline get-pipeline-state --name "${CODEBUILD_INITIATOR##*/}" --query stageStates[?actionStates[0].latestExecution.externalExecutionId==\`$CODEBUILD_BUILD_ID\`].latestExecution.pipelineExecutionId --output=text > /tmp/execution_id.txt 
      - aws codepipeline get-pipeline-execution --pipeline-name "${CODEBUILD_INITIATOR##*/}" --pipeline-execution-id $(cat /tmp/execution_id.txt) --query 'pipelineExecution.artifactRevisions[0].revisionId' --output=text > /tmp/tag.txt  
      - printf "%s:%s" "$REPOSITORY_URI" "$(cat /tmp/tag.txt)" > /tmp/build_tag.txt 
      - printf '{"tag":"%s"}' "$(cat /tmp/tag.txt)" > /tmp/build.json 
      - $(aws ecr get-login --no-include-email) 
  build: 
    commands: 
      - docker build -t "$(cat /tmp/build_tag.txt)" . 
  post_build: 
    commands: 
      - docker push "$(cat /tmp/build_tag.txt)" 
      - aws ecr batch-get-image --repository-name $REPOSITORY_NAME --image-ids imageTag="$(cat /tmp/tag.txt)" --query 'images[].imageManifest' --output text | tee /tmp/latest_manifest.json 
      - aws ecr put-image --repository-name $REPOSITORY_NAME --image-tag latest --image-manifest $(cat /tmp/latest_manifest.json) 
artifacts: 
  files: /tmp/build.json 
  discard-paths: yes 
"""

t.add_resource(
    Project(
        "CodeBuild",
        Name='HelloWorldContainer',
        Environment=environment,
        ServiceRole=Ref("ServiceRole"),
        Source=Source(Type="CODEPIPELINE", BuildSpec=buildspec),
        Artifacts=Artifacts(Type="CODEPIPELINE", Name="output"),
    ))

print(t.to_json())
Beispiel #10
0
from troposphere import Template
from troposphere.codebuild import Artifacts, Environment, Project, Source

template = Template()
template.set_version("2010-09-09")

artifacts = Artifacts(Type="NO_ARTIFACTS")

environment = Environment(
    ComputeType="BUILD_GENERAL1_SMALL",
    Image="aws/codebuild/java:openjdk-8",
    Type="LINUX_CONTAINER",
    EnvironmentVariables=[{
        "Name": "APP_NAME",
        "Value": "demo"
    }],
)

source = Source(
    Location="codebuild-demo-test/0123ab9a371ebf0187b0fe5614fbb72c", Type="S3")

project = Project(
    "DemoProject",
    Artifacts=artifacts,
    Environment=environment,
    Name="DemoProject",
    ServiceRole="arn:aws:iam::0123456789:role/codebuild-role",
    Source=source,
)
template.add_resource(project)
Beispiel #11
0
from troposphere import Template
from troposphere.codebuild import Artifacts, Environment, Source, Project

template = Template()
template.add_version('2010-09-09')

artifacts = Artifacts(Type='NO_ARTIFACTS')

environment = Environment(
    ComputeType='BUILD_GENERAL1_SMALL',
    Image='aws/codebuild/java:openjdk-8',
    Type='LINUX_CONTAINER',
    EnvironmentVariables=[{
        'Name': 'APP_NAME',
        'Value': 'demo'
    }],
)

source = Source(
    Location='codebuild-demo-test/0123ab9a371ebf0187b0fe5614fbb72c', Type='S3')

project = Project(
    "DemoProject",
    Artifacts=artifacts,
    Environment=environment,
    Name='DemoProject',
    ServiceRole='arn:aws:iam::0123456789:role/codebuild-role',
    Source=source,
)
template.add_resource(project)
def add_packer_codebuild_job(t, name, environment=None):
    """ Add the packer AMI build to the codebuild job """
    cfn_name = sanitize_cfn_resource_name(name)
    with open(
            os.path.dirname(os.path.realpath(__file__)) +
            "/buildspecs/packer.yml") as spec:
        build_spec = spec.read()

    codebuild_job_environments = [{'Name': 'CLUSTER_NAME', 'Value': name}]
    if environment:
        codebuild_job_environments.append({
            'Name': 'ENVIRONMENT',
            'Value': environment
        })

    PackerRole = Role(
        "CodeBuildPackerRole",
        AssumeRolePolicyDocument={
            "Statement": [{
                "Effect": "Allow",
                "Action": "sts:AssumeRole",
                "Principal": {
                    "Service":
                    ["codebuild.amazonaws.com", "ec2.amazonaws.com"]
                },
            }]
        },
        Policies=[
            Policy(
                PolicyName="codebuild-packer",
                PolicyDocument={
                    "Statement": [{
                        "Effect":
                        "Allow",
                        "Action": [
                            "ec2:AttachVolume",
                            "ec2:AuthorizeSecurityGroupIngress",
                            "ec2:CopyImage", "ec2:CreateImage",
                            "ec2:CreateKeypair", "ec2:CreateSecurityGroup",
                            "ec2:CreateSnapshot", "ec2:CreateTags",
                            "ec2:CreateVolume", "ec2:DeleteKeyPair",
                            "ec2:DeleteSecurityGroup", "ec2:DeleteSnapshot",
                            "ec2:DeleteVolume", "ec2:DeregisterImage",
                            "ec2:Describe*", "ec2:DetachVolume",
                            "ec2:GetPasswordData", "ec2:ModifyImageAttribute",
                            "ec2:ModifyInstanceAttribute",
                            "ec2:ModifySnapshotAttribute", "ec2:RegisterImage",
                            "ec2:RunInstances", "ec2:StopInstances",
                            "ec2:TerminateInstances", "iam:PassRole"
                        ],
                        "Resource":
                        "*"
                    }, {
                        "Effect": "Allow",
                        "Action": ["logs:*"],
                        "Resource": "*"
                    }, {
                        "Effect":
                        "Allow",
                        "Action": [
                            "ssm:GetParametersByPath", "ssm:GetParameters",
                            "ssm:GetParameter"
                        ],
                        "Resource":
                        ["arn:aws:ssm:*:*:parameter/aws/service/ecs*"]
                    }, {
                        "Effect":
                        "Allow",
                        "Action":
                        "s3:*",
                        "Resource": [{
                            "Fn::Sub":
                            "arn:aws:s3:::ecs-cluster-deployer-${AWS::AccountId}-${AWS::Region}"
                        }, {
                            "Fn::Sub":
                            "arn:aws:s3:::ecs-cluster-deployer-${AWS::AccountId}-${AWS::Region}/*"
                        }]
                    }]
                })
        ])

    PackerInstanceProfile = InstanceProfile(
        "PackerInstanceProfile",
        InstanceProfileName=f"{cfn_name}PackerInstanceProfile",
        Roles=[Ref(PackerRole)])
    environment = Environment(ComputeType="BUILD_GENERAL1_SMALL",
                              Type="LINUX_CONTAINER",
                              Image="aws/codebuild/standard:2.0",
                              EnvironmentVariables=codebuild_job_environments,
                              PrivilegedMode=True)
    PackerCodebuild = Project("PackerAMIBuilder",
                              Name=f"{cfn_name}PackerAMIBuilder",
                              Artifacts=Artifacts(Type='CODEPIPELINE'),
                              Environment=environment,
                              ServiceRole=Ref(PackerRole),
                              Source=Source(Type="CODEPIPELINE",
                                            BuildSpec=Sub(build_spec)))
    t.add_resource(PackerRole)
    t.add_resource(PackerCodebuild)
    t.add_resource(PackerInstanceProfile)
def add_deployer_codebuild_job(t, name, environments):
    """
    Adds deployer role to the codebuild job for ecs cluster deployer
    """
    with open(
            os.path.dirname(os.path.realpath(__file__)) +
            "/buildspecs/cluster_deployer.yml") as spec:
        build_spec = spec.read()

    cfn_name = sanitize_cfn_resource_name(name)

    deployer_role = Role(
        "CodeBuildClusterDeployerRole",
        AssumeRolePolicyDocument={
            "Statement": [{
                "Effect": "Allow",
                "Action": "sts:AssumeRole",
                "Principal": {
                    "Service": ["codebuild.amazonaws.com"]
                }
            }]
        },
        Policies=[
            Policy(
                PolicyName="codebuild-cluster-deployer",
                PolicyDocument={
                    "Statement": [{
                        "Effect":
                        "Allow",
                        "Action": [
                            "ec2:CreateSecurityGroup",
                            "ec2:DeleteSecurityGroup", "ec2:CreateTags",
                            "ec2:AuthorizeSecurityGroupIngress",
                            "ec2:RequestSpotFleet",
                            "ec2:CancelSpotFleetRequests", "ec2:Describe*",
                            "ec2:CreateKeyPair",
                            "ec2:RevokeSecurityGroupIngress", "iam:CreateRole",
                            "iam:CreateInstanceProfile",
                            "iam:DeleteInstanceProfile",
                            "iam:RemoveRoleFromInstanceProfile",
                            "iam:DeleteInstanceProfile",
                            "iam:AddRoleToInstanceProfile", "iam:DeleteRole",
                            "iam:DeleteRolePolicy", "iam:PutRolePolicy",
                            "iam:List*", "iam:Get*", "iam:PassRole", "logs:*",
                            "lambda:*", "events:*", "ecs:*",
                            "cloudformation:ListStacks"
                        ],
                        "Resource":
                        "*"
                    }, {
                        "Effect":
                        "Allow",
                        "Action": [
                            "ssm:PutParameter",
                            "ssm:GetParameter",
                            "ssm:DeleteParameter",
                            "ssm:AddTagsToResource",
                        ],
                        "Resource": [{
                            "Fn::Sub":
                            "arn:aws:ssm:${AWS::Region}:${AWS::AccountId}:parameter/ecs-maestro/*"
                        }]
                    }, {
                        "Effect":
                        "Allow",
                        "Action": ["cloudformation:*"],
                        "Resource":
                        [f"arn:aws:cloudformation:*:*:stack/{name}*"]
                    }, {
                        "Effect":
                        "Allow",
                        "Action": ["s3:*"],
                        "Resource": [{
                            "Fn::Sub":
                            "arn:aws:s3:::ecs-cluster-deployer-${AWS::AccountId}-${AWS::Region}"
                        }, {
                            "Fn::Sub":
                            "arn:aws:s3:::ecs-cluster-deployer-${AWS::AccountId}-${AWS::Region}/*"
                        }]
                    }]
                })
        ])

    for environment in environments:
        t.add_resource(
            Project(f"Deploy{cfn_name}{environment.title()}",
                    Name=f"Deploy{cfn_name}{environment.title()}",
                    Artifacts=Artifacts(Type='CODEPIPELINE'),
                    Environment=Environment(
                        ComputeType="BUILD_GENERAL1_SMALL",
                        Type="LINUX_CONTAINER",
                        Image="aws/codebuild/standard:2.0",
                        EnvironmentVariables=[{
                            'Name': 'CLUSTER_NAME',
                            'Value': name
                        }, {
                            'Name': 'ENVIRONMENT',
                            'Value': environment
                        }, {
                            'Name':
                            'ECS_CLUSTER_DEPLOYER_VERSION',
                            'Value':
                            os.environ.get('ECS_CLUSTER_DEPLOYER_VERSION')
                        }, {
                            'Name':
                            'VALUES_FILE',
                            'Value':
                            os.environ.get('VALUES_FILE', 'infra.yml')
                        }],
                        PrivilegedMode=True),
                    ServiceRole=Ref(deployer_role),
                    Source=Source(Type="CODEPIPELINE", BuildSpec=build_spec)))

    t.add_resource(deployer_role)
Beispiel #14
0
def create_template():
    t = Template()

    t.add_description("The individual CodeBuild stack for CBuildCI.")

    p_build_description = t.add_parameter(
        Parameter(
            "BuildDescription",
            Description="Used for the CodeBuild project description.",
            Type="String",
        ))

    p_api_lambda_role = t.add_parameter(
        Parameter(
            "ApiLambdaRole",
            Description=
            "The IAM role used by the API lambda function, which will receive permission to monitor builds.",
            Type="String",
        ))

    p_step_lambda_role = t.add_parameter(
        Parameter(
            "StepLambdaRole",
            Description=
            "The IAM role used by the lambda function, which will receive permission to start, stop and monitor builds.",
            Type="String",
        ))

    p_source_bucket = t.add_parameter(
        Parameter(
            "SourceBucket",
            Type="String",
        ))

    p_source_key_prefix = t.add_parameter(
        Parameter(
            "SourceKeyPrefix",
            Type="String",
            Default="github-source/",
        ))

    p_artifact_bucket = t.add_parameter(
        Parameter(
            "ArtifactBucket",
            Type="String",
        ))

    p_artifact_key_prefix = t.add_parameter(
        Parameter(
            "ArtifactKeyPrefix",
            Type="String",
            Default="github-artifacts/",
        ))

    p_cache_bucket = t.add_parameter(Parameter(
        "CacheBucket",
        Type="String",
    ))

    p_cache_key_prefix = t.add_parameter(
        Parameter(
            "CacheKeyPrefix",
            Type="String",
            Default="github-cache/",
        ))

    p_logs_retention_days = t.add_parameter(
        Parameter(
            "LogsRetentionDays",
            Description=
            "Number of days to keep CloudWatch logs for this stack's lambda function.",
            Type="Number",
            Default="30",
        ))

    p_code_build_role_policy_arns = t.add_parameter(
        Parameter(
            "CodeBuildRolePolicyArns",
            Description=
            "Optional list of IAM managed policy ARNs to attach to the CodeBuild role.",
            Type="String",
            Default="-NONE-",
        ))

    p_read_ecr_arns = t.add_parameter(
        Parameter(
            "ReadECRArns",
            Description=
            "ECS Repository ARNs to give CodeBuild permission to pull images from.",
            Type="String",
            Default="-NONE-",
        ))

    p_read_s3_arns = t.add_parameter(
        Parameter(
            "ReadS3Arns",
            Description="S3 ARNs to give CodeBuild permission to S3.",
            Type="String",
            Default="-NONE-",
        ))

    p_read_ssm_param_arns = t.add_parameter(
        Parameter(
            "ReadSSMParamArns",
            Description="SSM parameters to give CodeBuild permission to read.",
            Type="String",
            Default="-NONE-",
        ))

    p_read_kms_arns = t.add_parameter(
        Parameter(
            "ReadKMSArns",
            Description="KMS keys to give CodeBuild permission to decrypt.",
            Type="String",
            Default="-NONE-",
        ))

    p_vpc = t.add_parameter(
        Parameter(
            "VPC",
            Description="Optional VPC to use for CodeBuild.",
            Type="String",
            Default="-NONE-",
        ))

    p_security_groups = t.add_parameter(
        Parameter(
            "SecurityGroups",
            Description="Security groups to use for CodeBuild.",
            Type="String",
            Default="-NONE-",
        ))

    p_subnets = t.add_parameter(
        Parameter(
            "Subnets",
            Description="Subnets to use for CodeBuild.",
            Type="String",
            Default="-NONE-",
        ))

    t.add_condition(
        "HasCodeBuildRolePolicyArns",
        Not(Equals(Ref(p_code_build_role_policy_arns), "-NONE-")),
    )

    t.add_condition(
        "HasReadECRArns",
        Not(Equals(Ref(p_read_ecr_arns), "-NONE-")),
    )

    t.add_condition(
        "HasReadS3Arns",
        Not(Equals(Ref(p_read_s3_arns), "-NONE-")),
    )

    t.add_condition(
        "HasReadSSMParamArns",
        Not(Equals(Ref(p_read_ssm_param_arns), "-NONE-")),
    )

    t.add_condition(
        "HasReadKMSArns",
        Not(Equals(Ref(p_read_kms_arns), "-NONE-")),
    )

    t.add_condition(
        "HasVPC",
        Not(Equals(Ref(p_vpc), "-NONE-")),
    )

    # Replace with custom tags if desired.
    tags = build_tags_list(t)

    r_log_group = t.add_resource(
        LogGroup(
            "CodeBuildLogGroup",
            LogGroupName=Sub("/aws/codebuild/${AWS::StackName}"),
            RetentionInDays=Ref(p_logs_retention_days),
        ))

    r_code_build_role = t.add_resource(
        Role(
            "CodeBuildRole",
            AssumeRolePolicyDocument=PolicyDocument(
                Version="2012-10-17",
                Statement=[
                    Statement(
                        Effect=Allow,
                        Action=[ac_sts.AssumeRole],
                        Principal=Principal("Service",
                                            ["codebuild.amazonaws.com"]),
                    ),
                ],
            ),
            ManagedPolicyArns=If(
                "HasCodeBuildRolePolicyArns",
                Split(",", Ref(p_code_build_role_policy_arns)),
                NoValue,
            ),
            Policies=[
                Policy(
                    PolicyName="code-build-policy",
                    PolicyDocument={
                        "Statement": [
                            Statement(
                                Effect=Allow,
                                Resource=[
                                    GetAtt(r_log_group, "Arn"),
                                ],
                                Action=[
                                    ac_logs.CreateLogGroup,
                                    ac_logs.CreateLogStream,
                                    ac_logs.PutLogEvents,
                                ],
                            ),
                            Statement(
                                Effect=Allow,
                                Resource=[
                                    Sub(
                                        ac_s3.ARN(resource="${%s}/${%s}*" % (
                                            p_source_bucket.title,
                                            p_source_key_prefix.title,
                                        ), )),
                                ],
                                Action=[
                                    ac_s3.GetObject,
                                    ac_s3.GetObjectVersion,
                                ],
                            ),
                            Statement(
                                Effect=Allow,
                                Resource=[
                                    Sub(
                                        ac_s3.ARN(resource="${%s}/${%s}*" % (
                                            p_artifact_bucket.title,
                                            p_artifact_key_prefix.title,
                                        ), )),
                                    Sub(
                                        ac_s3.ARN(resource="${%s}/${%s}*" % (
                                            p_cache_bucket.title,
                                            p_cache_key_prefix.title,
                                        ), )),
                                ],
                                Action=[
                                    ac_s3.GetObject,
                                    ac_s3.GetObjectVersion,
                                    ac_s3.PutObject,
                                ],
                            ),
                            If(
                                "HasReadECRArns",
                                {
                                    "Effect":
                                    Allow,
                                    "Resource":
                                    Split(",", Ref(p_read_ecr_arns)),
                                    "Action": [
                                        ac_ecr.BatchCheckLayerAvailability,
                                        ac_ecr.BatchGetImage,
                                        ac_ecr.GetDownloadUrlForLayer,
                                    ],
                                },
                                NoValue,
                            ),
                            If(
                                "HasReadS3Arns",
                                {
                                    "Effect":
                                    Allow,
                                    "Resource":
                                    Split(",", Ref(p_read_s3_arns)),
                                    "Action": [
                                        ac_s3.ListBucket,
                                        ac_s3.GetObject,
                                        ac_s3.GetObject,
                                        ac_s3.GetObjectVersion,
                                    ],
                                },
                                NoValue,
                            ),
                            If(
                                "HasReadSSMParamArns",
                                {
                                    "Effect":
                                    Allow,
                                    "Resource":
                                    Split(",", Ref(p_read_ssm_param_arns)),
                                    "Action": [
                                        ac_ssm.GetParameter,
                                    ],
                                },
                                NoValue,
                            ),
                            If(
                                "HasReadKMSArns",
                                {
                                    "Effect": Allow,
                                    "Resource": Split(",",
                                                      Ref(p_read_kms_arns)),
                                    "Action": [
                                        ac_kms.Decrypt,
                                    ],
                                },
                                NoValue,
                            ),
                        ]
                    },
                ),
            ],
        ))

    r_code_build = t.add_resource(
        Project(
            "CodeBuild",
            Name=Ref("AWS::StackName"),
            Description=Ref(p_build_description),
            ServiceRole=Ref(r_code_build_role),
            Source=Source(Type="CODEPIPELINE", ),
            Artifacts=Artifacts(Type="CODEPIPELINE", ),
            VpcConfig=If(
                "HasVPC",
                VpcConfig(
                    VpcId=Ref(p_vpc),
                    Subnets=Ref(p_subnets),
                    SecurityGroupIds=Ref(p_security_groups),
                ),
                NoValue,
            ),
            Environment=CodeBuildEnvironment(
                Type="LINUX_CONTAINER",
                ComputeType="BUILD_GENERAL1_SMALL",
                Image="aws/codebuild/ubuntu-base:14.04",
            ),
            Tags=tags,
        ))

    t.add_resource(
        PolicyType(
            "ApiLambdaRolePolicy",
            Roles=[
                Ref(p_api_lambda_role),
            ],
            PolicyName=Sub("${AWS::StackName}-policy"),
            PolicyDocument=PolicyDocument(Statement=[
                Statement(
                    Effect=Allow,
                    Resource=[GetAtt(r_code_build, "Arn")],
                    Action=[
                        ac_codebuild.BatchGetBuilds,
                    ],
                ),
                Statement(
                    Effect=Allow,
                    Resource=[
                        GetAtt(r_log_group, "Arn"),
                    ],
                    Action=[
                        ac_logs.GetLogEvents,
                    ],
                ),
            ], ),
        ))

    t.add_resource(
        PolicyType(
            "StepLambdaRolePolicy",
            Roles=[
                Ref(p_step_lambda_role),
            ],
            PolicyName=Sub("${AWS::StackName}-policy"),
            PolicyDocument=PolicyDocument(Statement=[
                Statement(
                    Effect=Allow,
                    Resource=[GetAtt(r_code_build, "Arn")],
                    Action=[
                        ac_codebuild.StartBuild,
                        ac_codebuild.StopBuild,
                        ac_codebuild.BatchGetBuilds,
                    ],
                ),
            ], ),
        ))

    t.add_output(Output(
        "CodeBuildProjectName",
        Value=Ref(r_code_build),
    ))

    t.add_output(Output(
        "CodeBuildArn",
        Value=GetAtt(r_code_build, "Arn"),
    ))

    return t
Beispiel #15
0
 def define_artifacts(self, kwargs):
     artifact = Artifacts(Type="NO_ARTIFACTS")
     if 'UseCodePipeline' in kwargs.keys():
         artifact = Artifacts(Type="CODEPIPELINE", Packaging="ZIP")
     return artifact
Beispiel #16
0
def create_pipeline_template(name) -> Template:
    t = Template()

    github_token = t.add_parameter(Parameter("GithubToken", Type="String"))

    github_owner = t.add_parameter(
        Parameter("GitHubOwner",
                  Type='String',
                  Default='larroy',
                  AllowedPattern="[A-Za-z0-9-_]+"))

    github_repo = t.add_parameter(
        Parameter("GitHubRepo",
                  Type='String',
                  Default='ci',
                  AllowedPattern="[A-Za-z0-9-_]+"))

    github_branch = t.add_parameter(
        Parameter("GitHubBranch",
                  Type='String',
                  Default='master',
                  AllowedPattern="[A-Za-z0-9-_]+"))

    artifact_store_s3_bucket = t.add_resource(Bucket(name + "bucket", ))

    cloudformationrole = t.add_resource(
        Role("CloudformationRole",
             AssumeRolePolicyDocument=PolicyDocument(
                 Version="2012-10-17",
                 Statement=[
                     Statement(
                         Effect=Allow,
                         Action=[AssumeRole],
                         Principal=Principal("Service",
                                             ["cloudformation.amazonaws.com"]))
                 ]),
             ManagedPolicyArns=['arn:aws:iam::aws:policy/AdministratorAccess'
                                ]))

    codepipelinerole = t.add_resource(
        Role("CodePipelineRole",
             AssumeRolePolicyDocument=PolicyDocument(Statement=[
                 Statement(Effect=Allow,
                           Action=[AssumeRole],
                           Principal=Principal("Service",
                                               ["codepipeline.amazonaws.com"]))
             ]),
             ManagedPolicyArns=['arn:aws:iam::aws:policy/AdministratorAccess'
                                ]))

    code_build_role = t.add_resource(
        Role("CodeBuildRole",
             AssumeRolePolicyDocument=PolicyDocument(Statement=[
                 Statement(Effect=Allow,
                           Action=[AssumeRole],
                           Principal=Principal("Service",
                                               ["codebuild.amazonaws.com"]))
             ]),
             ManagedPolicyArns=[
                 'arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess',
                 'arn:aws:iam::aws:policy/AWSCodeBuildAdminAccess',
                 'arn:aws:iam::aws:policy/CloudWatchFullAccess',
             ]))

    environment = Environment(
        ComputeType='BUILD_GENERAL1_SMALL',
        Image='aws/codebuild/python:3.7.1',
        Type='LINUX_CONTAINER',
    )

    # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-source.html
    codebuild_project = t.add_resource(
        Project(name,
                Name=name,
                Description='continuous deployment of infrastructure',
                Artifacts=Artifacts(Type='CODEPIPELINE'),
                Environment=environment,
                Source=Source(Type='CODEPIPELINE'),
                ServiceRole=code_build_role.GetAtt('Arn')))

    pipeline = t.add_resource(
        Pipeline(
            name + "Pipeline",
            ArtifactStore=ArtifactStore(
                Type="S3", Location=Ref(artifact_store_s3_bucket)),
            #        DisableInboundStageTransitions = [
            #            DisableInboundStageTransitions(
            #                StageName = "Release",
            #                Reason = "Disabling the transition until "
            #                       "integration tests are completed"
            #            )
            #        ],
            RestartExecutionOnUpdate=True,
            RoleArn=codepipelinerole.GetAtt('Arn'),
            Stages=[
                Stages(Name="Source",
                       Actions=[
                           Actions(Name="SourceAction",
                                   ActionTypeId=ActionTypeId(
                                       Category="Source",
                                       Owner="ThirdParty",
                                       Provider="GitHub",
                                       Version="1",
                                   ),
                                   OutputArtifacts=[
                                       OutputArtifacts(Name="GitHubSourceCode")
                                   ],
                                   Configuration={
                                       'Owner': Ref(github_owner),
                                       'Repo': Ref(github_repo),
                                       'Branch': Ref(github_branch),
                                       'PollForSourceChanges': False,
                                       'OAuthToken': Ref(github_token)
                                   },
                                   RunOrder="1")
                       ]),
                Stages(Name="Build",
                       Actions=[
                           Actions(Name="BuildAction",
                                   ActionTypeId=ActionTypeId(
                                       Category="Build",
                                       Owner="AWS",
                                       Provider="CodeBuild",
                                       Version="1"),
                                   InputArtifacts=[
                                       InputArtifacts(Name="GitHubSourceCode")
                                   ],
                                   OutputArtifacts=[
                                       OutputArtifacts(Name="BuildArtifacts")
                                   ],
                                   Configuration={
                                       'ProjectName': Ref(codebuild_project),
                                   },
                                   RunOrder="1")
                       ]),
            ],
        ))

    t.add_resource(
        Webhook("GitHubWebHook",
                Authentication='GITHUB_HMAC',
                AuthenticationConfiguration=WebhookAuthConfiguration(
                    SecretToken=Ref(github_token)),
                Filters=[
                    WebhookFilterRule(JsonPath='$.ref',
                                      MatchEquals='refs/heads/{Branch}')
                ],
                TargetPipeline=Ref(pipeline),
                TargetAction='Source',
                TargetPipelineVersion=pipeline.GetAtt('Version')))

    return t
    def __init__(self, prefix: str, lambda_under_deployment: Function,
                 artifacts_bucket: Bucket) -> None:
        """
        Constructor.

        :param prefix: A prefix for pipeline resource names.
        :param lambda_under_deployment: Main AWS Lambda function for which the pipeline should be created.
        :param artifacts_bucket: An S3 bucket in which pipeline builds are saved and read.
        """
        self.lambda_under_deployment = lambda_under_deployment

        self.deployment_group_role = Role(
            prefix + 'DeploymentGroupRole',
            Path='/',
            Policies=[
                Policy(PolicyName=prefix + 'FargateEcsDeploymentGroupPolicy',
                       PolicyDocument={
                           'Version':
                           '2012-10-17',
                           'Statement': [{
                               'Action': [
                                   "lambda:InvokeFunction",
                                   "cloudwatch:DescribeAlarms", "sns:Publish",
                                   "s3:GetObject", "s3:GetObjectMetadata",
                                   "s3:GetObjectVersion", "iam:PassRole"
                               ],
                               "Resource":
                               "*",
                               "Effect":
                               "Allow"
                           }]
                       })
            ],
            AssumeRolePolicyDocument={
                'Version':
                '2012-10-17',
                'Statement': [{
                    'Action': ['sts:AssumeRole'],
                    'Effect': 'Allow',
                    'Principal': {
                        'Service': [
                            'codedeploy.amazonaws.com',
                        ]
                    }
                }]
            },
        )

        self.code_build_role = Role(
            prefix + 'CodeBuildRole',
            Path='/',
            Policies=[
                Policy(PolicyName=prefix + 'CodeBuildPolicy',
                       PolicyDocument={
                           'Version':
                           '2012-10-17',
                           'Statement': [{
                               'Effect': 'Allow',
                               'Action': ['codebuild:*'],
                               'Resource': '*'
                           }, {
                               "Effect": "Allow",
                               "Action": ["logs:*"],
                               "Resource": "arn:aws:logs:*:*:*"
                           }, {
                               'Effect': 'Allow',
                               'Action': ['s3:*'],
                               'Resource': '*'
                           }, {
                               'Effect': 'Allow',
                               'Action': ['ssm:*'],
                               'Resource': '*'
                           }]
                       })
            ],
            AssumeRolePolicyDocument={
                'Version':
                '2012-10-17',
                'Statement': [{
                    'Action': ['sts:AssumeRole'],
                    'Effect': 'Allow',
                    'Principal': {
                        'Service': [
                            'codebuild.amazonaws.com',
                        ]
                    }
                }]
            },
        )

        self.pipeline_role = Role(
            prefix + 'PipelineRole',
            Path='/',
            Policies=[
                Policy(PolicyName=prefix + 'PipelinePolicy',
                       PolicyDocument={
                           'Version':
                           '2012-10-17',
                           'Statement': [{
                               'Effect': 'Allow',
                               'Action': 'codepipeline:*',
                               'Resource': '*'
                           }, {
                               'Effect': 'Allow',
                               'Action': 'codecommit:*',
                               'Resource': '*'
                           }, {
                               'Effect': 'Allow',
                               'Action': 's3:*',
                               'Resource': '*'
                           }, {
                               'Effect': 'Allow',
                               'Action': 'codebuild:*',
                               'Resource': '*'
                           }]
                       })
            ],
            AssumeRolePolicyDocument={
                'Version':
                '2012-10-17',
                'Statement': [{
                    'Action': ['sts:AssumeRole'],
                    'Effect': 'Allow',
                    'Principal': {
                        'Service': [
                            'codepipeline.amazonaws.com',
                        ]
                    }
                }]
            },
        )

        # Create a bucket to which a ci/cd pipeline will save a build output (Lambda deployment package).
        self.lambda_deployment_bucket = Bucket(
            prefix + 'LambdaDeploymentBucket',
            BucketName=prefix.lower() + '.lambda.deployment.bucket',
            AccessControl='Private',
            PublicAccessBlockConfiguration=PublicAccessBlockConfiguration(
                BlockPublicAcls=True,
                BlockPublicPolicy=True,
                IgnorePublicAcls=True,
                RestrictPublicBuckets=True))

        # Create deployment lambda which executes deployment actions against our original AWS Lambda function.
        self.lambda_deployment_bucket_trigger = DeploymentLambda(
            prefix, lambda_under_deployment)

        # We need to create an explicit permission for a S3 bucket so the bucket would be able to
        # invoke deployment lambda function.
        self.invoke_permission = Permission(
            prefix + 'LambdaDeploymentPermission',
            Action='lambda:InvokeFunction',
            FunctionName=GetAtt(self.lambda_deployment_bucket_trigger.function,
                                'Arn'),
            Principal='s3.amazonaws.com',
            SourceArn=GetAtt(self.lambda_deployment_bucket, 'Arn'))

        # Add a notification configuration to our S3 bucket which makes the bucket to fire an event and invoke
        # the deployment function every time an object is created in the bucket.
        self.s3_notification_configuration = CustomS3Notification(
            prefix + 'S3NotificationConfiguration',
            ServiceToken=S3NotificationService().service_token(),
            BucketName=self.lambda_deployment_bucket.BucketName,
            NotificationConfiguration={
                'LambdaFunctionConfigurations': [
                    {
                        'LambdaFunctionArn':
                        GetAtt(self.lambda_deployment_bucket_trigger.function,
                               'Arn'),
                        'Events': [
                            's3:ObjectCreated:*',
                        ],
                    },
                ]
            },
            DependsOn=[
                self.lambda_deployment_bucket.title,
                self.invoke_permission.title,
                self.lambda_deployment_bucket_trigger.function.title
            ])

        # Create a main git repository which fires a ci/cd pipeline every time a code is committed to it.
        self.git_repository = Repository(prefix + 'GitRepository',
                                         RepositoryName=prefix.lower())

        self.code_build_project = Project(
            prefix + 'CodeBuildProject',
            Name=prefix + 'CodeBuildProject',
            Artifacts=Artifacts(Type='CODEPIPELINE'),
            Environment=Environment(
                ComputeType='BUILD_GENERAL1_SMALL',
                Image='aws/codebuild/standard:2.0',
                Type='LINUX_CONTAINER',
                EnvironmentVariables=[],
            ),
            ServiceRole=GetAtt(self.code_build_role, 'Arn'),
            Source=Source(Type='CODEPIPELINE',
                          BuildSpec=self.__read_buildspec()))

        # Our main pipeline for ci/cd.
        self.pipeline = Pipeline(
            prefix + 'Pipeline',
            ArtifactStore=ArtifactStore(Location=artifacts_bucket.BucketName,
                                        Type='S3'),
            Name=prefix + 'Pipeline',
            RoleArn=GetAtt(self.pipeline_role, 'Arn'),
            Stages=[
                Stages(Name='SourceStage',
                       Actions=[
                           Actions(Name='SourceAction',
                                   ActionTypeId=ActionTypeId(
                                       Category='Source',
                                       Owner='AWS',
                                       Version='1',
                                       Provider='CodeCommit'),
                                   OutputArtifacts=[
                                       OutputArtifacts(Name='SourceOutput')
                                   ],
                                   Configuration={
                                       'RepositoryName':
                                       self.git_repository.RepositoryName,
                                       'BranchName': 'master'
                                   },
                                   RunOrder='1')
                       ]),
                Stages(Name='BuildStage',
                       Actions=[
                           Actions(
                               Name='BuildAction',
                               ActionTypeId=ActionTypeId(Category='Build',
                                                         Owner='AWS',
                                                         Version='1',
                                                         Provider='CodeBuild'),
                               InputArtifacts=[
                                   InputArtifacts(Name='SourceOutput')
                               ],
                               OutputArtifacts=[
                                   OutputArtifacts(Name='BuildOutput')
                               ],
                               Configuration={
                                   'ProjectName': self.code_build_project.Name,
                               },
                               RunOrder='1')
                       ]),
                Stages(Name='DeployStage',
                       Actions=[
                           Actions(
                               Name='DeployAction',
                               ActionTypeId=ActionTypeId(Category='Deploy',
                                                         Owner='AWS',
                                                         Version='1',
                                                         Provider='S3'),
                               InputArtifacts=[
                                   InputArtifacts(Name='BuildOutput')
                               ],
                               Configuration={
                                   'BucketName':
                                   self.lambda_deployment_bucket.BucketName,
                                   'Extract': False,
                                   'ObjectKey': 'package.zip'
                               },
                               RunOrder='1')
                       ])
            ],
            DependsOn=[
                artifacts_bucket.title, self.git_repository.title,
                self.code_build_project.title, lambda_under_deployment.title
            ])
Beispiel #18
0
 def build(self) -> Artifacts:
   checkForNoneValues(self)
   return Artifacts( Type = self._type )
Beispiel #19
0
def generate_template(d):

    # Set template metadata
    t = Template()
    t.add_version("2010-09-09")
    t.set_description(d["cf_template_description"])

    aws_account_id = Ref("AWS::AccountId")
    aws_region = Ref("AWS::Region")

    # Task definition
    task_definition = t.add_resource(
        TaskDefinition(
            "TaskDefinition",
            Family=Join(
                "",
                [d["env"], "-", d["project_name"], "-", d["service_name"]]),
            RequiresCompatibilities=["FARGATE"],
            Cpu=d["container_cpu"],
            Memory=d["container_memory"],
            NetworkMode="awsvpc",
            ExecutionRoleArn=ImportValue(d["ecs_stack_name"] +
                                         "-ECSClusterRole"),
            ContainerDefinitions=[
                ContainerDefinition(
                    Name=Join("", [
                        d["env"], "-", d["project_name"], "-",
                        d["service_name"]
                    ]),
                    Image=Join(
                        "",
                        [
                            aws_account_id, ".dkr.ecr.", aws_region,
                            ".amazonaws.com/", d["env"], d["project_name"],
                            d["service_name"], ":latest"
                        ],
                    ),
                    Essential=True,
                    PortMappings=[
                        PortMapping(
                            ContainerPort=d["container_port"],
                            HostPort=d["container_port"],
                        )
                    ],
                    EntryPoint=["sh", "-c"],
                    Command=[d["container_command"]],
                    LogConfiguration=LogConfiguration(
                        LogDriver="awslogs",
                        Options={
                            "awslogs-region":
                            aws_region,
                            "awslogs-group":
                            Join("", [
                                d["env"], "-", d["project_name"], "-",
                                d["service_name"]
                            ]),
                            "awslogs-stream-prefix":
                            "ecs",
                            "awslogs-create-group":
                            "true"
                        }))
            ],
            Tags=Tags(d["tags"],
                      {"Name": d["project_name"] + "-task-definition"}),
        ))

    # ECR
    ecr = t.add_resource(
        Repository(
            "ECR",
            DependsOn="ListenerRule",
            RepositoryName=Join(
                "",
                [d["env"], "-", d["project_name"], "-", d["service_name"]]),
            Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecr"}),
        ))
    # Target group
    target_group = t.add_resource(
        elb.TargetGroup(
            "TargetGroup",
            Name=Join("", [d["env"], "-", d["service_name"]]),
            HealthCheckIntervalSeconds="30",
            HealthCheckProtocol="HTTP",
            HealthCheckPort=d["container_port"],
            HealthCheckTimeoutSeconds="10",
            HealthyThresholdCount="4",
            HealthCheckPath=d["tg_health_check_path"],
            Matcher=elb.Matcher(HttpCode="200-299"),
            Port=d["container_port"],
            Protocol="HTTP",
            TargetType="ip",
            UnhealthyThresholdCount="3",
            VpcId=ImportValue(d["network_stack_name"] + "-VPCId"),
            Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecr"}),
        ))
    # Listener rule
    t.add_resource(
        elb.ListenerRule(
            "ListenerRule",
            DependsOn="TargetGroup",
            ListenerArn=ImportValue(d["ecs_stack_name"] + "-ListenerArnHTTP"),
            Conditions=[
                elb.Condition(Field="path-pattern",
                              Values=[d["application_path_api"]])
            ],
            Actions=[
                elb.Action(Type="forward", TargetGroupArn=Ref(target_group))
            ],
            Priority="1",
        ))
    # ECS service
    ecs_service = t.add_resource(
        Service(
            "ECSService",
            ServiceName=Join(
                "",
                [d["env"], "-", d["project_name"], "-", d["service_name"]]),
            DependsOn="pipeline",
            DesiredCount=d["container_desired_tasks_count"],
            TaskDefinition=Ref(task_definition),
            LaunchType="FARGATE",
            NetworkConfiguration=NetworkConfiguration(
                AwsvpcConfiguration=AwsvpcConfiguration(
                    Subnets=[
                        ImportValue(d["network_stack_name"] +
                                    "-PrivateSubnetId1"),
                        ImportValue(d["network_stack_name"] +
                                    "-PrivateSubnetId2"),
                    ],
                    SecurityGroups=[
                        ImportValue(d["ecs_stack_name"] + "-ECSClusterSG")
                    ],
                )),
            LoadBalancers=([
                LoadBalancer(
                    ContainerName=Join(
                        "",
                        [
                            d["env"], "-", d["project_name"], "-",
                            d["service_name"]
                        ],
                    ),
                    ContainerPort=d["container_port"],
                    TargetGroupArn=Ref(target_group),
                )
            ]),
            Cluster=ImportValue(d["ecs_stack_name"] + "-ECSClusterName"),
            Tags=Tags(d["tags"], {"Name": d["project_name"] + "-ecs-service"}),
        ))
    # App Autoscaling target

    # App Autoscaling policy

    # Codebuild project
    codebuild = t.add_resource(
        Project(
            "codebuild",
            Name=Join(
                "",
                [d["env"], "-", d["project_name"], "-", d["service_name"]]),
            DependsOn="ECR",
            ServiceRole=ImportValue(d["ecs_stack_name"] +
                                    "-CodebuildDeveloperRole"),
            Artifacts=Artifacts(
                Name="Build",
                Location=d["artifact_store"],
                Type="S3",
            ),
            Description="Build a docker image and send it to ecr",
            Source=Source(
                BuildSpec="buildspec.yml",
                Type="S3",
                Location=d["artifact_store"] + "/" + d["artifact_name"],
            ),
            Environment=Environment(
                ComputeType="BUILD_GENERAL1_SMALL",
                Image="aws/codebuild/standard:4.0",
                PrivilegedMode=True,
                Type="LINUX_CONTAINER",
                EnvironmentVariables=[
                    EnvironmentVariable(
                        Name="AWS_DEFAULT_REGION",
                        Type="PLAINTEXT",
                        Value=aws_region,
                    ),
                    EnvironmentVariable(
                        Name="SERVICE_NAME",
                        Type="PLAINTEXT",
                        Value=Join(
                            "",
                            [
                                d["env"], "-", d["project_name"], "-",
                                d["service_name"]
                            ],
                        ),
                    ),
                    EnvironmentVariable(
                        Name="IMAGE_URI",
                        Type="PLAINTEXT",
                        Value=Join(
                            "",
                            [
                                aws_account_id,
                                ".dkr.ecr.",
                                aws_region,
                                ".amazonaws.com/",
                                d["env"],
                                "-",
                                d["project_name"],
                                "-",
                                d["service_name"],
                            ],
                        ),
                    ),
                ],
            ),
            Tags=Tags(d["tags"], {"Name": d["project_name"] + "-codebuild"}),
        ))

    # Codepipeline
    pipeline = t.add_resource(
        Pipeline(
            "pipeline",
            Name=Join(
                "",
                [d["env"], "-", d["project_name"], "-", d["service_name"]]),
            RoleArn=ImportValue(d["ecs_stack_name"] + "-CodePipelineRole"),
            Stages=[
                Stages(
                    Name="Source",
                    Actions=[
                        Actions(
                            Name="Source",
                            ActionTypeId=ActionTypeId(
                                Category="Source",
                                Owner="AWS",
                                Version="1",
                                Provider="S3",
                            ),
                            OutputArtifacts=[
                                OutputArtifacts(Name="source_artifact")
                            ],
                            Configuration={
                                "S3Bucket": d["artifact_store"],
                                "S3ObjectKey": d["artifact_name"],
                            },
                            RunOrder="1",
                        )
                    ],
                ),
                Stages(
                    Name="Build",
                    Actions=[
                        Actions(
                            Name="Build",
                            InputArtifacts=[
                                InputArtifacts(Name="source_artifact")
                            ],
                            OutputArtifacts=[
                                OutputArtifacts(Name="build_artifact")
                            ],
                            ActionTypeId=ActionTypeId(
                                Category="Build",
                                Owner="AWS",
                                Version="1",
                                Provider="CodeBuild",
                            ),
                            Configuration={"ProjectName": Ref(codebuild)},
                            RunOrder="1",
                        )
                    ],
                ),
                Stages(
                    Name="Deploy",
                    Actions=[
                        Actions(
                            Name="Deploy",
                            InputArtifacts=[
                                InputArtifacts(Name="build_artifact")
                            ],
                            ActionTypeId=ActionTypeId(
                                Category="Deploy",
                                Owner="AWS",
                                Version="1",
                                Provider="ECS",
                            ),
                            Configuration={
                                "ClusterName":
                                ImportValue(d["ecs_stack_name"] +
                                            "-ECSClusterName"),
                                "ServiceName":
                                Join(
                                    "",
                                    [
                                        d["env"],
                                        "-",
                                        d["project_name"],
                                        "-",
                                        d["service_name"],
                                    ],
                                ),
                                "FileName":
                                "definitions.json",
                            },
                        )
                    ],
                ),
            ],
            ArtifactStore=ArtifactStore(Type="S3",
                                        Location=d["artifact_store"]),
        ))
    # Route53

    # Outputs

    return t
Beispiel #20
0
def build_project(template=Template(), section=None, project_name=None, raw_env=None,
                  service_role: str = None) -> Template:
    """ Assemble all the requirements for a Troposphere CodeBuild Project. """
    template.set_version('2010-09-09')
    secondary_artifacts = list()

    # Artifact object creation
    if 'artifact_s3_bucket' in config[section]:
        artifacts = build_artifacts(project_name,
                                    config.get(section, 'artifact_s3_bucket'))
        if 'artifact_secondary_identifiers' in config[section]:
            # There can be N number of secondary artifacts
            for arti in config.get(section, 'artifact_secondary_identifiers').split(','):
                secondary_artifacts.append(build_artifacts(arti, config.get(section, 'artifact_s3_bucket')))

    else:
        # One blank Artifact object required.
        artifacts = Artifacts(Type='NO_ARTIFACTS')
    env_list = list()

    # Convert the env: line in the config to a list.
    try:
        logging.debug(f'raw_env is {raw_env}')
        env = raw_env.split(' ')
    except AttributeError:
        env = config.get(section, 'env').split(' ')
        logging.debug(f'Section is {section}')

    # Split the env key/value pairs into dict.
    for i in env:
        k, v = i.split("=")
        env_list.append({"Name": k, "Value": v})

    environment = Environment(
        ComputeType=config.get(section, 'compute_type'),
        Image=str(config.get(section, 'image')),
        Type=str(config.get(section, 'env_type')),
        PrivilegedMode=True,
        EnvironmentVariables=env_list,
    )

    source = Source(
        Location=config.get(section, 'source_location'),
        Type=config.get(section, 'source_type'),
        GitCloneDepth=config.get(section, 'source_clonedepth'),
        BuildSpec=config.get(section, 'buildspec'),
        ReportBuildStatus=True
    )

    # Artifact is required; SecondaryArtifact is optional.
    if secondary_artifacts:
        project = Project(
            project_name,
            Artifacts=artifacts,
            SecondaryArtifacts=secondary_artifacts,
            Environment=environment,
            Name=project_name,
            TimeoutInMinutes=config.get(section, 'timeout_in_min'),
            ServiceRole=Ref(service_role),
            Source=source,
            SourceVersion=config.get(section, 'source_version'),
            BadgeEnabled=True,
            DependsOn=service_role,
        )
    else:
        project = Project(
            project_name,
            Artifacts=artifacts,
            Environment=environment,
            Name=project_name,
            TimeoutInMinutes=config.get(section, 'timeout_in_min'),
            ServiceRole=Ref(service_role),
            Source=source,
            SourceVersion=config.get(section, 'source_version'),
            BadgeEnabled=True,
            DependsOn=service_role,
        )
    template.add_resource(project)
    template.add_output([Output(f"CodeBuildProject{project_name}", Value=Ref(project))])
Beispiel #21
0
                            Action("ecr", "PutImage")
                        ],
                        Resource=['*'],
                    ),
                ],
            )
        ),
    ]
))

# Generate the build porject
CodeBuildProject = t.add_resource(Project(
    "CodeBuildProjectDockerStaticWebsite",
    TimeoutInMinutes=60,
    Artifacts=Artifacts(
        Type='CODEPIPELINE'
    ),
    Cache=ProjectCache(
        Type='NO_CACHE'
    ),
    Environment=Environment(
        ComputeType='BUILD_GENERAL1_SMALL',
        Image='aws/codebuild/standard:2.0',
        Type='LINUX_CONTAINER',
        PrivilegedMode=True,
        ImagePullCredentialsType='CODEBUILD'
    ),
    ServiceRole=GetAtt(CodeBuildServiceRole, "Arn"),
    Name=PROJECT_NAME,
    Source=Source(
        Type='CODEPIPELINE',
Beispiel #22
0
# Add ecr repo to the cfn template
ml_docker_repo = t.add_resource(
    Docker_Repo('mlrepo', RepositoryName=Ref('mldockerregistrynameparameter')))

# Add codecommit repo
repo = t.add_resource(
    Repository('Repository',
               RepositoryDescription='ML repo',
               RepositoryName=Ref('reponameparameter')))

# Start to build out the codeBuild portion of the solution. This part of the pipeline relies on dockerfile being present
# in the codecommit repo that the pipeline passes onto it. The build details should also be contained in a buildspec.yml
# file that is also located in the same repo. The buildspec file will use the docker file to create a container based on
# the dockerfile and then tag it with the commit id that triggered the pipeline. Once the build is complete it will push
# it to ecr.
code_build_artifacts = Artifacts(Type='CODEPIPELINE')

environment = Environment(ComputeType='BUILD_GENERAL1_SMALL',
                          Image='aws/codebuild/docker:17.09.0',
                          Type='LINUX_CONTAINER',
                          EnvironmentVariables=[{
                              'Name': 'AWS_DEFAULT_REGION',
                              'Value': Ref('regionparameter'),
                              'Type': 'PLAINTEXT'
                          }, {
                              'Name':
                              'AWS_ACCOUNT_ID',
                              'Value':
                              Ref('accountparameter'),
                              'Type':
                              'PLAINTEXT'