def test_linux_gpu_environment(self): environment = codebuild.Environment( ComputeType="BUILD_GENERAL1_LARGE", Image="aws/codebuild/standard:4.0", Type="LINUX_GPU_CONTAINER", ) environment.to_dict()
def test_arm_environment(self): environment = codebuild.Environment( ComputeType="BUILD_GENERAL1_LARGE", Image="aws/codebuild/amazonlinux2-aarch64-standard:1.0", Type="ARM_CONTAINER", ) environment.to_dict()
def _create_codebuild_project(self, code_build_role): app_package_build = codebuild.Project( 'AppPackageBuild', Artifacts=codebuild.Artifacts( Type='CODEPIPELINE' ), Name=Sub('${ApplicationName}-build'), Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_LARGE', Image=Ref('CodeBuildImage'), Type='LINUX_CONTAINER', EnvironmentVariables=[ codebuild.EnvironmentVariable( Name='APP_S3_BUCKET', Value=Ref('ApplicationBucket'), ), ] ), ServiceRole=code_build_role.GetAtt('Arn'), Source=codebuild.Source( Type='CODEPIPELINE', BuildSpec='pipeline/buildspec.yml', ), ) self._t.add_resource(app_package_build) return app_package_build
def test_linux_environment(self): environment = codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/ubuntu-base:14.04', Type='LINUX_CONTAINER' ) environment.to_dict()
def _build_project(name: str, action: InputResolver, role: Ref, bucket: Ref, tags: Tags) -> codebuild.Project: """Construct a CodeBuild project for the specified action. :param name: Logical resource name to use for project :param action: Action wrapped in an InputResolver :param role: Reference to CodeBuild role :param bucket: Reference to application resources bucket :param tags: Tags to add to project :return: Constructed project """ return codebuild.Project( name, Name=Sub(f"${{{AWS_STACK_NAME}}}-{name}"), ServiceRole=role, Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Source=codebuild.Source(Type="CODEPIPELINE", BuildSpec=action.buildspec), Environment=codebuild.Environment( ComputeType=action.compute_type, Type=action.environment_type, Image=action.image, EnvironmentVariables=[ codebuild.EnvironmentVariable(Name="PIPEFORMER_S3_BUCKET", Value=bucket) ] + [ codebuild.EnvironmentVariable(Name=key, Value=value) for key, value in action.env.items() ], ), Tags=tags, )
def test_linux_gpu_environment(self): environment = codebuild.Environment( ComputeType='BUILD_GENERAL1_LARGE', Image='aws/codebuild/standard:4.0', Type='LINUX_GPU_CONTAINER' ) environment.to_dict()
def test_windows_environment(self): environment = codebuild.Environment( ComputeType='BUILD_GENERAL1_LARGE', Image='aws/codebuild/windows-base:1.0', Type='WINDOWS_CONTAINER' ) environment.to_dict()
def test_windows_environment(self): environment = codebuild.Environment( ComputeType="BUILD_GENERAL1_LARGE", Image="aws/codebuild/windows-base:1.0", Type="WINDOWS_CONTAINER", ) environment.to_dict()
def test_linux_environment(self): environment = codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/ubuntu-base:14.04", Type="LINUX_CONTAINER", ) environment.to_dict()
def setUp(self): self.context = chaincontext.ChainContext( template=troposphere.Template(), instance_name='justtestin', ) self.environment = codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/python:2.7.12', Type='LINUX_CONTAINER', EnvironmentVariables=[{ 'Name': 'TEST_VAR', 'Value': 'demo' }], )
def _codebuild_builder(role: iam.Role, application_bucket: s3.Bucket) -> codebuild.Project: """Build and return the CodeBuild Project resource to be used to build the decrypt oracle.""" artifacts = codebuild.Artifacts(Type="CODEPIPELINE") environment = codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image=CODEBUILD_IMAGE, Type="LINUX_CONTAINER", EnvironmentVariables=[codebuild.EnvironmentVariable(Name="APP_S3_BUCKET", Value=Ref(application_bucket))], ) source = codebuild.Source(Type="CODEPIPELINE", BuildSpec=BUILDSPEC) return codebuild.Project( "{}Build".format(APPLICATION_NAME), Artifacts=artifacts, Environment=environment, Name=APPLICATION_NAME, ServiceRole=Ref(role), Source=source, )
def generate_codebuild_resource(name): return codebuild.Project( f'RunwayIntegrationTest{name}', Artifacts=codebuild.Artifacts(Type='NO_ARTIFACTS'), Description=f'{name} runway integration tests', Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', EnvironmentVariables=[ codebuild.EnvironmentVariable( Name='DEPLOY_ENVIRONMENT', Type='PLAINTEXT', Value=variables['EnvironmentName'].ref), codebuild.EnvironmentVariable(Name='TEST_TO_RUN', Type='PLAINTEXT', Value=name.lower()) ], Image='aws/codebuild/standard:2.0', Type='LINUX_CONTAINER'), Name=f'runway-int-test-{name}', ServiceRole=codebuild_role.get_att('Arn'), Source=codebuild.Source(Type='GITHUB', Location=variables['GitHubUrl'].ref), Triggers=codebuild.ProjectTriggers( Webhook=True, FilterGroups=[[ codebuild.WebhookFilter( Type='ACTOR_ACCOUNT_ID', Pattern='|'.join( str(x) for x in GITHUB_ACCOUNT_IDS)), codebuild.WebhookFilter(Type='EVENT', Pattern='PULL_REQUEST_CREATED,' 'PULL_REQUEST_UPDATED,' 'PULL_REQUEST_REOPENED'), codebuild.WebhookFilter( Type='BASE_REF', Pattern='^refs/heads/release$'), codebuild.WebhookFilter(Type='HEAD_REF', Pattern='^refs/heads/master$') ]]))
def run(self): puppet_version = constants.VERSION description = f"""Bootstrap template used to configure spoke account for terraform use {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-spoke-terraform"}}""" service_role = t.Sub( "arn:aws:iam::${AWS::AccountId}:role/servicecatalog-puppet/PuppetDeployInSpokeRole" ) template = t.Template(Description=description) state = template.add_resource( s3.Bucket( "state", BucketName=t.Sub("sc-puppet-state-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) template.add_resource( s3.BucketPolicy( "statePolicy", Bucket=t.Ref(state), PolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": [ "s3:GetObject*", "s3:PutObject*", ], "Principal": { "AWS": self.puppet_account_id }, "Resource": t.Join("/", [t.GetAtt(state, "Arn"), "*"]), "Effect": "Allow", "Sid": "AllowPuppet", }, ], }, )) execute_build_spec = dict( version="0.2", phases=dict( install=dict(commands=[ "mkdir -p /root/downloads", "curl -s -qL -o /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip", "unzip /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/bin/", "chmod +x /usr/bin/terraform", "terraform --version", "aws s3 cp $ZIP source.zip", "unzip source.zip", ], ), pre_build=dict(commands=[ "aws s3 cp $STATE_FILE terraform.tfstate || echo 'no statefile copied'", 'ASSUME_ROLE_ARN="arn:aws:iam::${TARGET_ACCOUNT}:role/servicecatalog-puppet/PuppetRole"', "TEMP_ROLE=$(aws sts assume-role --role-arn $ASSUME_ROLE_ARN --role-session-name terraform)", "export TEMP_ROLE", 'export AWS_ACCESS_KEY_ID=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.AccessKeyId")', 'export AWS_SECRET_ACCESS_KEY=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SecretAccessKey")', 'export AWS_SESSION_TOKEN=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SessionToken")', "aws sts get-caller-identity", "terraform init", ], ), build=dict(commands=[ "terraform apply -auto-approve", ]), post_build=dict(commands=[ "terraform output -json > outputs.json", "unset AWS_ACCESS_KEY_ID", "unset AWS_SECRET_ACCESS_KEY", "unset AWS_SESSION_TOKEN", "aws sts get-caller-identity", "aws s3 cp terraform.tfstate $STATE_FILE", ]), ), artifacts=dict(files=[ "outputs.json", ], ), ) execute_terraform = dict( Name=constants.EXECUTE_TERRAFORM_PROJECT_NAME, ServiceRole=service_role, Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-execute", NamespaceType="BUILD_ID", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image=constants.CODEBUILD_DEFAULT_IMAGE, Type="LINUX_CONTAINER", EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="TERRAFORM_VERSION", Type="PARAMETER_STORE", Value=constants. DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, ), ] + [ codebuild.EnvironmentVariable( Name=name, Type="PLAINTEXT", Value="CHANGE_ME", ) for name in ["TARGET_ACCOUNT", "ZIP", "STATE_FILE"] ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump(execute_build_spec), Type="NO_SOURCE", ), Description= "Execute the given terraform in the given account using the given state file", ) # execute template.add_resource( codebuild.Project("ExecuteTerraformProject", **execute_terraform)) # execute dry run execute_dry_run_terraform = copy.deepcopy(execute_terraform) execute_dry_run_terraform[ "Name"] = constants.EXECUTE_DRY_RUN_TERRAFORM_PROJECT_NAME execute_dry_run_terraform["Description"] = execute_dry_run_terraform[ "Description"].replace("Execute", "DRY RUN of Execute") execute_dry_run_build_spec = copy.deepcopy(execute_build_spec) execute_dry_run_build_spec["phases"]["build"]["commands"] = [ "terraform plan -out=plan.bin", "terraform show -json plan.bin > plan.json", ] del execute_dry_run_build_spec["phases"]["post_build"] execute_dry_run_build_spec["artifacts"] = dict(files=[ "plan.bin", "plan.json", ], ) execute_dry_run_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(execute_dry_run_build_spec), Type="NO_SOURCE", ) execute_dry_run_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-execute-dry-run", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("ExecuteDryRunTerraformProject", **execute_dry_run_terraform)) # terminate terminate_terraform = copy.deepcopy(execute_terraform) terminate_terraform[ "Name"] = constants.TERMINATE_TERRAFORM_PROJECT_NAME terminate_terraform["Description"] = terminate_terraform[ "Description"].replace("Execute", "Terminate") terminate_build_spec = copy.deepcopy(execute_build_spec) terminate_build_spec["phases"]["build"]["commands"] = [ "terraform destroy -auto-approve" ] terminate_build_spec["phases"]["post_build"]["commands"] = [ "unset AWS_ACCESS_KEY_ID", "unset AWS_SECRET_ACCESS_KEY", "unset AWS_SESSION_TOKEN", "aws sts get-caller-identity", "aws s3 cp terraform.tfstate $STATE_FILE", ] del terminate_build_spec["artifacts"] terminate_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(terminate_build_spec), Type="NO_SOURCE", ) terminate_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-terminate", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("TerminateTerraformProject", **terminate_terraform)) # terminate dry run termminate_dry_run_terraform = copy.deepcopy(execute_terraform) termminate_dry_run_terraform[ "Name"] = constants.TERMINATE_DRY_RUN_TERRAFORM_PROJECT_NAME new_description = termminate_dry_run_terraform["Description"].replace( "Execute", "DRY RUN of Terminate") termminate_dry_run_terraform["Description"] = new_description termminate_dry_run_build_spec = copy.deepcopy(execute_build_spec) termminate_dry_run_build_spec["phases"]["build"]["commands"] = [ "terraform plan -destroy -out=plan.bin", "terraform show -json plan.bin > plan.json", ] del termminate_dry_run_build_spec["phases"]["post_build"] termminate_dry_run_build_spec["artifacts"] = dict(files=[ "plan.bin", "plan.json", ], ) termminate_dry_run_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(termminate_dry_run_build_spec), Type="NO_SOURCE", ) termminate_dry_run_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-terminate-dry-run", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("TerminateDryRunTerraformProject", **termminate_dry_run_terraform)) self.write_output(template.to_yaml(), skip_json_dump=True)
] }, Roles=[Ref(ServiceRole)], )) # Create CodeBuild Projects # Image Build ImageArtifacts = codebuild.Artifacts(Type='S3', Name='artifacts', Location=Ref(S3Bucket)) ImageEnvironment = codebuild.Environment(ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/docker:17.09.0", Type="LINUX_CONTAINER", EnvironmentVariables=[{ 'Name': 'S3BUCKET', 'Value': Ref(S3Bucket) }], PrivilegedMode=True) ImageSource = codebuild.Source( Location="https://github.com/ethanbrooks/ghost-ecs-fargate-pipeline", Type="GITHUB", BuildSpec="init-db-lambda/buildspec.yml") ImageProject = codebuild.Project("ImageBuildProject", Artifacts=ImageArtifacts, Environment=ImageEnvironment, Name="init-db-lambda-build", ServiceRole=Ref(ServiceRole),
# Image Build ImageArtifacts = codebuild.Artifacts(Type='NO_ARTIFACTS') ImageEnvironment = codebuild.Environment(ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/docker:1.12.1", Type="LINUX_CONTAINER", EnvironmentVariables=[{ 'Name': 'AWS_DEFAULT_REGION', 'Value': 'ap-southeast-2' }, { 'Name': 'AWS_ACCOUNT_ID', 'Value': Ref(AWS_ACCOUNT_ID) }, { 'Name': 'IMAGE_REPO_NAME', 'Value': Ref(Repository) }, { 'Name': 'IMAGE_TAG', 'Value': 'latest' }], PrivilegedMode=True) ImageSource = codebuild.Source( Location="https://github.com/jasonumiker/shibboleth3-aws-duo-config",
def main(): t = Template("A template to create a load balanced autoscaled Web flask deployment using ansible.") addMapping(t) ### VPC CONFIGURATION ### vpc = ec2.VPC( "MainVPC", CidrBlock="10.1.0.0/16" ) t.add_resource(vpc) vpc_id = Ref(vpc) subnet_1 = ec2.Subnet( "WebAppSubnet1", t, AvailabilityZone="us-east-1a", CidrBlock="10.1.0.0/24", MapPublicIpOnLaunch=True, VpcId=vpc_id, ) subnet_1_id = Ref(subnet_1) subnet_2 = ec2.Subnet( "WebAppSubnet2", t, AvailabilityZone="us-east-1b", CidrBlock="10.1.1.0/24", MapPublicIpOnLaunch=True, VpcId=vpc_id, ) subnet_2_id = Ref(subnet_2) ### NETWORKING ### igw = ec2.InternetGateway("internetGateway", t) gateway_to_internet = ec2.VPCGatewayAttachment( "GatewayToInternet", t, VpcId=vpc_id, InternetGatewayId=Ref(igw) ) route_table = ec2.RouteTable( "subnetRouteTable", t, VpcId=vpc_id ) route_table_id = Ref(route_table) internet_route = ec2.Route( "routeToInternet", t, DependsOn=gateway_to_internet, DestinationCidrBlock="0.0.0.0/0", GatewayId=Ref(igw), RouteTableId=route_table_id ) subnet_1_route_assoc = ec2.SubnetRouteTableAssociation( "Subnet1RouteAssociation", t, RouteTableId=route_table_id, SubnetId=Ref(subnet_1) ) subnet_2_route_assoc = ec2.SubnetRouteTableAssociation( "Subnet2RouteAssociation", t, RouteTableId=route_table_id, SubnetId=Ref(subnet_2) ) http_ingress = { "CidrIp": "0.0.0.0/0", "Description": "Allow HTTP traffic in from internet.", "IpProtocol": "tcp", "FromPort": 80, "ToPort": 80, } ssh_ingress = { "CidrIp": "0.0.0.0/0", "Description": "Allow SSH traffic in from internet.", "IpProtocol": "tcp", "FromPort": 22, "ToPort": 22, } elb_sg = ec2.SecurityGroup( "elbSecurityGroup", t, GroupName="WebGroup", GroupDescription="Allow web traffic in from internet to ELB", VpcId=vpc_id, SecurityGroupIngress=[ http_ingress ]) ssh_sg = ec2.SecurityGroup( "sshSecurityGroup", t, GroupName="SSHGroup", GroupDescription="Allow SSH traffic in from internet", VpcId=vpc_id, SecurityGroupIngress=[ ssh_ingress ] ) elb_sg_id = Ref(elb_sg) ssh_sg_id = Ref(ssh_sg) autoscale_ingress = { "SourceSecurityGroupId": elb_sg_id, "Description": "Allow web traffic in from ELB", "IpProtocol": "tcp", "FromPort": 80, "ToPort": 80 } autoscale_sg = ec2.SecurityGroup( "WebAutoscaleSG", t, GroupName="AutoscaleGroup", GroupDescription="Allow web traffic in from elb on port 80", VpcId=vpc_id, SecurityGroupIngress=[ autoscale_ingress ] ) autoscale_sg_id = Ref(autoscale_sg) # BUCKETS app_bucket = s3.Bucket( "CodeDeployApplicationBucket", t, ) ### LOAD BALANCING ### Web_elb = elb.LoadBalancer( "WebElb", t, Name="WebElb", # TODO: Fix for name conflict Subnets=[subnet_1_id, subnet_2_id], SecurityGroups=[elb_sg_id] ) Web_target_group = elb.TargetGroup( "WebTargetGroup", t, DependsOn=Web_elb, HealthCheckPath="/health", HealthCheckPort=80, HealthCheckProtocol="HTTP", Matcher=elb.Matcher(HttpCode="200"), Name="NginxTargetGroup", Port=80, Protocol="HTTP", VpcId=vpc_id ) Web_listener = elb.Listener( "WebListener", t, LoadBalancerArn=Ref(Web_elb), DefaultActions=[ elb.Action("forwardAction", TargetGroupArn=Ref(Web_target_group), Type="forward" ) ], Port=80, Protocol="HTTP" ) ### AUTOSCALING ### # Everything after sudo -u ubuntu is one command # The sudo command is required to properly set file permissions when # running the ansible script as it assumes running from non root user lc_user_data = Base64(Join("\n", [ "#!/bin/bash", "apt-add-repository -y ppa:ansible/ansible", "apt-get update && sudo apt-get -y upgrade", "apt-get -y install git", "apt-get -y install ansible", "cd /home/ubuntu/", "sudo -H -u ubuntu bash -c '" "export LC_ALL=C.UTF-8 && " "export LANG=C.UTF-8 && " "ansible-pull -U https://github.com/DameonSmith/aws-meetup-ansible.git --extra-vars \"user=ubuntu\"'" ])) web_instance_role = iam.Role( "webInstanceCodeDeployRole", t, AssumeRolePolicyDocument={ 'Statement': [{ 'Effect': 'Allow', 'Principal': { 'Service': 'ec2.amazonaws.com' }, 'Action': 'sts:AssumeRole' }] }, Policies=[ iam.Policy( PolicyName="CodeDeployS3Policy", PolicyDocument=aws.Policy( Version='2012-10-17', Statement=[ aws.Statement( Sid='CodeDeployS3', Effect=aws.Allow, Action=[ aws_s3.PutObject, aws_s3.GetObject, aws_s3.GetObjectVersion, aws_s3.DeleteObject, aws_s3.ListObjects, aws_s3.ListBucket, aws_s3.ListBucketVersions, aws_s3.ListAllMyBuckets, aws_s3.ListMultipartUploadParts, aws_s3.ListBucketMultipartUploads, aws_s3.ListBucketByTags, ], Resource=[ GetAtt(app_bucket, 'Arn'), Join('', [ GetAtt(app_bucket, 'Arn'), '/*', ]), "arn:aws:s3:::aws-codedeploy-us-east-2/*", "arn:aws:s3:::aws-codedeploy-us-east-1/*", "arn:aws:s3:::aws-codedeploy-us-west-1/*", "arn:aws:s3:::aws-codedeploy-us-west-2/*", "arn:aws:s3:::aws-codedeploy-ca-central-1/*", "arn:aws:s3:::aws-codedeploy-eu-west-1/*", "arn:aws:s3:::aws-codedeploy-eu-west-2/*", "arn:aws:s3:::aws-codedeploy-eu-west-3/*", "arn:aws:s3:::aws-codedeploy-eu-central-1/*", "arn:aws:s3:::aws-codedeploy-ap-northeast-1/*", "arn:aws:s3:::aws-codedeploy-ap-northeast-2/*", "arn:aws:s3:::aws-codedeploy-ap-southeast-1/*", "arn:aws:s3:::aws-codedeploy-ap-southeast-2/*", "arn:aws:s3:::aws-codedeploy-ap-south-1/*", "arn:aws:s3:::aws-codedeploy-sa-east-1/*", ] ) ] ) ) ] ) web_instance_profile = iam.InstanceProfile( "webInstanceProfile", t, Path='/', Roles=[Ref(web_instance_role)], ) Web_launch_config = autoscaling.LaunchConfiguration( "webLaunchConfig", t, ImageId=FindInMap("RegionMap", Ref("AWS::Region"), "AMI"), # TODO: Remove magic string SecurityGroups=[ssh_sg_id, autoscale_sg_id], IamInstanceProfile=Ref(web_instance_profile), InstanceType="t2.micro", BlockDeviceMappings= [{ "DeviceName": "/dev/sdk", "Ebs": {"VolumeSize": "10"} }], UserData= lc_user_data, KeyName="advanced-cfn", ) Web_autoscaler = autoscaling.AutoScalingGroup( "WebAutoScaler", t, LaunchConfigurationName=Ref(Web_launch_config), MinSize="2", # TODO: Change to parameter MaxSize="2", VPCZoneIdentifier=[subnet_2_id, subnet_1_id], TargetGroupARNs= [Ref(Web_target_group)] ) t.add_output([ Output( "ALBDNS", Description="The DNS name for the application load balancer.", Value=GetAtt(Web_elb, "DNSName") ) ]) # DEVTOOLS CONFIG codebuild_service_role = iam.Role( "CMSCodeBuildServiceRole", t, AssumeRolePolicyDocument={ 'Statement': [{ 'Effect': 'Allow', 'Principal': { 'Service': ['codebuild.amazonaws.com'] }, 'Action': ['sts:AssumeRole'] }] }, Policies=[ iam.Policy( PolicyName="CloudWatchLogsPolicy", PolicyDocument=aws.Policy( Version="2012-10-17", Statement=[ aws.Statement( Sid='logs', Effect=aws.Allow, Action=[ aws_logs.CreateLogGroup, aws_logs.CreateLogStream, aws_logs.PutLogEvents ], Resource=['*'] ) ] ) ), iam.Policy( PolicyName="s3AccessPolicy", PolicyDocument=aws.Policy( Version="2012-10-17", Statement=[ aws.Statement( Sid='codebuilder', Effect=aws.Allow, Action=[ aws_s3.PutObject, aws_s3.GetObject, aws_s3.GetObjectVersion, aws_s3.DeleteObject ], Resource=[ GetAtt(app_bucket, 'Arn'), Join('', [ GetAtt(app_bucket, 'Arn'), '/*', ]) ] ) ] ) ) ] ) github_repo = Parameter( "GithubRepoLink", Description="Name of the repository you wish to connect to codebuild.", Type="String" ) artifact_key = Parameter( "ArtifactKey", Description="The key for the artifact that codebuild creates.", Type="String" ) t.add_parameter(github_repo) t.add_parameter(artifact_key) cms_code_build_project = codebuild.Project( "CMSBuild", t, Name="CMS-Build", Artifacts=codebuild.Artifacts( Location=Ref(app_bucket), Name=Ref(artifact_key), NamespaceType="BUILD_ID", Type="S3", Packaging="ZIP" ), Description="Code build for CMS", Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/python:3.6.5", Type="LINUX_CONTAINER", ), ServiceRole=GetAtt(codebuild_service_role, 'Arn'), Source=codebuild.Source( "CMSSourceCode", Auth=codebuild.SourceAuth( "GitHubAuth", Type="OAUTH" ), Location=Ref(github_repo), Type="GITHUB" ), Triggers=codebuild.ProjectTriggers( Webhook=True ) ) codedeploy_service_role = iam.Role( "CMSDeploymentGroupServiceRole", t, AssumeRolePolicyDocument={ 'Statement': [{ 'Effect': 'Allow', 'Principal': { 'Service': ['codedeploy.amazonaws.com'] }, 'Action': ['sts:AssumeRole'] }] }, Policies=[ iam.Policy( PolicyName="CloudWatchLogsPolicy", PolicyDocument=aws.Policy( Version="2012-10-17", Statement=[ aws.Statement( Sid='logs', Effect=aws.Allow, Action=[ aws_logs.CreateLogGroup, aws_logs.CreateLogStream, aws_logs.PutLogEvents ], Resource=['*'] ) ] ) ), iam.Policy( PolicyName="s3AccessPolicy", PolicyDocument=aws.Policy( Version="2012-10-17", Statement=[ aws.Statement( Sid='codebuilder', Effect=aws.Allow, Action=[ aws_s3.PutObject, aws_s3.GetObject, aws_s3.GetObjectVersion, aws_s3.DeleteObject ], Resource=[ GetAtt(app_bucket, 'Arn'), Join('', [ GetAtt(app_bucket, 'Arn'), '/*' ]) ] ) ] ) ), iam.Policy( PolicyName="autoscalingAccess", PolicyDocument=aws.Policy( Version="2012-10-17", Statement=[ aws.Statement( Sid='codebuilder', Effect=aws.Allow, Action=[ aws.Action('autoscaling', '*'), aws.Action('elasticloadbalancing', '*') ], Resource=[ '*' ] ) ] ) ) ] ) cms_codedeploy_application = codedeploy.Application( "CMSCodeDeployApplication", t, ) cms_deployment_group = codedeploy.DeploymentGroup( "CMSDeploymentGroup", t, DependsOn=[cms_codedeploy_application], ApplicationName=Ref(cms_codedeploy_application), AutoScalingGroups=[Ref(Web_autoscaler)], LoadBalancerInfo=codedeploy.LoadBalancerInfo( "CodeDeployLBInfo", TargetGroupInfoList=[ codedeploy.TargetGroupInfoList( "WebTargetGroup", Name=GetAtt(Web_target_group, "TargetGroupName") ) ] ), ServiceRoleArn=GetAtt(codedeploy_service_role, 'Arn') ) print(t.to_yaml())
def get_template( puppet_version, all_regions, source, is_caching_enabled, is_manual_approvals: bool, scm_skip_creation_of_repo: bool, should_validate: bool, ) -> t.Template: is_codecommit = source.get("Provider", "").lower() == "codecommit" is_github = source.get("Provider", "").lower() == "github" is_codestarsourceconnection = (source.get( "Provider", "").lower() == "codestarsourceconnection") is_custom = (source.get("Provider", "").lower() == "custom") is_s3 = source.get("Provider", "").lower() == "s3" description = f"""Bootstrap template used to bring up the main ServiceCatalog-Puppet AWS CodePipeline with dependencies {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master"}}""" template = t.Template(Description=description) version_parameter = template.add_parameter( t.Parameter("Version", Default=puppet_version, Type="String")) org_iam_role_arn_parameter = template.add_parameter( t.Parameter("OrgIamRoleArn", Default="None", Type="String")) with_manual_approvals_parameter = template.add_parameter( t.Parameter( "WithManualApprovals", Type="String", AllowedValues=["Yes", "No"], Default="No", )) puppet_code_pipeline_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetCodePipelineRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetCodePipelineRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) source_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "SourceRolePermissionsBoundary", Type="String", Description="IAM Permission Boundary to apply to the SourceRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_generate_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetGenerateRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetGenerateRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_deploy_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetDeployRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_provisioning_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "PuppetProvisioningRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetProvisioningRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) cloud_formation_deploy_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "CloudFormationDeployRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the CloudFormationDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "DeployEnvironmentComputeType", Type="String", Description="The AWS CodeBuild Environment Compute Type", Default="BUILD_GENERAL1_SMALL", )) spoke_deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "SpokeDeployEnvironmentComputeType", Type="String", Description= "The AWS CodeBuild Environment Compute Type for spoke execution mode", Default="BUILD_GENERAL1_SMALL", )) deploy_num_workers_parameter = template.add_parameter( t.Parameter( "DeployNumWorkers", Type="Number", Description= "Number of workers that should be used when running a deploy", Default=10, )) puppet_role_name_parameter = template.add_parameter( t.Parameter("PuppetRoleName", Type="String", Default="PuppetRole")) puppet_role_path_template_parameter = template.add_parameter( t.Parameter("PuppetRolePath", Type="String", Default="/servicecatalog-puppet/")) template.add_condition( "ShouldUseOrgs", t.Not(t.Equals(t.Ref(org_iam_role_arn_parameter), "None"))) template.add_condition( "HasManualApprovals", t.Equals(t.Ref(with_manual_approvals_parameter), "Yes")) template.add_resource( s3.Bucket( "StacksRepository", BucketName=t.Sub("sc-puppet-stacks-repository-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) manual_approvals_param = template.add_resource( ssm.Parameter( "ManualApprovalsParam", Type="String", Name="/servicecatalog-puppet/manual-approvals", Value=t.Ref(with_manual_approvals_parameter), )) template.add_resource( ssm.Parameter( "SpokeDeployEnvParameter", Type="String", Name=constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, Value=t.Ref(spoke_deploy_environment_compute_type_parameter), )) param = template.add_resource( ssm.Parameter( "Param", Type="String", Name="service-catalog-puppet-version", Value=t.Ref(version_parameter), )) partition_parameter = template.add_resource( ssm.Parameter( "PartitionParameter", Type="String", Name="/servicecatalog-puppet/partition", Value=t.Ref("AWS::Partition"), )) puppet_role_name_parameter = template.add_resource( ssm.Parameter( "PuppetRoleNameParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/name", Value=t.Ref(puppet_role_name_parameter), )) puppet_role_path_parameter = template.add_resource( ssm.Parameter( "PuppetRolePathParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/path", Value=t.Ref(puppet_role_path_template_parameter), )) share_accept_function_role = template.add_resource( iam.Role( "ShareAcceptFunctionRole", RoleName="ShareAcceptFunctionRole", ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" ) ], Path=t.Ref(puppet_role_path_template_parameter), Policies=[ iam.Policy( PolicyName="ServiceCatalogActions", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Resource": { "Fn::Sub": "arn:${AWS::Partition}:iam::*:role${PuppetRolePath}${PuppetRoleName}" }, "Effect": "Allow", }], }, ) ], AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["lambda.amazonaws.com"] }, }], }, )) provisioning_role = template.add_resource( iam.Role( "ProvisioningRole", RoleName="PuppetProvisioningRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_provisioning_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) cloud_formation_deploy_role = template.add_resource( iam.Role( "CloudFormationDeployRole", RoleName="CloudFormationDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["cloudformation.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( cloud_formation_deploy_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) pipeline_role = template.add_resource( iam.Role( "PipelineRole", RoleName="PuppetCodePipelineRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_code_pipeline_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) source_role = template.add_resource( iam.Role( "SourceRole", RoleName="PuppetSourceRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:root" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( source_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) dry_run_notification_topic = template.add_resource( sns.Topic( "DryRunNotificationTopic", DisplayName="service-catalog-puppet-dry-run-approvals", TopicName="service-catalog-puppet-dry-run-approvals", Condition="HasManualApprovals", )) deploy_role = template.add_resource( iam.Role( "DeployRole", RoleName="PuppetDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_deploy_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) num_workers_ssm_parameter = template.add_resource( ssm.Parameter( "NumWorkersSSMParameter", Type="String", Name="/servicecatalog-puppet/deploy/num-workers", Value=t.Sub("${DeployNumWorkers}"), )) parameterised_source_bucket = template.add_resource( s3.Bucket( "ParameterisedSourceBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-parameterised-runs-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage = codepipeline.Stages( Name="Source", Actions=[ codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="ParameterisedSource") ], Configuration={ "S3Bucket": t.Ref(parameterised_source_bucket), "S3ObjectKey": "parameters.zip", "PollForSourceChanges": True, }, Name="ParameterisedSource", ) ], ) install_spec = { "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], } deploy_env_vars = [ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Ref("AWS::AccountId"), }, { "Type": "PLAINTEXT", "Name": "PUPPET_REGION", "Value": t.Ref("AWS::Region"), }, { "Type": "PARAMETER_STORE", "Name": "PARTITION", "Value": t.Ref(partition_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_NAME", "Value": t.Ref(puppet_role_name_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_PATH", "Value": t.Ref(puppet_role_path_parameter), }, ] if is_codecommit: template.add_resource( codecommit.Repository( "CodeRepo", RepositoryName=source.get("Configuration").get( "RepositoryName"), RepositoryDescription= "Repo to store the servicecatalog puppet solution", DeletionPolicy="Retain", )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "RepositoryName": source.get("Configuration").get("RepositoryName"), "BranchName": source.get("Configuration").get("BranchName"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges", True), }, Name="Source", )) if is_github: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "Owner": source.get("Configuration").get("Owner"), "Repo": source.get("Configuration").get("Repo"), "Branch": source.get("Configuration").get("Branch"), "OAuthToken": t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:OAuthToken}}", ], ), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) if is_custom: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="Custom", Version=source.get("Configuration").get( "CustomActionTypeVersion"), Provider=source.get("Configuration").get( "CustomActionTypeProvider"), ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "GitUrl": source.get("Configuration").get("GitUrl"), "Branch": source.get("Configuration").get("Branch"), "PipelineName": t.Sub("${AWS::StackName}-pipeline"), }, Name="Source", )) webhook = codepipeline.Webhook( "Webhook", Authentication="IP", TargetAction="Source", AuthenticationConfiguration=codepipeline.WebhookAuthConfiguration( AllowedIPRange=source.get("Configuration").get( "GitWebHookIpAddress")), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.changes[0].ref.id", MatchEquals="refs/heads/{Branch}") ], TargetPipelineVersion=1, TargetPipeline=t.Sub("${AWS::StackName}-pipeline"), ) template.add_resource(webhook) values_for_sub = { "GitUrl": source.get("Configuration").get("GitUrl"), "WebhookUrl": t.GetAtt(webhook, "Url"), } output_to_add = t.Output("WebhookUrl") output_to_add.Value = t.Sub("${GitUrl}||${WebhookUrl}", **values_for_sub) output_to_add.Export = t.Export(t.Sub("${AWS::StackName}-pipeline")) template.add_output(output_to_add) if is_codestarsourceconnection: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "ConnectionArn": source.get("Configuration").get("ConnectionArn"), "FullRepositoryId": source.get("Configuration").get("FullRepositoryId"), "BranchName": source.get("Configuration").get("BranchName"), "OutputArtifactFormat": source.get("Configuration").get("OutputArtifactFormat"), }, Name="Source", )) if is_s3: bucket_name = source.get("Configuration").get("S3Bucket") if not scm_skip_creation_of_repo: template.add_resource( s3.Bucket( bucket_name, PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": source.get("Configuration").get("S3ObjectKey"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) single_account_run_project_build_spec = dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ 'echo "single_account: \\"${SINGLE_ACCOUNT_ID}\\"" > parameters.yaml', "cat parameters.yaml", "zip parameters.zip parameters.yaml", "aws s3 cp parameters.zip s3://sc-puppet-parameterised-runs-${PUPPET_ACCOUNT_ID}/parameters.zip", ] }, post_build={ "commands": [ "servicecatalog-puppet wait-for-parameterised-run-to-complete", ] }, ), artifacts=dict( name="DeployProject", files=[ "ServiceCatalogPuppet/manifest.yaml", "ServiceCatalogPuppet/manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) single_account_run_project_args = dict( Name="servicecatalog-puppet-single-account-run", Description="Runs puppet for a single account - SINGLE_ACCOUNT_ID", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "SINGLE_ACCOUNT_ID", "Value": "CHANGE_ME", }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ), ) single_account_run_project = template.add_resource( codebuild.Project("SingleAccountRunProject", **single_account_run_project_args)) single_account_run_project_build_spec["phases"]["post_build"]["commands"] = [ "servicecatalog-puppet wait-for-parameterised-run-to-complete --on-complete-url $CALLBACK_URL" ] single_account_run_project_args[ "Name"] = "servicecatalog-puppet-single-account-run-with-callback" single_account_run_project_args[ "Description"] = "Runs puppet for a single account - SINGLE_ACCOUNT_ID and then does a http put" single_account_run_project_args.get( "Environment").EnvironmentVariables.append({ "Type": "PLAINTEXT", "Name": "CALLBACK_URL", "Value": "CHANGE_ME", }) single_account_run_project_args["Source"] = codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ) single_account_run_project_with_callback = template.add_resource( codebuild.Project("SingleAccountRunWithCallbackProject", **single_account_run_project_args)) stages = [source_stage] if should_validate: template.add_resource( codebuild.Project( "ValidateProject", Name="servicecatalog-puppet-validate", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version="0.2", phases={ "install": { "runtime-versions": { "python": "3.7", }, "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, "build": { "commands": [ "servicecatalog-puppet validate manifest.yaml" ] }, }, )), Type="CODEPIPELINE", ), Description="Validate the manifest.yaml file", )) stages.append( codepipeline.Stages( Name="Validate", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), ], Name="Validate", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts( Name="ValidateProject") ], Configuration={ "ProjectName": t.Ref("ValidateProject"), "PrimarySource": "Source", }, RunOrder=1, ), ], )) if is_manual_approvals: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="DryRun", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DryRunProject") ], Configuration={ "ProjectName": t.Ref("DryRunProject"), "PrimarySource": "Source", }, RunOrder=1, ), codepipeline.Actions( ActionTypeId=codepipeline.ActionTypeId( Category="Approval", Owner="AWS", Version="1", Provider="Manual", ), Configuration={ "NotificationArn": t.Ref("DryRunNotificationTopic"), "CustomData": "Approve when you are happy with the dry run.", }, Name="DryRunApproval", RunOrder=2, ), codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", }, RunOrder=3, ), ], ) else: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", "EnvironmentVariables": '[{"name":"EXECUTION_ID","value":"#{codepipeline.PipelineExecutionId}","type":"PLAINTEXT"}]', }, RunOrder=1, ), ], ) stages.append(deploy_stage) pipeline = template.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=t.GetAtt("PipelineRole", "Arn"), Stages=stages, Name=t.Sub("${AWS::StackName}-pipeline"), ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=t.Sub( "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}" ), ), RestartExecutionOnUpdate=True, )) if is_github: template.add_resource( codepipeline.Webhook( "Webhook", AuthenticationConfiguration=codepipeline. WebhookAuthConfiguration(SecretToken=t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:SecretToken}}", ], )), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.ref", MatchEquals="refs/heads/" + source.get("Configuration").get("Branch"), ) ], Authentication="GITHUB_HMAC", TargetPipeline=t.Ref(pipeline), TargetAction="Source", Name=t.Sub("${AWS::StackName}-webhook"), TargetPipelineVersion=t.GetAtt(pipeline, "Version"), RegisterWithThirdParty="true", )) deploy_project_build_spec = dict( version=0.2, phases=dict( install={ "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, pre_build={ "commands": [ "servicecatalog-puppet --info expand --parameter-override-file $CODEBUILD_SRC_DIR_ParameterisedSource/parameters.yaml manifest.yaml", ] }, build={ "commands": [ "servicecatalog-puppet --info deploy --num-workers ${NUM_WORKERS} manifest-expanded.yaml", ] }, ), artifacts=dict( name="DeployProject", files=[ "manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) deploy_project_args = dict( Name="servicecatalog-puppet-deploy", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PARAMETER_STORE", "Name": "NUM_WORKERS", "Value": t.Ref(num_workers_ssm_parameter), }, { "Type": "PARAMETER_STORE", "Name": "SPOKE_EXECUTION_MODE_DEPLOY_ENV", "Value": constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ), Description="deploys out the products to be deployed", ) deploy_project = template.add_resource( codebuild.Project("DeployProject", **deploy_project_args)) deploy_project_build_spec["phases"]["build"]["commands"] = [ "servicecatalog-puppet --info dry-run manifest-expanded.yaml" ] deploy_project_build_spec["artifacts"]["name"] = "DryRunProject" deploy_project_args["Name"] = "servicecatalog-puppet-dryrun" deploy_project_args[ "Description"] = "dry run of servicecatalog-puppet-dryrun" deploy_project_args["Source"] = codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ) dry_run_project = template.add_resource( codebuild.Project("DryRunProject", **deploy_project_args)) bootstrap_project = template.add_resource( codebuild.Project( "BootstrapProject", Name="servicecatalog-puppet-bootstrap-spokes-in-ou", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "OU_OR_PATH", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_NAME", "Value": "OrganizationAccountAccessRole", }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_ARNS", "Value": "" }, ], ), Source=codebuild.Source( BuildSpec= "version: 0.2\nphases:\n install:\n runtime-versions:\n python: 3.7\n commands:\n - pip install aws-service-catalog-puppet\n build:\n commands:\n - servicecatalog-puppet bootstrap-spokes-in-ou $OU_OR_PATH $IAM_ROLE_NAME $IAM_ROLE_ARNS\nartifacts:\n files:\n - results/*/*\n - output/*/*\n name: BootstrapProject\n", Type="NO_SOURCE", ), Description="Bootstrap all the accounts in an OU", )) template.add_resource( codebuild.Project( "BootstrapASpokeProject", Name="servicecatalog-puppet-bootstrap-spoke", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Sub("${AWS::AccountId}"), }, { "Type": "PLAINTEXT", "Name": "ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "ASSUMABLE_ROLE_IN_ROOT_ACCOUNT", "Value": "CHANGE_ME", }, ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ "servicecatalog-puppet bootstrap-spoke-as ${PUPPET_ACCOUNT_ID} ${ASSUMABLE_ROLE_IN_ROOT_ACCOUNT} ${ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN}" ] }, ), )), Type="NO_SOURCE", ), Description="Bootstrap given account as a spoke", )) cloud_formation_events_queue = template.add_resource( sqs.Queue( "CloudFormationEventsQueue", QueueName="servicecatalog-puppet-cloudformation-events", Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), )) cloud_formation_events_queue_policy = template.add_resource( sqs.QueuePolicy( "CloudFormationEventsQueuePolicy", Queues=[t.Ref(cloud_formation_events_queue)], PolicyDocument={ "Id": "AllowSNS", "Version": "2012-10-17", "Statement": [{ "Sid": "allow-send-message", "Effect": "Allow", "Principal": { "AWS": "*" }, "Action": ["sqs:SendMessage"], "Resource": "*", "Condition": { "ArnEquals": { "aws:SourceArn": t.Sub( "arn:${AWS::Partition}:sns:*:${AWS::AccountId}:servicecatalog-puppet-cloudformation-regional-events" ) } }, }], }, )) spoke_deploy_bucket = template.add_resource( s3.Bucket( "SpokeDeployBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-spoke-deploy-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) caching_bucket = template.add_resource( s3.Bucket( "CachingBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub( "sc-puppet-caching-bucket-${AWS::AccountId}-${AWS::Region}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) template.add_output( t.Output( "CloudFormationEventsQueueArn", Value=t.GetAtt(cloud_formation_events_queue, "Arn"), )) template.add_output(t.Output("Version", Value=t.GetAtt(param, "Value"))) template.add_output( t.Output("ManualApprovalsParam", Value=t.GetAtt(manual_approvals_param, "Value"))) template.add_resource( ssm.Parameter( "DefaultTerraformVersion", Type="String", Name=constants.DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, Value=constants.DEFAULT_TERRAFORM_VERSION_VALUE, )) return template
def generate_codepipeline_template( codepipeline_role_name: str, codepipeline_role_path: str, codebuild_role_name: str, codebuild_role_path: str, output_format: str, migrate_role_arn: str, ) -> str: t = troposphere.Template() t.set_description( "CICD template that runs aws organized migrate for the given branch of the given repo" ) project_name = "AWSOrganized-Migrate" repository_name = "AWS-Organized-environment" repo = t.add_resource( codecommit.Repository("Repository", RepositoryName=repository_name)) artifact_store = t.add_resource( s3.Bucket( "ArtifactStore", BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), )) codepipeline_role = t.add_resource( iam.Role( "CodePipelineRole", RoleName=codepipeline_role_name, Path=codepipeline_role_path, ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AdministratorAccess", ], Policies=[ iam.Policy( PolicyName=f"executionpermissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"executionpermissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=[ awscd_codecommit.GitPull, awscd_codecommit.GetBranch, awscd_codecommit.GetCommit, awscd_codecommit.UploadArchive, ], Resource=[troposphere.GetAtt(repo, "Arn")], ), aws.Statement( Sid="2", Effect=aws.Allow, Action=[ awacs_s3.GetBucketPolicy, awacs_s3.GetBucketVersioning, awacs_s3.ListBucket, ], Resource=[ troposphere.GetAtt(artifact_store, "Arn") ], ), aws.Statement( Sid="3", Effect=aws.Allow, Action=[ awacs_s3.GetObject, awacs_s3.GetObjectVersion, ], Resource=[ troposphere.Join(":", [ troposphere.GetAtt( artifact_store, 'Arn'), "*" ]) ], ), aws.Statement( Sid="4", Effect=aws.Allow, Action=[ awacs_s3.ListAllMyBuckets, ], Resource=[ troposphere.Join(":", [ "arn", troposphere.Partition, "s3:::*", ]) ], ), # aws.Statement( # Sid="5", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.Join(":", [ # troposphere.GetAtt(artifact_store, 'Arn'), # "*" # ]) # ], # ), # aws.Statement( # Sid="6", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.GetAtt(artifact_store, 'Arn') # ], # ), ], ), ) ], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal( "Service", ["codepipeline.amazonaws.com"]), ), ], ), )) codebuild_role = t.add_resource( iam.Role( "CodeBuildRole", RoleName=codebuild_role_name, Path=codebuild_role_path, ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AdministratorAccess", ], Policies=[ iam.Policy( PolicyName=f"executionpermissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"executionpermissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=[ awacs_logs.CreateLogGroup, awacs_logs.CreateLogStream, awacs_logs.PutLogEvents, ], Resource=[ # "arn:aws:logs:eu-west-1:669925765091:log-group:/aws/codebuild/examplecodebuild", # "arn:aws:logs:eu-west-1:669925765091:log-group:/aws/codebuild/examplecodebuild:*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:logs:${{AWS::Region}}:${{AWS::AccountId}}:log-group:/aws/codebuild/{project_name}", {}, ] }, { "Fn::Sub": [ f"arn:${{AWS::Partition}}:logs:${{AWS::Region}}:${{AWS::AccountId}}:log-group:/aws/codebuild/{project_name}:*", {}, ] }, ], ), aws.Statement( Sid="2", Effect=aws.Allow, Action=[ awacs_s3.PutObject, awacs_s3.GetObject, awacs_s3.GetObjectVersion, awacs_s3.GetBucketAcl, awacs_s3.GetBucketLocation, ], Resource=[ # "arn:aws:s3:::codepipeline-eu-west-1-*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:s3:::codepipeline-${{AWS::Region}}-*", {}, ] }, ], ), aws.Statement( Sid="3", Effect=aws.Allow, Action=[ awacs_codebuild.CreateReportGroup, awacs_codebuild.CreateReport, awacs_codebuild.UpdateReport, awacs_codebuild.BatchPutTestCases, awacs_codebuild.BatchPutCodeCoverages, ], Resource=[ # "arn:aws:codebuild:eu-west-1:669925765091:report-group/examplecodebuild-*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:codebuild:${{AWS::Region}}:${{AWS::AccountId}}:report-group/{project_name}-*", {}, ] }, ], ), aws.Statement(Sid="4", Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Resource=[migrate_role_arn]), # aws.Statement( # Sid="5", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.Join(":", [ # troposphere.GetAtt(artifact_store, 'Arn'), # "*" # ]) # ], # ), # aws.Statement( # Sid="6", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.GetAtt(artifact_store, 'Arn') # ], # ), ], ), ) ], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal("Service", ["codebuild.amazonaws.com"]), ), ], ), )) project = t.add_resource( codebuild.Project( "AWSOrganizedMigrate", Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[{ "Name": "MIGRATE_ROLE_ARN", "Type": "PLAINTEXT", "Value": migrate_role_arn, }]), Name=project_name, ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump( dict( version="0.2", phases=dict( install={ "runtime-versions": dict(python="3.8"), "commands": [ "pip install aws-organized", ], }, build={ "commands": [ "aws-organized migrate $(MIGRATE_ROLE_ARN)", ], }, ), artifacts=dict(files=[ "environment", ], ), )), ), )) source_actions = codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="SourceOutput")], Configuration={ "RepositoryName": repository_name, "BranchName": "master", "PollForSourceChanges": "true", }, RunOrder="1", ) pipeline = t.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"), Stages=[ codepipeline.Stages( Name="Source", Actions=[source_actions], ), codepipeline.Stages( Name="Migrate", Actions=[ codepipeline.Actions( Name="Migrate", InputArtifacts=[ codepipeline.InputArtifacts( Name="SourceOutput") ], ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={ "ProjectName": troposphere.Ref(project), "PrimarySource": "SourceAction", }, RunOrder="1", ) ], ), ], ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=troposphere.Ref(artifact_store)), )) if output_format == "json": return t.to_json() else: return t.to_yaml()
def add_test_resources(test_name): """Add the resources for the given test.""" codebuild_role = template.add_resource( iam.Role( "CodeBuildRole{}".format(test_name), AssumeRolePolicyDocument=make_simple_assume_policy( "codebuild.amazonaws.com"), Policies=IAM_POLICY_BUILDER.build(test_name), )) template.add_resource( codebuild.Project( f"RunwayIntegrationTest{test_name}", Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), Description=f"{test_name} runway integration tests", Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="DEPLOY_ENVIRONMENT", Type="PLAINTEXT", Value=variables["EnvironmentName"].ref, ), codebuild.EnvironmentVariable( Name="TEST_TO_RUN", Type="PLAINTEXT", Value=test_name.lower(), ), codebuild.EnvironmentVariable( # Disable emojis in output. Name="PIPENV_HIDE_EMOJIS", Type="PLAINTEXT", Value="1", ), codebuild.EnvironmentVariable( # disable terminal spinner. Name="PIPENV_NOSPIN", Type="PLAINTEXT", Value="1", ), codebuild.EnvironmentVariable( # Pipenv automatically assumes “yes” at all prompts. Name="PIPENV_YES", Type="PLAINTEXT", Value="1", ), ], Image="aws/codebuild/standard:2.0", Type="LINUX_CONTAINER", ), Name=f"runway-int-test-{test_name}", ServiceRole=codebuild_role.get_att("Arn"), Source=codebuild.Source( Type="GITHUB", Location=variables["GitHubUrl"].ref), Triggers=codebuild.ProjectTriggers( Webhook=True, FilterGroups=[[ codebuild.WebhookFilter( Type="ACTOR_ACCOUNT_ID", Pattern="|".join( str(x) for x in GITHUB_ACCOUNT_IDS), ), codebuild.WebhookFilter( Type="EVENT", Pattern="PULL_REQUEST_CREATED," "PULL_REQUEST_UPDATED," "PULL_REQUEST_REOPENED", ), codebuild.WebhookFilter( Type="BASE_REF", Pattern="^refs/heads/release$"), codebuild.WebhookFilter( Type="HEAD_REF", Pattern="^refs/heads/master$"), ]], ), ))
def create_template(self): """Create template (main function called by Stacker).""" template = self.template variables = self.get_variables() template.set_version('2010-09-09') template.set_description('App - Build Pipeline') # Resources boundary_arn = Join('', [ 'arn:', Partition, ':iam::', AccountId, ':policy/', variables['RolePermissionsBoundaryName'].ref ]) # Repo image limit is 1000 by default; this lambda function will prune # old images image_param_path = Join( '', ['/', variables['AppPrefix'].ref, '/current-hash']) image_param_arn = Join('', [ 'arn:', Partition, ':ssm:', Region, ':', AccountId, ':parameter', image_param_path ]) ecr_repo_arn = Join('', [ 'arn:', Partition, ':ecr:', Region, ':', AccountId, ':repository/', variables['EcrRepoName'].ref ]) cleanuplambdarole = template.add_resource( iam.Role('CleanupLambdaRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'lambda.amazonaws.com'), ManagedPolicyArns=[ IAM_ARN_PREFIX + 'AWSLambdaBasicExecutionRole' ], PermissionsBoundary=boundary_arn, Policies=[ iam.Policy( PolicyName=Join( '', [variables['AppPrefix'].ref, '-ecrcleanup']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement(Action=[awacs.ssm.GetParameter], Effect=Allow, Resource=[image_param_arn]), Statement(Action=[ awacs.ecr.DescribeImages, awacs.ecr.BatchDeleteImage ], Effect=Allow, Resource=[ecr_repo_arn]) ])) ])) cleanupfunction = template.add_resource( awslambda.Function( 'CleanupFunction', Description='Cleanup stale ECR images', Code=awslambda.Code( ZipFile=variables['ECRCleanupLambdaFunction']), Environment=awslambda.Environment( Variables={ 'ECR_REPO_NAME': variables['EcrRepoName'].ref, 'SSM_PARAM': image_param_path }), Handler='index.handler', Role=cleanuplambdarole.get_att('Arn'), Runtime='python3.6', Timeout=120)) cleanuprule = template.add_resource( events.Rule('CleanupRule', Description='Regularly invoke CleanupFunction', ScheduleExpression='rate(7 days)', State='ENABLED', Targets=[ events.Target(Arn=cleanupfunction.get_att('Arn'), Id='CleanupFunction') ])) template.add_resource( awslambda.Permission( 'AllowCWLambdaInvocation', FunctionName=cleanupfunction.ref(), Action=awacs.awslambda.InvokeFunction.JSONrepr(), Principal='events.amazonaws.com', SourceArn=cleanuprule.get_att('Arn'))) appsource = template.add_resource( codecommit.Repository( 'AppSource', RepositoryName=Join('-', [variables['AppPrefix'].ref, 'source']))) for i in ['Name', 'Arn']: template.add_output( Output("AppRepo%s" % i, Description="%s of app source repo" % i, Value=appsource.get_att(i))) bucket = template.add_resource( s3.Bucket( 'Bucket', AccessControl=s3.Private, LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[ s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status='Enabled') ]), VersioningConfiguration=s3.VersioningConfiguration( Status='Enabled'))) template.add_output( Output('PipelineBucketName', Description='Name of pipeline bucket', Value=bucket.ref())) # This list must be kept in sync between the CodeBuild project and its # role build_name = Join('', [variables['AppPrefix'].ref, '-build']) build_role = template.add_resource( iam.Role( 'BuildRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'codebuild.amazonaws.com'), PermissionsBoundary=boundary_arn, Policies=[ iam.Policy( PolicyName=Join('', [build_name, '-policy']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Action=[awacs.s3.GetObject], Effect=Allow, Resource=[ Join('', [bucket.get_att('Arn'), '/*']) ]), Statement( Action=[awacs.ecr.GetAuthorizationToken], Effect=Allow, Resource=['*']), Statement(Action=[ awacs.ecr.BatchCheckLayerAvailability, awacs.ecr.BatchGetImage, awacs.ecr.CompleteLayerUpload, awacs.ecr.DescribeImages, awacs.ecr.GetDownloadUrlForLayer, awacs.ecr.InitiateLayerUpload, awacs.ecr.PutImage, awacs.ecr.UploadLayerPart ], Effect=Allow, Resource=[ecr_repo_arn]), Statement(Action=[ awacs.ssm.GetParameter, awacs.ssm.PutParameter ], Effect=Allow, Resource=[image_param_arn]), Statement(Action=[ awacs.logs.CreateLogGroup, awacs.logs.CreateLogStream, awacs.logs.PutLogEvents ], Effect=Allow, Resource=[ Join('', [ 'arn:', Partition, ':logs:', Region, ':', AccountId, ':log-group:/aws/codebuild/', build_name ] + x) for x in [[':*'], [':*/*']] ]) ])) ])) buildproject = template.add_resource( codebuild.Project( 'BuildProject', Artifacts=codebuild.Artifacts(Type='CODEPIPELINE'), Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', EnvironmentVariables=[ codebuild.EnvironmentVariable( Name='AWS_DEFAULT_REGION', Type='PLAINTEXT', Value=Region), codebuild.EnvironmentVariable(Name='AWS_ACCOUNT_ID', Type='PLAINTEXT', Value=AccountId), codebuild.EnvironmentVariable( Name='IMAGE_REPO_NAME', Type='PLAINTEXT', Value=variables['EcrRepoName'].ref), ], Image='aws/codebuild/docker:18.09.0', Type='LINUX_CONTAINER'), Name=build_name, ServiceRole=build_role.get_att('Arn'), Source=codebuild.Source( Type='CODEPIPELINE', BuildSpec=variables['BuildProjectBuildSpec']))) pipelinerole = template.add_resource( iam.Role( 'PipelineRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'codepipeline.amazonaws.com'), PermissionsBoundary=boundary_arn, Policies=[ iam.Policy( PolicyName=Join('', [build_name, '-pipeline-policy']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Action=[ awacs.codecommit.GetBranch, awacs.codecommit.GetCommit, awacs.codecommit.UploadArchive, awacs.codecommit. GetUploadArchiveStatus, # noqa awacs.codecommit.CancelUploadArchive ], # noqa Effect=Allow, Resource=[appsource.get_att('Arn')]), Statement( Action=[awacs.s3.GetBucketVersioning], Effect=Allow, Resource=[bucket.get_att('Arn')]), Statement( Action=[ awacs.s3.GetObject, awacs.s3.PutObject ], Effect=Allow, Resource=[ Join('', [bucket.get_att('Arn'), '/*']) ]), Statement( Action=[ awacs.codebuild.BatchGetBuilds, awacs.codebuild.StartBuild ], Effect=Allow, Resource=[buildproject.get_att('Arn')]) ])) ])) template.add_resource( codepipeline.Pipeline( 'Pipeline', ArtifactStore=codepipeline.ArtifactStore(Location=bucket.ref(), Type='S3'), Name=build_name, RoleArn=pipelinerole.get_att('Arn'), Stages=[ codepipeline.Stages( Name='Source', Actions=[ codepipeline.Actions( Name='CodeCommit', ActionTypeId=codepipeline.ActionTypeId( Category='Source', Owner='AWS', Provider='CodeCommit', Version='1'), Configuration={ 'RepositoryName': appsource.get_att('Name'), # noqa 'BranchName': 'master' }, OutputArtifacts=[ codepipeline.OutputArtifacts( Name='CodeCommitRepo') ]), ]), codepipeline.Stages( Name='Build', Actions=[ codepipeline.Actions( Name='Build', ActionTypeId=codepipeline.ActionTypeId( Category='Build', Owner='AWS', Provider='CodeBuild', Version='1'), Configuration={ 'ProjectName': buildproject.ref() }, InputArtifacts=[ codepipeline.InputArtifacts( Name='CodeCommitRepo') ]) ]) ]))
def handle(self, chain_context): self.validate(chain_context) print("Adding action %s Stage." % self.action_name) full_action_name = "%s%s" % (self.stage_name_to_add, self.action_name) policy_name = "%sCodeBuildPolicy" % chain_context.instance_name role_name = "CodeBuildRole%s" % full_action_name codebuild_role = self.get_default_code_build_role( chain_context=chain_context, policy_name=policy_name, role_name=role_name, ) codebuild_role_arn = self.role_arn if self.role_arn else troposphere.GetAtt( codebuild_role, 'Arn') if not self.environment: self.environment = codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/python:2.7.12', Type='LINUX_CONTAINER', EnvironmentVariables=[ # TODO: allow these to be injectable, or just the whole environment? { 'Name': 'PIPELINE_BUCKET', 'Value': chain_context.metadata[META_PIPELINE_BUCKET_NAME] } ], ) project = self.create_project( prefix=self.prefix, chain_context=chain_context, codebuild_role_arn=codebuild_role_arn, codebuild_environment=self.environment, name=full_action_name, ) code_build_action = cumulus.types.codebuild.buildaction.CodeBuildAction( Name=self.action_name, InputArtifacts=[ codepipeline.InputArtifacts(Name=self.input_artifact_name) ], Configuration={'ProjectName': Ref(project)}, RunOrder="1") chain_context.template.add_resource(codebuild_role) chain_context.template.add_resource(project) template = chain_context.template stage = cumulus.util.template_query.TemplateQuery.get_pipeline_stage_by_name( template=template, stage_name=self.stage_name_to_add, ) # TODO accept a parallel action to the previous action, and don't +1 here. next_run_order = len(stage.Actions) + 1 code_build_action.RunOrder = next_run_order stage.Actions.append(code_build_action)
"*" ] }, ]}, Roles=[Ref(ServiceRole)], )) # Create CodeBuild Projects # Image Build BuildArtifacts = codebuild.Artifacts( Type='NO_ARTIFACTS', ) BuildEnvironment = codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/docker:17.09.0", Type="LINUX_CONTAINER", EnvironmentVariables=[{'Name': 'CODECOMMIT_REPO_ADDR', 'Value': Ref(codecommit_repo_addr)}], ) BuildSource = codebuild.Source( Location="https://github.com/jasonumiker/ghost-ecs-fargate-pipeline", Type="GITHUB", BuildSpec = "init-codecommit/buildspec.yml" ) BuildProject = codebuild.Project( "CodeCommitInitBuildProject", Artifacts=BuildArtifacts, Environment=BuildEnvironment, Name="ghost-pipeline-init-codecommit", ServiceRole=Ref(ServiceRole),
def get_resources() -> list: all_regions = config.get_regions() return [ codebuild.Project( "Validate", Name=VALIDATE_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="TEMPLATE_FORMAT", Type="PLAINTEXT", Value="yaml", ) ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict(build={ "commands": [ "export FactoryTemplateValidateBucket=$(aws cloudformation list-stack-resources --stack-name servicecatalog-factory --query 'StackResourceSummaries[?LogicalResourceId==`FactoryTemplateValidateBucket`].PhysicalResourceId' --output text)", "aws s3 cp product.template.$TEMPLATE_FORMAT s3://$FactoryTemplateValidateBucket/$CODEBUILD_BUILD_ID.$TEMPLATE_FORMAT", "aws cloudformation validate-template --template-url https://$FactoryTemplateValidateBucket.s3.$AWS_REGION.amazonaws.com/$CODEBUILD_BUILD_ID.$TEMPLATE_FORMAT", ] }, ), artifacts=dict( name=VALIDATE_OUTPUT_ARTIFACT, files=["*", "**/*"], ), ))), Type="CODEPIPELINE", ), Description=t.Sub("Run validate"), ), codebuild.Project( "Deploy", Name=DEPLOY_IN_GOVCLOUD_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ codebuild.EnvironmentVariable( Type="PLAINTEXT", Name="ACCOUNT_ID", Value=t.Sub("${AWS::AccountId}"), ), codebuild.EnvironmentVariable(Name="PIPELINE_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CODEPIPELINE_ID", Type="PLAINTEXT", Value="CHANGE_ME"), ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": dict( python="3.7", nodejs=constants. BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ), "commands": [ f"pip install {constants.VERSION}" if "http" in constants.VERSION else f"pip install aws-service-catalog-factory=={constants.VERSION}", ], }, build={"commands": get_commands_for_deploy()}, ), artifacts={ "name": DEPLOY_OUTPUT_ARTIFACT, "files": ["*", "**/*"], }, ))), Type="CODEPIPELINE", ), Description=t.Sub( "Create a deploy stage for template cloudformation"), ), ]
def render( self, template, name, version, description, source, product_ids_by_region, tags, friendly_uid, ) -> str: template_description = f"{friendly_uid}-{version}" tpl = t.Template(Description=template_description) all_regions = product_ids_by_region.keys() source_stage = codepipeline.Stages( Name="Source", Actions=[ dict( codecommit=codepipeline.Actions( RunOrder=1, RoleArn=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/SourceRole" ), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "RepositoryName": source.get("Configuration").get( "RepositoryName" ), "BranchName": source.get("Configuration").get("BranchName"), "PollForSourceChanges": source.get("Configuration").get( "PollForSourceChanges", True ), }, Name="Source", ), github=codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "Owner": source.get("Configuration").get("Owner"), "Repo": source.get("Configuration").get("Repo"), "Branch": source.get("Configuration").get("Branch"), "OAuthToken": t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret" ), ":SecretString:OAuthToken}}", ], ), "PollForSourceChanges": source.get("Configuration").get( "PollForSourceChanges" ), }, Name="Source", ), codestarsourceconnection=codepipeline.Actions( RunOrder=1, RoleArn=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/SourceRole" ), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "ConnectionArn": source.get("Configuration").get( "ConnectionArn" ), "FullRepositoryId": source.get("Configuration").get( "FullRepositoryId" ), "BranchName": source.get("Configuration").get("BranchName"), "OutputArtifactFormat": source.get("Configuration").get( "OutputArtifactFormat" ), }, Name="Source", ), s3=codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "S3Bucket": source.get("Configuration").get("S3Bucket"), "S3ObjectKey": source.get("Configuration").get( "S3ObjectKey" ), "PollForSourceChanges": source.get("Configuration").get( "PollForSourceChanges" ), }, Name="Source", ), ).get(source.get("Provider", "").lower()) ], ) build_project_name = t.Sub("${AWS::StackName}-build") configuration = template.get("Configuration", {}) runtime_versions = dict( nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ) if configuration.get("runtime-versions"): runtime_versions.update(configuration.get("runtime-versions")) extra_commands = list(configuration.get("install", {}).get("commands", [])) tpl.add_resource( codebuild.Project( "BuildProject", Name=build_project_name, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ {"Type": "PLAINTEXT", "Name": "ACCOUNT_ID", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "REGION", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "NAME", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "VERSION", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "PROVISIONER_NAME", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "PROVISIONER_VERSION", "Value": "CHANGE_ME",}, ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ f"pip install {constants.VERSION}" if "http" in constants.VERSION else f"pip install aws-service-catalog-factory=={constants.VERSION}", ] + extra_commands }, pre_build={ "commands": [ "npm install", "npm run cdk synth -- --output sct-synth-output", ], }, build={ "commands": [ f"servicecatalog-factory generate-template $PROVISIONER_NAME $PROVISIONER_VERSION $NAME $VERSION . > product.template.yaml", ] }, ), artifacts={ "name": BUILD_OUTPUT_ARTIFACT, "files": ["*", "**/*"], "exclude-paths": ["sct-synth-output/*"], }, ) ) ), Type="CODEPIPELINE", ), Description=t.Sub("Create a build stage for template CDK 1.0.0"), ) ) build_stage = codepipeline.Stages( Name="Build", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT), ], Name='Build', ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=BUILD_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": build_project_name, "PrimarySource": SOURCE_OUTPUT_ARTIFACT, "EnvironmentVariables": t.Sub( json.dumps( [ dict(name="ACCOUNT_ID", value="${AWS::AccountId}", type="PLAINTEXT"), dict(name="REGION", value="${AWS::Region}", type="PLAINTEXT"), dict(name="PROVISIONER_NAME", value='CDK', type="PLAINTEXT"), dict(name="PROVISIONER_VERSION", value='1.0.0', type="PLAINTEXT"), dict(name="NAME", value=name, type="PLAINTEXT"), dict( name="VERSION", value=version, type="PLAINTEXT" ), ] ) ), }, RunOrder=1, ) ], ) validate_stage = codepipeline.Stages( Name="Validate", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=BUILD_OUTPUT_ARTIFACT), ], Name="Validate", ActionTypeId=codepipeline.ActionTypeId( Category="Test", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=VALIDATE_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": shared_resources.VALIDATE_PROJECT_NAME, "PrimarySource": BUILD_OUTPUT_ARTIFACT, }, RunOrder=1, ) ], ) # package_stage = codepipeline.Stages( Name="Package", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=BUILD_OUTPUT_ARTIFACT), ], Name="Package", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=PACKAGE_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": cdk_shared_resources.CDK_PACKAGE_PROJECT_NAME, "PrimarySource": BUILD_OUTPUT_ARTIFACT, "EnvironmentVariables": t.Sub( json.dumps( [ dict( name="PIPELINE_NAME", value="${AWS::StackName}-pipeline", type="PLAINTEXT", ), dict( name="CODEPIPELINE_ID", value="#{codepipeline.PipelineExecutionId}", type="PLAINTEXT", ), dict(name="NAME", value=name, type="PLAINTEXT"), dict( name="VERSION", value=version, type="PLAINTEXT" ), dict( name="DESCRIPTION", value=description, type="PLAINTEXT", ), dict( name="TEMPLATE_FORMAT", value="yaml", type="PLAINTEXT", ), dict( name="PROVISIONER", value="CDK/1.0.0", type="PLAINTEXT", ), ] ) ), }, RunOrder=1, ) ], ) deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=PACKAGE_OUTPUT_ARTIFACT), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=DEPLOY_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": cdk_shared_resources.CDK_DEPLOY_PROJECT_NAME, "PrimarySource": PACKAGE_OUTPUT_ARTIFACT, "EnvironmentVariables": t.Sub( json.dumps( [ dict( name="ACCOUNT_ID", value="${AWS::AccountId}", type="PLAINTEXT", ), dict( name="REGION", value="${AWS::Region}", type="PLAINTEXT", ), dict( name="PIPELINE_NAME", value="${AWS::StackName}-pipeline", type="PLAINTEXT", ), dict( name="CODEPIPELINE_ID", value="#{codepipeline.PipelineExecutionId}", type="PLAINTEXT", ), dict( name="PROVISIONER", value="CDK/1.0.0", type="PLAINTEXT", ), ] ) ), }, RunOrder=1, ) ], ) tpl.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/CodePipelineRole" ), Stages=[ source_stage, build_stage, validate_stage, package_stage, deploy_stage, ], Name=t.Sub("${AWS::StackName}-pipeline"), ArtifactStores=[ codepipeline.ArtifactStoreMap( Region=region, ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=t.Sub( "sc-factory-artifacts-${AWS::AccountId}-" + region ), ), ) for region in all_regions ], RestartExecutionOnUpdate=False, ) ) return tpl.to_yaml(clean_up=True)
def main(): # TODO: # - SNS topic is probably not complete # - BuildCopyCFNProject Encryption keys # - build_copy_cfn_project, validate_resource_project TimeoutInMinutes property # - Lambda function execute permissions # - Build role GetBucketTagging permissions. Only used in build-env step, may be obsolete in other scenarios # - Add customer name to CodeCommit repositories # - What to do with the password of the codecommit user? # - InputArtifact is not used in pipeline # - buildspec.env in project definition itself # - CodeCommitUser Permissions on troporepo and CFNvalidaterepo only! # INIT section template = Template() projectName = "dummy-mytestrepo" # PARAMETERS section github_oauth_token_parameter = template.add_parameter( Parameter( "GithubOauthToken", Type="String", Description="Github OAuthToken", NoEcho=True, )) github_owner_parameter = template.add_parameter( Parameter( "GithubOwner", Type="String", Description="Github owner", Default="cta-int", )) github_branch_parameter = template.add_parameter( Parameter( "GithubBranch", Type="String", Description="Github branch", Default="master", )) github_repository_parameter = template.add_parameter( Parameter( "GithubRepository", Type="String", Description="Github repository", Default="aws-bootstrap", )) # RESOURCES section approve_topic = template.add_resource( sns.Topic("ApproveTopic", Subscription=[ sns.Subscription( Endpoint="*****@*****.**", Protocol="email", ) ])) artifact_store_s3_bucket = template.add_resource( s3.Bucket( "ArtifactStoreS3Bucket", AccessControl=s3.Private, )) # ROLES section cloud_formation_role = template.add_resource( iam.Role( "CloudFormationRole", AssumeRolePolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement(Effect=Allow, Principal=Principal( "Service", "cloudformation.amazonaws.com"), Action=[Action("sts", "AssumeRole")]) ]), Path="/", Policies=[ iam.Policy(PolicyName="CloudFormationNestedCFNAccessPolicy", PolicyDocument=Policy(Version="2012-10-17", Statement=[ Statement( Effect=Allow, Action=[Action("*")], Resource=["*"]) ])) ])) code_build_role = template.add_resource( iam.Role( "CodeBuildRole", AssumeRolePolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement( Effect=Allow, Principal=Principal("Service", "codebuild.amazonaws.com"), Action=[Action("sts", "AssumeRole")], ) ]), Path="/", Policies=[ iam.Policy( PolicyName="CodeBuildAccessPolicy", PolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement( Effect=Allow, Action=[ Action("cloudformation", "Get*"), Action("cloudformation", "Describe*"), Action("cloudformation", "List*"), ], Resource=[ Sub("arn:aws:cloudformation:${AWS::Region}:${AWS::AccountId}:stack/${AWS::StackName}*" ), ]), Statement(Effect=Allow, Action=[ Action("ec2", "Describe*"), Action("cloudformation", "ValidateTemplate"), Action("elasticloadbalancing", "Describe*"), Action("autoscaling", "Describe*"), Action("iam", "Get*"), Action("iam", "List*"), Action("logs", "Describe*"), Action("logs", "Get*"), Action("tag", "Get*"), ], Resource=["*"]), Statement( Effect=Allow, Action=[ Action("logs", "CreateLogGroup"), Action("logs", "CreateLogStream"), Action("logs", "PutLogEvents"), ], Resource=[ Sub("arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/codebuild/*" ), ]), Statement(Effect=Allow, Action=[ Action("lambda", "ListFunctions"), Action("lambda", "InvokeFunction"), ], Resource=[ "*", ]), Statement( Effect=Allow, Action=[ Action("s3", "PutObject"), Action("s3", "GetObject"), Action("s3", "GetObjectVersion"), Action("s3", "ListBucket"), ], Resource=[ Sub("arn:aws:s3:::codepipeline-${AWS::Region}-*" ), GetAtt(artifact_store_s3_bucket, "Arn"), Join("", [ GetAtt(artifact_store_s3_bucket, "Arn"), "/*" ]), ]), Statement(Effect=Allow, Action=[ Action("s3", "GetBucketTagging"), ], Resource=[ Sub("arn:aws:s3:::*"), ]) ])) ])) code_pipeline_role = template.add_resource( iam.Role( "CodePipelineRole", AssumeRolePolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement( Effect=Allow, Principal=Principal("Service", "codepipeline.amazonaws.com"), Action=[Action("sts", "AssumeRole")], ) ]), Path="/", Policies=[ iam.Policy( PolicyName="CodePipelineAccessPolicy", PolicyDocument=Policy( Version="2012-10-17", Statement=[ Statement(Effect=Allow, Action=[Action("s3", "*")], Resource=[ GetAtt(artifact_store_s3_bucket, "Arn"), Join("", [ GetAtt(artifact_store_s3_bucket, "Arn"), "/*" ]), ]), Statement(Effect=Allow, Action=[ Action("sns", "Publish"), ], Resource=[ Ref(approve_topic), ]), Statement( Effect=Allow, Action=[ Action("codebuild", "StartBuild"), Action("codebuild", "BatchGetBuilds"), ], Resource=[ Sub("arn:aws:codebuild:${AWS::Region}:${AWS::AccountId}:project/codebuild-" + projectName), ]), Statement(Effect=Allow, Action=[ Action("lambda", "ListFunctions"), Action("lambda", "InvokeFunction"), ], Resource=[ "*", ]), Statement( Effect=Allow, Action=[ Action("iam", "PassRole"), ], Resource=[ GetAtt(cloud_formation_role, "Arn"), ], ) ])) ])) code_build_dummy = template.add_resource( codebuild.Project( "CodeBuildDummy", Source=codebuild.Source(Type="CODEPIPELINE"), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Description="Generate cloudformation templates", Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/python:3.3.6', Type='LINUX_CONTAINER', ), Name="codebuild-" + projectName, ServiceRole=GetAtt(code_build_role, "Arn"), )) code_pipeline_dummy = template.add_resource( codepipeline.Pipeline( "CodePipelineDummy", Name="pipeline-" + projectName, RoleArn=GetAtt(code_pipeline_role, "Arn"), ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=Ref(artifact_store_s3_bucket), ), Stages=[ codepipeline.Stages( Name="Source", Actions=[ codepipeline.Actions( Name="Source", ActionTypeId=codepipeline.ActionTypeID( Category="Source", Owner="ThirdParty", Provider="GitHub", Version="1", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="Source", ) ], Configuration={ "Branch": Ref(github_branch_parameter), "Repo": Ref(github_repository_parameter), "PollForSourceChanges": True, "Owner": Ref(github_owner_parameter), "OAuthToken": Ref(github_oauth_token_parameter), }, RunOrder="1", ), ]), codepipeline.Stages( Name="Build", Actions=[ codepipeline.Actions( Name="Build", ActionTypeId=codepipeline.ActionTypeID( Category="Build", Owner="AWS", Provider="CodeBuild", Version="1", ), InputArtifacts=[ codepipeline.InputArtifacts(Name="Source", ) ], OutputArtifacts=[ codepipeline.OutputArtifacts(Name="Build", ) ], Configuration={ "ProjectName": Ref(code_build_dummy), }, RunOrder="1", ), ]), codepipeline.Stages( Name="UAT", Actions=[ codepipeline.Actions( Name="CreateUATStack", InputArtifacts=[ codepipeline.InputArtifacts(Name="Build", ) ], ActionTypeId=codepipeline.ActionTypeID( Category="Invoke", Owner="AWS", Version="1", Provider="Lambda", ), Configuration={ "FunctionName": "lambda-cfn-provider", "UserParameters": Sub( json.dumps({ "ActionMode": "CREATE_UPDATE", "ConfigPath": "Build::config.json", "StackName": projectName + "-UAT", "TemplatePath": "Build::dummy.json", })) }, OutputArtifacts=[ codepipeline.OutputArtifacts( Name="CreateUATStack", ) ], RunOrder="1", ), codepipeline.Actions( Name="CreatePRODChangeSet", InputArtifacts=[ codepipeline.InputArtifacts(Name="Build", ) ], ActionTypeId=codepipeline.ActionTypeID( Category="Invoke", Owner="AWS", Version="1", Provider="Lambda", ), Configuration={ "FunctionName": "lambda-cfn-provider", "UserParameters": Sub( json.dumps({ "ActionMode": "CHANGE_SET_REPLACE", "ChangeSetName": projectName + "-PROD-CHANGE-SET", "StackName": projectName + "-PROD", "TemplateConfiguration": "Build::config.json", "TemplatePath": "Build::dummy.json", })) }, OutputArtifacts=[ codepipeline.OutputArtifacts( Name="CreatePRODChangeSet", ) ], RunOrder="2", ), ]), codepipeline.Stages( Name="PROD-ApproveChangeSet", Actions=[ codepipeline.Actions( Name="ApprovePRODChangeSet", ActionTypeId=codepipeline.ActionTypeID( Category="Approval", Owner="AWS", Version="1", Provider="Manual", ), Configuration={ "NotificationArn": Ref(approve_topic), "CustomData": "Approve deployment in production.", }, RunOrder="1", ), ]), codepipeline.Stages( Name="PROD-ExecuteChangeSet", Actions=[ codepipeline.Actions( Name="ExecutePRODChangeSet", InputArtifacts=[ codepipeline.InputArtifacts(Name="Build", ) ], ActionTypeId=codepipeline.ActionTypeID( Category="Invoke", Owner="AWS", Version="1", Provider="Lambda", ), Configuration={ "FunctionName": "lambda-cfn-provider", "UserParameters": Sub( json.dumps({ "ActionMode": "CHANGE_SET_EXECUTE", "ChangeSetName": projectName + "-PROD-CHANGE-SET", "StackName": projectName + "-PROD", })) }, OutputArtifacts=[ codepipeline.OutputArtifacts( Name="ExecutePRODChangeSet", ) ], RunOrder="1", ), ]), ])) # OUTPUT section template.add_output([ Output( "ArtifactStoreS3Bucket", Description= "ResourceName of the S3 bucket containg the artifacts of the pipeline(s)", Value=Ref(artifact_store_s3_bucket), Export=Export(projectName + "-ArtifactS3Bucket"), ), Output( "ArtifactStoreS3BucketArn", Description= "Arn of the S3 bucket containg the artifacts of the pipeline(s)", Value=GetAtt(artifact_store_s3_bucket, "Arn"), Export=Export(projectName + "-ArtifactS3BucketArn"), ), Output( "CodeBuildRole", Description= "Logical name of the role that is used by the CodeBuild projects in the CodePipeline", Value=Ref(code_build_role), Export=Export(projectName + "-CodeBuildRole"), ), Output( "CloudFormationRoleArn", Description= "Arn of the S3 bucket containing the artifacts of the pipeline(s)", Value=GetAtt(cloud_formation_role, "Arn"), Export=Export(projectName + "-CloudFormationRoleArn"), ), Output( "CodePipelineRoleArn", Description= "Logical name of the role that is used by the CodePipeline", Value=GetAtt(code_pipeline_role, "Arn"), Export=Export(projectName + "-CodePipelineRoleArn"), ) ]) print(template.to_json())
def create_template(self): """Create template (main function called by Stacker).""" template = self.template variables = self.get_variables() template.set_version('2010-09-09') template.set_description('Runway CodeBuild Project') # Resources deploy_name_list = [ 'runway-integration-tests-', variables['EnvironmentName'].ref ] # This must match what is in the the Terraform # integration tests. This corresponds to the template listed in # integration_tests\test_terraform\tf_state.cfn test_suite_prefix = 'testsuite-tf-state' codebuild_role = template.add_resource( iam.Role( 'CodeBuildRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'codebuild.amazonaws.com'), # todo: drop this broad access in favor of more narrow # permissions (will mean identifying all the needed # permissions across all tests) ManagedPolicyArns=[ 'arn:aws:iam::aws:policy/AdministratorAccess' ], Policies=[ iam.Policy( PolicyName=Join('', deploy_name_list + ['-policy']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement(Action=[ awacs.logs.CreateLogGroup, awacs.logs.CreateLogStream, awacs.logs.PutLogEvents ], Effect=Allow, Resource=[ Join('', [ 'arn:', Partition, ':logs:', Region, ':', AccountId, ':log-group:/aws/codebuild/' ] + deploy_name_list + ['*'] + x) for x in [[':*'], [':*/*']] ]), Statement( Action=[awacs.sts.AssumeRole], Effect=Allow, Resource=[ Join( '', [ 'arn:', Partition, ':iam::', ALT_TESTING_ACCOUNT_ID, ':role/runway-integration-test-role-', # noqa variables['EnvironmentName']. ref ]) ]), Statement( Action=[Action('cloudformation', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, 'cloudformation', Region, AccountId, Sub('stack/${prefix}/*', {'prefix': test_suite_prefix}) ]) ]), Statement( Action=[Action('dynamodb', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, 'dynamodb', Region, AccountId, Sub('table/${prefix}-*', {'prefix': test_suite_prefix}) ]) ]), Statement( Action=[Action('s3', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, Sub('s3:::${prefix}', {'prefix': test_suite_prefix}) ]), Join(':', [ 'arn', Partition, Sub('s3:::${prefix}/*', {'prefix': test_suite_prefix}) ]) ]), Statement( Action=[Action('sqs', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, 'sqs', Region, AccountId, 'terraform-*' ]) ]) ])) ])) template.add_resource( codebuild.Project( 'RunwayIntegrationTests', Artifacts=codebuild.Artifacts(Type='NO_ARTIFACTS'), Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', EnvironmentVariables=[ codebuild.EnvironmentVariable(Name='CI', Type='PLAINTEXT', Value='1'), codebuild.EnvironmentVariable( Name='DEPLOY_ENVIRONMENT', Type='PLAINTEXT', Value=variables['EnvironmentName'].ref) ], Image='aws/codebuild/standard:2.0', Type='LINUX_CONTAINER'), Name=Join('', deploy_name_list), ServiceRole=codebuild_role.get_att('Arn'), Source=codebuild.Source(Type='GITHUB', Location=variables['GitHubUrl'].ref), Triggers=codebuild.ProjectTriggers( Webhook=True, FilterGroups=[[ codebuild.WebhookFilter( Type='ACTOR_ACCOUNT_ID', Pattern='|'.join( str(x) for x in GITHUB_ACCOUNT_IDS)), codebuild.WebhookFilter( Type='EVENT', Pattern= 'PULL_REQUEST_CREATED,PULL_REQUEST_UPDATED,PULL_REQUEST_REOPENED' # noqa ), codebuild.WebhookFilter( Type='BASE_REF', Pattern='^refs/heads/release$'), codebuild.WebhookFilter(Type='HEAD_REF', Pattern='^refs/heads/master$') ]])))
def generate_codepipeline_template( codepipeline_role_name: str, codepipeline_role_path: str, codebuild_role_name: str, codebuild_role_path: str, ssm_parameter_prefix: str, scm_provider: str, scm_connection_arn: str, scm_full_repository_id: str, scm_branch_name: str, scm_bucket_name: str, scm_object_key: str, scm_skip_creation_of_repo: str, migrate_role_arn: str, ) -> troposphere.Template: version = pkg_resources.get_distribution("aws-organized").version t = troposphere.Template() t.set_description( "CICD template that runs aws organized migrate for the given branch of the given repo" ) project_name = "AWSOrganized-Migrate" bucket_name = scm_bucket_name if scm_provider.lower( ) == "codecommit" and scm_skip_creation_of_repo is False: t.add_resource( codecommit.Repository("Repository", RepositoryName=scm_full_repository_id)) if scm_provider.lower() == "s3" and scm_skip_creation_of_repo is False: bucket_name = ( scm_bucket_name if scm_bucket_name else troposphere.Sub("aws-organized-pipeline-source-${AWS::AccountId}")) t.add_resource( s3.Bucket( "Source", BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), )) artifact_store = t.add_resource( s3.Bucket( "ArtifactStore", VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), )) codepipeline_role = t.add_resource( iam.Role( "CodePipelineRole", RoleName=codepipeline_role_name, Path=codepipeline_role_path, ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal( "Service", ["codepipeline.amazonaws.com"]), ) ], ), )) codebuild_role = t.add_resource( iam.Role( "CodeBuildRole", RoleName=codebuild_role_name, Path=codebuild_role_path, ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal("Service", ["codebuild.amazonaws.com"]), ) ], ), )) version_parameter = ssm.Parameter( "versionparameter", Name=f"{ssm_parameter_prefix}/version", Type="String", Value=version, ) t.add_resource(version_parameter) project = t.add_resource( codebuild.Project( "AWSOrganizedMigrate", Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Name": "MIGRATE_ROLE_ARN", "Type": "PLAINTEXT", "Value": migrate_role_arn, }, { "Name": "Version", "Type": "PARAMETER_STORE", "Value": troposphere.Ref(version_parameter), }, { "Name": "SSM_PARAMETER_PREFIX", "Type": "PLAINTEXT", "Value": ssm_parameter_prefix, }, ], ), Name=project_name, ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump( dict( version="0.2", phases=dict( install={ "runtime-versions": dict(python="3.8"), "commands": ["pip install aws-organized==${Version}"], }, build={ "commands": [ "aws-organized migrate --ssm-parameter-prefix $SSM_PARAMETER_PREFIX $MIGRATE_ROLE_ARN" ] }, ), artifacts=dict(files=["environment"]), )), ), )) source_actions = dict( codecommit=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId(Category="Source", Owner="AWS", Version="1", Provider="CodeCommit"), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "RepositoryName": scm_full_repository_id, "BranchName": scm_branch_name, "PollForSourceChanges": "true", }, RunOrder="1", ), codestarsourceconnection=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "ConnectionArn": scm_connection_arn, "FullRepositoryId": scm_full_repository_id, "BranchName": scm_branch_name, "OutputArtifactFormat": "CODE_ZIP", }, RunOrder="1", ), s3=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId(Category="Source", Owner="AWS", Version="1", Provider="S3"), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": scm_object_key, "PollForSourceChanges": True, }, RunOrder="1", ), ).get(scm_provider.lower()) t.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"), Stages=[ codepipeline.Stages(Name="Source", Actions=[source_actions]), codepipeline.Stages( Name="Migrate", Actions=[ codepipeline.Actions( Name="Migrate", InputArtifacts=[ codepipeline.InputArtifacts( Name="SourceOutput") ], ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={ "ProjectName": troposphere.Ref(project), "PrimarySource": "SourceAction", }, RunOrder="1", ) ], ), ], ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=troposphere.Ref(artifact_store)), )) return t
def create_cdk_pipeline(name, version, product_name, product_version, template_config, p) -> t.Template: description = f"""Builds a cdk pipeline {{"version": "{constants.VERSION}", "framework": "servicecatalog-factory", "role": "product-pipeline", "type": "{name}", "version": "{version}"}}""" configuration = template_config.get("Configuration") template = t.Template(Description=description) template.add_parameter(t.Parameter("PuppetAccountId", Type="String")) template.add_parameter( t.Parameter("CDKSupportCDKDeployRequireApproval", Type="String", Default="never")) template.add_parameter( t.Parameter("CDKSupportCDKComputeType", Type="String", Default="BUILD_GENERAL1_SMALL")) template.add_parameter( t.Parameter("CDKSupportCDKDeployImage", Type="String", Default="aws/codebuild/standard:4.0")) template.add_parameter( t.Parameter("CDKSupportCDKToolkitStackName", Type="String", Default="CDKToolKit")) template.add_parameter( t.Parameter( "CDKSupportCDKDeployExtraArgs", Type="String", Default="", Description="Extra args to pass to CDK deploy", )) template.add_parameter( t.Parameter( "CDKSupportStartCDKDeployFunctionArn", Type="String", )) template.add_parameter( t.Parameter( "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn", Type="String", )) template.add_parameter( t.Parameter("CDKSupportIAMRolePaths", Type="String", Default="/servicecatalog-factory-cdk-support/")) template.add_parameter( t.Parameter("CDKSupportCDKDeployRoleName", Type="String", Default="CDKDeployRoleName")) manifest = json.loads(open(f"{p}/{PREFIX}/manifest.json", "r").read()) cdk_deploy_parameter_args = list() for artifact_name, artifact in manifest.get("artifacts", {}).items(): if artifact.get("type") == "aws:cloudformation:stack": artifact_template_file_path = artifact.get("properties", {}).get("templateFile") assert ( artifact_template_file_path ), f"Could not find template file in manifest.json for {artifact_name}" artifact_template = json.loads( open(f"{p}/{PREFIX}/{artifact_template_file_path}", "r").read()) for parameter_name, parameter_details in artifact_template.get( "Parameters", {}).items(): if template.parameters.get(parameter_name) is None: template.add_parameter( t.Parameter(parameter_name, **parameter_details)) cdk_deploy_parameter_args.append( f"--parameters {artifact_name}:{parameter_name}=${{{parameter_name}}}" ) for output_name, output_details in artifact_template.get( "Outputs", {}).items(): if template.outputs.get(output_name) is None: new_output = dict(**output_details) new_output["Value"] = t.GetAtt("GetOutputsCode", output_name) template.add_output(t.Output(output_name, **new_output)) cdk_deploy_parameter_args = " ".join(cdk_deploy_parameter_args) class DeployDetailsCustomResource(cloudformation.AWSCustomObject): resource_type = "Custom::DeployDetails" props = dict() runtime_versions = dict( nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ) if configuration.get("runtime-versions"): runtime_versions.update(configuration.get("runtime-versions")) extra_commands = list(configuration.get("install", {}).get("commands", [])) template.add_resource( codebuild.Project( "CDKDeploy", Name=t.Sub("${AWS::StackName}-deploy"), Description='Run CDK deploy for given source code', ServiceRole=t.Sub( "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}" ), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), Environment=codebuild.Environment( ComputeType=t.Ref('CDKSupportCDKComputeType'), EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="CDK_DEPLOY_REQUIRE_APPROVAL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable( Name="CDK_TOOLKIT_STACK_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="UId", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="REGION", Type="PLAINTEXT", Value=t.Ref("AWS::Region")), codebuild.EnvironmentVariable( Name="CDK_DEPLOY_PARAMETER_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="VERSION", Type="PLAINTEXT", Value="CHANGE_ME"), ], Image=t.Ref('CDKSupportCDKDeployImage'), Type="LINUX_CONTAINER", ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip", "unzip $NAME-$VERSION.zip", "npm install", ] + extra_commands }, build={ "commands": [ "npm run cdk deploy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --require-approval $CDK_DEPLOY_REQUIRE_APPROVAL --outputs-file scf_outputs.json $CDK_DEPLOY_EXTRA_ARGS $CDK_DEPLOY_PARAMETER_ARGS '*'", "aws s3 cp scf_outputs.json s3://sc-cdk-artifacts-${AWS::AccountId}/CDK/1.0.0/$NAME/$VERSION/scf_outputs-$CODEBUILD_BUILD_ID.json", ] }, ), artifacts={ "name": "CDKDeploy", "files": ["*", "**/*"], }, ))), ), TimeoutInMinutes=480, )) template.add_resource( codebuild.Project( "CDKDestroy", Name=t.Sub("${AWS::StackName}-destroy"), Description='Run CDK destroy for given source code', ServiceRole=t.Sub( "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}" ), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), Environment=codebuild.Environment( ComputeType=t.Ref('CDKSupportCDKComputeType'), EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="CDK_DEPLOY_REQUIRE_APPROVAL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable( Name="CDK_TOOLKIT_STACK_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="UId", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="REGION", Type="PLAINTEXT", Value=t.Ref("AWS::Region")), codebuild.EnvironmentVariable( Name="CDK_DEPLOY_PARAMETER_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="VERSION", Type="PLAINTEXT", Value="CHANGE_ME"), ], Image=t.Ref('CDKSupportCDKDeployImage'), Type="LINUX_CONTAINER", ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip", "unzip $NAME-$VERSION.zip", "npm install", ] + extra_commands }, build={ "commands": [ "npm run cdk destroy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --force --ignore-errors '*'" ] }, ), artifacts={ "name": "CDKDeploy", "files": ["*", "**/*"], }, ))), ), TimeoutInMinutes=480, )) template.add_resource( DeployDetailsCustomResource( "StartCDKDeploy", DependsOn=["CDKDeploy", "CDKDestroy"], ServiceToken=t.Ref("CDKSupportStartCDKDeployFunctionArn"), CreateUpdateProject=t.Ref("CDKDeploy"), DeleteProject=t.Ref("CDKDestroy"), CDK_DEPLOY_EXTRA_ARGS=t.Ref("CDKSupportCDKDeployExtraArgs"), CDK_TOOLKIT_STACK_NAME=t.Ref("CDKSupportCDKToolkitStackName"), PUPPET_ACCOUNT_ID=t.Ref("PuppetAccountId"), CDK_DEPLOY_PARAMETER_ARGS=t.Sub(cdk_deploy_parameter_args), CDK_DEPLOY_REQUIRE_APPROVAL=t.Ref( "CDKSupportCDKDeployRequireApproval"), NAME=product_name, VERSION=product_version, )) template.add_resource( DeployDetailsCustomResource( "GetOutputsCode", DependsOn=[ "StartCDKDeploy", ], ServiceToken=t.Ref( "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn"), CodeBuildBuildId=t.GetAtt("StartCDKDeploy", "BuildId"), BucketName=t.Sub("sc-cdk-artifacts-${AWS::AccountId}"), ObjectKeyPrefix=t.Sub( f"CDK/1.0.0/{product_name}/{product_version}"), )) return template
# Create CodeBuild Projects # Image Build ImageArtifacts = codebuild.Artifacts(Type='S3', Name='artifacts', Location=Ref(S3Bucket)) ImageEnvironment = codebuild.Environment(ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/docker:17.09.0", Type="LINUX_CONTAINER", EnvironmentVariables=[{ 'Name': 'AWS_ACCOUNT_ID', 'Value': Ref(AWS_ACCOUNT_ID) }, { 'Name': 'IMAGE_REPO_NAME', 'Value': Ref(Repository) }, { 'Name': 'IMAGE_TAG', 'Value': 'latest' }], PrivilegedMode=True) ImageSource = codebuild.Source( Location="https://github.com/jasonumiker/clair-ecs-fargate", Type="GITHUB") ImageProject = codebuild.Project("ImageBuildProject", Artifacts=ImageArtifacts,
def get_resources() -> list: all_regions = config.get_regions() return [ codebuild.Project( "CDKPackage100", Name=CDK_PACKAGE_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "ACCOUNT_ID", "Value": t.Sub("${AWS::AccountId}"), }, { "Type": "PLAINTEXT", "Name": "NAME", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "VERSION", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "CODEPIPELINE_ID", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "PIPELINE_NAME", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "TEMPLATE_FORMAT", "Value": "CHANGE_ME", }, ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict(build={ "commands": [ 'zip -r $NAME-$VERSION.zip . -x "node_modules/*"' ] + [ f"aws cloudformation package --region {region} --template $(pwd)/product.template.yaml --s3-bucket sc-factory-artifacts-$ACCOUNT_ID-{region} --s3-prefix /CDK/1.0.0/$NAME/$VERSION --output-template-file product.template-{region}.yaml" for region in all_regions ] + [ f"aws s3 cp --quiet $NAME-$VERSION.zip s3://sc-factory-artifacts-$ACCOUNT_ID-{region}/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip" for region in all_regions ] }, ), artifacts={ "name": PACKAGE_OUTPUT_ARTIFACT, "files": ["product.template-*.yaml"], }, ))), Type="CODEPIPELINE", ), Description=t.Sub("Create a build stage for template CDK 1.0.0"), ), codebuild.Project( "CDKDeploy100", Name=CDK_DEPLOY_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ codebuild.EnvironmentVariable( Type="PLAINTEXT", Name="ACCOUNT_ID", Value=t.Sub("${AWS::AccountId}"), ), codebuild.EnvironmentVariable(Name="PIPELINE_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CODEPIPELINE_ID", Type="PLAINTEXT", Value="CHANGE_ME"), ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": dict(python="3.7", ), "commands": [ f"pip install {constants.VERSION}" if "http" in constants.VERSION else f"pip install aws-service-catalog-factory=={constants.VERSION}", ], }, build={"commands": get_commands_for_deploy()}, ), artifacts={ "name": DEPLOY_OUTPUT_ARTIFACT, "files": ["*", "**/*"], }, ))), Type="CODEPIPELINE", ), Description=t.Sub("Create a deploy stage for template CDK 1.0.0"), ), ]