def _build_project(name: str, action: InputResolver, role: Ref, bucket: Ref, tags: Tags) -> codebuild.Project: """Construct a CodeBuild project for the specified action. :param name: Logical resource name to use for project :param action: Action wrapped in an InputResolver :param role: Reference to CodeBuild role :param bucket: Reference to application resources bucket :param tags: Tags to add to project :return: Constructed project """ return codebuild.Project( name, Name=Sub(f"${{{AWS_STACK_NAME}}}-{name}"), ServiceRole=role, Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Source=codebuild.Source(Type="CODEPIPELINE", BuildSpec=action.buildspec), Environment=codebuild.Environment( ComputeType=action.compute_type, Type=action.environment_type, Image=action.image, EnvironmentVariables=[ codebuild.EnvironmentVariable(Name="PIPEFORMER_S3_BUCKET", Value=bucket) ] + [ codebuild.EnvironmentVariable(Name=key, Value=value) for key, value in action.env.items() ], ), Tags=tags, )
def _create_codebuild_project(self, code_build_role): app_package_build = codebuild.Project( 'AppPackageBuild', Artifacts=codebuild.Artifacts( Type='CODEPIPELINE' ), Name=Sub('${ApplicationName}-build'), Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_LARGE', Image=Ref('CodeBuildImage'), Type='LINUX_CONTAINER', EnvironmentVariables=[ codebuild.EnvironmentVariable( Name='APP_S3_BUCKET', Value=Ref('ApplicationBucket'), ), ] ), ServiceRole=code_build_role.GetAtt('Arn'), Source=codebuild.Source( Type='CODEPIPELINE', BuildSpec='pipeline/buildspec.yml', ), ) self._t.add_resource(app_package_build) return app_package_build
def generate_codebuild_resource(name): return codebuild.Project( f'RunwayIntegrationTest{name}', Artifacts=codebuild.Artifacts(Type='NO_ARTIFACTS'), Description=f'{name} runway integration tests', Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', EnvironmentVariables=[ codebuild.EnvironmentVariable( Name='DEPLOY_ENVIRONMENT', Type='PLAINTEXT', Value=variables['EnvironmentName'].ref), codebuild.EnvironmentVariable(Name='TEST_TO_RUN', Type='PLAINTEXT', Value=name.lower()) ], Image='aws/codebuild/standard:2.0', Type='LINUX_CONTAINER'), Name=f'runway-int-test-{name}', ServiceRole=codebuild_role.get_att('Arn'), Source=codebuild.Source(Type='GITHUB', Location=variables['GitHubUrl'].ref), Triggers=codebuild.ProjectTriggers( Webhook=True, FilterGroups=[[ codebuild.WebhookFilter( Type='ACTOR_ACCOUNT_ID', Pattern='|'.join( str(x) for x in GITHUB_ACCOUNT_IDS)), codebuild.WebhookFilter(Type='EVENT', Pattern='PULL_REQUEST_CREATED,' 'PULL_REQUEST_UPDATED,' 'PULL_REQUEST_REOPENED'), codebuild.WebhookFilter( Type='BASE_REF', Pattern='^refs/heads/release$'), codebuild.WebhookFilter(Type='HEAD_REF', Pattern='^refs/heads/master$') ]]))
def _codebuild_builder(role: iam.Role, application_bucket: s3.Bucket) -> codebuild.Project: """Build and return the CodeBuild Project resource to be used to build the decrypt oracle.""" artifacts = codebuild.Artifacts(Type="CODEPIPELINE") environment = codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image=CODEBUILD_IMAGE, Type="LINUX_CONTAINER", EnvironmentVariables=[codebuild.EnvironmentVariable(Name="APP_S3_BUCKET", Value=Ref(application_bucket))], ) source = codebuild.Source(Type="CODEPIPELINE", BuildSpec=BUILDSPEC) return codebuild.Project( "{}Build".format(APPLICATION_NAME), Artifacts=artifacts, Environment=environment, Name=APPLICATION_NAME, ServiceRole=Ref(role), Source=source, )
def add_test_resources(test_name): """Add the resources for the given test.""" codebuild_role = template.add_resource( iam.Role( "CodeBuildRole{}".format(test_name), AssumeRolePolicyDocument=make_simple_assume_policy( "codebuild.amazonaws.com"), Policies=IAM_POLICY_BUILDER.build(test_name), )) template.add_resource( codebuild.Project( f"RunwayIntegrationTest{test_name}", Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), Description=f"{test_name} runway integration tests", Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="DEPLOY_ENVIRONMENT", Type="PLAINTEXT", Value=variables["EnvironmentName"].ref, ), codebuild.EnvironmentVariable( Name="TEST_TO_RUN", Type="PLAINTEXT", Value=test_name.lower(), ), codebuild.EnvironmentVariable( # Disable emojis in output. Name="PIPENV_HIDE_EMOJIS", Type="PLAINTEXT", Value="1", ), codebuild.EnvironmentVariable( # disable terminal spinner. Name="PIPENV_NOSPIN", Type="PLAINTEXT", Value="1", ), codebuild.EnvironmentVariable( # Pipenv automatically assumes “yes” at all prompts. Name="PIPENV_YES", Type="PLAINTEXT", Value="1", ), ], Image="aws/codebuild/standard:2.0", Type="LINUX_CONTAINER", ), Name=f"runway-int-test-{test_name}", ServiceRole=codebuild_role.get_att("Arn"), Source=codebuild.Source( Type="GITHUB", Location=variables["GitHubUrl"].ref), Triggers=codebuild.ProjectTriggers( Webhook=True, FilterGroups=[[ codebuild.WebhookFilter( Type="ACTOR_ACCOUNT_ID", Pattern="|".join( str(x) for x in GITHUB_ACCOUNT_IDS), ), codebuild.WebhookFilter( Type="EVENT", Pattern="PULL_REQUEST_CREATED," "PULL_REQUEST_UPDATED," "PULL_REQUEST_REOPENED", ), codebuild.WebhookFilter( Type="BASE_REF", Pattern="^refs/heads/release$"), codebuild.WebhookFilter( Type="HEAD_REF", Pattern="^refs/heads/master$"), ]], ), ))
def create_cdk_pipeline(name, version, product_name, product_version, template_config, p) -> t.Template: description = f"""Builds a cdk pipeline {{"version": "{constants.VERSION}", "framework": "servicecatalog-factory", "role": "product-pipeline", "type": "{name}", "version": "{version}"}}""" configuration = template_config.get("Configuration") template = t.Template(Description=description) template.add_parameter(t.Parameter("PuppetAccountId", Type="String")) template.add_parameter( t.Parameter("CDKSupportCDKDeployRequireApproval", Type="String", Default="never")) template.add_parameter( t.Parameter("CDKSupportCDKComputeType", Type="String", Default="BUILD_GENERAL1_SMALL")) template.add_parameter( t.Parameter("CDKSupportCDKDeployImage", Type="String", Default="aws/codebuild/standard:4.0")) template.add_parameter( t.Parameter("CDKSupportCDKToolkitStackName", Type="String", Default="CDKToolKit")) template.add_parameter( t.Parameter( "CDKSupportCDKDeployExtraArgs", Type="String", Default="", Description="Extra args to pass to CDK deploy", )) template.add_parameter( t.Parameter( "CDKSupportStartCDKDeployFunctionArn", Type="String", )) template.add_parameter( t.Parameter( "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn", Type="String", )) template.add_parameter( t.Parameter("CDKSupportIAMRolePaths", Type="String", Default="/servicecatalog-factory-cdk-support/")) template.add_parameter( t.Parameter("CDKSupportCDKDeployRoleName", Type="String", Default="CDKDeployRoleName")) manifest = json.loads(open(f"{p}/{PREFIX}/manifest.json", "r").read()) cdk_deploy_parameter_args = list() for artifact_name, artifact in manifest.get("artifacts", {}).items(): if artifact.get("type") == "aws:cloudformation:stack": artifact_template_file_path = artifact.get("properties", {}).get("templateFile") assert ( artifact_template_file_path ), f"Could not find template file in manifest.json for {artifact_name}" artifact_template = json.loads( open(f"{p}/{PREFIX}/{artifact_template_file_path}", "r").read()) for parameter_name, parameter_details in artifact_template.get( "Parameters", {}).items(): if template.parameters.get(parameter_name) is None: template.add_parameter( t.Parameter(parameter_name, **parameter_details)) cdk_deploy_parameter_args.append( f"--parameters {artifact_name}:{parameter_name}=${{{parameter_name}}}" ) for output_name, output_details in artifact_template.get( "Outputs", {}).items(): if template.outputs.get(output_name) is None: new_output = dict(**output_details) new_output["Value"] = t.GetAtt("GetOutputsCode", output_name) template.add_output(t.Output(output_name, **new_output)) cdk_deploy_parameter_args = " ".join(cdk_deploy_parameter_args) class DeployDetailsCustomResource(cloudformation.AWSCustomObject): resource_type = "Custom::DeployDetails" props = dict() runtime_versions = dict( nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ) if configuration.get("runtime-versions"): runtime_versions.update(configuration.get("runtime-versions")) extra_commands = list(configuration.get("install", {}).get("commands", [])) template.add_resource( codebuild.Project( "CDKDeploy", Name=t.Sub("${AWS::StackName}-deploy"), Description='Run CDK deploy for given source code', ServiceRole=t.Sub( "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}" ), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), Environment=codebuild.Environment( ComputeType=t.Ref('CDKSupportCDKComputeType'), EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="CDK_DEPLOY_REQUIRE_APPROVAL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable( Name="CDK_TOOLKIT_STACK_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="UId", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="REGION", Type="PLAINTEXT", Value=t.Ref("AWS::Region")), codebuild.EnvironmentVariable( Name="CDK_DEPLOY_PARAMETER_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="VERSION", Type="PLAINTEXT", Value="CHANGE_ME"), ], Image=t.Ref('CDKSupportCDKDeployImage'), Type="LINUX_CONTAINER", ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip", "unzip $NAME-$VERSION.zip", "npm install", ] + extra_commands }, build={ "commands": [ "npm run cdk deploy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --require-approval $CDK_DEPLOY_REQUIRE_APPROVAL --outputs-file scf_outputs.json $CDK_DEPLOY_EXTRA_ARGS $CDK_DEPLOY_PARAMETER_ARGS '*'", "aws s3 cp scf_outputs.json s3://sc-cdk-artifacts-${AWS::AccountId}/CDK/1.0.0/$NAME/$VERSION/scf_outputs-$CODEBUILD_BUILD_ID.json", ] }, ), artifacts={ "name": "CDKDeploy", "files": ["*", "**/*"], }, ))), ), TimeoutInMinutes=480, )) template.add_resource( codebuild.Project( "CDKDestroy", Name=t.Sub("${AWS::StackName}-destroy"), Description='Run CDK destroy for given source code', ServiceRole=t.Sub( "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}" ), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), Environment=codebuild.Environment( ComputeType=t.Ref('CDKSupportCDKComputeType'), EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="CDK_DEPLOY_REQUIRE_APPROVAL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable( Name="CDK_TOOLKIT_STACK_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="UId", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="REGION", Type="PLAINTEXT", Value=t.Ref("AWS::Region")), codebuild.EnvironmentVariable( Name="CDK_DEPLOY_PARAMETER_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="VERSION", Type="PLAINTEXT", Value="CHANGE_ME"), ], Image=t.Ref('CDKSupportCDKDeployImage'), Type="LINUX_CONTAINER", ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip", "unzip $NAME-$VERSION.zip", "npm install", ] + extra_commands }, build={ "commands": [ "npm run cdk destroy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --force --ignore-errors '*'" ] }, ), artifacts={ "name": "CDKDeploy", "files": ["*", "**/*"], }, ))), ), TimeoutInMinutes=480, )) template.add_resource( DeployDetailsCustomResource( "StartCDKDeploy", DependsOn=["CDKDeploy", "CDKDestroy"], ServiceToken=t.Ref("CDKSupportStartCDKDeployFunctionArn"), CreateUpdateProject=t.Ref("CDKDeploy"), DeleteProject=t.Ref("CDKDestroy"), CDK_DEPLOY_EXTRA_ARGS=t.Ref("CDKSupportCDKDeployExtraArgs"), CDK_TOOLKIT_STACK_NAME=t.Ref("CDKSupportCDKToolkitStackName"), PUPPET_ACCOUNT_ID=t.Ref("PuppetAccountId"), CDK_DEPLOY_PARAMETER_ARGS=t.Sub(cdk_deploy_parameter_args), CDK_DEPLOY_REQUIRE_APPROVAL=t.Ref( "CDKSupportCDKDeployRequireApproval"), NAME=product_name, VERSION=product_version, )) template.add_resource( DeployDetailsCustomResource( "GetOutputsCode", DependsOn=[ "StartCDKDeploy", ], ServiceToken=t.Ref( "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn"), CodeBuildBuildId=t.GetAtt("StartCDKDeploy", "BuildId"), BucketName=t.Sub("sc-cdk-artifacts-${AWS::AccountId}"), ObjectKeyPrefix=t.Sub( f"CDK/1.0.0/{product_name}/{product_version}"), )) return template
def get_resources() -> list: all_regions = config.get_regions() return [ codebuild.Project( "Validate", Name=VALIDATE_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="TEMPLATE_FORMAT", Type="PLAINTEXT", Value="yaml", ) ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict(build={ "commands": [ "export FactoryTemplateValidateBucket=$(aws cloudformation list-stack-resources --stack-name servicecatalog-factory --query 'StackResourceSummaries[?LogicalResourceId==`FactoryTemplateValidateBucket`].PhysicalResourceId' --output text)", "aws s3 cp product.template.$TEMPLATE_FORMAT s3://$FactoryTemplateValidateBucket/$CODEBUILD_BUILD_ID.$TEMPLATE_FORMAT", "aws cloudformation validate-template --template-url https://$FactoryTemplateValidateBucket.s3.$AWS_REGION.amazonaws.com/$CODEBUILD_BUILD_ID.$TEMPLATE_FORMAT", ] }, ), artifacts=dict( name=VALIDATE_OUTPUT_ARTIFACT, files=["*", "**/*"], ), ))), Type="CODEPIPELINE", ), Description=t.Sub("Run validate"), ), codebuild.Project( "Deploy", Name=DEPLOY_IN_GOVCLOUD_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ codebuild.EnvironmentVariable( Type="PLAINTEXT", Name="ACCOUNT_ID", Value=t.Sub("${AWS::AccountId}"), ), codebuild.EnvironmentVariable(Name="PIPELINE_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CODEPIPELINE_ID", Type="PLAINTEXT", Value="CHANGE_ME"), ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": dict( python="3.7", nodejs=constants. BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ), "commands": [ f"pip install {constants.VERSION}" if "http" in constants.VERSION else f"pip install aws-service-catalog-factory=={constants.VERSION}", ], }, build={"commands": get_commands_for_deploy()}, ), artifacts={ "name": DEPLOY_OUTPUT_ARTIFACT, "files": ["*", "**/*"], }, ))), Type="CODEPIPELINE", ), Description=t.Sub( "Create a deploy stage for template cloudformation"), ), ]
def get_resources() -> list: all_regions = config.get_regions() return [ codebuild.Project( "CDKPackage100", Name=CDK_PACKAGE_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "ACCOUNT_ID", "Value": t.Sub("${AWS::AccountId}"), }, { "Type": "PLAINTEXT", "Name": "NAME", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "VERSION", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "CODEPIPELINE_ID", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "PIPELINE_NAME", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "TEMPLATE_FORMAT", "Value": "CHANGE_ME", }, ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict(build={ "commands": [ 'zip -r $NAME-$VERSION.zip . -x "node_modules/*"' ] + [ f"aws cloudformation package --region {region} --template $(pwd)/product.template.yaml --s3-bucket sc-factory-artifacts-$ACCOUNT_ID-{region} --s3-prefix /CDK/1.0.0/$NAME/$VERSION --output-template-file product.template-{region}.yaml" for region in all_regions ] + [ f"aws s3 cp --quiet $NAME-$VERSION.zip s3://sc-factory-artifacts-$ACCOUNT_ID-{region}/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip" for region in all_regions ] }, ), artifacts={ "name": PACKAGE_OUTPUT_ARTIFACT, "files": ["product.template-*.yaml"], }, ))), Type="CODEPIPELINE", ), Description=t.Sub("Create a build stage for template CDK 1.0.0"), ), codebuild.Project( "CDKDeploy100", Name=CDK_DEPLOY_PROJECT_NAME, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ codebuild.EnvironmentVariable( Type="PLAINTEXT", Name="ACCOUNT_ID", Value=t.Sub("${AWS::AccountId}"), ), codebuild.EnvironmentVariable(Name="PIPELINE_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CODEPIPELINE_ID", Type="PLAINTEXT", Value="CHANGE_ME"), ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": dict(python="3.7", ), "commands": [ f"pip install {constants.VERSION}" if "http" in constants.VERSION else f"pip install aws-service-catalog-factory=={constants.VERSION}", ], }, build={"commands": get_commands_for_deploy()}, ), artifacts={ "name": DEPLOY_OUTPUT_ARTIFACT, "files": ["*", "**/*"], }, ))), Type="CODEPIPELINE", ), Description=t.Sub("Create a deploy stage for template CDK 1.0.0"), ), ]
def create_template(self): """Create template (main function called by Stacker).""" template = self.template variables = self.get_variables() template.set_version('2010-09-09') template.set_description('Runway CodeBuild Project') # Resources deploy_name_list = [ 'runway-integration-tests-', variables['EnvironmentName'].ref ] # This must match what is in the the Terraform # integration tests. This corresponds to the template listed in # integration_tests\test_terraform\tf_state.cfn test_suite_prefix = 'testsuite-tf-state' codebuild_role = template.add_resource( iam.Role( 'CodeBuildRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'codebuild.amazonaws.com'), # todo: drop this broad access in favor of more narrow # permissions (will mean identifying all the needed # permissions across all tests) ManagedPolicyArns=[ 'arn:aws:iam::aws:policy/AdministratorAccess' ], Policies=[ iam.Policy( PolicyName=Join('', deploy_name_list + ['-policy']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement(Action=[ awacs.logs.CreateLogGroup, awacs.logs.CreateLogStream, awacs.logs.PutLogEvents ], Effect=Allow, Resource=[ Join('', [ 'arn:', Partition, ':logs:', Region, ':', AccountId, ':log-group:/aws/codebuild/' ] + deploy_name_list + ['*'] + x) for x in [[':*'], [':*/*']] ]), Statement( Action=[awacs.sts.AssumeRole], Effect=Allow, Resource=[ Join( '', [ 'arn:', Partition, ':iam::', ALT_TESTING_ACCOUNT_ID, ':role/runway-integration-test-role-', # noqa variables['EnvironmentName']. ref ]) ]), Statement( Action=[Action('cloudformation', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, 'cloudformation', Region, AccountId, Sub('stack/${prefix}/*', {'prefix': test_suite_prefix}) ]) ]), Statement( Action=[Action('dynamodb', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, 'dynamodb', Region, AccountId, Sub('table/${prefix}-*', {'prefix': test_suite_prefix}) ]) ]), Statement( Action=[Action('s3', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, Sub('s3:::${prefix}', {'prefix': test_suite_prefix}) ]), Join(':', [ 'arn', Partition, Sub('s3:::${prefix}/*', {'prefix': test_suite_prefix}) ]) ]), Statement( Action=[Action('sqs', '*')], Effect=Allow, Resource=[ Join(':', [ 'arn', Partition, 'sqs', Region, AccountId, 'terraform-*' ]) ]) ])) ])) template.add_resource( codebuild.Project( 'RunwayIntegrationTests', Artifacts=codebuild.Artifacts(Type='NO_ARTIFACTS'), Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', EnvironmentVariables=[ codebuild.EnvironmentVariable(Name='CI', Type='PLAINTEXT', Value='1'), codebuild.EnvironmentVariable( Name='DEPLOY_ENVIRONMENT', Type='PLAINTEXT', Value=variables['EnvironmentName'].ref) ], Image='aws/codebuild/standard:2.0', Type='LINUX_CONTAINER'), Name=Join('', deploy_name_list), ServiceRole=codebuild_role.get_att('Arn'), Source=codebuild.Source(Type='GITHUB', Location=variables['GitHubUrl'].ref), Triggers=codebuild.ProjectTriggers( Webhook=True, FilterGroups=[[ codebuild.WebhookFilter( Type='ACTOR_ACCOUNT_ID', Pattern='|'.join( str(x) for x in GITHUB_ACCOUNT_IDS)), codebuild.WebhookFilter( Type='EVENT', Pattern= 'PULL_REQUEST_CREATED,PULL_REQUEST_UPDATED,PULL_REQUEST_REOPENED' # noqa ), codebuild.WebhookFilter( Type='BASE_REF', Pattern='^refs/heads/release$'), codebuild.WebhookFilter(Type='HEAD_REF', Pattern='^refs/heads/master$') ]])))
def create_template(self): """Create template (main function called by Stacker).""" template = self.template variables = self.get_variables() template.set_version('2010-09-09') template.set_description('App - Build Pipeline') # Resources boundary_arn = Join('', [ 'arn:', Partition, ':iam::', AccountId, ':policy/', variables['RolePermissionsBoundaryName'].ref ]) # Repo image limit is 1000 by default; this lambda function will prune # old images image_param_path = Join( '', ['/', variables['AppPrefix'].ref, '/current-hash']) image_param_arn = Join('', [ 'arn:', Partition, ':ssm:', Region, ':', AccountId, ':parameter', image_param_path ]) ecr_repo_arn = Join('', [ 'arn:', Partition, ':ecr:', Region, ':', AccountId, ':repository/', variables['EcrRepoName'].ref ]) cleanuplambdarole = template.add_resource( iam.Role('CleanupLambdaRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'lambda.amazonaws.com'), ManagedPolicyArns=[ IAM_ARN_PREFIX + 'AWSLambdaBasicExecutionRole' ], PermissionsBoundary=boundary_arn, Policies=[ iam.Policy( PolicyName=Join( '', [variables['AppPrefix'].ref, '-ecrcleanup']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement(Action=[awacs.ssm.GetParameter], Effect=Allow, Resource=[image_param_arn]), Statement(Action=[ awacs.ecr.DescribeImages, awacs.ecr.BatchDeleteImage ], Effect=Allow, Resource=[ecr_repo_arn]) ])) ])) cleanupfunction = template.add_resource( awslambda.Function( 'CleanupFunction', Description='Cleanup stale ECR images', Code=awslambda.Code( ZipFile=variables['ECRCleanupLambdaFunction']), Environment=awslambda.Environment( Variables={ 'ECR_REPO_NAME': variables['EcrRepoName'].ref, 'SSM_PARAM': image_param_path }), Handler='index.handler', Role=cleanuplambdarole.get_att('Arn'), Runtime='python3.6', Timeout=120)) cleanuprule = template.add_resource( events.Rule('CleanupRule', Description='Regularly invoke CleanupFunction', ScheduleExpression='rate(7 days)', State='ENABLED', Targets=[ events.Target(Arn=cleanupfunction.get_att('Arn'), Id='CleanupFunction') ])) template.add_resource( awslambda.Permission( 'AllowCWLambdaInvocation', FunctionName=cleanupfunction.ref(), Action=awacs.awslambda.InvokeFunction.JSONrepr(), Principal='events.amazonaws.com', SourceArn=cleanuprule.get_att('Arn'))) appsource = template.add_resource( codecommit.Repository( 'AppSource', RepositoryName=Join('-', [variables['AppPrefix'].ref, 'source']))) for i in ['Name', 'Arn']: template.add_output( Output("AppRepo%s" % i, Description="%s of app source repo" % i, Value=appsource.get_att(i))) bucket = template.add_resource( s3.Bucket( 'Bucket', AccessControl=s3.Private, LifecycleConfiguration=s3.LifecycleConfiguration(Rules=[ s3.LifecycleRule(NoncurrentVersionExpirationInDays=90, Status='Enabled') ]), VersioningConfiguration=s3.VersioningConfiguration( Status='Enabled'))) template.add_output( Output('PipelineBucketName', Description='Name of pipeline bucket', Value=bucket.ref())) # This list must be kept in sync between the CodeBuild project and its # role build_name = Join('', [variables['AppPrefix'].ref, '-build']) build_role = template.add_resource( iam.Role( 'BuildRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'codebuild.amazonaws.com'), PermissionsBoundary=boundary_arn, Policies=[ iam.Policy( PolicyName=Join('', [build_name, '-policy']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Action=[awacs.s3.GetObject], Effect=Allow, Resource=[ Join('', [bucket.get_att('Arn'), '/*']) ]), Statement( Action=[awacs.ecr.GetAuthorizationToken], Effect=Allow, Resource=['*']), Statement(Action=[ awacs.ecr.BatchCheckLayerAvailability, awacs.ecr.BatchGetImage, awacs.ecr.CompleteLayerUpload, awacs.ecr.DescribeImages, awacs.ecr.GetDownloadUrlForLayer, awacs.ecr.InitiateLayerUpload, awacs.ecr.PutImage, awacs.ecr.UploadLayerPart ], Effect=Allow, Resource=[ecr_repo_arn]), Statement(Action=[ awacs.ssm.GetParameter, awacs.ssm.PutParameter ], Effect=Allow, Resource=[image_param_arn]), Statement(Action=[ awacs.logs.CreateLogGroup, awacs.logs.CreateLogStream, awacs.logs.PutLogEvents ], Effect=Allow, Resource=[ Join('', [ 'arn:', Partition, ':logs:', Region, ':', AccountId, ':log-group:/aws/codebuild/', build_name ] + x) for x in [[':*'], [':*/*']] ]) ])) ])) buildproject = template.add_resource( codebuild.Project( 'BuildProject', Artifacts=codebuild.Artifacts(Type='CODEPIPELINE'), Environment=codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', EnvironmentVariables=[ codebuild.EnvironmentVariable( Name='AWS_DEFAULT_REGION', Type='PLAINTEXT', Value=Region), codebuild.EnvironmentVariable(Name='AWS_ACCOUNT_ID', Type='PLAINTEXT', Value=AccountId), codebuild.EnvironmentVariable( Name='IMAGE_REPO_NAME', Type='PLAINTEXT', Value=variables['EcrRepoName'].ref), ], Image='aws/codebuild/docker:18.09.0', Type='LINUX_CONTAINER'), Name=build_name, ServiceRole=build_role.get_att('Arn'), Source=codebuild.Source( Type='CODEPIPELINE', BuildSpec=variables['BuildProjectBuildSpec']))) pipelinerole = template.add_resource( iam.Role( 'PipelineRole', AssumeRolePolicyDocument=make_simple_assume_policy( 'codepipeline.amazonaws.com'), PermissionsBoundary=boundary_arn, Policies=[ iam.Policy( PolicyName=Join('', [build_name, '-pipeline-policy']), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Action=[ awacs.codecommit.GetBranch, awacs.codecommit.GetCommit, awacs.codecommit.UploadArchive, awacs.codecommit. GetUploadArchiveStatus, # noqa awacs.codecommit.CancelUploadArchive ], # noqa Effect=Allow, Resource=[appsource.get_att('Arn')]), Statement( Action=[awacs.s3.GetBucketVersioning], Effect=Allow, Resource=[bucket.get_att('Arn')]), Statement( Action=[ awacs.s3.GetObject, awacs.s3.PutObject ], Effect=Allow, Resource=[ Join('', [bucket.get_att('Arn'), '/*']) ]), Statement( Action=[ awacs.codebuild.BatchGetBuilds, awacs.codebuild.StartBuild ], Effect=Allow, Resource=[buildproject.get_att('Arn')]) ])) ])) template.add_resource( codepipeline.Pipeline( 'Pipeline', ArtifactStore=codepipeline.ArtifactStore(Location=bucket.ref(), Type='S3'), Name=build_name, RoleArn=pipelinerole.get_att('Arn'), Stages=[ codepipeline.Stages( Name='Source', Actions=[ codepipeline.Actions( Name='CodeCommit', ActionTypeId=codepipeline.ActionTypeId( Category='Source', Owner='AWS', Provider='CodeCommit', Version='1'), Configuration={ 'RepositoryName': appsource.get_att('Name'), # noqa 'BranchName': 'master' }, OutputArtifacts=[ codepipeline.OutputArtifacts( Name='CodeCommitRepo') ]), ]), codepipeline.Stages( Name='Build', Actions=[ codepipeline.Actions( Name='Build', ActionTypeId=codepipeline.ActionTypeId( Category='Build', Owner='AWS', Provider='CodeBuild', Version='1'), Configuration={ 'ProjectName': buildproject.ref() }, InputArtifacts=[ codepipeline.InputArtifacts( Name='CodeCommitRepo') ]) ]) ]))
def run(self): puppet_version = constants.VERSION description = f"""Bootstrap template used to configure spoke account for terraform use {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-spoke-terraform"}}""" service_role = t.Sub( "arn:aws:iam::${AWS::AccountId}:role/servicecatalog-puppet/PuppetDeployInSpokeRole" ) template = t.Template(Description=description) state = template.add_resource( s3.Bucket( "state", BucketName=t.Sub("sc-puppet-state-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) template.add_resource( s3.BucketPolicy( "statePolicy", Bucket=t.Ref(state), PolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": [ "s3:GetObject*", "s3:PutObject*", ], "Principal": { "AWS": self.puppet_account_id }, "Resource": t.Join("/", [t.GetAtt(state, "Arn"), "*"]), "Effect": "Allow", "Sid": "AllowPuppet", }, ], }, )) execute_build_spec = dict( version="0.2", phases=dict( install=dict(commands=[ "mkdir -p /root/downloads", "curl -s -qL -o /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip", "unzip /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/bin/", "chmod +x /usr/bin/terraform", "terraform --version", "aws s3 cp $ZIP source.zip", "unzip source.zip", ], ), pre_build=dict(commands=[ "aws s3 cp $STATE_FILE terraform.tfstate || echo 'no statefile copied'", 'ASSUME_ROLE_ARN="arn:aws:iam::${TARGET_ACCOUNT}:role/servicecatalog-puppet/PuppetRole"', "TEMP_ROLE=$(aws sts assume-role --role-arn $ASSUME_ROLE_ARN --role-session-name terraform)", "export TEMP_ROLE", 'export AWS_ACCESS_KEY_ID=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.AccessKeyId")', 'export AWS_SECRET_ACCESS_KEY=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SecretAccessKey")', 'export AWS_SESSION_TOKEN=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SessionToken")', "aws sts get-caller-identity", "terraform init", ], ), build=dict(commands=[ "terraform apply -auto-approve", ]), post_build=dict(commands=[ "terraform output -json > outputs.json", "unset AWS_ACCESS_KEY_ID", "unset AWS_SECRET_ACCESS_KEY", "unset AWS_SESSION_TOKEN", "aws sts get-caller-identity", "aws s3 cp terraform.tfstate $STATE_FILE", ]), ), artifacts=dict(files=[ "outputs.json", ], ), ) execute_terraform = dict( Name=constants.EXECUTE_TERRAFORM_PROJECT_NAME, ServiceRole=service_role, Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-execute", NamespaceType="BUILD_ID", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image=constants.CODEBUILD_DEFAULT_IMAGE, Type="LINUX_CONTAINER", EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="TERRAFORM_VERSION", Type="PARAMETER_STORE", Value=constants. DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, ), ] + [ codebuild.EnvironmentVariable( Name=name, Type="PLAINTEXT", Value="CHANGE_ME", ) for name in ["TARGET_ACCOUNT", "ZIP", "STATE_FILE"] ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump(execute_build_spec), Type="NO_SOURCE", ), Description= "Execute the given terraform in the given account using the given state file", ) # execute template.add_resource( codebuild.Project("ExecuteTerraformProject", **execute_terraform)) # execute dry run execute_dry_run_terraform = copy.deepcopy(execute_terraform) execute_dry_run_terraform[ "Name"] = constants.EXECUTE_DRY_RUN_TERRAFORM_PROJECT_NAME execute_dry_run_terraform["Description"] = execute_dry_run_terraform[ "Description"].replace("Execute", "DRY RUN of Execute") execute_dry_run_build_spec = copy.deepcopy(execute_build_spec) execute_dry_run_build_spec["phases"]["build"]["commands"] = [ "terraform plan -out=plan.bin", "terraform show -json plan.bin > plan.json", ] del execute_dry_run_build_spec["phases"]["post_build"] execute_dry_run_build_spec["artifacts"] = dict(files=[ "plan.bin", "plan.json", ], ) execute_dry_run_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(execute_dry_run_build_spec), Type="NO_SOURCE", ) execute_dry_run_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-execute-dry-run", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("ExecuteDryRunTerraformProject", **execute_dry_run_terraform)) # terminate terminate_terraform = copy.deepcopy(execute_terraform) terminate_terraform[ "Name"] = constants.TERMINATE_TERRAFORM_PROJECT_NAME terminate_terraform["Description"] = terminate_terraform[ "Description"].replace("Execute", "Terminate") terminate_build_spec = copy.deepcopy(execute_build_spec) terminate_build_spec["phases"]["build"]["commands"] = [ "terraform destroy -auto-approve" ] terminate_build_spec["phases"]["post_build"]["commands"] = [ "unset AWS_ACCESS_KEY_ID", "unset AWS_SECRET_ACCESS_KEY", "unset AWS_SESSION_TOKEN", "aws sts get-caller-identity", "aws s3 cp terraform.tfstate $STATE_FILE", ] del terminate_build_spec["artifacts"] terminate_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(terminate_build_spec), Type="NO_SOURCE", ) terminate_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-terminate", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("TerminateTerraformProject", **terminate_terraform)) # terminate dry run termminate_dry_run_terraform = copy.deepcopy(execute_terraform) termminate_dry_run_terraform[ "Name"] = constants.TERMINATE_DRY_RUN_TERRAFORM_PROJECT_NAME new_description = termminate_dry_run_terraform["Description"].replace( "Execute", "DRY RUN of Terminate") termminate_dry_run_terraform["Description"] = new_description termminate_dry_run_build_spec = copy.deepcopy(execute_build_spec) termminate_dry_run_build_spec["phases"]["build"]["commands"] = [ "terraform plan -destroy -out=plan.bin", "terraform show -json plan.bin > plan.json", ] del termminate_dry_run_build_spec["phases"]["post_build"] termminate_dry_run_build_spec["artifacts"] = dict(files=[ "plan.bin", "plan.json", ], ) termminate_dry_run_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(termminate_dry_run_build_spec), Type="NO_SOURCE", ) termminate_dry_run_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-terminate-dry-run", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("TerminateDryRunTerraformProject", **termminate_dry_run_terraform)) self.write_output(template.to_yaml(), skip_json_dump=True)