def run(self): puppet_version = constants.VERSION description = f"""Bootstrap template used to configure spoke account for stack use {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-spoke-stack"}}""" template = t.Template(Description=description) template.add_resource( iam.Role( "PuppetStackRole", RoleName="PuppetStackRole", ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], Path=config.get_puppet_role_path(), AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["cloudformation.amazonaws.com"] }, }], }, )) self.write_output(template.to_yaml(), skip_json_dump=True)
def init_template(self, description): "Initializes a Troposphere template" self.template = troposphere.Template(Description=description, ) self.template.set_version() self.template.add_resource( troposphere.cloudformation.WaitConditionHandle( title="EmptyTemplatePlaceholder"))
def process(self, role_arn: str, processor: LambdaType): try: ssm_docs = [] cf_template = troposphere.Template(self.data["Description"]) # First, add SSM documents for action in self.data['Actions']: if action['Type'] == ACTION_TYPE_IMP_RUN_SCRIPT: shell_script_path = action.get("Document", {}).get("Path") params = action.get("Document", {}).get("Parameters", []) ssm_document = build_ssm_document( self.name, action.get("Name"), self.ssm_document(shell_script_path, params)) cf_template.add_resource(ssm_document) ssm_docs.append(action.get("Name")) # Now, create FIS templates cf_template.add_resource( build_fis_template(self.name, role_arn, self.data, ssm_docs)) return processor(cf_template_name(self.name), self.name, cf_template, TAG_VALUE_RESOURCE_TYPE_TEMPLATE, False) except yaml.YAMLError as e: cli_error(f'{type(e).__name__}: {e}')
def test_should_not_find_resource_by_type(self): t = troposphere.Template() t.add_resource(troposphere.s3.Bucket("thebucket")) results = TemplateQuery.get_resource_by_type(t, troposphere.s3.Policy) self.assertTrue(results.count(results) == 0)
def get_template() -> t.Template: description = "Shared resources used by product pipelines" tpl = t.Template(Description=description) for resource in (shared_resources.get_resources() + cdk_shared_resources.get_resources()): tpl.add_resource(resource) return tpl
def setUp(self): self.context = chaincontext.ChainContext( template=troposphere.Template(), instance_name='justtestin') self.context.metadata[META_PIPELINE_BUCKET_POLICY_REF] = "blah" self.pipeline_name = "ThatPipeline" self.deploy_stage_name = "DeployIt" TestCloudFormationAction._add_pipeline_and_stage_to_template( self.context.template, self.pipeline_name, self.deploy_stage_name)
def main(): args = parse_args() t = troposphere.Template() define_mappings(t) define_metadata(t) define_parameters(t) define_resources(t, args) define_outputs(t) print(t.to_json())
def create_template_yaml(zone_name, list_of_resources): template = troposphere.Template() template.add_parameter(zone_name) template.add_resource(list_of_resources) output = troposphere.Output( 'Endpoint', Description='dummy endpoint required by aodnstack playbook', Value='NO_ENDPOINT') template.add_output(output) return template.to_yaml(long_form=True)
def generate_role_template( command: str, actions: list, role_name: str, path: str, assuming_account_id: str, assuming_resource: str, output_format: str, ) -> str: t = troposphere.Template() t.description = f"Role used to run the {command} command" t.add_resource( iam.Role( title="role", RoleName=role_name, Path=path, Policies=[ iam.Policy( PolicyName=f"{command}-permissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"{command}-permissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=actions, Resource=["*"], ), ], ), ) ], AssumeRolePolicyDocument=aws.Policy( Version="2012-10-17", Id="AllowAssume", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Principal=aws.Principal("AWS", [ IAM_ARN(assuming_resource, "", assuming_account_id) ]), Action=[awacs_sts.AssumeRole], ), ], ), )) if output_format == "json": return t.to_json() else: return t.to_yaml()
def test_should_find_resource_by_type(self): t = troposphere.Template() t.add_resource(troposphere.s3.Bucket("whoCares")) found = TemplateQuery.get_resource_by_type( template=t, type_to_find=troposphere.s3.Bucket) self.assertIsNotNone(found) self.assertIsInstance(found, list) self.assertIsInstance(found[0], troposphere.s3.Bucket)
def test_should_build_template_with_required_parameters_added_automatically( self): the_chain = chain.Chain() mock_step = MockStepWithRequiredParam() the_chain.add(mock_step) self.context = chaincontext.ChainContext( template=troposphere.Template(), instance_name='will_generate_parameters', auto_param_creation=True) the_chain.run(self.context)
def test_StackLookup(): # used in cloudformation! # Create EC2 Cloudformation template with troposphere t = troposphere.Template() t.add_version('2010-09-09') t.add_description('gcdt unit-tests') lambda_lookup_arn = 'lookup:stack:%s:EC2BasicsLambdaArn' % 'dp-dev' stack_lookup = StackLookup(t, lambda_lookup_arn) # as_reference: Is the parameter a reference (Default) or a string vpcid = stack_lookup.get_att('vpcid', as_reference=False) assert vpcid.data == {'Fn::GetAtt': ['StackOutput', 'vpcid']}
def test_should_find_resource_by_title(self): t = troposphere.Template() resource_name_to_lookup = "TestingTheNameLookup" t.add_resource(troposphere.s3.Bucket(resource_name_to_lookup)) resource = TemplateQuery.get_resource_by_title( template=t, title=resource_name_to_lookup) self.assertIsNotNone(resource) self.assertIsInstance(resource, troposphere.s3.Bucket) self.assertEqual(resource.title, resource_name_to_lookup)
def setUp(self): self.context = chaincontext.ChainContext( template=troposphere.Template(), instance_name='justtestin') self.context.metadata[META_PIPELINE_BUCKET_POLICY_REF] = "blah" self.context.metadata[META_PIPELINE_BUCKET_NAME] = troposphere.Ref( "notabucket") self.pipeline_name = "ThatPipeline" self.deploy_stage_name = "DeployIt" self.source_stage_name = "SourceIt" TestCodeBuildAction._add_pipeline_and_stage_to_template( self.context.template, self.pipeline_name, self.deploy_stage_name)
def generate_role_template( command: str, actions: list, role_name: str, path: str, assuming_account_id: str, assuming_resource: str, additional_statements: list = [], ) -> troposphere.Template: t = troposphere.Template() t.description = f"Role used to run the {command} command" role = iam.Role( title="role", RoleName=role_name, Path=path, Policies=[ iam.Policy( PolicyName=f"{command}-permissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"{command}-permissions", Statement=[ aws.Statement(Sid="1", Effect=aws.Allow, Action=actions, Resource=["*"]) ] + additional_statements, ), ) ], AssumeRolePolicyDocument=aws.Policy( Version="2012-10-17", Id="AllowAssume", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Principal=aws.Principal( "AWS", [IAM_ARN(assuming_resource, "", assuming_account_id)]), Action=[awacs_sts.AssumeRole], ) ], ), ) t.add_resource(role) t.add_output(troposphere.Output("RoleName", Value=troposphere.Ref(role))) t.add_output( troposphere.Output("RoleArn", Value=troposphere.GetAtt(role, "Arn"))) return t
def setUp(self): self.context = chaincontext.ChainContext( template=troposphere.Template(), instance_name='justtestin', ) self.environment = codebuild.Environment( ComputeType='BUILD_GENERAL1_SMALL', Image='aws/codebuild/python:2.7.12', Type='LINUX_CONTAINER', EnvironmentVariables=[{ 'Name': 'TEST_VAR', 'Value': 'demo' }], )
def test_tropo_to_string(self): utility.tropo_to_string(tropo.Template()) utility.tropo_to_string(tropo.Base64('efsdfsdf')) utility.tropo_to_string(tropo.Output('efsdfsdf', Value='dsfsdfs')) utility.tropo_to_string(tropo.Parameter('efsdfsdf', Type='dsfsdfs')) # These constructors recursively call themselves for some reason # Don't instantiate directly # utility.tropo_to_string(tropo.AWSProperty()) # utility.tropo_to_string(tropo.AWSAttribute()) utility.tropo_to_string( ec2.Instance("ec2instance", InstanceType="m3.medium", ImageId="ami-951945d0"))
def test_iam_role_and_policies(): # used in cloudformation! # Config ROLE_NAME_PREFIX = 'lambda-' ROLE_PRINCIPALS = ['lambda.amazonaws.com'] ROLE_PATH = '/lambda/' name = 'embed-player-wrapper' policy_lambda = 'arn:aws:iam::aws:policy/service-role/AWSLambdaRole' t = troposphere.Template() iam = IAMRoleAndPolicies(t, ROLE_NAME_PREFIX, ROLE_PRINCIPALS, ROLE_PATH) role_embed_player_wrapper = iam.build_role( name, [troposphere.Ref(policy_lambda)]) assert role_embed_player_wrapper.to_dict()['Type'] == 'AWS::IAM::Role'
def main(): """Create all resources.""" template = troposphere.Template() for resource in cloudformation.RESOURCES: template.add_resource(resource) # Executing template stack_name = "cloudformation-kubernetes" session = boto3.Session(region_name="ap-southeast-2") cloudformation_client = session.client("cloudformation") cloudformation_client.update_stack( StackName=stack_name, TemplateBody=template.to_yaml(clean_up=True), Capabilities=["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"], ) cloudformation_client.get_waiter("stack_update_complete").wait( StackName=stack_name, WaiterConfig={"Delay": 5})
def test_should_build_template_with_required_parameters_added_externally( self): the_chain = chain.Chain() mock_step = MockStepWithRequiredParam() the_chain.add(mock_step) self.context = chaincontext.ChainContext( template=troposphere.Template(), instance_name='wont_generate_parameters', auto_param_creation=False) self.context.template.add_parameter( troposphere.Parameter("NumberOfMinions", Type="String")) self.context.template.add_parameter( troposphere.Parameter("NumberOfEyeballs", Type="String")) the_chain.run(self.context)
def _base_troposphere_template(self): """Returns the most basic troposphere template possible""" template = troposphere.Template() template.add_parameter( troposphere.Parameter( "Stage", Default="dev", Description="Name of the Stage", Type="String", )) template.add_parameter( troposphere.Parameter( "Region", Description="AWS Region", Type="String", )) return template
def test_should_not_find_resource_by_title(self): t = troposphere.Template() resource_name_to_lookup = "TestingTheNameLookup" t.add_resource(troposphere.s3.Bucket(resource_name_to_lookup)) self.assertRaises( ValueError, TemplateQuery.get_resource_by_title, template=t, title="TestingTheNameLookups", ) self.assertRaisesRegexp( ValueError, "Expected to find.+TestingTheNameLookup", callable_obj=TemplateQuery.get_resource_by_title, template=t, title="TestingTheNameLookups", )
CONFIG = { 'app_bucket': { 'name': 'khueue-diary-app', }, 'pipeline_bucket': { 'name': 'khueue-diary-pipeline', 'object_lifetime_days': 30, }, 'log_bucket': { 'name': 'khueue-diary-logs', 'object_lifetime_days': 30, }, } template = troposphere.Template() log_bucket = troposphere.s3.Bucket( 'LogBucket', template=template, BucketName=CONFIG['log_bucket']['name'], AccessControl='LogDeliveryWrite', LifecycleConfiguration=troposphere.s3.LifecycleConfiguration( Rules=[ troposphere.s3.LifecycleRule( Id='DeleteOldObjects', Status='Enabled', ExpirationInDays=CONFIG['log_bucket']['object_lifetime_days'], ), ], ),
import troposphere as tp from troposphere import Ref, Parameter, GetAtt from troposphere.awslambda import Function, Code, Alias from troposphere.iam import PolicyType, Role from awacs.aws import Allow, Statement, Principal, Policy from awacs.sts import AssumeRole t = tp.Template() handler = t.add_parameter( Parameter("LambdaHandler", Type="String", Default="handler", Description="The name of the function (within your source code) " "that Lambda calls to start running your code.")) memory_size = t.add_parameter( Parameter("LambdaMemorySize", Type="Number", Description="The amount of memory, in MB, that is allocated to " "your Lambda function.")) timeout = t.add_parameter( Parameter( "LambdaTimeout", Type="Number", Default="15", Description="The function execution time (in seconds) after which " "Lambda terminates the function. ")) env = t.add_parameter(
def generate_codepipeline_template( codepipeline_role_name: str, codepipeline_role_path: str, codebuild_role_name: str, codebuild_role_path: str, ssm_parameter_prefix: str, scm_provider: str, scm_connection_arn: str, scm_full_repository_id: str, scm_branch_name: str, scm_bucket_name: str, scm_object_key: str, scm_skip_creation_of_repo: str, migrate_role_arn: str, ) -> troposphere.Template: version = pkg_resources.get_distribution("aws-organized").version t = troposphere.Template() t.set_description( "CICD template that runs aws organized migrate for the given branch of the given repo" ) project_name = "AWSOrganized-Migrate" bucket_name = scm_bucket_name if scm_provider.lower( ) == "codecommit" and scm_skip_creation_of_repo is False: t.add_resource( codecommit.Repository("Repository", RepositoryName=scm_full_repository_id)) if scm_provider.lower() == "s3" and scm_skip_creation_of_repo is False: bucket_name = ( scm_bucket_name if scm_bucket_name else troposphere.Sub("aws-organized-pipeline-source-${AWS::AccountId}")) t.add_resource( s3.Bucket( "Source", BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), )) artifact_store = t.add_resource( s3.Bucket( "ArtifactStore", VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), )) codepipeline_role = t.add_resource( iam.Role( "CodePipelineRole", RoleName=codepipeline_role_name, Path=codepipeline_role_path, ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal( "Service", ["codepipeline.amazonaws.com"]), ) ], ), )) codebuild_role = t.add_resource( iam.Role( "CodeBuildRole", RoleName=codebuild_role_name, Path=codebuild_role_path, ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal("Service", ["codebuild.amazonaws.com"]), ) ], ), )) version_parameter = ssm.Parameter( "versionparameter", Name=f"{ssm_parameter_prefix}/version", Type="String", Value=version, ) t.add_resource(version_parameter) project = t.add_resource( codebuild.Project( "AWSOrganizedMigrate", Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Name": "MIGRATE_ROLE_ARN", "Type": "PLAINTEXT", "Value": migrate_role_arn, }, { "Name": "Version", "Type": "PARAMETER_STORE", "Value": troposphere.Ref(version_parameter), }, { "Name": "SSM_PARAMETER_PREFIX", "Type": "PLAINTEXT", "Value": ssm_parameter_prefix, }, ], ), Name=project_name, ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump( dict( version="0.2", phases=dict( install={ "runtime-versions": dict(python="3.8"), "commands": ["pip install aws-organized==${Version}"], }, build={ "commands": [ "aws-organized migrate --ssm-parameter-prefix $SSM_PARAMETER_PREFIX $MIGRATE_ROLE_ARN" ] }, ), artifacts=dict(files=["environment"]), )), ), )) source_actions = dict( codecommit=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId(Category="Source", Owner="AWS", Version="1", Provider="CodeCommit"), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "RepositoryName": scm_full_repository_id, "BranchName": scm_branch_name, "PollForSourceChanges": "true", }, RunOrder="1", ), codestarsourceconnection=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "ConnectionArn": scm_connection_arn, "FullRepositoryId": scm_full_repository_id, "BranchName": scm_branch_name, "OutputArtifactFormat": "CODE_ZIP", }, RunOrder="1", ), s3=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId(Category="Source", Owner="AWS", Version="1", Provider="S3"), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": scm_object_key, "PollForSourceChanges": True, }, RunOrder="1", ), ).get(scm_provider.lower()) t.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"), Stages=[ codepipeline.Stages(Name="Source", Actions=[source_actions]), codepipeline.Stages( Name="Migrate", Actions=[ codepipeline.Actions( Name="Migrate", InputArtifacts=[ codepipeline.InputArtifacts( Name="SourceOutput") ], ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={ "ProjectName": troposphere.Ref(project), "PrimarySource": "SourceAction", }, RunOrder="1", ) ], ), ], ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=troposphere.Ref(artifact_store)), )) return t
def generate_codepipeline_template( codepipeline_role_name: str, codepipeline_role_path: str, codebuild_role_name: str, codebuild_role_path: str, output_format: str, migrate_role_arn: str, ) -> str: t = troposphere.Template() t.set_description( "CICD template that runs aws organized migrate for the given branch of the given repo" ) project_name = "AWSOrganized-Migrate" repository_name = "AWS-Organized-environment" repo = t.add_resource( codecommit.Repository("Repository", RepositoryName=repository_name)) artifact_store = t.add_resource( s3.Bucket( "ArtifactStore", BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), )) codepipeline_role = t.add_resource( iam.Role( "CodePipelineRole", RoleName=codepipeline_role_name, Path=codepipeline_role_path, ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AdministratorAccess", ], Policies=[ iam.Policy( PolicyName=f"executionpermissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"executionpermissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=[ awscd_codecommit.GitPull, awscd_codecommit.GetBranch, awscd_codecommit.GetCommit, awscd_codecommit.UploadArchive, ], Resource=[troposphere.GetAtt(repo, "Arn")], ), aws.Statement( Sid="2", Effect=aws.Allow, Action=[ awacs_s3.GetBucketPolicy, awacs_s3.GetBucketVersioning, awacs_s3.ListBucket, ], Resource=[ troposphere.GetAtt(artifact_store, "Arn") ], ), aws.Statement( Sid="3", Effect=aws.Allow, Action=[ awacs_s3.GetObject, awacs_s3.GetObjectVersion, ], Resource=[ troposphere.Join(":", [ troposphere.GetAtt( artifact_store, 'Arn'), "*" ]) ], ), aws.Statement( Sid="4", Effect=aws.Allow, Action=[ awacs_s3.ListAllMyBuckets, ], Resource=[ troposphere.Join(":", [ "arn", troposphere.Partition, "s3:::*", ]) ], ), # aws.Statement( # Sid="5", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.Join(":", [ # troposphere.GetAtt(artifact_store, 'Arn'), # "*" # ]) # ], # ), # aws.Statement( # Sid="6", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.GetAtt(artifact_store, 'Arn') # ], # ), ], ), ) ], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal( "Service", ["codepipeline.amazonaws.com"]), ), ], ), )) codebuild_role = t.add_resource( iam.Role( "CodeBuildRole", RoleName=codebuild_role_name, Path=codebuild_role_path, ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AdministratorAccess", ], Policies=[ iam.Policy( PolicyName=f"executionpermissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"executionpermissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=[ awacs_logs.CreateLogGroup, awacs_logs.CreateLogStream, awacs_logs.PutLogEvents, ], Resource=[ # "arn:aws:logs:eu-west-1:669925765091:log-group:/aws/codebuild/examplecodebuild", # "arn:aws:logs:eu-west-1:669925765091:log-group:/aws/codebuild/examplecodebuild:*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:logs:${{AWS::Region}}:${{AWS::AccountId}}:log-group:/aws/codebuild/{project_name}", {}, ] }, { "Fn::Sub": [ f"arn:${{AWS::Partition}}:logs:${{AWS::Region}}:${{AWS::AccountId}}:log-group:/aws/codebuild/{project_name}:*", {}, ] }, ], ), aws.Statement( Sid="2", Effect=aws.Allow, Action=[ awacs_s3.PutObject, awacs_s3.GetObject, awacs_s3.GetObjectVersion, awacs_s3.GetBucketAcl, awacs_s3.GetBucketLocation, ], Resource=[ # "arn:aws:s3:::codepipeline-eu-west-1-*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:s3:::codepipeline-${{AWS::Region}}-*", {}, ] }, ], ), aws.Statement( Sid="3", Effect=aws.Allow, Action=[ awacs_codebuild.CreateReportGroup, awacs_codebuild.CreateReport, awacs_codebuild.UpdateReport, awacs_codebuild.BatchPutTestCases, awacs_codebuild.BatchPutCodeCoverages, ], Resource=[ # "arn:aws:codebuild:eu-west-1:669925765091:report-group/examplecodebuild-*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:codebuild:${{AWS::Region}}:${{AWS::AccountId}}:report-group/{project_name}-*", {}, ] }, ], ), aws.Statement(Sid="4", Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Resource=[migrate_role_arn]), # aws.Statement( # Sid="5", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.Join(":", [ # troposphere.GetAtt(artifact_store, 'Arn'), # "*" # ]) # ], # ), # aws.Statement( # Sid="6", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.GetAtt(artifact_store, 'Arn') # ], # ), ], ), ) ], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal("Service", ["codebuild.amazonaws.com"]), ), ], ), )) project = t.add_resource( codebuild.Project( "AWSOrganizedMigrate", Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[{ "Name": "MIGRATE_ROLE_ARN", "Type": "PLAINTEXT", "Value": migrate_role_arn, }]), Name=project_name, ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump( dict( version="0.2", phases=dict( install={ "runtime-versions": dict(python="3.8"), "commands": [ "pip install aws-organized", ], }, build={ "commands": [ "aws-organized migrate $(MIGRATE_ROLE_ARN)", ], }, ), artifacts=dict(files=[ "environment", ], ), )), ), )) source_actions = codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="SourceOutput")], Configuration={ "RepositoryName": repository_name, "BranchName": "master", "PollForSourceChanges": "true", }, RunOrder="1", ) pipeline = t.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"), Stages=[ codepipeline.Stages( Name="Source", Actions=[source_actions], ), codepipeline.Stages( Name="Migrate", Actions=[ codepipeline.Actions( Name="Migrate", InputArtifacts=[ codepipeline.InputArtifacts( Name="SourceOutput") ], ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={ "ProjectName": troposphere.Ref(project), "PrimarySource": "SourceAction", }, RunOrder="1", ) ], ), ], ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=troposphere.Ref(artifact_store)), )) if output_format == "json": return t.to_json() else: return t.to_yaml()
def render( self, template, name, version, description, source, product_ids_by_region, tags, friendly_uid, ) -> str: template_description = f"{friendly_uid}-{version}" tpl = t.Template(Description=template_description) all_regions = product_ids_by_region.keys() source_stage = codepipeline.Stages( Name="Source", Actions=[ dict( codecommit=codepipeline.Actions( RunOrder=1, RoleArn=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/SourceRole" ), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "RepositoryName": source.get("Configuration").get( "RepositoryName" ), "BranchName": source.get("Configuration").get("BranchName"), "PollForSourceChanges": source.get("Configuration").get( "PollForSourceChanges", True ), }, Name="Source", ), github=codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "Owner": source.get("Configuration").get("Owner"), "Repo": source.get("Configuration").get("Repo"), "Branch": source.get("Configuration").get("Branch"), "OAuthToken": t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret" ), ":SecretString:OAuthToken}}", ], ), "PollForSourceChanges": source.get("Configuration").get( "PollForSourceChanges" ), }, Name="Source", ), codestarsourceconnection=codepipeline.Actions( RunOrder=1, RoleArn=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/SourceRole" ), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "ConnectionArn": source.get("Configuration").get( "ConnectionArn" ), "FullRepositoryId": source.get("Configuration").get( "FullRepositoryId" ), "BranchName": source.get("Configuration").get("BranchName"), "OutputArtifactFormat": source.get("Configuration").get( "OutputArtifactFormat" ), }, Name="Source", ), s3=codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT) ], Configuration={ "S3Bucket": source.get("Configuration").get("S3Bucket"), "S3ObjectKey": source.get("Configuration").get( "S3ObjectKey" ), "PollForSourceChanges": source.get("Configuration").get( "PollForSourceChanges" ), }, Name="Source", ), ).get(source.get("Provider", "").lower()) ], ) build_project_name = t.Sub("${AWS::StackName}-build") configuration = template.get("Configuration", {}) runtime_versions = dict( nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ) if configuration.get("runtime-versions"): runtime_versions.update(configuration.get("runtime-versions")) extra_commands = list(configuration.get("install", {}).get("commands", [])) tpl.add_resource( codebuild.Project( "BuildProject", Name=build_project_name, ServiceRole=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/DeliveryCodeRole" ), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType=constants.ENVIRONMENT_COMPUTE_TYPE_DEFAULT, Image=constants.ENVIRONMENT_IMAGE_DEFAULT, Type=constants.ENVIRONMENT_TYPE_DEFAULT, EnvironmentVariables=[ {"Type": "PLAINTEXT", "Name": "ACCOUNT_ID", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "REGION", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "NAME", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "VERSION", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "PROVISIONER_NAME", "Value": "CHANGE_ME",}, {"Type": "PLAINTEXT", "Name": "PROVISIONER_VERSION", "Value": "CHANGE_ME",}, ], ), Source=codebuild.Source( BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ f"pip install {constants.VERSION}" if "http" in constants.VERSION else f"pip install aws-service-catalog-factory=={constants.VERSION}", ] + extra_commands }, pre_build={ "commands": [ "npm install", "npm run cdk synth -- --output sct-synth-output", ], }, build={ "commands": [ f"servicecatalog-factory generate-template $PROVISIONER_NAME $PROVISIONER_VERSION $NAME $VERSION . > product.template.yaml", ] }, ), artifacts={ "name": BUILD_OUTPUT_ARTIFACT, "files": ["*", "**/*"], "exclude-paths": ["sct-synth-output/*"], }, ) ) ), Type="CODEPIPELINE", ), Description=t.Sub("Create a build stage for template CDK 1.0.0"), ) ) build_stage = codepipeline.Stages( Name="Build", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=SOURCE_OUTPUT_ARTIFACT), ], Name='Build', ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=BUILD_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": build_project_name, "PrimarySource": SOURCE_OUTPUT_ARTIFACT, "EnvironmentVariables": t.Sub( json.dumps( [ dict(name="ACCOUNT_ID", value="${AWS::AccountId}", type="PLAINTEXT"), dict(name="REGION", value="${AWS::Region}", type="PLAINTEXT"), dict(name="PROVISIONER_NAME", value='CDK', type="PLAINTEXT"), dict(name="PROVISIONER_VERSION", value='1.0.0', type="PLAINTEXT"), dict(name="NAME", value=name, type="PLAINTEXT"), dict( name="VERSION", value=version, type="PLAINTEXT" ), ] ) ), }, RunOrder=1, ) ], ) validate_stage = codepipeline.Stages( Name="Validate", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=BUILD_OUTPUT_ARTIFACT), ], Name="Validate", ActionTypeId=codepipeline.ActionTypeId( Category="Test", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=VALIDATE_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": shared_resources.VALIDATE_PROJECT_NAME, "PrimarySource": BUILD_OUTPUT_ARTIFACT, }, RunOrder=1, ) ], ) # package_stage = codepipeline.Stages( Name="Package", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=BUILD_OUTPUT_ARTIFACT), ], Name="Package", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=PACKAGE_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": cdk_shared_resources.CDK_PACKAGE_PROJECT_NAME, "PrimarySource": BUILD_OUTPUT_ARTIFACT, "EnvironmentVariables": t.Sub( json.dumps( [ dict( name="PIPELINE_NAME", value="${AWS::StackName}-pipeline", type="PLAINTEXT", ), dict( name="CODEPIPELINE_ID", value="#{codepipeline.PipelineExecutionId}", type="PLAINTEXT", ), dict(name="NAME", value=name, type="PLAINTEXT"), dict( name="VERSION", value=version, type="PLAINTEXT" ), dict( name="DESCRIPTION", value=description, type="PLAINTEXT", ), dict( name="TEMPLATE_FORMAT", value="yaml", type="PLAINTEXT", ), dict( name="PROVISIONER", value="CDK/1.0.0", type="PLAINTEXT", ), ] ) ), }, RunOrder=1, ) ], ) deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name=PACKAGE_OUTPUT_ARTIFACT), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name=DEPLOY_OUTPUT_ARTIFACT) ], Configuration={ "ProjectName": cdk_shared_resources.CDK_DEPLOY_PROJECT_NAME, "PrimarySource": PACKAGE_OUTPUT_ARTIFACT, "EnvironmentVariables": t.Sub( json.dumps( [ dict( name="ACCOUNT_ID", value="${AWS::AccountId}", type="PLAINTEXT", ), dict( name="REGION", value="${AWS::Region}", type="PLAINTEXT", ), dict( name="PIPELINE_NAME", value="${AWS::StackName}-pipeline", type="PLAINTEXT", ), dict( name="CODEPIPELINE_ID", value="#{codepipeline.PipelineExecutionId}", type="PLAINTEXT", ), dict( name="PROVISIONER", value="CDK/1.0.0", type="PLAINTEXT", ), ] ) ), }, RunOrder=1, ) ], ) tpl.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=t.Sub( "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/servicecatalog-product-factory/CodePipelineRole" ), Stages=[ source_stage, build_stage, validate_stage, package_stage, deploy_stage, ], Name=t.Sub("${AWS::StackName}-pipeline"), ArtifactStores=[ codepipeline.ArtifactStoreMap( Region=region, ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=t.Sub( "sc-factory-artifacts-${AWS::AccountId}-" + region ), ), ) for region in all_regions ], RestartExecutionOnUpdate=False, ) ) return tpl.to_yaml(clean_up=True)
def generate_cft(): VPC_NETWORK = "10.0.0.0/16" VPC_PRIVATE = "10.0.0.0/24" t = troposphere.Template() t.add_description("HaaS Stack") key_name = t.add_parameter( troposphere.Parameter( "KeyName", Type="AWS::EC2::KeyPair::KeyName", ConstraintDescription="must be a valid keypair Id", )) username_and_password = t.add_parameter( troposphere.Parameter( "UserNameAndPassword", Type="String", Default="", Description= "(Optional) Enter like: username/password Used to log into ECL Watch and ECL IDE." )) cluster_size = t.add_parameter( troposphere.Parameter( "ClusterSize", Type="Number", Default="1", Description="Number of slave instances to be launched")) num_slaves = t.add_parameter( troposphere.Parameter( "NumberOfSlavesPerNode", Type="Number", Default="1", Description="Number of THOR slave nodes per slave instance")) master_instance_type = t.add_parameter( troposphere.Parameter( "MasterInstanceType", Type="String", AllowedValues=[ 't2.micro', 'c4.large', 'c4.xlarge', 'c4.2xlarge', 'm4.large', 'm4.xlarge', 'm4.2xlarge', 'r4.large', 'r4.xlarge', 'r4.2xlarge' ], Default="c4.large", Description="HPCC Thor Master EC2 instance type")) slave_instance_type = t.add_parameter( troposphere.Parameter("SlaveInstanceType", Type="String", AllowedValues=[ 't2.micro', 'c4.large', 'c4.xlarge', 'c4.2xlarge', 'm4.large', 'm4.xlarge', 'm4.2xlarge', 'r4.large', 'r4.xlarge', 'r4.2xlarge' ], Default="c4.large", Description="HPCC Thor Slave EC2 instance type")) vpc_availability_zone = t.add_parameter( troposphere.Parameter( "AvailabilityZone", Type="String", AllowedValues=['us-east-1d'], Default="us-east-1d", Description="Availability zone", )) t.add_mapping('RegionMap', {'us-east-1': {'64': 'ami-24c2ee32'}}) instance_role = t.add_resource( troposphere.iam.Role( "HPCCInstanceRoles", AssumeRolePolicyDocument=awacs.aws.Policy(Statement=[ awacs.aws.Statement(Effect=awacs.aws.Allow, Action=[awacs.sts.AssumeRole], Principal=awacs.aws.Principal( "Service", ["ec2.amazonaws.com"])) ]), Policies=[ troposphere.iam.Policy( PolicyName="root", PolicyDocument=awacs.aws.Policy(Statement=[ awacs.aws.Statement(Effect=awacs.aws.Allow, Action=[awacs.aws.Action("*")], Resource=["*"]) ])) ], Path="/")) instance_profile = t.add_resource( troposphere.iam.InstanceProfile("HPCCInstanceProfile", Path="/", Roles=[troposphere.Ref(instance_role) ])) vpc = t.add_resource( troposphere.ec2.VPC( "HPCCVpc", CidrBlock=VPC_NETWORK, InstanceTenancy="default", EnableDnsSupport=True, EnableDnsHostnames=False, Tags=troposphere.Tags(Name=troposphere.Ref("AWS::StackName")))) internetGateway = t.add_resource( troposphere.ec2.InternetGateway( "InternetGateway", Tags=troposphere.Tags(Name=troposphere.Join( "-", [troposphere.Ref("AWS::StackName"), "gateway"]), ), )) gatewayAttachment = t.add_resource( troposphere.ec2.VPCGatewayAttachment( "InternetGatewayAttachment", InternetGatewayId=troposphere.Ref(internetGateway), VpcId=troposphere.Ref(vpc))) # public routing table publicRouteTable = t.add_resource( troposphere.ec2.RouteTable( "PublicRouteTable", VpcId=troposphere.Ref(vpc), Tags=troposphere.Tags(Name=troposphere.Join( "-", [troposphere.Ref("AWS::StackName"), "public-rt"]), ), )) internetRoute = t.add_resource( troposphere.ec2.Route("RouteToInternet", DestinationCidrBlock="0.0.0.0/0", GatewayId=troposphere.Ref(internetGateway), RouteTableId=troposphere.Ref(publicRouteTable), DependsOn=gatewayAttachment.title)) subnet = t.add_resource( troposphere.ec2.Subnet( "Subnet", CidrBlock=VPC_PRIVATE, Tags=troposphere.Tags(Name=troposphere.Join( "-", [troposphere.Ref("AWS::StackName"), "subnet"]), ), VpcId=troposphere.Ref(vpc))) t.add_resource( troposphere.ec2.SubnetRouteTableAssociation( "SubnetRouteTableAssociation", RouteTableId=troposphere.Ref(publicRouteTable), SubnetId=troposphere.Ref(subnet))) placement_group = t.add_resource( troposphere.ec2.PlacementGroup("HPCCPlacementGroup", Strategy="cluster")) security_groups = t.add_resource( troposphere.ec2.SecurityGroup( "HPCCSecurityGroups", GroupDescription="Enable SSH and HTTP access on the inbound port", SecurityGroupEgress=[ troposphere.ec2.SecurityGroupRule( IpProtocol="-1", CidrIp="0.0.0.0/0", ), ], SecurityGroupIngress=[ troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8888, ToPort=8888, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=9042, ToPort=9042, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=7000, ToPort=7000, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=7001, ToPort=7001, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=7199, ToPort=7199, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=9160, ToPort=9160, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=61620, ToPort=61620, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=61621, ToPort=61621, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8002, ToPort=8002, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8010, ToPort=8010, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8015, ToPort=8015, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8145, ToPort=8145, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=22, ToPort=22, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=0, ToPort=65535, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="udp", FromPort=0, ToPort=65535, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8050, ToPort=8050, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=8008, ToPort=8008, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="tcp", FromPort=9876, ToPort=9876, CidrIp="0.0.0.0/0", ), troposphere.ec2.SecurityGroupRule( IpProtocol="icmp", FromPort=-1, ToPort=-1, CidrIp="0.0.0.0/0", ), ], VpcId=troposphere.Ref(vpc))) # AutoScaling slave_launch_config = t.add_resource( troposphere.autoscaling.LaunchConfiguration( "SlaveLaunchCfg", ImageId=troposphere.FindInMap("RegionMap", troposphere.Ref("AWS::Region"), "64"), InstanceType=troposphere.Ref(slave_instance_type), AssociatePublicIpAddress="True", KeyName=troposphere.Ref(key_name), SecurityGroups=[troposphere.Ref(security_groups)], IamInstanceProfile=troposphere.Ref(instance_profile), UserData=troposphere.Base64( troposphere.Join('\n', [ "#!/bin/bash", "exec > >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1", "echo [Initialization] starting the slave node", troposphere.Join(" ", [ "su - osr /bin/bash -c 'cd /home/osr/project-aws; git pull; /bin/bash scripts/auto_hpcc.sh", troposphere.Ref("AWS::StackName"), troposphere.Ref("AWS::Region"), "'", ]), "echo [Initialization] completed the slave node", "echo SCRIPT: 'Signal stack that setup of HPCC System is complete.'", troposphere.Join(" ", [ "/usr/local/bin/cfn-signal -e 0 --stack ", troposphere.Ref("AWS::StackName"), "--resource SlaveASG ", "--region ", troposphere.Ref("AWS::Region") ]), "echo SCRIPT: 'Done signaling stack that setup of HPCC System has completed.'" ])), )) slave_autoscaling_group = t.add_resource( troposphere.autoscaling.AutoScalingGroup( "SlaveASG", DesiredCapacity=troposphere.Ref(cluster_size), # @TODO: disable here to support t2.micro for cheap testing #PlacementGroup=troposphere.Ref(placement_group), LaunchConfigurationName=troposphere.Ref(slave_launch_config), MinSize=troposphere.Ref(cluster_size), MaxSize=troposphere.Ref(cluster_size), HealthCheckType="EC2", HealthCheckGracePeriod="300", VPCZoneIdentifier=[troposphere.Ref(subnet)], #AvailabilityZones=[troposphere.Ref(vpc_availability_zone)], Tags=[ troposphere.autoscaling.Tag("StackName", troposphere.Ref("AWS::StackName"), True), troposphere.autoscaling.Tag("slavesPerNode", troposphere.Ref(num_slaves), True), troposphere.autoscaling.Tag( "UserNameAndPassword", troposphere.Ref(username_and_password), True), troposphere.autoscaling.Tag( "Name", troposphere.Join( "-", [troposphere.Ref("AWS::StackName"), "Slave"]), True), ], )) master_launch_config = t.add_resource( troposphere.autoscaling.LaunchConfiguration( "MasterLaunchCfg", ImageId=troposphere.FindInMap("RegionMap", troposphere.Ref("AWS::Region"), "64"), InstanceType=troposphere.Ref(master_instance_type), AssociatePublicIpAddress="True", KeyName=troposphere.Ref(key_name), SecurityGroups=[troposphere.Ref(security_groups)], IamInstanceProfile=troposphere.Ref(instance_profile), UserData=troposphere.Base64( troposphere.Join('\n', [ "#!/bin/bash", "exec > >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1", "echo [Initialization] starting the master node", troposphere.Join(" ", [ "su - osr /bin/bash -c 'cd /home/osr/project-aws; git pull; /bin/bash scripts/auto_hpcc.sh", troposphere.Ref("AWS::StackName"), troposphere.Ref("AWS::Region"), "'", ]), "echo [Initialization] completed the master node", "echo SCRIPT: 'Signal stack that setup of HPCC System is complete.'", troposphere.Join(" ", [ "/usr/local/bin/cfn-signal -e 0 --stack ", troposphere.Ref("AWS::StackName"), "--resource MasterASG ", "--region ", troposphere.Ref("AWS::Region") ]), "echo SCRIPT: 'Done signaling stack that setup of HPCC System has completed.'" ])), )) master_autoscaling_group = t.add_resource( troposphere.autoscaling.AutoScalingGroup( "MasterASG", DesiredCapacity="1", # need to update x -> N+x # @TODO: disable here to support t2.micro for cheap testing #PlacementGroup=troposphere.Ref(placement_group), LaunchConfigurationName=troposphere.Ref(master_launch_config), MinSize="1", MaxSize="1", HealthCheckType="EC2", HealthCheckGracePeriod="300", VPCZoneIdentifier=[troposphere.Ref(subnet)], #AvailabilityZones=[troposphere.Ref(vpc_availability_zone)], Tags=[ troposphere.autoscaling.Tag("StackName", troposphere.Ref("AWS::StackName"), True), troposphere.autoscaling.Tag("slavesPerNode", troposphere.Ref(num_slaves), True), troposphere.autoscaling.Tag( "UserNameAndPassword", troposphere.Ref(username_and_password), True), troposphere.autoscaling.Tag( "Name", troposphere.Join( "-", [troposphere.Ref("AWS::StackName"), "Master"]), True), ], )) print(t.to_json()) return t.to_dict()
def create_cdk_pipeline(name, version, product_name, product_version, template_config, p) -> t.Template: description = f"""Builds a cdk pipeline {{"version": "{constants.VERSION}", "framework": "servicecatalog-factory", "role": "product-pipeline", "type": "{name}", "version": "{version}"}}""" configuration = template_config.get("Configuration") template = t.Template(Description=description) template.add_parameter(t.Parameter("PuppetAccountId", Type="String")) template.add_parameter( t.Parameter("CDKSupportCDKDeployRequireApproval", Type="String", Default="never")) template.add_parameter( t.Parameter("CDKSupportCDKComputeType", Type="String", Default="BUILD_GENERAL1_SMALL")) template.add_parameter( t.Parameter("CDKSupportCDKDeployImage", Type="String", Default="aws/codebuild/standard:4.0")) template.add_parameter( t.Parameter("CDKSupportCDKToolkitStackName", Type="String", Default="CDKToolKit")) template.add_parameter( t.Parameter( "CDKSupportCDKDeployExtraArgs", Type="String", Default="", Description="Extra args to pass to CDK deploy", )) template.add_parameter( t.Parameter( "CDKSupportStartCDKDeployFunctionArn", Type="String", )) template.add_parameter( t.Parameter( "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn", Type="String", )) template.add_parameter( t.Parameter("CDKSupportIAMRolePaths", Type="String", Default="/servicecatalog-factory-cdk-support/")) template.add_parameter( t.Parameter("CDKSupportCDKDeployRoleName", Type="String", Default="CDKDeployRoleName")) manifest = json.loads(open(f"{p}/{PREFIX}/manifest.json", "r").read()) cdk_deploy_parameter_args = list() for artifact_name, artifact in manifest.get("artifacts", {}).items(): if artifact.get("type") == "aws:cloudformation:stack": artifact_template_file_path = artifact.get("properties", {}).get("templateFile") assert ( artifact_template_file_path ), f"Could not find template file in manifest.json for {artifact_name}" artifact_template = json.loads( open(f"{p}/{PREFIX}/{artifact_template_file_path}", "r").read()) for parameter_name, parameter_details in artifact_template.get( "Parameters", {}).items(): if template.parameters.get(parameter_name) is None: template.add_parameter( t.Parameter(parameter_name, **parameter_details)) cdk_deploy_parameter_args.append( f"--parameters {artifact_name}:{parameter_name}=${{{parameter_name}}}" ) for output_name, output_details in artifact_template.get( "Outputs", {}).items(): if template.outputs.get(output_name) is None: new_output = dict(**output_details) new_output["Value"] = t.GetAtt("GetOutputsCode", output_name) template.add_output(t.Output(output_name, **new_output)) cdk_deploy_parameter_args = " ".join(cdk_deploy_parameter_args) class DeployDetailsCustomResource(cloudformation.AWSCustomObject): resource_type = "Custom::DeployDetails" props = dict() runtime_versions = dict( nodejs=constants.BUILDSPEC_RUNTIME_VERSIONS_NODEJS_DEFAULT, ) if configuration.get("runtime-versions"): runtime_versions.update(configuration.get("runtime-versions")) extra_commands = list(configuration.get("install", {}).get("commands", [])) template.add_resource( codebuild.Project( "CDKDeploy", Name=t.Sub("${AWS::StackName}-deploy"), Description='Run CDK deploy for given source code', ServiceRole=t.Sub( "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}" ), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), Environment=codebuild.Environment( ComputeType=t.Ref('CDKSupportCDKComputeType'), EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="CDK_DEPLOY_REQUIRE_APPROVAL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable( Name="CDK_TOOLKIT_STACK_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="UId", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="REGION", Type="PLAINTEXT", Value=t.Ref("AWS::Region")), codebuild.EnvironmentVariable( Name="CDK_DEPLOY_PARAMETER_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="VERSION", Type="PLAINTEXT", Value="CHANGE_ME"), ], Image=t.Ref('CDKSupportCDKDeployImage'), Type="LINUX_CONTAINER", ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip", "unzip $NAME-$VERSION.zip", "npm install", ] + extra_commands }, build={ "commands": [ "npm run cdk deploy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --require-approval $CDK_DEPLOY_REQUIRE_APPROVAL --outputs-file scf_outputs.json $CDK_DEPLOY_EXTRA_ARGS $CDK_DEPLOY_PARAMETER_ARGS '*'", "aws s3 cp scf_outputs.json s3://sc-cdk-artifacts-${AWS::AccountId}/CDK/1.0.0/$NAME/$VERSION/scf_outputs-$CODEBUILD_BUILD_ID.json", ] }, ), artifacts={ "name": "CDKDeploy", "files": ["*", "**/*"], }, ))), ), TimeoutInMinutes=480, )) template.add_resource( codebuild.Project( "CDKDestroy", Name=t.Sub("${AWS::StackName}-destroy"), Description='Run CDK destroy for given source code', ServiceRole=t.Sub( "arn:aws:iam::${AWS::AccountId}:role${CDKSupportIAMRolePaths}${CDKSupportCDKDeployRoleName}" ), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), Environment=codebuild.Environment( ComputeType=t.Ref('CDKSupportCDKComputeType'), EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="CDK_DEPLOY_REQUIRE_APPROVAL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="CDK_DEPLOY_EXTRA_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable( Name="CDK_TOOLKIT_STACK_NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="UId", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="PUPPET_ACCOUNT_ID", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="REGION", Type="PLAINTEXT", Value=t.Ref("AWS::Region")), codebuild.EnvironmentVariable( Name="CDK_DEPLOY_PARAMETER_ARGS", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="ON_COMPLETE_URL", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="NAME", Type="PLAINTEXT", Value="CHANGE_ME"), codebuild.EnvironmentVariable(Name="VERSION", Type="PLAINTEXT", Value="CHANGE_ME"), ], Image=t.Ref('CDKSupportCDKDeployImage'), Type="LINUX_CONTAINER", ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=t.Sub( yaml.safe_dump( dict( version=0.2, phases=dict( install={ "runtime-versions": runtime_versions, "commands": [ "aws s3 cp s3://sc-factory-artifacts-$PUPPET_ACCOUNT_ID-$REGION/CDK/1.0.0/$NAME/$VERSION/$NAME-$VERSION.zip $NAME-$VERSION.zip", "unzip $NAME-$VERSION.zip", "npm install", ] + extra_commands }, build={ "commands": [ "npm run cdk destroy -- --toolkit-stack-name $CDK_TOOLKIT_STACK_NAME --force --ignore-errors '*'" ] }, ), artifacts={ "name": "CDKDeploy", "files": ["*", "**/*"], }, ))), ), TimeoutInMinutes=480, )) template.add_resource( DeployDetailsCustomResource( "StartCDKDeploy", DependsOn=["CDKDeploy", "CDKDestroy"], ServiceToken=t.Ref("CDKSupportStartCDKDeployFunctionArn"), CreateUpdateProject=t.Ref("CDKDeploy"), DeleteProject=t.Ref("CDKDestroy"), CDK_DEPLOY_EXTRA_ARGS=t.Ref("CDKSupportCDKDeployExtraArgs"), CDK_TOOLKIT_STACK_NAME=t.Ref("CDKSupportCDKToolkitStackName"), PUPPET_ACCOUNT_ID=t.Ref("PuppetAccountId"), CDK_DEPLOY_PARAMETER_ARGS=t.Sub(cdk_deploy_parameter_args), CDK_DEPLOY_REQUIRE_APPROVAL=t.Ref( "CDKSupportCDKDeployRequireApproval"), NAME=product_name, VERSION=product_version, )) template.add_resource( DeployDetailsCustomResource( "GetOutputsCode", DependsOn=[ "StartCDKDeploy", ], ServiceToken=t.Ref( "CDKSupportGetOutputsForGivenCodebuildIdFunctionArn"), CodeBuildBuildId=t.GetAtt("StartCDKDeploy", "BuildId"), BucketName=t.Sub("sc-cdk-artifacts-${AWS::AccountId}"), ObjectKeyPrefix=t.Sub( f"CDK/1.0.0/{product_name}/{product_version}"), )) return template
def get_template( puppet_version, all_regions, source, is_caching_enabled, is_manual_approvals: bool, scm_skip_creation_of_repo: bool, should_validate: bool, ) -> t.Template: is_codecommit = source.get("Provider", "").lower() == "codecommit" is_github = source.get("Provider", "").lower() == "github" is_codestarsourceconnection = (source.get( "Provider", "").lower() == "codestarsourceconnection") is_custom = (source.get("Provider", "").lower() == "custom") is_s3 = source.get("Provider", "").lower() == "s3" description = f"""Bootstrap template used to bring up the main ServiceCatalog-Puppet AWS CodePipeline with dependencies {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master"}}""" template = t.Template(Description=description) version_parameter = template.add_parameter( t.Parameter("Version", Default=puppet_version, Type="String")) org_iam_role_arn_parameter = template.add_parameter( t.Parameter("OrgIamRoleArn", Default="None", Type="String")) with_manual_approvals_parameter = template.add_parameter( t.Parameter( "WithManualApprovals", Type="String", AllowedValues=["Yes", "No"], Default="No", )) puppet_code_pipeline_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetCodePipelineRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetCodePipelineRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) source_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "SourceRolePermissionsBoundary", Type="String", Description="IAM Permission Boundary to apply to the SourceRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_generate_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetGenerateRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetGenerateRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_deploy_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetDeployRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_provisioning_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "PuppetProvisioningRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetProvisioningRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) cloud_formation_deploy_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "CloudFormationDeployRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the CloudFormationDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "DeployEnvironmentComputeType", Type="String", Description="The AWS CodeBuild Environment Compute Type", Default="BUILD_GENERAL1_SMALL", )) spoke_deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "SpokeDeployEnvironmentComputeType", Type="String", Description= "The AWS CodeBuild Environment Compute Type for spoke execution mode", Default="BUILD_GENERAL1_SMALL", )) deploy_num_workers_parameter = template.add_parameter( t.Parameter( "DeployNumWorkers", Type="Number", Description= "Number of workers that should be used when running a deploy", Default=10, )) puppet_role_name_parameter = template.add_parameter( t.Parameter("PuppetRoleName", Type="String", Default="PuppetRole")) puppet_role_path_template_parameter = template.add_parameter( t.Parameter("PuppetRolePath", Type="String", Default="/servicecatalog-puppet/")) template.add_condition( "ShouldUseOrgs", t.Not(t.Equals(t.Ref(org_iam_role_arn_parameter), "None"))) template.add_condition( "HasManualApprovals", t.Equals(t.Ref(with_manual_approvals_parameter), "Yes")) template.add_resource( s3.Bucket( "StacksRepository", BucketName=t.Sub("sc-puppet-stacks-repository-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) manual_approvals_param = template.add_resource( ssm.Parameter( "ManualApprovalsParam", Type="String", Name="/servicecatalog-puppet/manual-approvals", Value=t.Ref(with_manual_approvals_parameter), )) template.add_resource( ssm.Parameter( "SpokeDeployEnvParameter", Type="String", Name=constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, Value=t.Ref(spoke_deploy_environment_compute_type_parameter), )) param = template.add_resource( ssm.Parameter( "Param", Type="String", Name="service-catalog-puppet-version", Value=t.Ref(version_parameter), )) partition_parameter = template.add_resource( ssm.Parameter( "PartitionParameter", Type="String", Name="/servicecatalog-puppet/partition", Value=t.Ref("AWS::Partition"), )) puppet_role_name_parameter = template.add_resource( ssm.Parameter( "PuppetRoleNameParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/name", Value=t.Ref(puppet_role_name_parameter), )) puppet_role_path_parameter = template.add_resource( ssm.Parameter( "PuppetRolePathParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/path", Value=t.Ref(puppet_role_path_template_parameter), )) share_accept_function_role = template.add_resource( iam.Role( "ShareAcceptFunctionRole", RoleName="ShareAcceptFunctionRole", ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" ) ], Path=t.Ref(puppet_role_path_template_parameter), Policies=[ iam.Policy( PolicyName="ServiceCatalogActions", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Resource": { "Fn::Sub": "arn:${AWS::Partition}:iam::*:role${PuppetRolePath}${PuppetRoleName}" }, "Effect": "Allow", }], }, ) ], AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["lambda.amazonaws.com"] }, }], }, )) provisioning_role = template.add_resource( iam.Role( "ProvisioningRole", RoleName="PuppetProvisioningRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_provisioning_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) cloud_formation_deploy_role = template.add_resource( iam.Role( "CloudFormationDeployRole", RoleName="CloudFormationDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["cloudformation.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( cloud_formation_deploy_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) pipeline_role = template.add_resource( iam.Role( "PipelineRole", RoleName="PuppetCodePipelineRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_code_pipeline_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) source_role = template.add_resource( iam.Role( "SourceRole", RoleName="PuppetSourceRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:root" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( source_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) dry_run_notification_topic = template.add_resource( sns.Topic( "DryRunNotificationTopic", DisplayName="service-catalog-puppet-dry-run-approvals", TopicName="service-catalog-puppet-dry-run-approvals", Condition="HasManualApprovals", )) deploy_role = template.add_resource( iam.Role( "DeployRole", RoleName="PuppetDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_deploy_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) num_workers_ssm_parameter = template.add_resource( ssm.Parameter( "NumWorkersSSMParameter", Type="String", Name="/servicecatalog-puppet/deploy/num-workers", Value=t.Sub("${DeployNumWorkers}"), )) parameterised_source_bucket = template.add_resource( s3.Bucket( "ParameterisedSourceBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-parameterised-runs-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage = codepipeline.Stages( Name="Source", Actions=[ codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="ParameterisedSource") ], Configuration={ "S3Bucket": t.Ref(parameterised_source_bucket), "S3ObjectKey": "parameters.zip", "PollForSourceChanges": True, }, Name="ParameterisedSource", ) ], ) install_spec = { "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], } deploy_env_vars = [ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Ref("AWS::AccountId"), }, { "Type": "PLAINTEXT", "Name": "PUPPET_REGION", "Value": t.Ref("AWS::Region"), }, { "Type": "PARAMETER_STORE", "Name": "PARTITION", "Value": t.Ref(partition_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_NAME", "Value": t.Ref(puppet_role_name_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_PATH", "Value": t.Ref(puppet_role_path_parameter), }, ] if is_codecommit: template.add_resource( codecommit.Repository( "CodeRepo", RepositoryName=source.get("Configuration").get( "RepositoryName"), RepositoryDescription= "Repo to store the servicecatalog puppet solution", DeletionPolicy="Retain", )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "RepositoryName": source.get("Configuration").get("RepositoryName"), "BranchName": source.get("Configuration").get("BranchName"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges", True), }, Name="Source", )) if is_github: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "Owner": source.get("Configuration").get("Owner"), "Repo": source.get("Configuration").get("Repo"), "Branch": source.get("Configuration").get("Branch"), "OAuthToken": t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:OAuthToken}}", ], ), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) if is_custom: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="Custom", Version=source.get("Configuration").get( "CustomActionTypeVersion"), Provider=source.get("Configuration").get( "CustomActionTypeProvider"), ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "GitUrl": source.get("Configuration").get("GitUrl"), "Branch": source.get("Configuration").get("Branch"), "PipelineName": t.Sub("${AWS::StackName}-pipeline"), }, Name="Source", )) webhook = codepipeline.Webhook( "Webhook", Authentication="IP", TargetAction="Source", AuthenticationConfiguration=codepipeline.WebhookAuthConfiguration( AllowedIPRange=source.get("Configuration").get( "GitWebHookIpAddress")), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.changes[0].ref.id", MatchEquals="refs/heads/{Branch}") ], TargetPipelineVersion=1, TargetPipeline=t.Sub("${AWS::StackName}-pipeline"), ) template.add_resource(webhook) values_for_sub = { "GitUrl": source.get("Configuration").get("GitUrl"), "WebhookUrl": t.GetAtt(webhook, "Url"), } output_to_add = t.Output("WebhookUrl") output_to_add.Value = t.Sub("${GitUrl}||${WebhookUrl}", **values_for_sub) output_to_add.Export = t.Export(t.Sub("${AWS::StackName}-pipeline")) template.add_output(output_to_add) if is_codestarsourceconnection: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "ConnectionArn": source.get("Configuration").get("ConnectionArn"), "FullRepositoryId": source.get("Configuration").get("FullRepositoryId"), "BranchName": source.get("Configuration").get("BranchName"), "OutputArtifactFormat": source.get("Configuration").get("OutputArtifactFormat"), }, Name="Source", )) if is_s3: bucket_name = source.get("Configuration").get("S3Bucket") if not scm_skip_creation_of_repo: template.add_resource( s3.Bucket( bucket_name, PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": source.get("Configuration").get("S3ObjectKey"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) single_account_run_project_build_spec = dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ 'echo "single_account: \\"${SINGLE_ACCOUNT_ID}\\"" > parameters.yaml', "cat parameters.yaml", "zip parameters.zip parameters.yaml", "aws s3 cp parameters.zip s3://sc-puppet-parameterised-runs-${PUPPET_ACCOUNT_ID}/parameters.zip", ] }, post_build={ "commands": [ "servicecatalog-puppet wait-for-parameterised-run-to-complete", ] }, ), artifacts=dict( name="DeployProject", files=[ "ServiceCatalogPuppet/manifest.yaml", "ServiceCatalogPuppet/manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) single_account_run_project_args = dict( Name="servicecatalog-puppet-single-account-run", Description="Runs puppet for a single account - SINGLE_ACCOUNT_ID", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "SINGLE_ACCOUNT_ID", "Value": "CHANGE_ME", }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ), ) single_account_run_project = template.add_resource( codebuild.Project("SingleAccountRunProject", **single_account_run_project_args)) single_account_run_project_build_spec["phases"]["post_build"]["commands"] = [ "servicecatalog-puppet wait-for-parameterised-run-to-complete --on-complete-url $CALLBACK_URL" ] single_account_run_project_args[ "Name"] = "servicecatalog-puppet-single-account-run-with-callback" single_account_run_project_args[ "Description"] = "Runs puppet for a single account - SINGLE_ACCOUNT_ID and then does a http put" single_account_run_project_args.get( "Environment").EnvironmentVariables.append({ "Type": "PLAINTEXT", "Name": "CALLBACK_URL", "Value": "CHANGE_ME", }) single_account_run_project_args["Source"] = codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ) single_account_run_project_with_callback = template.add_resource( codebuild.Project("SingleAccountRunWithCallbackProject", **single_account_run_project_args)) stages = [source_stage] if should_validate: template.add_resource( codebuild.Project( "ValidateProject", Name="servicecatalog-puppet-validate", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version="0.2", phases={ "install": { "runtime-versions": { "python": "3.7", }, "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, "build": { "commands": [ "servicecatalog-puppet validate manifest.yaml" ] }, }, )), Type="CODEPIPELINE", ), Description="Validate the manifest.yaml file", )) stages.append( codepipeline.Stages( Name="Validate", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), ], Name="Validate", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts( Name="ValidateProject") ], Configuration={ "ProjectName": t.Ref("ValidateProject"), "PrimarySource": "Source", }, RunOrder=1, ), ], )) if is_manual_approvals: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="DryRun", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DryRunProject") ], Configuration={ "ProjectName": t.Ref("DryRunProject"), "PrimarySource": "Source", }, RunOrder=1, ), codepipeline.Actions( ActionTypeId=codepipeline.ActionTypeId( Category="Approval", Owner="AWS", Version="1", Provider="Manual", ), Configuration={ "NotificationArn": t.Ref("DryRunNotificationTopic"), "CustomData": "Approve when you are happy with the dry run.", }, Name="DryRunApproval", RunOrder=2, ), codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", }, RunOrder=3, ), ], ) else: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", "EnvironmentVariables": '[{"name":"EXECUTION_ID","value":"#{codepipeline.PipelineExecutionId}","type":"PLAINTEXT"}]', }, RunOrder=1, ), ], ) stages.append(deploy_stage) pipeline = template.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=t.GetAtt("PipelineRole", "Arn"), Stages=stages, Name=t.Sub("${AWS::StackName}-pipeline"), ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=t.Sub( "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}" ), ), RestartExecutionOnUpdate=True, )) if is_github: template.add_resource( codepipeline.Webhook( "Webhook", AuthenticationConfiguration=codepipeline. WebhookAuthConfiguration(SecretToken=t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:SecretToken}}", ], )), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.ref", MatchEquals="refs/heads/" + source.get("Configuration").get("Branch"), ) ], Authentication="GITHUB_HMAC", TargetPipeline=t.Ref(pipeline), TargetAction="Source", Name=t.Sub("${AWS::StackName}-webhook"), TargetPipelineVersion=t.GetAtt(pipeline, "Version"), RegisterWithThirdParty="true", )) deploy_project_build_spec = dict( version=0.2, phases=dict( install={ "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, pre_build={ "commands": [ "servicecatalog-puppet --info expand --parameter-override-file $CODEBUILD_SRC_DIR_ParameterisedSource/parameters.yaml manifest.yaml", ] }, build={ "commands": [ "servicecatalog-puppet --info deploy --num-workers ${NUM_WORKERS} manifest-expanded.yaml", ] }, ), artifacts=dict( name="DeployProject", files=[ "manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) deploy_project_args = dict( Name="servicecatalog-puppet-deploy", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PARAMETER_STORE", "Name": "NUM_WORKERS", "Value": t.Ref(num_workers_ssm_parameter), }, { "Type": "PARAMETER_STORE", "Name": "SPOKE_EXECUTION_MODE_DEPLOY_ENV", "Value": constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ), Description="deploys out the products to be deployed", ) deploy_project = template.add_resource( codebuild.Project("DeployProject", **deploy_project_args)) deploy_project_build_spec["phases"]["build"]["commands"] = [ "servicecatalog-puppet --info dry-run manifest-expanded.yaml" ] deploy_project_build_spec["artifacts"]["name"] = "DryRunProject" deploy_project_args["Name"] = "servicecatalog-puppet-dryrun" deploy_project_args[ "Description"] = "dry run of servicecatalog-puppet-dryrun" deploy_project_args["Source"] = codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ) dry_run_project = template.add_resource( codebuild.Project("DryRunProject", **deploy_project_args)) bootstrap_project = template.add_resource( codebuild.Project( "BootstrapProject", Name="servicecatalog-puppet-bootstrap-spokes-in-ou", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "OU_OR_PATH", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_NAME", "Value": "OrganizationAccountAccessRole", }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_ARNS", "Value": "" }, ], ), Source=codebuild.Source( BuildSpec= "version: 0.2\nphases:\n install:\n runtime-versions:\n python: 3.7\n commands:\n - pip install aws-service-catalog-puppet\n build:\n commands:\n - servicecatalog-puppet bootstrap-spokes-in-ou $OU_OR_PATH $IAM_ROLE_NAME $IAM_ROLE_ARNS\nartifacts:\n files:\n - results/*/*\n - output/*/*\n name: BootstrapProject\n", Type="NO_SOURCE", ), Description="Bootstrap all the accounts in an OU", )) template.add_resource( codebuild.Project( "BootstrapASpokeProject", Name="servicecatalog-puppet-bootstrap-spoke", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Sub("${AWS::AccountId}"), }, { "Type": "PLAINTEXT", "Name": "ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "ASSUMABLE_ROLE_IN_ROOT_ACCOUNT", "Value": "CHANGE_ME", }, ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ "servicecatalog-puppet bootstrap-spoke-as ${PUPPET_ACCOUNT_ID} ${ASSUMABLE_ROLE_IN_ROOT_ACCOUNT} ${ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN}" ] }, ), )), Type="NO_SOURCE", ), Description="Bootstrap given account as a spoke", )) cloud_formation_events_queue = template.add_resource( sqs.Queue( "CloudFormationEventsQueue", QueueName="servicecatalog-puppet-cloudformation-events", Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), )) cloud_formation_events_queue_policy = template.add_resource( sqs.QueuePolicy( "CloudFormationEventsQueuePolicy", Queues=[t.Ref(cloud_formation_events_queue)], PolicyDocument={ "Id": "AllowSNS", "Version": "2012-10-17", "Statement": [{ "Sid": "allow-send-message", "Effect": "Allow", "Principal": { "AWS": "*" }, "Action": ["sqs:SendMessage"], "Resource": "*", "Condition": { "ArnEquals": { "aws:SourceArn": t.Sub( "arn:${AWS::Partition}:sns:*:${AWS::AccountId}:servicecatalog-puppet-cloudformation-regional-events" ) } }, }], }, )) spoke_deploy_bucket = template.add_resource( s3.Bucket( "SpokeDeployBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-spoke-deploy-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) caching_bucket = template.add_resource( s3.Bucket( "CachingBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub( "sc-puppet-caching-bucket-${AWS::AccountId}-${AWS::Region}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) template.add_output( t.Output( "CloudFormationEventsQueueArn", Value=t.GetAtt(cloud_formation_events_queue, "Arn"), )) template.add_output(t.Output("Version", Value=t.GetAtt(param, "Value"))) template.add_output( t.Output("ManualApprovalsParam", Value=t.GetAtt(manual_approvals_param, "Value"))) template.add_resource( ssm.Parameter( "DefaultTerraformVersion", Type="String", Name=constants.DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, Value=constants.DEFAULT_TERRAFORM_VERSION_VALUE, )) return template