class Conditions(object): HasConf = ts.Not(ts.Equals(ts.Ref('ConfVer'), ''))
t = troposphere.Template() t.add_version("2010-09-09") S3ArchiveBucket = t.add_parameter( troposphere.Parameter( "S3ArchiveBucket", Default="", Description= "S3 Bucket where you will store the output archives. If empty, it will be created. An existing bucket needs to notify S3BundlerQueueName on new manifests written by s3grouper.", Type="String", )) NeedsArchiveBucket = t.add_condition( "NeedsArchiveBucket", troposphere.Equals(troposphere.Ref(S3ArchiveBucket), "")) S3ArchivePrefix = t.add_parameter( troposphere.Parameter( "S3ArchivePrefix", Default="archive", Description= "Prefix within S3 Bucket where you will store the output archives", Type="String", )) S3ManifestPrefix = t.add_parameter( troposphere.Parameter( "S3ManifestPrefix", Default="manifests", Description=
def create_codepipeline_cfn( self, template, res_config, ): # CodePipeline # Source Actions source_stage_actions = [] # Source Actions for action_name in res_config.source.keys(): action_config = res_config.source[action_name] # Manual Approval Action if action_config.type == 'ManualApproval': manual_approval_action = self.init_manual_approval_action(template, action_config) source_stage_actions.append(manual_approval_action) # CodeCommit Action if action_config.type == 'CodeCommit.Source': codecommit_repo_arn_param = self.create_cfn_parameter( param_type='String', name='CodeCommitRepositoryArn', description='The Arn of the CodeCommit repository', value='{}.codecommit.arn'.format(action_config.paco_ref), ) codecommit_role_arn_param = self.create_cfn_parameter( param_type='String', name='CodeCommitRoleArn', description='The Arn of the CodeCommit Role', value='{}.codecommit_role.arn'.format(action_config.paco_ref), ) codecommit_repo_name_param = self.create_cfn_parameter( param_type='String', name='CodeCommitRepositoryName', description='The name of the CodeCommit repository', value=action_config.codecommit_repository+'.name', ) deploy_branch_name_param = self.create_cfn_parameter( param_type='String', name='CodeCommitDeploymentBranchName', description='The name of the branch where commits will trigger a build.', value=action_config.deployment_branch_name, ) codecommit_source_action = troposphere.codepipeline.Actions( Name='CodeCommit', ActionTypeId = troposphere.codepipeline.ActionTypeId( Category = 'Source', Owner = 'AWS', Version = '1', Provider = 'CodeCommit' ), Configuration = { 'RepositoryName': troposphere.Ref(codecommit_repo_name_param), 'BranchName': troposphere.Ref(deploy_branch_name_param) }, OutputArtifacts = [ troposphere.codepipeline.OutputArtifacts( Name = 'CodeCommitArtifact' ) ], RunOrder = action_config.run_order, RoleArn = troposphere.Ref(codecommit_role_arn_param) ) source_stage_actions.append(codecommit_source_action) source_stage = troposphere.codepipeline.Stages( Name="Source", Actions = source_stage_actions ) # Build Actions build_stage_actions = [] for action_name in res_config.build.keys(): action_config = res_config.build[action_name] # Manual Approval Action if action_config.type == 'ManualApproval': manual_approval_action = self.init_manual_approval_action(template, action_config) build_stage_actions.append(manual_approval_action) # CodeBuild Build Action elif action_config.type == 'CodeBuild.Build': codebuild_project_arn_param = self.create_cfn_parameter( param_type='String', name='CodeBuildProjectArn', description='The arn of the CodeBuild project', value='{}.project.arn'.format(action_config.paco_ref), ) codebuild_build_action = troposphere.codepipeline.Actions( Name='CodeBuild', ActionTypeId = troposphere.codepipeline.ActionTypeId( Category = 'Build', Owner = 'AWS', Version = '1', Provider = 'CodeBuild' ), Configuration = { 'ProjectName': troposphere.Ref(self.resource_name_prefix_param), }, InputArtifacts = [ troposphere.codepipeline.InputArtifacts( Name = 'CodeCommitArtifact' ) ], OutputArtifacts = [ troposphere.codepipeline.OutputArtifacts( Name = 'CodeBuildArtifact' ) ], RunOrder = action_config.run_order ) build_stage_actions.append(codebuild_build_action) build_stage = troposphere.codepipeline.Stages( Name="Build", Actions = build_stage_actions ) # Deploy Action [ deploy_stage, s3_deploy_assume_role_statement, codedeploy_deploy_assume_role_statement ] = self.init_deploy_stage(res_config, template) # Manual Deploy Enabled/Disable manual_approval_enabled_param = self.create_cfn_parameter( param_type='String', name='ManualApprovalEnabled', description='Boolean indicating whether a manual approval is enabled or not.', value=self.manual_approval_is_enabled, ) template.add_condition( 'ManualApprovalIsEnabled', troposphere.Equals(troposphere.Ref(manual_approval_enabled_param), 'true') ) # CodePipeline Role and Policy self.pipeline_service_role_name = self.create_iam_resource_name( name_list=[self.res_name_prefix, 'CodePipeline-Service'], filter_id='IAM.Role.RoleName' ) pipeline_service_role_res = troposphere.iam.Role( title='CodePipelineServiceRole', template = template, RoleName=self.pipeline_service_role_name, AssumeRolePolicyDocument=PolicyDocument( Version="2012-10-17", Statement=[ Statement( Effect=Allow, Action=[ AssumeRole ], Principal=Principal("Service", ['codepipeline.amazonaws.com']), ) ] ) ) pipeline_policy_statement_list = [ Statement( Sid='CodeCommitAccess', Effect=Allow, Action=[ Action('codecommit', 'List*'), Action('codecommit', 'Get*'), Action('codecommit', 'GitPull'), Action('codecommit', 'UploadArchive'), Action('codecommit', 'CancelUploadArchive'), ], Resource=[ troposphere.Ref(codecommit_repo_arn_param), ] ), Statement( Sid='CodePipelineAccess', Effect=Allow, Action=[ Action('codepipeline', '*'), Action('sns', 'Publish'), Action('s3', 'ListAllMyBuckets'), Action('s3', 'GetBucketLocation'), Action('iam', 'ListRoles'), Action('iam', 'PassRole'), ], Resource=[ '*' ] ), Statement( Sid='CodeBuildAccess', Effect=Allow, Action=[ Action('codebuild', 'BatchGetBuilds'), Action('codebuild', 'StartBuild') ], Resource=[ troposphere.Ref(codebuild_project_arn_param) ] ), Statement( Sid='S3Access', Effect=Allow, Action=[ Action('s3', 'PutObject'), Action('s3', 'GetBucketPolicy'), Action('s3', 'GetObject'), Action('s3', 'ListBucket'), ], Resource=[ troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}/*'), troposphere.Sub('arn:aws:s3:::${ArtifactsBucketName}') ] ), Statement( Sid='KMSCMK', Effect=Allow, Action=[ Action('kms', 'Decrypt'), ], Resource=[ troposphere.Ref(self.cmk_arn_param) ] ), Statement( Sid='CodeCommitAssumeRole', Effect=Allow, Action=[ Action('sts', 'AssumeRole'), ], Resource=[ troposphere.Ref(codecommit_role_arn_param) ] ), ] if codedeploy_deploy_assume_role_statement != None: pipeline_policy_statement_list.append(codedeploy_deploy_assume_role_statement) if s3_deploy_assume_role_statement != None: pipeline_policy_statement_list.append(s3_deploy_assume_role_statement) troposphere.iam.PolicyType( title='CodePipelinePolicy', template = template, DependsOn = 'CodePipelineServiceRole', PolicyName=troposphere.Sub('${ResourceNamePrefix}-CodePipeline-Policy'), PolicyDocument=PolicyDocument( Statement=pipeline_policy_statement_list, ), Roles=[troposphere.Ref(pipeline_service_role_res)] ) pipeline_stages = [] if source_stage != None: pipeline_stages.append(source_stage) if build_stage != None: pipeline_stages.append(build_stage) if deploy_stage != None: pipeline_stages.append(deploy_stage) pipeline_res = troposphere.codepipeline.Pipeline( title = 'BuildCodePipeline', template = template, DependsOn='CodePipelinePolicy', RoleArn = troposphere.GetAtt(pipeline_service_role_res, 'Arn'), Name = troposphere.Ref(self.resource_name_prefix_param), Stages = pipeline_stages, ArtifactStore = troposphere.codepipeline.ArtifactStore( Type = 'S3', Location = troposphere.Ref(self.artifacts_bucket_name_param), EncryptionKey = troposphere.codepipeline.EncryptionKey( Type = 'KMS', Id = troposphere.Ref(self.cmk_arn_param), ) ) ) return pipeline_res
def get_template( puppet_version, all_regions, source, is_caching_enabled, is_manual_approvals: bool, scm_skip_creation_of_repo: bool, should_validate: bool, ) -> t.Template: is_codecommit = source.get("Provider", "").lower() == "codecommit" is_github = source.get("Provider", "").lower() == "github" is_codestarsourceconnection = (source.get( "Provider", "").lower() == "codestarsourceconnection") is_custom = (source.get("Provider", "").lower() == "custom") is_s3 = source.get("Provider", "").lower() == "s3" description = f"""Bootstrap template used to bring up the main ServiceCatalog-Puppet AWS CodePipeline with dependencies {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master"}}""" template = t.Template(Description=description) version_parameter = template.add_parameter( t.Parameter("Version", Default=puppet_version, Type="String")) org_iam_role_arn_parameter = template.add_parameter( t.Parameter("OrgIamRoleArn", Default="None", Type="String")) with_manual_approvals_parameter = template.add_parameter( t.Parameter( "WithManualApprovals", Type="String", AllowedValues=["Yes", "No"], Default="No", )) puppet_code_pipeline_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetCodePipelineRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetCodePipelineRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) source_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "SourceRolePermissionsBoundary", Type="String", Description="IAM Permission Boundary to apply to the SourceRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_generate_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetGenerateRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetGenerateRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_deploy_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetDeployRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_provisioning_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "PuppetProvisioningRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetProvisioningRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) cloud_formation_deploy_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "CloudFormationDeployRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the CloudFormationDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "DeployEnvironmentComputeType", Type="String", Description="The AWS CodeBuild Environment Compute Type", Default="BUILD_GENERAL1_SMALL", )) spoke_deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "SpokeDeployEnvironmentComputeType", Type="String", Description= "The AWS CodeBuild Environment Compute Type for spoke execution mode", Default="BUILD_GENERAL1_SMALL", )) deploy_num_workers_parameter = template.add_parameter( t.Parameter( "DeployNumWorkers", Type="Number", Description= "Number of workers that should be used when running a deploy", Default=10, )) puppet_role_name_parameter = template.add_parameter( t.Parameter("PuppetRoleName", Type="String", Default="PuppetRole")) puppet_role_path_template_parameter = template.add_parameter( t.Parameter("PuppetRolePath", Type="String", Default="/servicecatalog-puppet/")) template.add_condition( "ShouldUseOrgs", t.Not(t.Equals(t.Ref(org_iam_role_arn_parameter), "None"))) template.add_condition( "HasManualApprovals", t.Equals(t.Ref(with_manual_approvals_parameter), "Yes")) template.add_resource( s3.Bucket( "StacksRepository", BucketName=t.Sub("sc-puppet-stacks-repository-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) manual_approvals_param = template.add_resource( ssm.Parameter( "ManualApprovalsParam", Type="String", Name="/servicecatalog-puppet/manual-approvals", Value=t.Ref(with_manual_approvals_parameter), )) template.add_resource( ssm.Parameter( "SpokeDeployEnvParameter", Type="String", Name=constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, Value=t.Ref(spoke_deploy_environment_compute_type_parameter), )) param = template.add_resource( ssm.Parameter( "Param", Type="String", Name="service-catalog-puppet-version", Value=t.Ref(version_parameter), )) partition_parameter = template.add_resource( ssm.Parameter( "PartitionParameter", Type="String", Name="/servicecatalog-puppet/partition", Value=t.Ref("AWS::Partition"), )) puppet_role_name_parameter = template.add_resource( ssm.Parameter( "PuppetRoleNameParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/name", Value=t.Ref(puppet_role_name_parameter), )) puppet_role_path_parameter = template.add_resource( ssm.Parameter( "PuppetRolePathParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/path", Value=t.Ref(puppet_role_path_template_parameter), )) share_accept_function_role = template.add_resource( iam.Role( "ShareAcceptFunctionRole", RoleName="ShareAcceptFunctionRole", ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" ) ], Path=t.Ref(puppet_role_path_template_parameter), Policies=[ iam.Policy( PolicyName="ServiceCatalogActions", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Resource": { "Fn::Sub": "arn:${AWS::Partition}:iam::*:role${PuppetRolePath}${PuppetRoleName}" }, "Effect": "Allow", }], }, ) ], AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["lambda.amazonaws.com"] }, }], }, )) provisioning_role = template.add_resource( iam.Role( "ProvisioningRole", RoleName="PuppetProvisioningRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_provisioning_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) cloud_formation_deploy_role = template.add_resource( iam.Role( "CloudFormationDeployRole", RoleName="CloudFormationDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["cloudformation.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( cloud_formation_deploy_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) pipeline_role = template.add_resource( iam.Role( "PipelineRole", RoleName="PuppetCodePipelineRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_code_pipeline_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) source_role = template.add_resource( iam.Role( "SourceRole", RoleName="PuppetSourceRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:root" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( source_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) dry_run_notification_topic = template.add_resource( sns.Topic( "DryRunNotificationTopic", DisplayName="service-catalog-puppet-dry-run-approvals", TopicName="service-catalog-puppet-dry-run-approvals", Condition="HasManualApprovals", )) deploy_role = template.add_resource( iam.Role( "DeployRole", RoleName="PuppetDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_deploy_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) num_workers_ssm_parameter = template.add_resource( ssm.Parameter( "NumWorkersSSMParameter", Type="String", Name="/servicecatalog-puppet/deploy/num-workers", Value=t.Sub("${DeployNumWorkers}"), )) parameterised_source_bucket = template.add_resource( s3.Bucket( "ParameterisedSourceBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-parameterised-runs-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage = codepipeline.Stages( Name="Source", Actions=[ codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="ParameterisedSource") ], Configuration={ "S3Bucket": t.Ref(parameterised_source_bucket), "S3ObjectKey": "parameters.zip", "PollForSourceChanges": True, }, Name="ParameterisedSource", ) ], ) install_spec = { "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], } deploy_env_vars = [ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Ref("AWS::AccountId"), }, { "Type": "PLAINTEXT", "Name": "PUPPET_REGION", "Value": t.Ref("AWS::Region"), }, { "Type": "PARAMETER_STORE", "Name": "PARTITION", "Value": t.Ref(partition_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_NAME", "Value": t.Ref(puppet_role_name_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_PATH", "Value": t.Ref(puppet_role_path_parameter), }, ] if is_codecommit: template.add_resource( codecommit.Repository( "CodeRepo", RepositoryName=source.get("Configuration").get( "RepositoryName"), RepositoryDescription= "Repo to store the servicecatalog puppet solution", DeletionPolicy="Retain", )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "RepositoryName": source.get("Configuration").get("RepositoryName"), "BranchName": source.get("Configuration").get("BranchName"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges", True), }, Name="Source", )) if is_github: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "Owner": source.get("Configuration").get("Owner"), "Repo": source.get("Configuration").get("Repo"), "Branch": source.get("Configuration").get("Branch"), "OAuthToken": t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:OAuthToken}}", ], ), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) if is_custom: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="Custom", Version=source.get("Configuration").get( "CustomActionTypeVersion"), Provider=source.get("Configuration").get( "CustomActionTypeProvider"), ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "GitUrl": source.get("Configuration").get("GitUrl"), "Branch": source.get("Configuration").get("Branch"), "PipelineName": t.Sub("${AWS::StackName}-pipeline"), }, Name="Source", )) webhook = codepipeline.Webhook( "Webhook", Authentication="IP", TargetAction="Source", AuthenticationConfiguration=codepipeline.WebhookAuthConfiguration( AllowedIPRange=source.get("Configuration").get( "GitWebHookIpAddress")), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.changes[0].ref.id", MatchEquals="refs/heads/{Branch}") ], TargetPipelineVersion=1, TargetPipeline=t.Sub("${AWS::StackName}-pipeline"), ) template.add_resource(webhook) values_for_sub = { "GitUrl": source.get("Configuration").get("GitUrl"), "WebhookUrl": t.GetAtt(webhook, "Url"), } output_to_add = t.Output("WebhookUrl") output_to_add.Value = t.Sub("${GitUrl}||${WebhookUrl}", **values_for_sub) output_to_add.Export = t.Export(t.Sub("${AWS::StackName}-pipeline")) template.add_output(output_to_add) if is_codestarsourceconnection: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "ConnectionArn": source.get("Configuration").get("ConnectionArn"), "FullRepositoryId": source.get("Configuration").get("FullRepositoryId"), "BranchName": source.get("Configuration").get("BranchName"), "OutputArtifactFormat": source.get("Configuration").get("OutputArtifactFormat"), }, Name="Source", )) if is_s3: bucket_name = source.get("Configuration").get("S3Bucket") if not scm_skip_creation_of_repo: template.add_resource( s3.Bucket( bucket_name, PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": source.get("Configuration").get("S3ObjectKey"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) single_account_run_project_build_spec = dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ 'echo "single_account: \\"${SINGLE_ACCOUNT_ID}\\"" > parameters.yaml', "cat parameters.yaml", "zip parameters.zip parameters.yaml", "aws s3 cp parameters.zip s3://sc-puppet-parameterised-runs-${PUPPET_ACCOUNT_ID}/parameters.zip", ] }, post_build={ "commands": [ "servicecatalog-puppet wait-for-parameterised-run-to-complete", ] }, ), artifacts=dict( name="DeployProject", files=[ "ServiceCatalogPuppet/manifest.yaml", "ServiceCatalogPuppet/manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) single_account_run_project_args = dict( Name="servicecatalog-puppet-single-account-run", Description="Runs puppet for a single account - SINGLE_ACCOUNT_ID", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "SINGLE_ACCOUNT_ID", "Value": "CHANGE_ME", }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ), ) single_account_run_project = template.add_resource( codebuild.Project("SingleAccountRunProject", **single_account_run_project_args)) single_account_run_project_build_spec["phases"]["post_build"]["commands"] = [ "servicecatalog-puppet wait-for-parameterised-run-to-complete --on-complete-url $CALLBACK_URL" ] single_account_run_project_args[ "Name"] = "servicecatalog-puppet-single-account-run-with-callback" single_account_run_project_args[ "Description"] = "Runs puppet for a single account - SINGLE_ACCOUNT_ID and then does a http put" single_account_run_project_args.get( "Environment").EnvironmentVariables.append({ "Type": "PLAINTEXT", "Name": "CALLBACK_URL", "Value": "CHANGE_ME", }) single_account_run_project_args["Source"] = codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ) single_account_run_project_with_callback = template.add_resource( codebuild.Project("SingleAccountRunWithCallbackProject", **single_account_run_project_args)) stages = [source_stage] if should_validate: template.add_resource( codebuild.Project( "ValidateProject", Name="servicecatalog-puppet-validate", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version="0.2", phases={ "install": { "runtime-versions": { "python": "3.7", }, "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, "build": { "commands": [ "servicecatalog-puppet validate manifest.yaml" ] }, }, )), Type="CODEPIPELINE", ), Description="Validate the manifest.yaml file", )) stages.append( codepipeline.Stages( Name="Validate", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), ], Name="Validate", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts( Name="ValidateProject") ], Configuration={ "ProjectName": t.Ref("ValidateProject"), "PrimarySource": "Source", }, RunOrder=1, ), ], )) if is_manual_approvals: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="DryRun", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DryRunProject") ], Configuration={ "ProjectName": t.Ref("DryRunProject"), "PrimarySource": "Source", }, RunOrder=1, ), codepipeline.Actions( ActionTypeId=codepipeline.ActionTypeId( Category="Approval", Owner="AWS", Version="1", Provider="Manual", ), Configuration={ "NotificationArn": t.Ref("DryRunNotificationTopic"), "CustomData": "Approve when you are happy with the dry run.", }, Name="DryRunApproval", RunOrder=2, ), codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", }, RunOrder=3, ), ], ) else: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", "EnvironmentVariables": '[{"name":"EXECUTION_ID","value":"#{codepipeline.PipelineExecutionId}","type":"PLAINTEXT"}]', }, RunOrder=1, ), ], ) stages.append(deploy_stage) pipeline = template.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=t.GetAtt("PipelineRole", "Arn"), Stages=stages, Name=t.Sub("${AWS::StackName}-pipeline"), ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=t.Sub( "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}" ), ), RestartExecutionOnUpdate=True, )) if is_github: template.add_resource( codepipeline.Webhook( "Webhook", AuthenticationConfiguration=codepipeline. WebhookAuthConfiguration(SecretToken=t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:SecretToken}}", ], )), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.ref", MatchEquals="refs/heads/" + source.get("Configuration").get("Branch"), ) ], Authentication="GITHUB_HMAC", TargetPipeline=t.Ref(pipeline), TargetAction="Source", Name=t.Sub("${AWS::StackName}-webhook"), TargetPipelineVersion=t.GetAtt(pipeline, "Version"), RegisterWithThirdParty="true", )) deploy_project_build_spec = dict( version=0.2, phases=dict( install={ "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, pre_build={ "commands": [ "servicecatalog-puppet --info expand --parameter-override-file $CODEBUILD_SRC_DIR_ParameterisedSource/parameters.yaml manifest.yaml", ] }, build={ "commands": [ "servicecatalog-puppet --info deploy --num-workers ${NUM_WORKERS} manifest-expanded.yaml", ] }, ), artifacts=dict( name="DeployProject", files=[ "manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) deploy_project_args = dict( Name="servicecatalog-puppet-deploy", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PARAMETER_STORE", "Name": "NUM_WORKERS", "Value": t.Ref(num_workers_ssm_parameter), }, { "Type": "PARAMETER_STORE", "Name": "SPOKE_EXECUTION_MODE_DEPLOY_ENV", "Value": constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ), Description="deploys out the products to be deployed", ) deploy_project = template.add_resource( codebuild.Project("DeployProject", **deploy_project_args)) deploy_project_build_spec["phases"]["build"]["commands"] = [ "servicecatalog-puppet --info dry-run manifest-expanded.yaml" ] deploy_project_build_spec["artifacts"]["name"] = "DryRunProject" deploy_project_args["Name"] = "servicecatalog-puppet-dryrun" deploy_project_args[ "Description"] = "dry run of servicecatalog-puppet-dryrun" deploy_project_args["Source"] = codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ) dry_run_project = template.add_resource( codebuild.Project("DryRunProject", **deploy_project_args)) bootstrap_project = template.add_resource( codebuild.Project( "BootstrapProject", Name="servicecatalog-puppet-bootstrap-spokes-in-ou", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "OU_OR_PATH", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_NAME", "Value": "OrganizationAccountAccessRole", }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_ARNS", "Value": "" }, ], ), Source=codebuild.Source( BuildSpec= "version: 0.2\nphases:\n install:\n runtime-versions:\n python: 3.7\n commands:\n - pip install aws-service-catalog-puppet\n build:\n commands:\n - servicecatalog-puppet bootstrap-spokes-in-ou $OU_OR_PATH $IAM_ROLE_NAME $IAM_ROLE_ARNS\nartifacts:\n files:\n - results/*/*\n - output/*/*\n name: BootstrapProject\n", Type="NO_SOURCE", ), Description="Bootstrap all the accounts in an OU", )) template.add_resource( codebuild.Project( "BootstrapASpokeProject", Name="servicecatalog-puppet-bootstrap-spoke", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Sub("${AWS::AccountId}"), }, { "Type": "PLAINTEXT", "Name": "ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "ASSUMABLE_ROLE_IN_ROOT_ACCOUNT", "Value": "CHANGE_ME", }, ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ "servicecatalog-puppet bootstrap-spoke-as ${PUPPET_ACCOUNT_ID} ${ASSUMABLE_ROLE_IN_ROOT_ACCOUNT} ${ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN}" ] }, ), )), Type="NO_SOURCE", ), Description="Bootstrap given account as a spoke", )) cloud_formation_events_queue = template.add_resource( sqs.Queue( "CloudFormationEventsQueue", QueueName="servicecatalog-puppet-cloudformation-events", Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), )) cloud_formation_events_queue_policy = template.add_resource( sqs.QueuePolicy( "CloudFormationEventsQueuePolicy", Queues=[t.Ref(cloud_formation_events_queue)], PolicyDocument={ "Id": "AllowSNS", "Version": "2012-10-17", "Statement": [{ "Sid": "allow-send-message", "Effect": "Allow", "Principal": { "AWS": "*" }, "Action": ["sqs:SendMessage"], "Resource": "*", "Condition": { "ArnEquals": { "aws:SourceArn": t.Sub( "arn:${AWS::Partition}:sns:*:${AWS::AccountId}:servicecatalog-puppet-cloudformation-regional-events" ) } }, }], }, )) spoke_deploy_bucket = template.add_resource( s3.Bucket( "SpokeDeployBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-spoke-deploy-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) caching_bucket = template.add_resource( s3.Bucket( "CachingBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub( "sc-puppet-caching-bucket-${AWS::AccountId}-${AWS::Region}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) template.add_output( t.Output( "CloudFormationEventsQueueArn", Value=t.GetAtt(cloud_formation_events_queue, "Arn"), )) template.add_output(t.Output("Version", Value=t.GetAtt(param, "Value"))) template.add_output( t.Output("ManualApprovalsParam", Value=t.GetAtt(manual_approvals_param, "Value"))) template.add_resource( ssm.Parameter( "DefaultTerraformVersion", Type="String", Name=constants.DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, Value=constants.DEFAULT_TERRAFORM_VERSION_VALUE, )) return template
def init_lb(self, aws_name, template_title): self.set_aws_name(aws_name, self.resource_group_name, self.lb_config.name) self.network = self.lb_config.env_region_obj.network # Init Troposphere template self.init_template(template_title) if not self.lb_config.is_enabled(): return self.set_template() # Parameters if self.lb_config.is_enabled(): lb_enable = 'true' else: lb_enable = 'false' lb_is_enabled_param = self.create_cfn_parameter( param_type='String', name='LBEnabled', description='Enable the LB in this template', value=lb_enable) vpc_stack = self.env_ctx.get_vpc_stack() vpc_param = self.create_cfn_parameter(param_type='String', name='VPC', description='VPC ID', value=StackOutputParam( 'VPC', vpc_stack, 'VPC', self)) lb_region = self.env_ctx.region if self.lb_config.type == 'LBApplication': lb_type = 'alb' else: lb_type = 'nlb' lb_hosted_zone_id_param = self.create_cfn_parameter( param_type='String', name='LBHostedZoneId', description='The Regonal AWS Route53 Hosted Zone ID', value=self.lb_hosted_zone_id(lb_type, lb_region)) # 32 Characters max # <proj>-<env>-<app>-<lb.name> # TODO: Limit each name item to 7 chars # Name collision risk:, if unique identifying characrtes are truncated # - Add a hash? # - Check for duplicates with validating template load_balancer_name = self.create_resource_name_join( name_list=[ self.env_ctx.netenv.name, self.env_ctx.env.name, self.app_id, self.resource_group_name, self.lb_config.name ], separator='', camel_case=True, filter_id='EC2.ElasticLoadBalancingV2.LoadBalancer.Name') load_balancer_name_param = self.create_cfn_parameter( param_type='String', name='LoadBalancerName', description='The name of the load balancer', value=load_balancer_name) scheme_param = self.create_cfn_parameter( param_type='String', min_length=1, max_length=128, name='Scheme', description= 'Specify internal to create an internal load balancer with a DNS name that resolves to private IP addresses or internet-facing to create a load balancer with a publicly resolvable DNS name, which resolves to public IP addresses.', value=self.lb_config.scheme) # Segment SubnetList is a Segment stack Output based on availability zones subnet_list_ref = self.network.vpc.segments[ self.lb_config.segment].paco_ref + '.subnet_id_list' subnet_list_param = self.create_cfn_parameter( param_type='List<AWS::EC2::Subnet::Id>', name='SubnetList', description= 'A list of subnets where the LBs instances will be provisioned', value=subnet_list_ref, ) # Security Groups if self.lb_config.type == 'LBApplication': sg_group_list = [] sg_group_list.extend(self.lb_config.security_groups) for hook in SECURITY_GROUPS_HOOKS: env_config = get_parent_by_interface(self.lb_config, IEnvironmentRegion) vpc_id = self.paco_ctx.get_ref( f'{env_config.network.vpc.paco_ref}.id').get_outputs_value( 'VPC') hook_sg_list = hook(self.lb_config, self.account_ctx, self.aws_region, vpc_id) sg_group_list.extend(hook_sg_list) security_group_list_param = self.create_cfn_ref_list_param( param_type='List<AWS::EC2::SecurityGroup::Id>', name='SecurityGroupList', description='A List of security groups to attach to the LB', value=sg_group_list, ref_attribute='id') idle_timeout_param = self.create_cfn_parameter( param_type='String', name='IdleTimeoutSecs', description='The idle timeout value, in seconds.', value=self.lb_config.idle_timeout_secs) # Conditions self.template.add_condition( "LBIsEnabled", troposphere.Equals(troposphere.Ref(lb_is_enabled_param), "true")) # Resources # LoadBalancer load_balancer_logical_id = 'LoadBalancer' cfn_export_dict = {} cfn_export_dict['Name'] = troposphere.Ref(load_balancer_name_param) if self.lb_config.type == 'LBApplication': lb_v2_type = 'application' else: lb_v2_type = 'network' cfn_export_dict['Type'] = lb_v2_type cfn_export_dict['Scheme'] = troposphere.Ref(scheme_param) cfn_export_dict['Subnets'] = troposphere.Ref(subnet_list_param) # Application Load Balancer Logic lb_attributes = [] if self.lb_config.type == 'LBApplication': cfn_export_dict['SecurityGroups'] = troposphere.Ref( security_group_list_param) lb_attributes.append({ 'Key': 'idle_timeout.timeout_seconds', 'Value': troposphere.Ref(idle_timeout_param) }) if self.lb_config.enable_access_logs: # ToDo: automatically create a bucket when access_logs_bucket is not set s3bucket = get_model_obj_from_ref( self.lb_config.access_logs_bucket, self.paco_ctx.project) lb_attributes.append({ 'Key': 'access_logs.s3.enabled', 'Value': 'true' }) lb_attributes.append({ 'Key': 'access_logs.s3.bucket', 'Value': s3bucket.get_bucket_name() }) if self.lb_config.access_logs_prefix: lb_attributes.append({ 'Key': 'access_logs.s3.prefix', 'Value': self.lb_config.access_logs_prefix }) cfn_export_dict['LoadBalancerAttributes'] = lb_attributes lb_resource = troposphere.elasticloadbalancingv2.LoadBalancer.from_dict( load_balancer_logical_id, cfn_export_dict) lb_resource.Condition = "LBIsEnabled" self.template.add_resource(lb_resource) # Target Groups for target_group_name, target_group in sorted( self.lb_config.target_groups.items()): if target_group.is_enabled() == False: continue target_group_id = self.create_cfn_logical_id(target_group_name) target_group_logical_id = 'TargetGroup' + target_group_id cfn_export_dict = {} if self.paco_ctx.legacy_flag( 'target_group_name_2019_10_29') == True: name = self.create_resource_name_join( name_list=[load_balancer_name, target_group_id], separator='', camel_case=True, hash_long_names=True, filter_id='EC2.ElasticLoadBalancingV2.TargetGroup.Name', ) else: name = troposphere.Ref('AWS::NoValue') cfn_export_dict['Name'] = name cfn_export_dict[ 'HealthCheckIntervalSeconds'] = target_group.health_check_interval cfn_export_dict[ 'HealthCheckTimeoutSeconds'] = target_group.health_check_timeout cfn_export_dict[ 'HealthyThresholdCount'] = target_group.healthy_threshold cfn_export_dict[ 'HealthCheckProtocol'] = target_group.health_check_protocol # HTTP Health Checks if target_group.health_check_protocol in ['HTTP', 'HTTPS']: cfn_export_dict[ 'HealthCheckPath'] = target_group.health_check_path cfn_export_dict['Matcher'] = { 'HttpCode': target_group.health_check_http_code } if target_group.health_check_port != 'traffic-port': cfn_export_dict[ 'HealthCheckPort'] = target_group.health_check_port if target_group.port != None: cfn_export_dict['Port'] = target_group.port cfn_export_dict['Protocol'] = target_group.protocol cfn_export_dict[ 'UnhealthyThresholdCount'] = target_group.unhealthy_threshold cfn_export_dict['TargetGroupAttributes'] = [{ 'Key': 'deregistration_delay.timeout_seconds', 'Value': str(target_group.connection_drain_timeout) }] # TODO: Preserve Client IP # if self.lb_config.type == 'LBNetwork': # cfn_export_dict['TargetGroupAttributes'].append({ # 'Key': 'preserve_client_ip.enabled', 'Value': 'false' # }) cfn_export_dict['VpcId'] = troposphere.Ref(vpc_param) if target_group.target_type != 'instance': cfn_export_dict['TargetType'] = target_group.target_type target_group_resource = troposphere.elasticloadbalancingv2.TargetGroup.from_dict( target_group_logical_id, cfn_export_dict) self.template.add_resource(target_group_resource) # Target Group Outputs target_group_ref = '.'.join([ self.lb_config.paco_ref_parts, 'target_groups', target_group_name ]) target_group_arn_ref = '.'.join([target_group_ref, 'arn']) self.create_output(title='TargetGroupArn' + target_group_id, value=troposphere.Ref(target_group_resource), ref=target_group_arn_ref) target_group_name_ref = '.'.join([target_group_ref, 'name']) self.create_output(title='TargetGroupName' + target_group_id, value=troposphere.GetAtt( target_group_resource, 'TargetGroupName'), ref=target_group_name_ref) self.create_output(title='TargetGroupFullName' + target_group_id, value=troposphere.GetAtt( target_group_resource, 'TargetGroupFullName'), ref=target_group_ref + '.fullname') # Listeners for listener_name, listener in self.lb_config.listeners.items(): logical_listener_name = self.create_cfn_logical_id('Listener' + listener_name) cfn_export_dict = listener.cfn_export_dict # Listener - Default Actions if listener.redirect != None: action = { 'Type': 'redirect', 'RedirectConfig': { 'Port': str(listener.redirect.port), 'Protocol': listener.redirect.protocol, 'StatusCode': 'HTTP_301' } } else: target_group_id = self.create_cfn_logical_id( listener.target_group) action = { 'Type': 'forward', 'TargetGroupArn': troposphere.Ref('TargetGroup' + target_group_id) } cfn_export_dict['DefaultActions'] = [action] cfn_export_dict['LoadBalancerArn'] = troposphere.Ref(lb_resource) # Listener - SSL Certificates ssl_cert_param_obj_list = [] unique_listener_cert_name = "" if len(listener.ssl_certificates ) > 0 and self.lb_config.is_enabled(): if listener.ssl_policy != '': cfn_export_dict['SslPolicy'] = listener.ssl_policy cfn_export_dict['Certificates'] = [] for ssl_cert_idx in range(0, len(listener.ssl_certificates)): ssl_cert_param = self.create_cfn_parameter( param_type='String', name='SSLCertificateIdL%sC%d' % (listener_name, ssl_cert_idx), description= 'The Arn of the SSL Certificate to associate with this Load Balancer', value=listener.ssl_certificates[ssl_cert_idx] + ".arn") if ssl_cert_idx == 0: cfn_export_dict['Certificates'] = [{ 'CertificateArn': troposphere.Ref(ssl_cert_param) }] else: unique_listener_cert_name = f'{unique_listener_cert_name}{listener.ssl_certificates[ssl_cert_idx]}' ssl_cert_param_obj_list.append( troposphere.elasticloadbalancingv2.Certificate( CertificateArn=troposphere.Ref( ssl_cert_param))) listener_resource = troposphere.elasticloadbalancingv2.Listener.from_dict( logical_listener_name, cfn_export_dict) self.template.add_resource(listener_resource) # ListenerCertificates if len(ssl_cert_param_obj_list) > 0: unique_listener_cert_name = utils.md5sum( str_data=unique_listener_cert_name) logical_listener_cert_name = self.create_cfn_logical_id_join([ logical_listener_name, 'Certificate', unique_listener_cert_name ]) troposphere.elasticloadbalancingv2.ListenerCertificate( title=logical_listener_cert_name, template=self.template, Certificates=ssl_cert_param_obj_list, ListenerArn=troposphere.Ref(listener_resource)) # Listener - Rules if listener.rules != None: for rule_name, rule in listener.rules.items(): if rule.enabled == False: continue logical_rule_name = self.create_cfn_logical_id(rule_name) cfn_export_dict = {} rule_conditions = [] if rule.rule_type == "forward": logical_target_group_id = self.create_cfn_logical_id( 'TargetGroup' + rule.target_group) cfn_export_dict['Actions'] = [{ 'Type': 'forward', 'TargetGroupArn': troposphere.Ref(logical_target_group_id) }] if rule.host != None: rule_conditions.append({ 'Field': 'host-header', 'Values': [rule.host] }) if len(rule.path_pattern) > 0: rule_conditions.append({ 'Field': 'path-pattern', 'Values': rule.path_pattern }) elif rule.rule_type == "redirect": redirect_config = { 'Type': 'redirect', 'RedirectConfig': { 'Host': rule.redirect_host, 'StatusCode': 'HTTP_301' } } if rule.redirect_path != None: redirect_config['RedirectConfig'][ 'Path'] = rule.redirect_path cfn_export_dict['Actions'] = [redirect_config] rule_conditions.append({ 'Field': 'host-header', 'Values': [rule.host] }) if len(rule.path_pattern) > 0: rule_conditions.append({ 'Field': 'path-pattern', 'Values': rule.path_pattern }) cfn_export_dict['Conditions'] = rule_conditions cfn_export_dict['ListenerArn'] = troposphere.Ref( logical_listener_name) cfn_export_dict['Priority'] = rule.priority logical_listener_rule_name = self.create_cfn_logical_id_join( str_list=[ logical_listener_name, 'Rule', logical_rule_name ]) listener_rule_resource = troposphere.elasticloadbalancingv2.ListenerRule.from_dict( logical_listener_rule_name, cfn_export_dict) listener_rule_resource.Condition = "LBIsEnabled" self.template.add_resource(listener_rule_resource) # Record Sets if self.paco_ctx.legacy_flag('route53_record_set_2019_10_16'): record_set_index = 0 for lb_dns in self.lb_config.dns: if self.lb_config.is_dns_enabled() == True: hosted_zone_param = self.create_cfn_parameter( param_type='String', description='LB DNS Hosted Zone ID', name='HostedZoneID%d' % (record_set_index), value=lb_dns.hosted_zone + '.id') cfn_export_dict = {} cfn_export_dict['HostedZoneId'] = troposphere.Ref( hosted_zone_param) cfn_export_dict['Name'] = lb_dns.domain_name cfn_export_dict['Type'] = 'A' cfn_export_dict['AliasTarget'] = { 'DNSName': troposphere.GetAtt(lb_resource, 'DNSName'), 'HostedZoneId': troposphere.GetAtt(lb_resource, 'CanonicalHostedZoneID') } record_set_resource = troposphere.route53.RecordSet.from_dict( 'RecordSet' + str(record_set_index), cfn_export_dict) record_set_resource.Condition = "LBIsEnabled" self.template.add_resource(record_set_resource) record_set_index += 1 if self.enabled == True: self.create_output(title='LoadBalancerArn', value=troposphere.Ref(lb_resource), ref=self.lb_config.paco_ref_parts + '.arn') self.create_output(title='LoadBalancerName', value=troposphere.GetAtt( lb_resource, 'LoadBalancerName'), ref=self.lb_config.paco_ref_parts + '.name') self.create_output(title='LoadBalancerFullName', value=troposphere.GetAtt( lb_resource, 'LoadBalancerFullName'), ref=self.lb_config.paco_ref_parts + '.fullname') self.create_output( title='LoadBalancerCanonicalHostedZoneID', value=troposphere.GetAtt(lb_resource, 'CanonicalHostedZoneID'), ref=self.lb_config.paco_ref_parts + '.canonicalhostedzoneid') self.create_output( title='LoadBalancerDNSName', value=troposphere.GetAtt(lb_resource, 'DNSName'), ref=self.lb_config.paco_ref_parts + '.dnsname', ) if self.paco_ctx.legacy_flag( 'route53_record_set_2019_10_16') == False: route53_ctl = self.paco_ctx.get_controller('route53') for lb_dns in self.lb_config.dns: if self.lb_config.is_dns_enabled() == True: alias_dns_ref = self.lb_config.paco_ref + '.dnsname' alias_hosted_zone_ref = self.lb_config.paco_ref + '.canonicalhostedzoneid' hosted_zone = get_model_obj_from_ref( lb_dns.hosted_zone, self.paco_ctx.project) account_ctx = self.paco_ctx.get_account_context( account_ref=hosted_zone.account) route53_ctl.add_record_set( account_ctx, self.aws_region, self.lb_config, enabled=self.lb_config.is_enabled(), dns=lb_dns, record_set_type='Alias', alias_dns_name=alias_dns_ref, alias_hosted_zone_id=alias_hosted_zone_ref, stack_group=self.stack.stack_group, async_stack_provision=True, config_ref=self.lb_config.paco_ref_parts + '.dns')
def create_pipeine_from_sourcebuilddeploy(self, deploy_region): # CodePipeline # Source Actions source_stage_actions = [] # Source Actions for action in self.pipeline.source.values(): self.build_input_artifacts = [] # Manual Approval Action if action.type == 'ManualApproval': manual_approval_action = self.init_manual_approval_action( action) source_stage_actions.append(manual_approval_action) # GitHub Action elif action.type == 'GitHub.Source': if action.is_enabled(): self.github_source_enabled = True github_access_token = Reference(action.github_access_token).ref #github_token_param = self.create_cfn_parameter( # param_type='AWS::SSM::Parameter::Value<String>', # name='GitHubTokenSSMParameterName', # description='The name of the SSM Parameter with the GitHub OAuth Token', # value=action.github_token_parameter_name, #) github_owner_param = self.create_cfn_parameter( param_type='String', name='GitHubOwner', description='The name of the GitHub owner', value=action.github_owner, ) github_repo_param = self.create_cfn_parameter( param_type='String', name='GitHubRepository', description='The name of the GitHub Repository', value=action.github_repository, ) github_deploy_branch_name_param = self.create_cfn_parameter( param_type='String', name='GitHubDeploymentBranchName', description= 'The name of the branch where commits will trigger a build.', value=action.deployment_branch_name, ) github_source_action = troposphere.codepipeline.Actions( Name='GitHub', ActionTypeId=troposphere.codepipeline.ActionTypeId( Category='Source', Owner='ThirdParty', Version='1', Provider='GitHub'), Configuration={ 'Owner': troposphere.Ref(github_owner_param), 'Repo': troposphere.Ref(github_repo_param), 'Branch': troposphere.Ref(github_deploy_branch_name_param), 'OAuthToken': "{{resolve:secretsmanager:%s}}" % github_access_token, #troposphere.Ref(github_token_param), 'PollForSourceChanges': False, }, OutputArtifacts=[ troposphere.codepipeline.OutputArtifacts( Name='GitHubArtifact') ], RunOrder=action.run_order, ) source_stage_actions.append(github_source_action) self.build_input_artifacts.append( troposphere.codepipeline.InputArtifacts( Name='GitHubArtifact')) # CodeCommit Action elif action.type == 'CodeCommit.Source': if action.is_enabled(): self.codecommit_source_enabled = True self.codecommit_repo_arn_param = self.create_cfn_parameter( param_type='String', name='CodeCommitRepositoryArn', description='The Arn of the CodeCommit repository', value='{}.codecommit.arn'.format(action.paco_ref), ) self.codecommit_role_arn_param = self.create_cfn_parameter( param_type='String', name='CodeCommitRoleArn', description='The Arn of the CodeCommit Role', value='{}.codecommit_role.arn'.format(action.paco_ref), ) codecommit_repo_name_param = self.create_cfn_parameter( param_type='String', name='CodeCommitRepositoryName', description='The name of the CodeCommit repository', value=action.codecommit_repository + '.name', ) deploy_branch_name_param = self.create_cfn_parameter( param_type='String', name='CodeCommitDeploymentBranchName', description= 'The name of the branch where commits will trigger a build.', value=action.deployment_branch_name, ) codecommit_source_action = troposphere.codepipeline.Actions( Name='CodeCommit', ActionTypeId=troposphere.codepipeline.ActionTypeId( Category='Source', Owner='AWS', Version='1', Provider='CodeCommit'), Configuration={ 'RepositoryName': troposphere.Ref(codecommit_repo_name_param), 'BranchName': troposphere.Ref(deploy_branch_name_param) }, OutputArtifacts=[ troposphere.codepipeline.OutputArtifacts( Name='CodeCommitArtifact') ], RunOrder=action.run_order, RoleArn=troposphere.Ref(self.codecommit_role_arn_param)) source_stage_actions.append(codecommit_source_action) self.build_input_artifacts.append( troposphere.codepipeline.InputArtifacts( Name='CodeCommitArtifact')) source_stage = troposphere.codepipeline.Stages( Name="Source", Actions=source_stage_actions) # Build Actions build_stage_actions = [] for action in self.pipeline.build.values(): # Manual Approval Action if action.type == 'ManualApproval': manual_approval_action = self.init_manual_approval_action( action) build_stage_actions.append(manual_approval_action) # CodeBuild Build Action elif action.type == 'CodeBuild.Build': self.codebuild_access_enabled = True self.codebuild_project_arn_param = self.create_cfn_parameter( param_type='String', name='CodeBuildProjectArn', description='The arn of the CodeBuild project', value='{}.project.arn'.format(action.paco_ref), ) codebuild_build_action = troposphere.codepipeline.Actions( Name='CodeBuild', ActionTypeId=troposphere.codepipeline.ActionTypeId( Category='Build', Owner='AWS', Version='1', Provider='CodeBuild'), Configuration={ 'ProjectName': troposphere.Ref(self.resource_name_prefix_param), }, InputArtifacts=self.build_input_artifacts, OutputArtifacts=[ troposphere.codepipeline.OutputArtifacts( Name='CodeBuildArtifact') ], RunOrder=action.run_order) build_stage_actions.append(codebuild_build_action) build_stage = troposphere.codepipeline.Stages( Name="Build", Actions=build_stage_actions) # Deploy Action [ deploy_stage, self.s3_deploy_assume_role_statement, self.codedeploy_deploy_assume_role_statement ] = self.init_deploy_stage(deploy_region) # Manual Deploy Enabled/Disable manual_approval_enabled_param = self.create_cfn_parameter( param_type='String', name='ManualApprovalEnabled', description= 'Boolean indicating whether a manual approval is enabled or not.', value=self.manual_approval_is_enabled, ) self.template.add_condition( 'ManualApprovalIsEnabled', troposphere.Equals(troposphere.Ref(manual_approval_enabled_param), 'true')) pipeline_stages = [] if source_stage != None: pipeline_stages.append(source_stage) if build_stage != None: pipeline_stages.append(build_stage) if deploy_stage != None: pipeline_stages.append(deploy_stage) pipeline_service_role_res = self.add_pipeline_service_role() pipeline_res = troposphere.codepipeline.Pipeline( title='BuildCodePipeline', template=self.template, DependsOn='CodePipelinePolicy', RoleArn=troposphere.GetAtt(pipeline_service_role_res, 'Arn'), Name=troposphere.Ref(self.resource_name_prefix_param), Stages=pipeline_stages, ArtifactStore=troposphere.codepipeline.ArtifactStore( Type='S3', Location=troposphere.Ref(self.artifacts_bucket_name_param), EncryptionKey=troposphere.codepipeline.EncryptionKey( Type='KMS', Id=troposphere.Ref(self.cmk_arn_param), ))) return pipeline_res