def _source_bucket() -> s3.Bucket: return s3.Bucket( "SourceBucket", BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule(ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), )
def add_module_bucket(self: Template): self._bucket = self.add_resource( s3.Bucket('TerraformModules', AccessControl='Private', BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm='AES256')) ]), PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True))) self.add_resource( s3.BucketPolicy( 'TerraformModulesBucketPolicy', Bucket=Ref(self._bucket), PolicyDocument=PolicyDocument( Version='2012-10-17', Statement=[ Statement( Effect=Deny, Action=[Action('s3', 'GetObject')], Principal=Principal('*'), Resource=[ Join( '', ['arn:aws:s3:::', Ref(self._bucket), '/*']) ], Condition=Condition( Bool({'aws:SecureTransport': False}))), Statement( Effect=Deny, Action=[Action('s3', 'GetObject')], Principal=Principal('*'), Resource=[ Join( '', ['arn:aws:s3:::', Ref(self._bucket), '/*']) ], Condition=Condition( Bool({'aws:SecureTransport': False}))) ]), ))
def _get_encryption_settings(self) -> s3.BucketEncryption: """Get CFNgin Bucket encryption settings. Returns: S3 Bucket encryption settings. """ kwargs = {'SSEAlgorithm': 'AES256'} kms_key_id = self._vars.get('KMSMasterKeyID') if kms_key_id: kwargs = {'KMSMasterKeyID': kms_key_id, 'SSEAlgorithm': 'aws:kms'} return s3.BucketEncryption(ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault( **kwargs)) ])
def resources(self, stack: Stack) -> list[AWSObject]: """Construct and return a s3.Bucket and its associated s3.BucketPolicy.""" # Handle versioning configuration versioning_status = "Suspended" if self.enable_versioning: versioning_status = "Enabled" # Block all public accesses public_access_block_config = s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ) # Set default bucket encryption to AES256 bucket_encryption = s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault( SSEAlgorithm="AES256" ) ) ] ) return [ s3.Bucket( name_to_id(self.name), BucketName=self.name, BucketEncryption=bucket_encryption, PublicAccessBlockConfiguration=public_access_block_config, VersioningConfiguration=s3.VersioningConfiguration( Status=versioning_status ), ), s3.BucketPolicy( name_to_id(self.name) + "Policy", Bucket=self.ref, PolicyDocument=self.policy_document.as_dict, ), ]
def add_s3_bucket(self): """Adds an S3 bucket to the template :return: the (s3.Bucket) object that has been added """ bucket = s3.Bucket( 'Bucket', BucketName=self.resource_name( PerfTestingTemplate.BUCKET_SHORT_NAME), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm='AES256', # alternative is 'aws:kms' )) ]), VersioningConfiguration=s3.VersioningConfiguration( Status='Enabled', )) self.add_resource(bucket) return bucket
def _bucket_kms_encryption(key_arn): return s3.BucketEncryption(ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault( KMSMasterKeyID=key_arn, SSEAlgorithm='aws:kms')) ])
def generate_codepipeline_template( codepipeline_role_name: str, codepipeline_role_path: str, codebuild_role_name: str, codebuild_role_path: str, ssm_parameter_prefix: str, scm_provider: str, scm_connection_arn: str, scm_full_repository_id: str, scm_branch_name: str, scm_bucket_name: str, scm_object_key: str, scm_skip_creation_of_repo: str, migrate_role_arn: str, ) -> troposphere.Template: version = pkg_resources.get_distribution("aws-organized").version t = troposphere.Template() t.set_description( "CICD template that runs aws organized migrate for the given branch of the given repo" ) project_name = "AWSOrganized-Migrate" bucket_name = scm_bucket_name if scm_provider.lower( ) == "codecommit" and scm_skip_creation_of_repo is False: t.add_resource( codecommit.Repository("Repository", RepositoryName=scm_full_repository_id)) if scm_provider.lower() == "s3" and scm_skip_creation_of_repo is False: bucket_name = ( scm_bucket_name if scm_bucket_name else troposphere.Sub("aws-organized-pipeline-source-${AWS::AccountId}")) t.add_resource( s3.Bucket( "Source", BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), )) artifact_store = t.add_resource( s3.Bucket( "ArtifactStore", VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), )) codepipeline_role = t.add_resource( iam.Role( "CodePipelineRole", RoleName=codepipeline_role_name, Path=codepipeline_role_path, ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal( "Service", ["codepipeline.amazonaws.com"]), ) ], ), )) codebuild_role = t.add_resource( iam.Role( "CodeBuildRole", RoleName=codebuild_role_name, Path=codebuild_role_path, ManagedPolicyArns=["arn:aws:iam::aws:policy/AdministratorAccess"], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal("Service", ["codebuild.amazonaws.com"]), ) ], ), )) version_parameter = ssm.Parameter( "versionparameter", Name=f"{ssm_parameter_prefix}/version", Type="String", Value=version, ) t.add_resource(version_parameter) project = t.add_resource( codebuild.Project( "AWSOrganizedMigrate", Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Name": "MIGRATE_ROLE_ARN", "Type": "PLAINTEXT", "Value": migrate_role_arn, }, { "Name": "Version", "Type": "PARAMETER_STORE", "Value": troposphere.Ref(version_parameter), }, { "Name": "SSM_PARAMETER_PREFIX", "Type": "PLAINTEXT", "Value": ssm_parameter_prefix, }, ], ), Name=project_name, ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump( dict( version="0.2", phases=dict( install={ "runtime-versions": dict(python="3.8"), "commands": ["pip install aws-organized==${Version}"], }, build={ "commands": [ "aws-organized migrate --ssm-parameter-prefix $SSM_PARAMETER_PREFIX $MIGRATE_ROLE_ARN" ] }, ), artifacts=dict(files=["environment"]), )), ), )) source_actions = dict( codecommit=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId(Category="Source", Owner="AWS", Version="1", Provider="CodeCommit"), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "RepositoryName": scm_full_repository_id, "BranchName": scm_branch_name, "PollForSourceChanges": "true", }, RunOrder="1", ), codestarsourceconnection=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "ConnectionArn": scm_connection_arn, "FullRepositoryId": scm_full_repository_id, "BranchName": scm_branch_name, "OutputArtifactFormat": "CODE_ZIP", }, RunOrder="1", ), s3=codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId(Category="Source", Owner="AWS", Version="1", Provider="S3"), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="SourceOutput") ], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": scm_object_key, "PollForSourceChanges": True, }, RunOrder="1", ), ).get(scm_provider.lower()) t.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"), Stages=[ codepipeline.Stages(Name="Source", Actions=[source_actions]), codepipeline.Stages( Name="Migrate", Actions=[ codepipeline.Actions( Name="Migrate", InputArtifacts=[ codepipeline.InputArtifacts( Name="SourceOutput") ], ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={ "ProjectName": troposphere.Ref(project), "PrimarySource": "SourceAction", }, RunOrder="1", ) ], ), ], ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=troposphere.Ref(artifact_store)), )) return t
def get_template( puppet_version, all_regions, source, is_caching_enabled, is_manual_approvals: bool, scm_skip_creation_of_repo: bool, should_validate: bool, ) -> t.Template: is_codecommit = source.get("Provider", "").lower() == "codecommit" is_github = source.get("Provider", "").lower() == "github" is_codestarsourceconnection = (source.get( "Provider", "").lower() == "codestarsourceconnection") is_custom = (source.get("Provider", "").lower() == "custom") is_s3 = source.get("Provider", "").lower() == "s3" description = f"""Bootstrap template used to bring up the main ServiceCatalog-Puppet AWS CodePipeline with dependencies {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master"}}""" template = t.Template(Description=description) version_parameter = template.add_parameter( t.Parameter("Version", Default=puppet_version, Type="String")) org_iam_role_arn_parameter = template.add_parameter( t.Parameter("OrgIamRoleArn", Default="None", Type="String")) with_manual_approvals_parameter = template.add_parameter( t.Parameter( "WithManualApprovals", Type="String", AllowedValues=["Yes", "No"], Default="No", )) puppet_code_pipeline_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetCodePipelineRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetCodePipelineRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) source_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "SourceRolePermissionsBoundary", Type="String", Description="IAM Permission Boundary to apply to the SourceRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_generate_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetGenerateRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetGenerateRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_deploy_role_permission_boundary_parameter = template.add_parameter( t.Parameter( "PuppetDeployRolePermissionBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) puppet_provisioning_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "PuppetProvisioningRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the PuppetProvisioningRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) cloud_formation_deploy_role_permissions_boundary_parameter = template.add_parameter( t.Parameter( "CloudFormationDeployRolePermissionsBoundary", Type="String", Description= "IAM Permission Boundary to apply to the CloudFormationDeployRole", Default=awscs_iam.ARN(resource="policy/AdministratorAccess").data, )) deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "DeployEnvironmentComputeType", Type="String", Description="The AWS CodeBuild Environment Compute Type", Default="BUILD_GENERAL1_SMALL", )) spoke_deploy_environment_compute_type_parameter = template.add_parameter( t.Parameter( "SpokeDeployEnvironmentComputeType", Type="String", Description= "The AWS CodeBuild Environment Compute Type for spoke execution mode", Default="BUILD_GENERAL1_SMALL", )) deploy_num_workers_parameter = template.add_parameter( t.Parameter( "DeployNumWorkers", Type="Number", Description= "Number of workers that should be used when running a deploy", Default=10, )) puppet_role_name_parameter = template.add_parameter( t.Parameter("PuppetRoleName", Type="String", Default="PuppetRole")) puppet_role_path_template_parameter = template.add_parameter( t.Parameter("PuppetRolePath", Type="String", Default="/servicecatalog-puppet/")) template.add_condition( "ShouldUseOrgs", t.Not(t.Equals(t.Ref(org_iam_role_arn_parameter), "None"))) template.add_condition( "HasManualApprovals", t.Equals(t.Ref(with_manual_approvals_parameter), "Yes")) template.add_resource( s3.Bucket( "StacksRepository", BucketName=t.Sub("sc-puppet-stacks-repository-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) manual_approvals_param = template.add_resource( ssm.Parameter( "ManualApprovalsParam", Type="String", Name="/servicecatalog-puppet/manual-approvals", Value=t.Ref(with_manual_approvals_parameter), )) template.add_resource( ssm.Parameter( "SpokeDeployEnvParameter", Type="String", Name=constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, Value=t.Ref(spoke_deploy_environment_compute_type_parameter), )) param = template.add_resource( ssm.Parameter( "Param", Type="String", Name="service-catalog-puppet-version", Value=t.Ref(version_parameter), )) partition_parameter = template.add_resource( ssm.Parameter( "PartitionParameter", Type="String", Name="/servicecatalog-puppet/partition", Value=t.Ref("AWS::Partition"), )) puppet_role_name_parameter = template.add_resource( ssm.Parameter( "PuppetRoleNameParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/name", Value=t.Ref(puppet_role_name_parameter), )) puppet_role_path_parameter = template.add_resource( ssm.Parameter( "PuppetRolePathParameter", Type="String", Name="/servicecatalog-puppet/puppet-role/path", Value=t.Ref(puppet_role_path_template_parameter), )) share_accept_function_role = template.add_resource( iam.Role( "ShareAcceptFunctionRole", RoleName="ShareAcceptFunctionRole", ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" ) ], Path=t.Ref(puppet_role_path_template_parameter), Policies=[ iam.Policy( PolicyName="ServiceCatalogActions", PolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Resource": { "Fn::Sub": "arn:${AWS::Partition}:iam::*:role${PuppetRolePath}${PuppetRoleName}" }, "Effect": "Allow", }], }, ) ], AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["lambda.amazonaws.com"] }, }], }, )) provisioning_role = template.add_resource( iam.Role( "ProvisioningRole", RoleName="PuppetProvisioningRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_provisioning_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) cloud_formation_deploy_role = template.add_resource( iam.Role( "CloudFormationDeployRole", RoleName="CloudFormationDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["cloudformation.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "${AWS::AccountId}" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( cloud_formation_deploy_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) pipeline_role = template.add_resource( iam.Role( "PipelineRole", RoleName="PuppetCodePipelineRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_code_pipeline_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) source_role = template.add_resource( iam.Role( "SourceRole", RoleName="PuppetSourceRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codepipeline.amazonaws.com"] }, }, { "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "AWS": { "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:root" } }, }, ], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( source_role_permissions_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) dry_run_notification_topic = template.add_resource( sns.Topic( "DryRunNotificationTopic", DisplayName="service-catalog-puppet-dry-run-approvals", TopicName="service-catalog-puppet-dry-run-approvals", Condition="HasManualApprovals", )) deploy_role = template.add_resource( iam.Role( "DeployRole", RoleName="PuppetDeployRole", AssumeRolePolicyDocument={ "Version": "2012-10-17", "Statement": [{ "Action": ["sts:AssumeRole"], "Effect": "Allow", "Principal": { "Service": ["codebuild.amazonaws.com"] }, }], }, ManagedPolicyArns=[ t.Sub( "arn:${AWS::Partition}:iam::aws:policy/AdministratorAccess" ) ], PermissionsBoundary=t.Ref( puppet_deploy_role_permission_boundary_parameter), Path=t.Ref(puppet_role_path_template_parameter), )) num_workers_ssm_parameter = template.add_resource( ssm.Parameter( "NumWorkersSSMParameter", Type="String", Name="/servicecatalog-puppet/deploy/num-workers", Value=t.Sub("${DeployNumWorkers}"), )) parameterised_source_bucket = template.add_resource( s3.Bucket( "ParameterisedSourceBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-parameterised-runs-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage = codepipeline.Stages( Name="Source", Actions=[ codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="ParameterisedSource") ], Configuration={ "S3Bucket": t.Ref(parameterised_source_bucket), "S3ObjectKey": "parameters.zip", "PollForSourceChanges": True, }, Name="ParameterisedSource", ) ], ) install_spec = { "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], } deploy_env_vars = [ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Ref("AWS::AccountId"), }, { "Type": "PLAINTEXT", "Name": "PUPPET_REGION", "Value": t.Ref("AWS::Region"), }, { "Type": "PARAMETER_STORE", "Name": "PARTITION", "Value": t.Ref(partition_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_NAME", "Value": t.Ref(puppet_role_name_parameter), }, { "Type": "PARAMETER_STORE", "Name": "PUPPET_ROLE_PATH", "Value": t.Ref(puppet_role_path_parameter), }, ] if is_codecommit: template.add_resource( codecommit.Repository( "CodeRepo", RepositoryName=source.get("Configuration").get( "RepositoryName"), RepositoryDescription= "Repo to store the servicecatalog puppet solution", DeletionPolicy="Retain", )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "RepositoryName": source.get("Configuration").get("RepositoryName"), "BranchName": source.get("Configuration").get("BranchName"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges", True), }, Name="Source", )) if is_github: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="ThirdParty", Version="1", Provider="GitHub", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "Owner": source.get("Configuration").get("Owner"), "Repo": source.get("Configuration").get("Repo"), "Branch": source.get("Configuration").get("Branch"), "OAuthToken": t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:OAuthToken}}", ], ), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) if is_custom: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="Custom", Version=source.get("Configuration").get( "CustomActionTypeVersion"), Provider=source.get("Configuration").get( "CustomActionTypeProvider"), ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "GitUrl": source.get("Configuration").get("GitUrl"), "Branch": source.get("Configuration").get("Branch"), "PipelineName": t.Sub("${AWS::StackName}-pipeline"), }, Name="Source", )) webhook = codepipeline.Webhook( "Webhook", Authentication="IP", TargetAction="Source", AuthenticationConfiguration=codepipeline.WebhookAuthConfiguration( AllowedIPRange=source.get("Configuration").get( "GitWebHookIpAddress")), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.changes[0].ref.id", MatchEquals="refs/heads/{Branch}") ], TargetPipelineVersion=1, TargetPipeline=t.Sub("${AWS::StackName}-pipeline"), ) template.add_resource(webhook) values_for_sub = { "GitUrl": source.get("Configuration").get("GitUrl"), "WebhookUrl": t.GetAtt(webhook, "Url"), } output_to_add = t.Output("WebhookUrl") output_to_add.Value = t.Sub("${GitUrl}||${WebhookUrl}", **values_for_sub) output_to_add.Export = t.Export(t.Sub("${AWS::StackName}-pipeline")) template.add_output(output_to_add) if is_codestarsourceconnection: source_stage.Actions.append( codepipeline.Actions( RunOrder=1, RoleArn=t.GetAtt("SourceRole", "Arn"), ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeStarSourceConnection", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "ConnectionArn": source.get("Configuration").get("ConnectionArn"), "FullRepositoryId": source.get("Configuration").get("FullRepositoryId"), "BranchName": source.get("Configuration").get("BranchName"), "OutputArtifactFormat": source.get("Configuration").get("OutputArtifactFormat"), }, Name="Source", )) if is_s3: bucket_name = source.get("Configuration").get("S3Bucket") if not scm_skip_creation_of_repo: template.add_resource( s3.Bucket( bucket_name, PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=bucket_name, VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) source_stage.Actions.append( codepipeline.Actions( RunOrder=1, ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="S3", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="Source")], Configuration={ "S3Bucket": bucket_name, "S3ObjectKey": source.get("Configuration").get("S3ObjectKey"), "PollForSourceChanges": source.get("Configuration").get("PollForSourceChanges"), }, Name="Source", )) single_account_run_project_build_spec = dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ 'echo "single_account: \\"${SINGLE_ACCOUNT_ID}\\"" > parameters.yaml', "cat parameters.yaml", "zip parameters.zip parameters.yaml", "aws s3 cp parameters.zip s3://sc-puppet-parameterised-runs-${PUPPET_ACCOUNT_ID}/parameters.zip", ] }, post_build={ "commands": [ "servicecatalog-puppet wait-for-parameterised-run-to-complete", ] }, ), artifacts=dict( name="DeployProject", files=[ "ServiceCatalogPuppet/manifest.yaml", "ServiceCatalogPuppet/manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) single_account_run_project_args = dict( Name="servicecatalog-puppet-single-account-run", Description="Runs puppet for a single account - SINGLE_ACCOUNT_ID", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "SINGLE_ACCOUNT_ID", "Value": "CHANGE_ME", }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ), ) single_account_run_project = template.add_resource( codebuild.Project("SingleAccountRunProject", **single_account_run_project_args)) single_account_run_project_build_spec["phases"]["post_build"]["commands"] = [ "servicecatalog-puppet wait-for-parameterised-run-to-complete --on-complete-url $CALLBACK_URL" ] single_account_run_project_args[ "Name"] = "servicecatalog-puppet-single-account-run-with-callback" single_account_run_project_args[ "Description"] = "Runs puppet for a single account - SINGLE_ACCOUNT_ID and then does a http put" single_account_run_project_args.get( "Environment").EnvironmentVariables.append({ "Type": "PLAINTEXT", "Name": "CALLBACK_URL", "Value": "CHANGE_ME", }) single_account_run_project_args["Source"] = codebuild.Source( Type="NO_SOURCE", BuildSpec=yaml.safe_dump(single_account_run_project_build_spec), ) single_account_run_project_with_callback = template.add_resource( codebuild.Project("SingleAccountRunWithCallbackProject", **single_account_run_project_args)) stages = [source_stage] if should_validate: template.add_resource( codebuild.Project( "ValidateProject", Name="servicecatalog-puppet-validate", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version="0.2", phases={ "install": { "runtime-versions": { "python": "3.7", }, "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, "build": { "commands": [ "servicecatalog-puppet validate manifest.yaml" ] }, }, )), Type="CODEPIPELINE", ), Description="Validate the manifest.yaml file", )) stages.append( codepipeline.Stages( Name="Validate", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), ], Name="Validate", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts( Name="ValidateProject") ], Configuration={ "ProjectName": t.Ref("ValidateProject"), "PrimarySource": "Source", }, RunOrder=1, ), ], )) if is_manual_approvals: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="DryRun", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DryRunProject") ], Configuration={ "ProjectName": t.Ref("DryRunProject"), "PrimarySource": "Source", }, RunOrder=1, ), codepipeline.Actions( ActionTypeId=codepipeline.ActionTypeId( Category="Approval", Owner="AWS", Version="1", Provider="Manual", ), Configuration={ "NotificationArn": t.Ref("DryRunNotificationTopic"), "CustomData": "Approve when you are happy with the dry run.", }, Name="DryRunApproval", RunOrder=2, ), codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", }, RunOrder=3, ), ], ) else: deploy_stage = codepipeline.Stages( Name="Deploy", Actions=[ codepipeline.Actions( InputArtifacts=[ codepipeline.InputArtifacts(Name="Source"), codepipeline.InputArtifacts( Name="ParameterisedSource"), ], Name="Deploy", ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), OutputArtifacts=[ codepipeline.OutputArtifacts(Name="DeployProject") ], Configuration={ "ProjectName": t.Ref("DeployProject"), "PrimarySource": "Source", "EnvironmentVariables": '[{"name":"EXECUTION_ID","value":"#{codepipeline.PipelineExecutionId}","type":"PLAINTEXT"}]', }, RunOrder=1, ), ], ) stages.append(deploy_stage) pipeline = template.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=t.GetAtt("PipelineRole", "Arn"), Stages=stages, Name=t.Sub("${AWS::StackName}-pipeline"), ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=t.Sub( "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}" ), ), RestartExecutionOnUpdate=True, )) if is_github: template.add_resource( codepipeline.Webhook( "Webhook", AuthenticationConfiguration=codepipeline. WebhookAuthConfiguration(SecretToken=t.Join( "", [ "{{resolve:secretsmanager:", source.get("Configuration").get( "SecretsManagerSecret"), ":SecretString:SecretToken}}", ], )), Filters=[ codepipeline.WebhookFilterRule( JsonPath="$.ref", MatchEquals="refs/heads/" + source.get("Configuration").get("Branch"), ) ], Authentication="GITHUB_HMAC", TargetPipeline=t.Ref(pipeline), TargetAction="Source", Name=t.Sub("${AWS::StackName}-webhook"), TargetPipelineVersion=t.GetAtt(pipeline, "Version"), RegisterWithThirdParty="true", )) deploy_project_build_spec = dict( version=0.2, phases=dict( install={ "runtime-versions": dict(python="3.7"), "commands": [ f"pip install {puppet_version}" if "http" in puppet_version else f"pip install aws-service-catalog-puppet=={puppet_version}", ], }, pre_build={ "commands": [ "servicecatalog-puppet --info expand --parameter-override-file $CODEBUILD_SRC_DIR_ParameterisedSource/parameters.yaml manifest.yaml", ] }, build={ "commands": [ "servicecatalog-puppet --info deploy --num-workers ${NUM_WORKERS} manifest-expanded.yaml", ] }, ), artifacts=dict( name="DeployProject", files=[ "manifest-expanded.yaml", "results/*/*", "output/*/*", "exploded_results/*/*", "tasks.log", ], ), ) deploy_project_args = dict( Name="servicecatalog-puppet-deploy", ServiceRole=t.GetAtt(deploy_role, "Arn"), Tags=t.Tags.from_dict(**{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="CODEPIPELINE", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType=t.Ref(deploy_environment_compute_type_parameter), Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PARAMETER_STORE", "Name": "NUM_WORKERS", "Value": t.Ref(num_workers_ssm_parameter), }, { "Type": "PARAMETER_STORE", "Name": "SPOKE_EXECUTION_MODE_DEPLOY_ENV", "Value": constants.SPOKE_EXECUTION_MODE_DEPLOY_ENV_PARAMETER_NAME, }, ] + deploy_env_vars, ), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ), Description="deploys out the products to be deployed", ) deploy_project = template.add_resource( codebuild.Project("DeployProject", **deploy_project_args)) deploy_project_build_spec["phases"]["build"]["commands"] = [ "servicecatalog-puppet --info dry-run manifest-expanded.yaml" ] deploy_project_build_spec["artifacts"]["name"] = "DryRunProject" deploy_project_args["Name"] = "servicecatalog-puppet-dryrun" deploy_project_args[ "Description"] = "dry run of servicecatalog-puppet-dryrun" deploy_project_args["Source"] = codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump(deploy_project_build_spec), ) dry_run_project = template.add_resource( codebuild.Project("DryRunProject", **deploy_project_args)) bootstrap_project = template.add_resource( codebuild.Project( "BootstrapProject", Name="servicecatalog-puppet-bootstrap-spokes-in-ou", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "OU_OR_PATH", "Value": "CHANGE_ME" }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_NAME", "Value": "OrganizationAccountAccessRole", }, { "Type": "PLAINTEXT", "Name": "IAM_ROLE_ARNS", "Value": "" }, ], ), Source=codebuild.Source( BuildSpec= "version: 0.2\nphases:\n install:\n runtime-versions:\n python: 3.7\n commands:\n - pip install aws-service-catalog-puppet\n build:\n commands:\n - servicecatalog-puppet bootstrap-spokes-in-ou $OU_OR_PATH $IAM_ROLE_NAME $IAM_ROLE_ARNS\nartifacts:\n files:\n - results/*/*\n - output/*/*\n name: BootstrapProject\n", Type="NO_SOURCE", ), Description="Bootstrap all the accounts in an OU", )) template.add_resource( codebuild.Project( "BootstrapASpokeProject", Name="servicecatalog-puppet-bootstrap-spoke", ServiceRole=t.GetAtt("DeployRole", "Arn"), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts(Type="NO_ARTIFACTS"), TimeoutInMinutes=60, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[ { "Type": "PLAINTEXT", "Name": "PUPPET_ACCOUNT_ID", "Value": t.Sub("${AWS::AccountId}"), }, { "Type": "PLAINTEXT", "Name": "ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN", "Value": "CHANGE_ME", }, { "Type": "PLAINTEXT", "Name": "ASSUMABLE_ROLE_IN_ROOT_ACCOUNT", "Value": "CHANGE_ME", }, ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump( dict( version=0.2, phases=dict( install=install_spec, build={ "commands": [ "servicecatalog-puppet bootstrap-spoke-as ${PUPPET_ACCOUNT_ID} ${ASSUMABLE_ROLE_IN_ROOT_ACCOUNT} ${ORGANIZATION_ACCOUNT_ACCESS_ROLE_ARN}" ] }, ), )), Type="NO_SOURCE", ), Description="Bootstrap given account as a spoke", )) cloud_formation_events_queue = template.add_resource( sqs.Queue( "CloudFormationEventsQueue", QueueName="servicecatalog-puppet-cloudformation-events", Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), )) cloud_formation_events_queue_policy = template.add_resource( sqs.QueuePolicy( "CloudFormationEventsQueuePolicy", Queues=[t.Ref(cloud_formation_events_queue)], PolicyDocument={ "Id": "AllowSNS", "Version": "2012-10-17", "Statement": [{ "Sid": "allow-send-message", "Effect": "Allow", "Principal": { "AWS": "*" }, "Action": ["sqs:SendMessage"], "Resource": "*", "Condition": { "ArnEquals": { "aws:SourceArn": t.Sub( "arn:${AWS::Partition}:sns:*:${AWS::AccountId}:servicecatalog-puppet-cloudformation-regional-events" ) } }, }], }, )) spoke_deploy_bucket = template.add_resource( s3.Bucket( "SpokeDeployBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( IgnorePublicAcls=True, BlockPublicPolicy=True, BlockPublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub("sc-puppet-spoke-deploy-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) caching_bucket = template.add_resource( s3.Bucket( "CachingBucket", PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), BucketName=t.Sub( "sc-puppet-caching-bucket-${AWS::AccountId}-${AWS::Region}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), )) template.add_output( t.Output( "CloudFormationEventsQueueArn", Value=t.GetAtt(cloud_formation_events_queue, "Arn"), )) template.add_output(t.Output("Version", Value=t.GetAtt(param, "Value"))) template.add_output( t.Output("ManualApprovalsParam", Value=t.GetAtt(manual_approvals_param, "Value"))) template.add_resource( ssm.Parameter( "DefaultTerraformVersion", Type="String", Name=constants.DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, Value=constants.DEFAULT_TERRAFORM_VERSION_VALUE, )) return template
def resources(self, stack: Stack) -> list[AWSObject]: """Construct and return a s3.Bucket and its associated s3.BucketPolicy.""" # Handle versioning configuration optional_resources = [] versioning_status = "Suspended" if self.enable_versioning: versioning_status = "Enabled" # Block all public accesses public_access_block_config = s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ) # Set default bucket encryption to AES256 bucket_encryption = None if self.default_bucket_encryption: bucket_encryption = s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault( SSEAlgorithm=self.default_bucket_encryption.value ) ) ] ) lifecycle_config = None if self.lifecycle_rules: lifecycle_config = s3.LifecycleConfiguration( name_to_id(self.name) + "LifeCycleConfig", Rules=self.lifecycle_rules ) notification_config, notification_resources = self.notification_setup optional_resources.extend(notification_resources) attr = {} for key, val in { "BucketName": self.name, "BucketEncryption": bucket_encryption, "PublicAccessBlockConfiguration": public_access_block_config, "VersioningConfiguration": s3.VersioningConfiguration( Status=versioning_status ), "LifecycleConfiguration": lifecycle_config, "NotificationConfiguration": notification_config, "DependsOn": self.depends_on, }.items(): if val: attr[key] = val return [ s3.Bucket(name_to_id(self.name), **attr), s3.BucketPolicy( name_to_id(self.name) + "Policy", Bucket=self.ref, PolicyDocument=self.policy_document.as_dict, ), *optional_resources, ]
def generate_codepipeline_template( codepipeline_role_name: str, codepipeline_role_path: str, codebuild_role_name: str, codebuild_role_path: str, output_format: str, migrate_role_arn: str, ) -> str: t = troposphere.Template() t.set_description( "CICD template that runs aws organized migrate for the given branch of the given repo" ) project_name = "AWSOrganized-Migrate" repository_name = "AWS-Organized-environment" repo = t.add_resource( codecommit.Repository("Repository", RepositoryName=repository_name)) artifact_store = t.add_resource( s3.Bucket( "ArtifactStore", BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault(SSEAlgorithm="AES256")) ]), )) codepipeline_role = t.add_resource( iam.Role( "CodePipelineRole", RoleName=codepipeline_role_name, Path=codepipeline_role_path, ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AdministratorAccess", ], Policies=[ iam.Policy( PolicyName=f"executionpermissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"executionpermissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=[ awscd_codecommit.GitPull, awscd_codecommit.GetBranch, awscd_codecommit.GetCommit, awscd_codecommit.UploadArchive, ], Resource=[troposphere.GetAtt(repo, "Arn")], ), aws.Statement( Sid="2", Effect=aws.Allow, Action=[ awacs_s3.GetBucketPolicy, awacs_s3.GetBucketVersioning, awacs_s3.ListBucket, ], Resource=[ troposphere.GetAtt(artifact_store, "Arn") ], ), aws.Statement( Sid="3", Effect=aws.Allow, Action=[ awacs_s3.GetObject, awacs_s3.GetObjectVersion, ], Resource=[ troposphere.Join(":", [ troposphere.GetAtt( artifact_store, 'Arn'), "*" ]) ], ), aws.Statement( Sid="4", Effect=aws.Allow, Action=[ awacs_s3.ListAllMyBuckets, ], Resource=[ troposphere.Join(":", [ "arn", troposphere.Partition, "s3:::*", ]) ], ), # aws.Statement( # Sid="5", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.Join(":", [ # troposphere.GetAtt(artifact_store, 'Arn'), # "*" # ]) # ], # ), # aws.Statement( # Sid="6", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.GetAtt(artifact_store, 'Arn') # ], # ), ], ), ) ], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal( "Service", ["codepipeline.amazonaws.com"]), ), ], ), )) codebuild_role = t.add_resource( iam.Role( "CodeBuildRole", RoleName=codebuild_role_name, Path=codebuild_role_path, ManagedPolicyArns=[ "arn:aws:iam::aws:policy/AdministratorAccess", ], Policies=[ iam.Policy( PolicyName=f"executionpermissions", PolicyDocument=aws.PolicyDocument( Version="2012-10-17", Id=f"executionpermissions", Statement=[ aws.Statement( Sid="1", Effect=aws.Allow, Action=[ awacs_logs.CreateLogGroup, awacs_logs.CreateLogStream, awacs_logs.PutLogEvents, ], Resource=[ # "arn:aws:logs:eu-west-1:669925765091:log-group:/aws/codebuild/examplecodebuild", # "arn:aws:logs:eu-west-1:669925765091:log-group:/aws/codebuild/examplecodebuild:*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:logs:${{AWS::Region}}:${{AWS::AccountId}}:log-group:/aws/codebuild/{project_name}", {}, ] }, { "Fn::Sub": [ f"arn:${{AWS::Partition}}:logs:${{AWS::Region}}:${{AWS::AccountId}}:log-group:/aws/codebuild/{project_name}:*", {}, ] }, ], ), aws.Statement( Sid="2", Effect=aws.Allow, Action=[ awacs_s3.PutObject, awacs_s3.GetObject, awacs_s3.GetObjectVersion, awacs_s3.GetBucketAcl, awacs_s3.GetBucketLocation, ], Resource=[ # "arn:aws:s3:::codepipeline-eu-west-1-*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:s3:::codepipeline-${{AWS::Region}}-*", {}, ] }, ], ), aws.Statement( Sid="3", Effect=aws.Allow, Action=[ awacs_codebuild.CreateReportGroup, awacs_codebuild.CreateReport, awacs_codebuild.UpdateReport, awacs_codebuild.BatchPutTestCases, awacs_codebuild.BatchPutCodeCoverages, ], Resource=[ # "arn:aws:codebuild:eu-west-1:669925765091:report-group/examplecodebuild-*", { "Fn::Sub": [ f"arn:${{AWS::Partition}}:codebuild:${{AWS::Region}}:${{AWS::AccountId}}:report-group/{project_name}-*", {}, ] }, ], ), aws.Statement(Sid="4", Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Resource=[migrate_role_arn]), # aws.Statement( # Sid="5", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.Join(":", [ # troposphere.GetAtt(artifact_store, 'Arn'), # "*" # ]) # ], # ), # aws.Statement( # Sid="6", # Effect=aws.Allow, # Action=[ # aws.Action("s3", "*") # ], # Resource=[ # troposphere.GetAtt(artifact_store, 'Arn') # ], # ), ], ), ) ], AssumeRolePolicyDocument=aws.PolicyDocument( Version="2012-10-17", Statement=[ aws.Statement( Effect=aws.Allow, Action=[awacs_sts.AssumeRole], Principal=aws.Principal("Service", ["codebuild.amazonaws.com"]), ), ], ), )) project = t.add_resource( codebuild.Project( "AWSOrganizedMigrate", Artifacts=codebuild.Artifacts(Type="CODEPIPELINE"), Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image="aws/codebuild/standard:4.0", Type="LINUX_CONTAINER", EnvironmentVariables=[{ "Name": "MIGRATE_ROLE_ARN", "Type": "PLAINTEXT", "Value": migrate_role_arn, }]), Name=project_name, ServiceRole=troposphere.GetAtt(codebuild_role, "Arn"), Source=codebuild.Source( Type="CODEPIPELINE", BuildSpec=yaml.safe_dump( dict( version="0.2", phases=dict( install={ "runtime-versions": dict(python="3.8"), "commands": [ "pip install aws-organized", ], }, build={ "commands": [ "aws-organized migrate $(MIGRATE_ROLE_ARN)", ], }, ), artifacts=dict(files=[ "environment", ], ), )), ), )) source_actions = codepipeline.Actions( Name="SourceAction", ActionTypeId=codepipeline.ActionTypeId( Category="Source", Owner="AWS", Version="1", Provider="CodeCommit", ), OutputArtifacts=[codepipeline.OutputArtifacts(Name="SourceOutput")], Configuration={ "RepositoryName": repository_name, "BranchName": "master", "PollForSourceChanges": "true", }, RunOrder="1", ) pipeline = t.add_resource( codepipeline.Pipeline( "Pipeline", RoleArn=troposphere.GetAtt(codepipeline_role, "Arn"), Stages=[ codepipeline.Stages( Name="Source", Actions=[source_actions], ), codepipeline.Stages( Name="Migrate", Actions=[ codepipeline.Actions( Name="Migrate", InputArtifacts=[ codepipeline.InputArtifacts( Name="SourceOutput") ], ActionTypeId=codepipeline.ActionTypeId( Category="Build", Owner="AWS", Version="1", Provider="CodeBuild", ), Configuration={ "ProjectName": troposphere.Ref(project), "PrimarySource": "SourceAction", }, RunOrder="1", ) ], ), ], ArtifactStore=codepipeline.ArtifactStore( Type="S3", Location=troposphere.Ref(artifact_store)), )) if output_format == "json": return t.to_json() else: return t.to_yaml()
notifications_queue_name_variable = Join( '-', [Ref(environment_parameter), 'notifications']) search_queue_name_variable = Join('-', [Ref(environment_parameter), 'search']) ci_user_name_variable = Join('-', ['ci-api', Ref(environment_parameter)]) api_user_name_variable = Join('-', ['api', Ref(environment_parameter)]) # ================================================== # Resources. # ================================================== bucket_resource = template.add_resource( s3.Bucket( 'Bucket', AccessControl='Private', BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule(ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm='AES256')) ]), BucketName=bucket_name_variable, VersioningConfiguration=s3.VersioningConfiguration(Status='Enabled'))) default_queue_resource = template.add_resource( sqs.Queue('DefaultQueue', QueueName=default_queue_name_variable)) notifications_queue_resource = template.add_resource( sqs.Queue('NotificationsQueue', QueueName=notifications_queue_name_variable)) search_queue_resource = template.add_resource( sqs.Queue('SearchQueue', QueueName=search_queue_name_variable))
def ssm_ansible(): template = Template() stackname = Ref("AWS::StackName") template.add_parameter(Parameter( "LambdaZipBucket", Type="String", Description="Bucket which has Lambda Zip file", Default="lambda-zip-bucket", ConstraintDescription="Must be an existing bucket.'" )) ansible_bucket = s3.Bucket( 'AnsibleBucket', BucketName=Join("", [stackname, "-code-bucket"]), BucketEncryption=s3.BucketEncryption( 'AnsibleCodeBucketEncryption', ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault( 'Default', SSEAlgorithm='AES256' ) ) ] ) ) ansible_lambda_role = iam.Role( 'AnsibleLambdaRole', RoleName=Join("", [stackname, "-AnsibleLambdaSSM"]), Policies=[ Policy( PolicyName="AnsibleLambdaSSM", PolicyDocument=PolicyDocument( Statement=[ Statement( Effect=Allow, Action=[ Action("ssm", "*"), Action("ec2", "DescribeInstances") ], Resource=[ "*" ] ), Statement( Effect=Allow, Action=[ Action("logs", "CreateLogGroup"), Action("logs", "CreateLogStream"), Action("logs", "PutLogEvents") ], Resource=[ "arn:aws:logs:*:*:*" ] ) ] ) ) ], AssumeRolePolicyDocument=PolicyDocument( Statement=[ Statement( Effect=Allow, Action=[ Action("sts", "AssumeRole") ], Principal=Principal("Service", "lambda.amazonaws.com") ) ] ) ) ansible_lambda = awslambda.Function( 'AnsibleLambda', FunctionName=Join("", [stackname, "-ansible-ssm"]), Handler="lambda_function.lambda_handler", Runtime="python3.7", Role=GetAtt(ansible_lambda_role, "Arn"), Code=Code( S3Key="ansible_ssm_lambda.zip", S3Bucket=Ref("LambdaZipBucket") ), Environment=Environment( Variables={ 'code_bucket': Ref(ansible_bucket), 'provision_playbook': "provision-playbook.yml", 'update_playbook': "update-playbook.yml" } ) ) cloudwatch_event_rule_trigger_lambda = events.Rule( 'CloudWatchEventRuleLambda', Description="Capture when instance stat changes to 'running' or 'terminated'.", EventPattern={ "source": [ "aws.ec2" ], "detail-type": [ "EC2 Instance State-change Notification" ], "detail": { "state": [ "running", "terminated" ] } }, Name=Join("", [stackname, "-instance-state"]), Targets=[ events.Target( 'TriggerTarget', Arn=GetAtt(ansible_lambda, "Arn"), Id="TriggerTarget" ) ] ) cloudwatch_lambda_permission = awslambda.Permission( 'CloudWatchLambdaPermission', Action="lambda:InvokeFunction", FunctionName=Ref(ansible_lambda), Principal="events.amazonaws.com", SourceArn=GetAtt(cloudwatch_event_rule_trigger_lambda, "Arn") ) template.add_resource(ansible_bucket) template.add_resource(ansible_lambda_role) template.add_resource(ansible_lambda) template.add_resource(cloudwatch_event_rule_trigger_lambda) template.add_resource(cloudwatch_lambda_permission) with open(os.path.dirname(os.path.realpath(__file__)) + '/ssm_ansible.yml', 'w') as cf_file: cf_file.write(template.to_yaml()) return template.to_yaml()
def run(self): puppet_version = constants.VERSION description = f"""Bootstrap template used to configure spoke account for terraform use {{"version": "{puppet_version}", "framework": "servicecatalog-puppet", "role": "bootstrap-spoke-terraform"}}""" service_role = t.Sub( "arn:aws:iam::${AWS::AccountId}:role/servicecatalog-puppet/PuppetDeployInSpokeRole" ) template = t.Template(Description=description) state = template.add_resource( s3.Bucket( "state", BucketName=t.Sub("sc-puppet-state-${AWS::AccountId}"), VersioningConfiguration=s3.VersioningConfiguration( Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3. ServerSideEncryptionByDefault( SSEAlgorithm="AES256")) ]), PublicAccessBlockConfiguration=s3. PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), )) template.add_resource( s3.BucketPolicy( "statePolicy", Bucket=t.Ref(state), PolicyDocument={ "Version": "2012-10-17", "Statement": [ { "Action": [ "s3:GetObject*", "s3:PutObject*", ], "Principal": { "AWS": self.puppet_account_id }, "Resource": t.Join("/", [t.GetAtt(state, "Arn"), "*"]), "Effect": "Allow", "Sid": "AllowPuppet", }, ], }, )) execute_build_spec = dict( version="0.2", phases=dict( install=dict(commands=[ "mkdir -p /root/downloads", "curl -s -qL -o /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip", "unzip /root/downloads/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/bin/", "chmod +x /usr/bin/terraform", "terraform --version", "aws s3 cp $ZIP source.zip", "unzip source.zip", ], ), pre_build=dict(commands=[ "aws s3 cp $STATE_FILE terraform.tfstate || echo 'no statefile copied'", 'ASSUME_ROLE_ARN="arn:aws:iam::${TARGET_ACCOUNT}:role/servicecatalog-puppet/PuppetRole"', "TEMP_ROLE=$(aws sts assume-role --role-arn $ASSUME_ROLE_ARN --role-session-name terraform)", "export TEMP_ROLE", 'export AWS_ACCESS_KEY_ID=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.AccessKeyId")', 'export AWS_SECRET_ACCESS_KEY=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SecretAccessKey")', 'export AWS_SESSION_TOKEN=$(echo "${TEMP_ROLE}" | jq -r ".Credentials.SessionToken")', "aws sts get-caller-identity", "terraform init", ], ), build=dict(commands=[ "terraform apply -auto-approve", ]), post_build=dict(commands=[ "terraform output -json > outputs.json", "unset AWS_ACCESS_KEY_ID", "unset AWS_SECRET_ACCESS_KEY", "unset AWS_SESSION_TOKEN", "aws sts get-caller-identity", "aws s3 cp terraform.tfstate $STATE_FILE", ]), ), artifacts=dict(files=[ "outputs.json", ], ), ) execute_terraform = dict( Name=constants.EXECUTE_TERRAFORM_PROJECT_NAME, ServiceRole=service_role, Tags=t.Tags.from_dict( **{"ServiceCatalogPuppet:Actor": "Framework"}), Artifacts=codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-execute", NamespaceType="BUILD_ID", ), TimeoutInMinutes=480, Environment=codebuild.Environment( ComputeType="BUILD_GENERAL1_SMALL", Image=constants.CODEBUILD_DEFAULT_IMAGE, Type="LINUX_CONTAINER", EnvironmentVariables=[ codebuild.EnvironmentVariable( Name="TERRAFORM_VERSION", Type="PARAMETER_STORE", Value=constants. DEFAULT_TERRAFORM_VERSION_PARAMETER_NAME, ), ] + [ codebuild.EnvironmentVariable( Name=name, Type="PLAINTEXT", Value="CHANGE_ME", ) for name in ["TARGET_ACCOUNT", "ZIP", "STATE_FILE"] ], ), Source=codebuild.Source( BuildSpec=yaml.safe_dump(execute_build_spec), Type="NO_SOURCE", ), Description= "Execute the given terraform in the given account using the given state file", ) # execute template.add_resource( codebuild.Project("ExecuteTerraformProject", **execute_terraform)) # execute dry run execute_dry_run_terraform = copy.deepcopy(execute_terraform) execute_dry_run_terraform[ "Name"] = constants.EXECUTE_DRY_RUN_TERRAFORM_PROJECT_NAME execute_dry_run_terraform["Description"] = execute_dry_run_terraform[ "Description"].replace("Execute", "DRY RUN of Execute") execute_dry_run_build_spec = copy.deepcopy(execute_build_spec) execute_dry_run_build_spec["phases"]["build"]["commands"] = [ "terraform plan -out=plan.bin", "terraform show -json plan.bin > plan.json", ] del execute_dry_run_build_spec["phases"]["post_build"] execute_dry_run_build_spec["artifacts"] = dict(files=[ "plan.bin", "plan.json", ], ) execute_dry_run_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(execute_dry_run_build_spec), Type="NO_SOURCE", ) execute_dry_run_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-execute-dry-run", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("ExecuteDryRunTerraformProject", **execute_dry_run_terraform)) # terminate terminate_terraform = copy.deepcopy(execute_terraform) terminate_terraform[ "Name"] = constants.TERMINATE_TERRAFORM_PROJECT_NAME terminate_terraform["Description"] = terminate_terraform[ "Description"].replace("Execute", "Terminate") terminate_build_spec = copy.deepcopy(execute_build_spec) terminate_build_spec["phases"]["build"]["commands"] = [ "terraform destroy -auto-approve" ] terminate_build_spec["phases"]["post_build"]["commands"] = [ "unset AWS_ACCESS_KEY_ID", "unset AWS_SECRET_ACCESS_KEY", "unset AWS_SESSION_TOKEN", "aws sts get-caller-identity", "aws s3 cp terraform.tfstate $STATE_FILE", ] del terminate_build_spec["artifacts"] terminate_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(terminate_build_spec), Type="NO_SOURCE", ) terminate_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-terminate", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("TerminateTerraformProject", **terminate_terraform)) # terminate dry run termminate_dry_run_terraform = copy.deepcopy(execute_terraform) termminate_dry_run_terraform[ "Name"] = constants.TERMINATE_DRY_RUN_TERRAFORM_PROJECT_NAME new_description = termminate_dry_run_terraform["Description"].replace( "Execute", "DRY RUN of Terminate") termminate_dry_run_terraform["Description"] = new_description termminate_dry_run_build_spec = copy.deepcopy(execute_build_spec) termminate_dry_run_build_spec["phases"]["build"]["commands"] = [ "terraform plan -destroy -out=plan.bin", "terraform show -json plan.bin > plan.json", ] del termminate_dry_run_build_spec["phases"]["post_build"] termminate_dry_run_build_spec["artifacts"] = dict(files=[ "plan.bin", "plan.json", ], ) termminate_dry_run_terraform["Source"] = codebuild.Source( BuildSpec=yaml.safe_dump(termminate_dry_run_build_spec), Type="NO_SOURCE", ) termminate_dry_run_terraform["Artifacts"] = codebuild.Artifacts( Type="S3", Location=t.Ref("state"), Path="terraform-executions", Name="artifacts-terminate-dry-run", NamespaceType="BUILD_ID", ) template.add_resource( codebuild.Project("TerminateDryRunTerraformProject", **termminate_dry_run_terraform)) self.write_output(template.to_yaml(), skip_json_dump=True)
def get_template(version: str, default_region_value) -> t.Template: description = f"""Bootstrap template used to bootstrap a region of ServiceCatalog-Puppet master {{"version": "{version}", "framework": "servicecatalog-puppet", "role": "bootstrap-master-region"}}""" template = t.Template(Description=description) version_parameter = template.add_parameter( t.Parameter("Version", Default=version, Type="String") ) default_region_value_parameter = template.add_parameter( t.Parameter("DefaultRegionValue", Default=default_region_value, Type="String") ) template.add_resource( ssm.Parameter( "DefaultRegionParam", Name="/servicecatalog-puppet/home-region", Type="String", Value=t.Ref(default_region_value_parameter), Tags={"ServiceCatalogPuppet:Actor": "Framework"}, ) ) version_ssm_parameter = template.add_resource( ssm.Parameter( "Param", Name="service-catalog-puppet-regional-version", Type="String", Value=t.Ref(version_parameter), Tags={"ServiceCatalogPuppet:Actor": "Framework"}, ) ) template.add_resource( s3.Bucket( "PipelineArtifactBucket", BucketName=t.Sub( "sc-puppet-pipeline-artifacts-${AWS::AccountId}-${AWS::Region}" ), VersioningConfiguration=s3.VersioningConfiguration(Status="Enabled"), BucketEncryption=s3.BucketEncryption( ServerSideEncryptionConfiguration=[ s3.ServerSideEncryptionRule( ServerSideEncryptionByDefault=s3.ServerSideEncryptionByDefault( SSEAlgorithm="AES256" ) ) ] ), PublicAccessBlockConfiguration=s3.PublicAccessBlockConfiguration( BlockPublicAcls=True, BlockPublicPolicy=True, IgnorePublicAcls=True, RestrictPublicBuckets=True, ), Tags=t.Tags({"ServiceCatalogPuppet:Actor": "Framework"}), ) ) regional_product_topic = template.add_resource( sns.Topic( "RegionalProductTopic", DisplayName="servicecatalog-puppet-cloudformation-regional-events", TopicName="servicecatalog-puppet-cloudformation-regional-events", Subscription=[ sns.Subscription( Endpoint=t.Sub( "arn:${AWS::Partition}:sqs:${DefaultRegionValue}:${AWS::AccountId}:servicecatalog-puppet-cloudformation-events" ), Protocol="sqs", ) ], ), ) template.add_output( t.Output("Version", Value=t.GetAtt(version_ssm_parameter, "Value")) ) template.add_output( t.Output("RegionalProductTopic", Value=t.Ref(regional_product_topic)) ) return template