def log_group(config) -> Optional[ResourceOutput]: """Generate a CodePipeline Pipeline resource""" sub_config = config.get("config", {}) log_group_config = sub_config.get("codebuild", {}).get("log_group", {}) if not log_group_config.get("enabled") or not log_group_config.get("create"): return None resource_properties = { "KmsKeyId": parse_value("${KmsKeyArn}", KmsKeyArn=sub_config["kms_key_arn"]), "LogGroupName": parse_value( "${LogGroupName}", LogGroupName=log_group_config.get("name", "AWS::NoValue") ), "RetentionInDays": parse_value( "${Retention}", Retention=log_group_config.get("retention", "AWS::NoValue") ), } return ResourceOutput( definition={ LOGICAL_ID: { "Type": "AWS::Logs::LogGroup", "Properties": resource_properties, } }, logical_id=LOGICAL_ID, )
def test_parse_value_single_value(): """Tests parse_value() when passed a single value""" assert config.parse_value("${Value}", Value="my-value") == "my-value" assert config.parse_value("${Value}", Value="AWS::NoValue") == { "Ref": "AWS::NoValue" }
def pipeline(config, role_logical_id: str) -> ResourceOutput: """Generate a CodePipeline Pipeline resource""" sub_config = config.get("config", {}) sources = config.get("sources", []) if not sources: raise KeyError("At least one source must be supplied") source_names = [source["name"] for source in sources] if not all(sources): raise KeyError("All sources must have a name key + value") codebuild_stages = [ { "Name": stage["name"], "Actions": [ codebuild_action_definition(action, source_names) for action in stage.get("actions", []) ], } for stage in filter( lambda stage: stage.get("enabled"), config.get("stages", []) ) ] resource_properties = { "ArtifactStore": { "EncryptionKey": { "Id": parse_value("${KmsKeyArn}", KmsKeyArn=sub_config["kms_key_arn"]), "Type": "KMS", }, "Location": parse_value( "${BucketName}", BucketName=sub_config["s3_bucket"], ), "Type": "S3", }, "RestartExecutionOnUpdate": sub_config.get("codepipeline", {}).get( "restart_execution_on_update" ), "RoleArn": {"Fn::GetAtt": [role_logical_id, "Arn"]}, "Stages": [ { "Name": "Source", "Actions": [source_action_definition(source) for source in sources], }, *codebuild_stages, ], } return ResourceOutput( definition={ LOGICAL_ID: { "Type": "AWS::CodePipeline::Pipeline", "Properties": resource_properties, } }, logical_id=LOGICAL_ID, )
def cloudwatch_events( config, cloudwatch_events_role_logical_id: str, codepipeline_logical_id: str ) -> ResourceOutput: """Generate a CloudWatch Event to detect source changes""" sources = config.get("sources", []) source_pattern = re.compile(r"[\W_]+") resources = {} for source in sources: if not is_codecommit_with_event_source(source): continue # Generate a CFN event logical_id = f"{source_pattern.sub('', source['name'])}PushEventRule" resources[logical_id] = { "Type": "AWS::Events::Rule", "Properties": { "EventPattern": { "source": ["aws.codecommit"], "detail-type": ["CodeCommit Repository State Change"], "resources": [ parse_value( "arn:aws:codecommit:${AWS::Region}:${AWS::AccountId}:${Repository}", Repository=source["repository"], ) ], "detail": { "event": ["referenceCreated", "referenceUpdated"], "referenceType": ["branch"], "referenceName": [ parse_value("${BranchName}", BranchName=source["branch"]) ], }, }, "Targets": [ { "Arn": { "Fn::Sub": f"arn:aws:codepipeline:${{AWS::Region}}:${{AWS::AccountId}}:${{{codepipeline_logical_id}}}" }, "RoleArn": { "Fn::GetAtt": [cloudwatch_events_role_logical_id, "Arn"] }, "Id": {"Fn::Sub": f"${{AWS::StackName}}-{logical_id}"}, } ], }, } return ResourceOutput(definition=resources, logical_id="")
def generate_source_config(project_config) -> Dict[str, Any]: """Generate a source config entry for a project config""" source: Dict[str, Any] = {"Type": "CODEPIPELINE"} if project_config.get("buildspec"): source["BuildSpec"] = parse_value( "${BuildSpec}", BuildSpec=project_config["buildspec"]) elif project_config.get("commands"): template = { "version": 0.2, "phases": { "build": { "commands": project_config["commands"] } }, } artifacts = project_config.get("artifacts") if artifacts: template.update({"artifacts": {"files": artifacts}}) source["BuildSpec"] = convert_to_yaml(template) return source
def test_parse_value_import(): """Tests parse_value() when passed an import""" assert config.parse_value("${Value}", Value="import:MyImport") == { "Fn::Sub": ("${Value}", { "Value": { "Fn::ImportValue": "MyImport" } }) } assert config.parse_value("Prefix-${Value}-Suffix", Value="import:MyImport") == { "Fn::Sub": ( "Prefix-${Value}-Suffix", { "Value": { "Fn::ImportValue": "MyImport" } }, ) }
def source_action_definition(source: Dict[str, str]) -> SourceDefinition: """Get a Source's CodePipeline Action definition""" definition: SourceDefinition = { "Name": source["name"], "ActionTypeId": { "Category": "Source", "Owner": "AWS", "Provider": "CodeCommit", "Version": 1, }, "Configuration": { "BranchName": parse_value("${BranchName}", BranchName=source["branch"]), }, "OutputArtifacts": [{"Name": sanitise_artifact_name(source["name"])}], } repository = parse_value("${RepositoryName}", RepositoryName=source["repository"]) if source["from"] == "CodeCommit": definition["Configuration"].update( { "RepositoryName": repository, "PollForSourceChanges": source.get("poll_for_source_changes"), } ) if source["from"] == "CodeStarConnection": definition["ActionTypeId"].update({"Provider": "CodeStarSourceConnection"}) definition["Configuration"].update( { "ConnectionArn": parse_value( "${ConnectionArn}", ConnectionArn=source.get("connection_arn") ), "FullRepositoryId": repository, } ) return definition
def project( project_config, sub_config: dict, role_logical_id: str, log_group_logical_id: Optional[str] = None, ) -> ResourceOutput: """Generate a CodeBuild project resource""" logical_id = generate_logical_id(project_config["name"]) environment_variables = project_config.get("environment", {}) environment_variables.setdefault("AWS_DEFAULT_REGION", "AWS::Region") environment_variables.setdefault("AWS_REGION", "AWS::Region") image_credential_type = ("SERVICE_ROLE" if is_ecr(project_config["image"]) else "CODEBUILD") resource_properties = { "Artifacts": { "Type": "CODEPIPELINE" }, "Environment": { "ComputeType": parse_value("${ComputeType}", ComputeType=project_config["compute_type"]), "Image": parse_value("${Image}", Image=project_config["image"]), "ImagePullCredentialsType": image_credential_type, "EnvironmentVariables": [{ "Name": key, "Value": parse_value("${Value}", Value=value) } for key, value in environment_variables.items()], "PrivilegedMode": False, "Type": "LINUX_CONTAINER", }, "ServiceRole": { "Fn::GetAtt": [role_logical_id, "Arn"] }, "Source": generate_source_config(project_config), "EncryptionKey": parse_value("${KmsKeyArn}", KmsKeyArn=sub_config["kms_key_arn"]), } log_group = sub_config.get("codebuild", {}).get("log_group", {}) if log_group.get("enabled"): log_group_name = parse_value("${GroupName}", GroupName=log_group.get("name")) if log_group_logical_id: log_group_name = {"Ref": log_group_logical_id} resource_properties.update({ "LogsConfig": { "CloudWatchLogs": { "GroupName": log_group_name, "Status": "ENABLED", } } }) else: resource_properties.update( {"LogsConfig": { "CloudWatchLogs": { "Status": "DISABLED", } }}) return ResourceOutput( definition={ logical_id: { "Type": "AWS::CodeBuild::Project", "Properties": resource_properties, } }, logical_id=logical_id, )
def codebuild_role(config, log_group_logical_id: Optional[str] = None ) -> ResourceOutput: """Generate a CodeBuild role + policy resources""" sub_config = config.get("config", {}) permissions = [] # Add CW Logs perms log_group = sub_config.get("codebuild", {}).get("log_group", {}) if log_group.get("enabled"): log_group_arn: Union[str, FnSub, Ref, FnGetAtt] = parse_value( "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:${LogGroupName}:*", LogGroupName=log_group.get("name"), ) if log_group_logical_id: log_group_arn = {"Fn::GetAtt": [log_group_logical_id, "Arn"]} permissions.append( iam_permission( ["logs:CreateLogStream", "logs:PutLogEvents"], [log_group_arn], )) images = set() for stage in config.get("stages", []): for action in stage.get("actions", []): images.add(action["image"]) image_arns = sorted(list(get_ecr_arns(list(images)))) if image_arns: permissions.extend([ iam_permission(["ecr:GetAuthorizationToken"], ["*"]), iam_permission( ["ecr:BatchGetImage", "ecr:GetDownloadUrlForLayer"], image_arns # type: ignore ), ]) # Add S3 and KMS perms permissions.extend([ iam_permission( copy(S3_BUCKET_PERMISSIONS), [ parse_value( "arn:aws:s3:::${BucketName}", BucketName=sub_config["s3_bucket"], ), parse_value( "arn:aws:s3:::${BucketName}/*", BucketName=sub_config["s3_bucket"], ), ], ), iam_permission( copy(KMS_KEY_PERMISSIONS), [ parse_value( "${KmsKeyArn}", KmsKeyArn=sub_config["kms_key_arn"], ) ], ), ]) # Add any additionally specified IAM perms iam = sub_config.get("iam") if iam: permissions.extend(iam) return ResourceOutput( definition={ **generate_role("CodeBuildRole", "codebuild.amazonaws.com", [ "CodeBuildPolicy" ]), **generate_managed_policy("CodeBuildPolicy", permissions), }, logical_id="CodeBuildRole", )
def codepipeline_role(config, codebuild_projects: List[str]) -> ResourceOutput: """Generate a CodePipeline role + policy resources""" sub_config = config.get("config", {}) permissions = [ iam_permission( copy(S3_BUCKET_PERMISSIONS), [ parse_value( "arn:aws:s3:::${BucketName}", BucketName=str(sub_config["s3_bucket"]), ), parse_value( "arn:aws:s3:::${BucketName}/*", BucketName=str(sub_config["s3_bucket"]), ), ], ), iam_permission( copy(KMS_KEY_PERMISSIONS), [ parse_value( "${KmsKeyArn}", KmsKeyArn=sub_config["kms_key_arn"], ) ], ), iam_permission( copy(CODEPIPELINE_CODEBUILD_PERMISSIONS), [{ "Fn::GetAtt": [codebuild_project, "Arn"] } for codebuild_project in codebuild_projects], ), ] # Add Source perms codecommit_projects: List[Union[str, FnSub, FnGetAtt, Ref]] = [] codestar_connection_arns: Set[str] = set() for source in config.get("sources", []): if source["from"] == "CodeCommit": codecommit_projects.append( parse_value( "arn:aws:codecommit:${AWS::Region}:${AWS::AccountId}:${RepositoryName}", RepositoryName=source["repository"], )) elif source["from"] == "CodeStarConnection": if not source.get("connection_arn"): raise RuntimeError( f"Source {source['name']} uses CodeStar Connections, but does not specify a connection_arn" ) codestar_connection_arns.add(source.get("connection_arn")) else: raise NotImplementedError( f"Source type '{source['from']}' is not supported yet") if codecommit_projects: permissions.append( iam_permission( [ "codecommit:GetBranch", "codecommit:GetCommit", "codecommit:GetUploadArchiveStatus", "codecommit:UploadArchive", "codecommit:GitPull", ], codecommit_projects, )) if codestar_connection_arns: permissions.append( iam_permission( [ "codestar-connections:UseConnection", ], [ parse_value( "${ConnectionArn}", ConnectionArn=connection_arn, ) for connection_arn in sorted(codestar_connection_arns) ], )) return ResourceOutput( definition={ **generate_role("CodePipelineRole", "codepipeline.amazonaws.com", [ "CodePipelinePolicy" ]), **generate_managed_policy("CodePipelinePolicy", permissions), }, logical_id="CodePipelineRole", )