def create_invoke_lambda_custom_resource(
    scope,  # NOSONAR:S107 this function is designed to take many arguments
    id,
    lambda_function_arn,
    lambda_function_name,
    blueprint_bucket,
    custom_resource_properties,
):
    """
    create_invoke_lambda_custom_resource creates a custom resource to invoke lambda function

    :scope: CDK Construct scope that's needed to create CDK resources
    :id: the logicalId of teh CDK resource
    :lambda_function_arn: arn of the lambda function to be invoked (str)
    :lambda_function_name: name of the lambda function to be invoked (str)
    :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline
    :custom_resource_properties: user provided properties (dict)

    :return: CDK Custom Resource
    """
    custom_resource_lambda_fn = lambda_.Function(
        scope,
        id,
        code=lambda_.Code.from_bucket(
            blueprint_bucket,
            "blueprints/byom/lambdas/invoke_lambda_custom_resource.zip"),
        handler="index.handler",
        runtime=lambda_.Runtime.PYTHON_3_8,
        timeout=core.Duration.minutes(5),
    )

    custom_resource_lambda_fn.add_to_role_policy(
        iam.PolicyStatement(
            actions=[
                "lambda:InvokeFunction",
            ],
            resources=[lambda_function_arn],
        ))
    custom_resource_lambda_fn.node.default_child.cfn_options.metadata = suppress_lambda_policies(
    )

    invoke_lambda_custom_resource = core.CustomResource(
        scope,
        f"{id}CustomeResource",
        service_token=custom_resource_lambda_fn.function_arn,
        properties={
            "function_name": lambda_function_name,
            "message": f"Invoking lambda function: {lambda_function_name}",
            **custom_resource_properties,
        },
        resource_type="Custom::InvokeLambda",
    )

    return invoke_lambda_custom_resource
    def __init__(self,
                 scope: core.Construct,
                 id: str,
                 *,
                 multi_account=False,
                 **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Get stack parameters:
        notification_email = create_notification_email_parameter(self)
        git_address = create_git_address_parameter(self)
        # Get the optional S3 assets bucket to use
        existing_bucket = create_existing_bucket_parameter(self)
        # Get the optional S3 assets bucket to use
        existing_ecr_repo = create_existing_ecr_repo_parameter(self)
        # create only if multi_account template
        if multi_account:
            # create development parameters
            account_type = "development"
            dev_account_id = create_account_id_parameter(
                self, "DEV_ACCOUNT_ID", account_type)
            dev_org_id = create_org_id_parameter(self, "DEV_ORG_ID",
                                                 account_type)
            # create staging parameters
            account_type = "staging"
            staging_account_id = create_account_id_parameter(
                self, "STAGING_ACCOUNT_ID", account_type)
            staging_org_id = create_org_id_parameter(self, "STAGING_ORG_ID",
                                                     account_type)
            # create production parameters
            account_type = "production"
            prod_account_id = create_account_id_parameter(
                self, "PROD_ACCOUNT_ID", account_type)
            prod_org_id = create_org_id_parameter(self, "PROD_ORG_ID",
                                                  account_type)

        # Conditions
        git_address_provided = create_git_address_provided_condition(
            self, git_address)

        # client provided an existing S3 bucket name, to be used for assets
        existing_bucket_provided = create_existing_bucket_provided_condition(
            self, existing_bucket)

        # client provided an existing Amazon ECR name
        existing_ecr_provided = create_existing_ecr_provided_condition(
            self, existing_ecr_repo)

        # S3 bucket needs to be created for assets
        create_new_bucket = create_new_bucket_condition(self, existing_bucket)

        # Amazon ECR repo needs too be created for custom Algorithms
        create_new_ecr_repo = create_new_ecr_repo_condition(
            self, existing_ecr_repo)

        # Constants
        pipeline_stack_name = "mlops-pipeline"

        # CDK Resources setup
        access_logs_bucket = s3.Bucket(
            self,
            "accessLogs",
            encryption=s3.BucketEncryption.S3_MANAGED,
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
        )

        # Apply secure transfer bucket policy
        apply_secure_bucket_policy(access_logs_bucket)

        # This is a logging bucket.
        access_logs_bucket.node.default_child.cfn_options.metadata = suppress_s3_access_policy(
        )

        # Import user provide S3 bucket, if any. s3.Bucket.from_bucket_arn is used instead of
        # s3.Bucket.from_bucket_name to allow cross account bucket.
        client_existing_bucket = s3.Bucket.from_bucket_arn(
            self,
            "ClientExistingBucket",
            f"arn:aws:s3:::{existing_bucket.value_as_string.strip()}",
        )

        # Create the resource if existing_bucket_provided condition is True
        core.Aspects.of(client_existing_bucket).add(
            ConditionalResources(existing_bucket_provided))

        # Import user provided Amazon ECR repository

        client_erc_repo = ecr.Repository.from_repository_name(
            self, "ClientExistingECRReo", existing_ecr_repo.value_as_string)
        # Create the resource if existing_ecr_provided condition is True
        core.Aspects.of(client_erc_repo).add(
            ConditionalResources(existing_ecr_provided))

        # Creating assets bucket so that users can upload ML Models to it.
        assets_bucket = s3.Bucket(
            self,
            "pipeline-assets-" + str(uuid.uuid4()),
            versioned=True,
            encryption=s3.BucketEncryption.S3_MANAGED,
            server_access_logs_bucket=access_logs_bucket,
            server_access_logs_prefix="assets_bucket_access_logs",
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
        )

        # Apply secure transport bucket policy
        apply_secure_bucket_policy(assets_bucket)
        s3_actions = ["s3:GetObject", "s3:ListBucket"]
        # if multi account
        if multi_account:
            # add permissions for other accounts to access the assets bucket

            assets_bucket.add_to_resource_policy(
                iam.PolicyStatement(
                    effect=iam.Effect.ALLOW,
                    actions=s3_actions,
                    principals=[
                        iam.AccountPrincipal(dev_account_id.value_as_string),
                        iam.AccountPrincipal(
                            staging_account_id.value_as_string),
                        iam.AccountPrincipal(prod_account_id.value_as_string),
                    ],
                    resources=[
                        assets_bucket.bucket_arn,
                        f"{assets_bucket.bucket_arn}/*"
                    ],
                ))

        # Create the resource if create_new_bucket condition is True
        core.Aspects.of(assets_bucket).add(
            ConditionalResources(create_new_bucket))

        # Get assets S3 bucket's name/arn, based on the condition
        assets_s3_bucket_name = core.Fn.condition_if(
            existing_bucket_provided.logical_id,
            client_existing_bucket.bucket_name,
            assets_bucket.bucket_name,
        ).to_string()

        # Creating Amazon ECR repository
        ecr_repo = ecr.Repository(self, "ECRRepo", image_scan_on_push=True)

        # if multi account
        if multi_account:
            # add permissios to other account to pull images
            ecr_repo.add_to_resource_policy(
                iam.PolicyStatement(
                    effect=iam.Effect.ALLOW,
                    actions=[
                        "ecr:DescribeImages",
                        "ecr:DescribeRepositories",
                        "ecr:GetDownloadUrlForLayer",
                        "ecr:BatchGetImage",
                        "ecr:BatchCheckLayerAvailability",
                    ],
                    principals=[
                        iam.AccountPrincipal(dev_account_id.value_as_string),
                        iam.AccountPrincipal(
                            staging_account_id.value_as_string),
                        iam.AccountPrincipal(prod_account_id.value_as_string),
                    ],
                ))
        # Create the resource if create_new_ecr condition is True
        core.Aspects.of(ecr_repo).add(
            ConditionalResources(create_new_ecr_repo))

        # Get ECR repo's name based on the condition
        ecr_repo_name = core.Fn.condition_if(
            existing_ecr_provided.logical_id,
            client_erc_repo.repository_name,
            ecr_repo.repository_name,
        ).to_string()

        # Get ECR repo's arn based on the condition
        ecr_repo_arn = core.Fn.condition_if(
            existing_ecr_provided.logical_id,
            client_erc_repo.repository_arn,
            ecr_repo.repository_arn,
        ).to_string()

        blueprints_bucket_name = "blueprint-repository-" + str(uuid.uuid4())
        blueprint_repository_bucket = s3.Bucket(
            self,
            blueprints_bucket_name,
            encryption=s3.BucketEncryption.S3_MANAGED,
            server_access_logs_bucket=access_logs_bucket,
            server_access_logs_prefix=blueprints_bucket_name,
            block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
        )
        # Apply secure transport bucket policy
        apply_secure_bucket_policy(blueprint_repository_bucket)

        # if multi account
        if multi_account:
            # add permissions for other accounts to access the blueprint bucket
            blueprint_repository_bucket.add_to_resource_policy(
                iam.PolicyStatement(
                    effect=iam.Effect.ALLOW,
                    actions=s3_actions,
                    principals=[
                        iam.AccountPrincipal(dev_account_id.value_as_string),
                        iam.AccountPrincipal(
                            staging_account_id.value_as_string),
                        iam.AccountPrincipal(prod_account_id.value_as_string),
                    ],
                    resources=[
                        blueprint_repository_bucket.bucket_arn,
                        f"{blueprint_repository_bucket.bucket_arn}/*"
                    ],
                ))

        # Custom resource to copy source bucket content to blueprints bucket
        custom_resource_lambda_fn = lambda_.Function(
            self,
            "CustomResourceLambda",
            code=lambda_.Code.from_asset("lambdas/custom_resource"),
            handler="index.on_event",
            runtime=lambda_.Runtime.PYTHON_3_8,
            environment={
                "source_bucket": "https://%%BUCKET_NAME%%-" + core.Aws.REGION +
                ".s3.amazonaws.com/%%SOLUTION_NAME%%/%%VERSION%%",
                "destination_bucket": blueprint_repository_bucket.bucket_name,
                "LOG_LEVEL": "INFO",
            },
            timeout=core.Duration.seconds(60),
        )

        custom_resource_lambda_fn.node.default_child.cfn_options.metadata = suppress_lambda_policies(
        )
        blueprint_repository_bucket.grant_write(custom_resource_lambda_fn)
        custom_resource = core.CustomResource(
            self,
            "CustomResourceCopyAssets",
            service_token=custom_resource_lambda_fn.function_arn,
        )
        custom_resource.node.add_dependency(blueprint_repository_bucket)
        # IAM policies setup ###
        cloudformation_role = iam.Role(
            self,
            "mlopscloudformationrole",
            assumed_by=iam.ServicePrincipal("cloudformation.amazonaws.com"),
        )
        lambda_invoke_action = "lambda:InvokeFunction"
        # Cloudformation policy setup
        orchestrator_policy = iam.Policy(
            self,
            "lambdaOrchestratorPolicy",
            statements=[
                iam.PolicyStatement(
                    actions=[
                        "cloudformation:CreateStack",
                        "cloudformation:DeleteStack",
                        "cloudformation:UpdateStack",
                        "cloudformation:ListStackResources",
                    ],
                    resources=[
                        (f"arn:{core.Aws.PARTITION}:cloudformation:{core.Aws.REGION}:"
                         f"{core.Aws.ACCOUNT_ID}:stack/{pipeline_stack_name}*/*"
                         ),
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "iam:CreateRole",
                        "iam:DeleteRole",
                        "iam:DeleteRolePolicy",
                        "iam:GetRole",
                        "iam:GetRolePolicy",
                        "iam:PassRole",
                        "iam:PutRolePolicy",
                        "iam:AttachRolePolicy",
                        "iam:DetachRolePolicy",
                    ],
                    resources=[
                        f"arn:{core.Aws.PARTITION}:iam::{core.Aws.ACCOUNT_ID}:role/{pipeline_stack_name}*"
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "ecr:CreateRepository",
                        "ecr:DescribeRepositories",
                    ],
                    resources=[
                        (f"arn:{core.Aws.PARTITION}:ecr:{core.Aws.REGION}:"
                         f"{core.Aws.ACCOUNT_ID}:repository/{ecr_repo_name}")
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "codebuild:CreateProject",
                        "codebuild:DeleteProject",
                        "codebuild:BatchGetProjects",
                    ],
                    resources=[
                        (f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:"
                         f"{core.Aws.ACCOUNT_ID}:project/ContainerFactory*"),
                        (f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:"
                         f"{core.Aws.ACCOUNT_ID}:project/VerifySagemaker*"),
                        (f"arn:{core.Aws.PARTITION}:codebuild:{core.Aws.REGION}:"
                         f"{core.Aws.ACCOUNT_ID}:report-group/*"),
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "lambda:CreateFunction",
                        "lambda:DeleteFunction",
                        lambda_invoke_action,
                        "lambda:PublishLayerVersion",
                        "lambda:DeleteLayerVersion",
                        "lambda:GetLayerVersion",
                        "lambda:GetFunctionConfiguration",
                        "lambda:GetFunction",
                        "lambda:AddPermission",
                        "lambda:RemovePermission",
                        "lambda:UpdateFunctionConfiguration",
                    ],
                    resources=[
                        f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:layer:*",
                        f"arn:{core.Aws.PARTITION}:lambda:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:function:*",
                    ],
                ),
                iam.PolicyStatement(
                    actions=s3_actions,
                    resources=[
                        blueprint_repository_bucket.bucket_arn,
                        blueprint_repository_bucket.arn_for_objects("*"),
                        f"arn:{core.Aws.PARTITION}:s3:::{assets_s3_bucket_name}/*",
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "codepipeline:CreatePipeline",
                        "codepipeline:UpdatePipeline",
                        "codepipeline:DeletePipeline",
                        "codepipeline:GetPipeline",
                        "codepipeline:GetPipelineState",
                    ],
                    resources=
                    [(f"arn:{core.Aws.PARTITION}:codepipeline:{core.Aws.REGION}:"
                      f"{core.Aws.ACCOUNT_ID}:{pipeline_stack_name}*")],
                ),
                iam.PolicyStatement(
                    actions=[
                        "apigateway:POST",
                        "apigateway:PATCH",
                        "apigateway:DELETE",
                        "apigateway:GET",
                        "apigateway:PUT",
                    ],
                    resources=[
                        f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis/*",
                        f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/restapis",
                        f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/account",
                        f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans",
                        f"arn:{core.Aws.PARTITION}:apigateway:{core.Aws.REGION}::/usageplans/*",
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "logs:CreateLogGroup",
                        "logs:DescribeLogGroups",
                    ],
                    resources=[
                        f"arn:{core.Aws.PARTITION}:logs:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:log-group:*",
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "s3:CreateBucket",
                        "s3:PutEncryptionConfiguration",
                        "s3:PutBucketVersioning",
                        "s3:PutBucketPublicAccessBlock",
                        "s3:PutBucketLogging",
                    ],
                    resources=[f"arn:{core.Aws.PARTITION}:s3:::*"],
                ),
                iam.PolicyStatement(
                    actions=[
                        "s3:PutObject",
                    ],
                    resources=[
                        f"arn:{core.Aws.PARTITION}:s3:::{assets_s3_bucket_name}/*"
                    ],
                ),
                iam.PolicyStatement(
                    actions=[
                        "sns:CreateTopic",
                        "sns:DeleteTopic",
                        "sns:Subscribe",
                        "sns:Unsubscribe",
                        "sns:GetTopicAttributes",
                        "sns:SetTopicAttributes",
                    ],
                    resources=
                    [(f"arn:{core.Aws.PARTITION}:sns:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:"
                      f"{pipeline_stack_name}*-*PipelineNotification*")],
                ),
                iam.PolicyStatement(
                    actions=[
                        "events:PutRule",
                        "events:DescribeRule",
                        "events:PutTargets",
                        "events:RemoveTargets",
                        "events:DeleteRule",
                        "events:PutEvents",
                    ],
                    resources=[
                        f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:rule/*",
                        f"arn:{core.Aws.PARTITION}:events:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:event-bus/*",
                    ],
                ),
            ],
        )
        orchestrator_policy.attach_to_role(cloudformation_role)

        # Lambda function IAM setup
        lambda_passrole_policy = iam.PolicyStatement(
            actions=["iam:passrole"], resources=[cloudformation_role.role_arn])
        # create sagemaker layer
        sm_layer = sagemaker_layer(self, blueprint_repository_bucket)
        # make sure the sagemaker code is uploaded first to the blueprints bucket
        sm_layer.node.add_dependency(custom_resource)
        # API Gateway and lambda setup to enable provisioning pipelines through API calls
        provisioner_apigw_lambda = aws_apigateway_lambda.ApiGatewayToLambda(
            self,
            "PipelineOrchestration",
            lambda_function_props={
                "runtime": lambda_.Runtime.PYTHON_3_8,
                "handler": "index.handler",
                "code":
                lambda_.Code.from_asset("lambdas/pipeline_orchestration"),
                "layers": [sm_layer],
                "timeout": core.Duration.minutes(10),
            },
            api_gateway_props={
                "defaultMethodOptions": {
                    "authorizationType": apigw.AuthorizationType.IAM,
                },
                "restApiName": f"{core.Aws.STACK_NAME}-orchestrator",
                "proxy": False,
                "dataTraceEnabled": True,
            },
        )

        # add lambda supressions
        provisioner_apigw_lambda.lambda_function.node.default_child.cfn_options.metadata = suppress_lambda_policies(
        )

        provision_resource = provisioner_apigw_lambda.api_gateway.root.add_resource(
            "provisionpipeline")
        provision_resource.add_method("POST")
        status_resource = provisioner_apigw_lambda.api_gateway.root.add_resource(
            "pipelinestatus")
        status_resource.add_method("POST")
        blueprint_repository_bucket.grant_read(
            provisioner_apigw_lambda.lambda_function)
        provisioner_apigw_lambda.lambda_function.add_to_role_policy(
            lambda_passrole_policy)
        orchestrator_policy.attach_to_role(
            provisioner_apigw_lambda.lambda_function.role)

        # Environment variables setup
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="BLUEPRINT_BUCKET_URL",
            value=str(blueprint_repository_bucket.bucket_regional_domain_name),
        )
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="BLUEPRINT_BUCKET",
            value=str(blueprint_repository_bucket.bucket_name))
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="ACCESS_BUCKET", value=str(access_logs_bucket.bucket_name))
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="ASSETS_BUCKET", value=str(assets_s3_bucket_name))
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="CFN_ROLE_ARN", value=str(cloudformation_role.role_arn))
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="PIPELINE_STACK_NAME", value=pipeline_stack_name)
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="NOTIFICATION_EMAIL", value=notification_email.value_as_string)
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="REGION", value=core.Aws.REGION)
        provisioner_apigw_lambda.lambda_function.add_environment(
            key="IS_MULTI_ACCOUNT", value=str(multi_account))

        # if multi account
        if multi_account:
            provisioner_apigw_lambda.lambda_function.add_environment(
                key="DEV_ACCOUNT_ID", value=dev_account_id.value_as_string)
            provisioner_apigw_lambda.lambda_function.add_environment(
                key="DEV_ORG_ID", value=dev_org_id.value_as_string)

            provisioner_apigw_lambda.lambda_function.add_environment(
                key="STAGING_ACCOUNT_ID",
                value=staging_account_id.value_as_string)
            provisioner_apigw_lambda.lambda_function.add_environment(
                key="STAGING_ORG_ID", value=staging_org_id.value_as_string)

            provisioner_apigw_lambda.lambda_function.add_environment(
                key="PROD_ACCOUNT_ID", value=prod_account_id.value_as_string)
            provisioner_apigw_lambda.lambda_function.add_environment(
                key="PROD_ORG_ID", value=prod_org_id.value_as_string)

        provisioner_apigw_lambda.lambda_function.add_environment(
            key="ECR_REPO_NAME", value=ecr_repo_name)

        provisioner_apigw_lambda.lambda_function.add_environment(
            key="ECR_REPO_ARN", value=ecr_repo_arn)

        provisioner_apigw_lambda.lambda_function.add_environment(
            key="LOG_LEVEL", value="DEBUG")
        cfn_policy_for_lambda = orchestrator_policy.node.default_child
        cfn_policy_for_lambda.cfn_options.metadata = {
            "cfn_nag": {
                "rules_to_suppress": [{
                    "id":
                    "W76",
                    "reason":
                    "A complex IAM policy is required for this resource.",
                }]
            }
        }

        # Codepipeline with Git source definitions ###
        source_output = codepipeline.Artifact()
        # processing git_address to retrieve repo name
        repo_name_split = core.Fn.split("/", git_address.value_as_string)
        repo_name = core.Fn.select(5, repo_name_split)
        # getting codecommit repo cdk object using 'from_repository_name'
        repo = codecommit.Repository.from_repository_name(
            self, "AWSMLOpsFrameworkRepository", repo_name)
        codebuild_project = codebuild.PipelineProject(
            self,
            "Take config file",
            build_spec=codebuild.BuildSpec.from_object({
                "version": "0.2",
                "phases": {
                    "build": {
                        "commands": [
                            "ls -a",
                            "aws lambda invoke --function-name " +
                            provisioner_apigw_lambda.lambda_function.
                            function_name +
                            " --payload fileb://mlops-config.json response.json"
                            + " --invocation-type RequestResponse",
                        ]
                    }
                },
            }),
        )
        # Defining a Codepipeline project with CodeCommit as source
        codecommit_pipeline = codepipeline.Pipeline(
            self,
            "MLOpsCodeCommitPipeline",
            stages=[
                codepipeline.StageProps(
                    stage_name="Source",
                    actions=[
                        codepipeline_actions.CodeCommitSourceAction(
                            action_name="CodeCommit",
                            repository=repo,
                            branch="main",
                            output=source_output,
                        )
                    ],
                ),
                codepipeline.StageProps(
                    stage_name="TakeConfig",
                    actions=[
                        codepipeline_actions.CodeBuildAction(
                            action_name="provision_pipeline",
                            input=source_output,
                            outputs=[],
                            project=codebuild_project,
                        )
                    ],
                ),
            ],
            cross_account_keys=False,
        )
        codecommit_pipeline.add_to_role_policy(
            iam.PolicyStatement(
                actions=[lambda_invoke_action],
                resources=[
                    provisioner_apigw_lambda.lambda_function.function_arn
                ],
            ))
        codebuild_project.add_to_role_policy(
            iam.PolicyStatement(
                actions=[lambda_invoke_action],
                resources=[
                    provisioner_apigw_lambda.lambda_function.function_arn
                ],
            ))
        pipeline_child_nodes = codecommit_pipeline.node.find_all()
        pipeline_child_nodes[1].node.default_child.cfn_options.metadata = {
            "cfn_nag": {
                "rules_to_suppress": [
                    {
                        "id":
                        "W35",
                        "reason":
                        "This is a managed bucket generated by CDK for codepipeline.",
                    },
                    {
                        "id":
                        "W51",
                        "reason":
                        "This is a managed bucket generated by CDK for codepipeline.",
                    },
                ]
            }
        }

        # custom resource for operational metrics###
        metrics_mapping = core.CfnMapping(
            self,
            "AnonymousData",
            mapping={"SendAnonymousData": {
                "Data": "Yes"
            }})
        metrics_condition = core.CfnCondition(
            self,
            "AnonymousDatatoAWS",
            expression=core.Fn.condition_equals(
                metrics_mapping.find_in_map("SendAnonymousData", "Data"),
                "Yes"),
        )

        helper_function = lambda_.Function(
            self,
            "SolutionHelper",
            code=lambda_.Code.from_asset("lambdas/solution_helper"),
            handler="lambda_function.handler",
            runtime=lambda_.Runtime.PYTHON_3_8,
            timeout=core.Duration.seconds(60),
        )

        helper_function.node.default_child.cfn_options.metadata = suppress_lambda_policies(
        )
        create_id_function = core.CustomResource(
            self,
            "CreateUniqueID",
            service_token=helper_function.function_arn,
            properties={"Resource": "UUID"},
            resource_type="Custom::CreateUUID",
        )

        send_data_function = core.CustomResource(
            self,
            "SendAnonymousData",
            service_token=helper_function.function_arn,
            properties={
                "Resource": "AnonymousMetric",
                "UUID": create_id_function.get_att_string("UUID"),
                "gitSelected": git_address.value_as_string,
                "Region": core.Aws.REGION,
                "SolutionId": "SO0136",
                "Version": "%%VERSION%%",
            },
            resource_type="Custom::AnonymousData",
        )

        core.Aspects.of(helper_function).add(
            ConditionalResources(metrics_condition))
        core.Aspects.of(create_id_function).add(
            ConditionalResources(metrics_condition))
        core.Aspects.of(send_data_function).add(
            ConditionalResources(metrics_condition))

        # If user chooses Git as pipeline provision type, create codepipeline with Git repo as source
        core.Aspects.of(repo).add(ConditionalResources(git_address_provided))
        core.Aspects.of(codecommit_pipeline).add(
            ConditionalResources(git_address_provided))
        core.Aspects.of(codebuild_project).add(
            ConditionalResources(git_address_provided))

        # Create Template Interface
        paramaters_list = [
            notification_email.logical_id,
            git_address.logical_id,
            existing_bucket.logical_id,
            existing_ecr_repo.logical_id,
        ]

        # if multi account
        if multi_account:
            paramaters_list.extend([
                dev_account_id.logical_id,
                dev_org_id.logical_id,
                staging_account_id.logical_id,
                staging_org_id.logical_id,
                prod_account_id.logical_id,
                prod_org_id.logical_id,
            ])

        paramaters_labels = {
            f"{notification_email.logical_id}": {
                "default": "Notification Email (Required)"
            },
            f"{git_address.logical_id}": {
                "default": "CodeCommit Repo URL Address (Optional)"
            },
            f"{existing_bucket.logical_id}": {
                "default": "Name of an Existing S3 Bucket (Optional)"
            },
            f"{existing_ecr_repo.logical_id}": {
                "default":
                "Name of an Existing Amazon ECR repository (Optional)"
            },
        }

        if multi_account:
            paramaters_labels.update({
                f"{dev_account_id.logical_id}": {
                    "default": "Development Account ID (Required)"
                },
                f"{dev_org_id.logical_id}": {
                    "default":
                    "Development Account Organizational Unit ID (Required)"
                },
                f"{staging_account_id.logical_id}": {
                    "default": "Staging Account ID (Required)"
                },
                f"{staging_org_id.logical_id}": {
                    "default":
                    "Staging Account Organizational Unit ID (Required)"
                },
                f"{prod_account_id.logical_id}": {
                    "default": "Production Account ID (Required)"
                },
                f"{prod_org_id.logical_id}": {
                    "default":
                    "Production Account Organizational Unit ID (Required)"
                },
            })
        self.template_options.metadata = {
            "AWS::CloudFormation::Interface": {
                "ParameterGroups": [{
                    "Label": {
                        "default": "MLOps Framework Settings"
                    },
                    "Parameters": paramaters_list,
                }],
                "ParameterLabels":
                paramaters_labels,
            }
        }
        # Outputs #
        core.CfnOutput(
            self,
            id="BlueprintsBucket",
            value=
            f"https://s3.console.aws.amazon.com/s3/buckets/{blueprint_repository_bucket.bucket_name}",
            description="S3 Bucket to upload MLOps Framework Blueprints",
        )
        core.CfnOutput(
            self,
            id="AssetsBucket",
            value=
            f"https://s3.console.aws.amazon.com/s3/buckets/{assets_s3_bucket_name}",
            description="S3 Bucket to upload model artifact",
        )
        core.CfnOutput(
            self,
            id="ECRRepoName",
            value=ecr_repo_name,
            description="Amazon ECR repository's name",
        )
        core.CfnOutput(
            self,
            id="ECRRepoArn",
            value=ecr_repo_arn,
            description="Amazon ECR repository's arn",
        )
Ejemplo n.º 3
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Parameteres #
        assets_bucket_name = create_assets_bucket_name_parameter(self)
        blueprint_bucket_name = create_blueprint_bucket_name_parameter(self)
        custom_algorithms_ecr_repo_arn = create_custom_algorithms_ecr_repo_arn_parameter(
            self)
        kms_key_arn = create_kms_key_arn_parameter(self)
        algorithm_image_uri = create_algorithm_image_uri_parameter(self)
        model_name = create_model_name_parameter(self)
        model_artifact_location = create_model_artifact_location_parameter(
            self)
        data_capture_location = create_data_capture_location_parameter(self)
        inference_instance = create_inference_instance_parameter(self)

        # Conditions
        custom_algorithms_ecr_repo_arn_provided = create_custom_algorithms_ecr_repo_arn_provided_condition(
            self, custom_algorithms_ecr_repo_arn)
        kms_key_arn_provided = create_kms_key_arn_provided_condition(
            self, kms_key_arn)

        # Resources #
        # getting blueprint bucket object from its name - will be used later in the stack
        blueprint_bucket = s3.Bucket.from_bucket_name(
            self, "BlueprintBucket", blueprint_bucket_name.value_as_string)

        # provision api gateway and lambda for inference using solution constructs
        inference_api_gateway = aws_apigateway_lambda.ApiGatewayToLambda(
            self,
            "BYOMInference",
            lambda_function_props={
                "runtime":
                lambda_.Runtime.PYTHON_3_8,
                "handler":
                "main.handler",
                "code":
                lambda_.Code.from_bucket(
                    blueprint_bucket, "blueprints/byom/lambdas/inference.zip"),
            },
            api_gateway_props={
                "defaultMethodOptions": {
                    "authorizationType": apigw.AuthorizationType.IAM,
                },
                "restApiName": f"{core.Aws.STACK_NAME}-inference",
                "proxy": False,
            },
        )
        # add supressions
        inference_api_gateway.lambda_function.node.default_child.cfn_options.metadata = suppress_lambda_policies(
        )
        provision_resource = inference_api_gateway.api_gateway.root.add_resource(
            "inference")
        provision_resource.add_method("POST")

        # create Sagemaker role
        sagemaker_role = create_sagemaker_role(
            self,
            "MLOpsRealtimeSagemakerRole",
            custom_algorithms_ecr_arn=custom_algorithms_ecr_repo_arn.
            value_as_string,
            kms_key_arn=kms_key_arn.value_as_string,
            assets_bucket_name=assets_bucket_name.value_as_string,
            input_bucket_name=assets_bucket_name.value_as_string,
            input_s3_location=assets_bucket_name.value_as_string,
            output_s3_location=data_capture_location.value_as_string,
            ecr_repo_arn_provided_condition=
            custom_algorithms_ecr_repo_arn_provided,
            kms_key_arn_provided_condition=kms_key_arn_provided,
        )

        # create sagemaker model
        sagemaker_model = create_sagemaker_model(
            self,
            "MLOpsSagemakerModel",
            execution_role=sagemaker_role,
            primary_container={
                "image":
                algorithm_image_uri.value_as_string,
                "modelDataUrl":
                f"s3://{assets_bucket_name.value_as_string}/{model_artifact_location.value_as_string}",
            },
            tags=[{
                "key": "model_name",
                "value": model_name.value_as_string
            }],
        )

        # Create Sagemaker EndpointConfg
        sagemaker_endpoint_config = create_sagemaker_endpoint_config(
            self,
            "MLOpsSagemakerEndpointConfig",
            sagemaker_model.attr_model_name,
            model_name.value_as_string,
            inference_instance.value_as_string,
            data_capture_location.value_as_string,
            core.Fn.condition_if(kms_key_arn_provided.logical_id,
                                 kms_key_arn.value_as_string,
                                 core.Aws.NO_VALUE).to_string(),
        )

        # create a dependency on the model
        sagemaker_endpoint_config.add_depends_on(sagemaker_model)

        # create Sagemaker endpoint
        sagemaker_endpoint = create_sagemaker_endpoint(
            self,
            "MLOpsSagemakerEndpoint",
            sagemaker_endpoint_config.attr_endpoint_config_name,
            model_name.value_as_string,
        )

        # add dependency on endpoint config
        sagemaker_endpoint.add_depends_on(sagemaker_endpoint_config)

        # Create Lambda - sagemakerendpoint
        LambdaToSagemakerEndpoint(
            self,
            "LambdaSagmakerEndpoint",
            existing_sagemaker_endpoint_obj=sagemaker_endpoint,
            existing_lambda_obj=inference_api_gateway.lambda_function,
        )

        # Outputs #
        core.CfnOutput(
            self,
            id="SageMakerModelName",
            value=sagemaker_model.attr_model_name,
        )
        core.CfnOutput(
            self,
            id="SageMakerEndpointConfigName",
            value=sagemaker_endpoint_config.attr_endpoint_config_name,
        )
        core.CfnOutput(
            self,
            id="SageMakerEndpointName",
            value=sagemaker_endpoint.attr_endpoint_name,
        )
        core.CfnOutput(
            self,
            id="EndpointDataCaptureLocation",
            value=
            f"https://s3.console.aws.amazon.com/s3/buckets/{data_capture_location.value_as_string}/",
            description=
            "Endpoint data capture location (to be used by Model Monitor)",
        )
def batch_transform(
    scope,  # NOSONAR:S107 this function is designed to take many arguments
    id,
    blueprint_bucket,
    assets_bucket,
    model_name,
    inference_instance,
    batch_input_bucket,
    batch_inference_data,
    batch_job_output_location,
    kms_key_arn,
    sm_layer,
):
    """
    batch_transform creates a sagemaker batch transform job in a lambda

    :scope: CDK Construct scope that's needed to create CDK resources
    :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :model_name: name of the sagemaker model to be created, in the form of a CDK CfnParameter object
    :inference_instance: compute instance type for the sagemaker inference endpoint, in the form of
    a CDK CfnParameter object
    :batch_input_bucket: bucket name where the batch data is stored
    :batch_inference_data: location of the batch inference data in assets bucket, in the form of
    a CDK CfnParameter object
    :batch_job_output_location: S3 bucket location where the result of the batch job will be stored
    :kms_key_arn: optionl kmsKeyArn used to encrypt job's output and instance volume.
    :sm_layer: sagemaker lambda layer
    :return: Lambda function
    """
    s3_read = s3_policy_read(
        list(
            set([
                f"arn:aws:s3:::{assets_bucket.bucket_name}",
                f"arn:aws:s3:::{assets_bucket.bucket_name}/*",
                f"arn:aws:s3:::{batch_input_bucket}",
                f"arn:aws:s3:::{batch_inference_data}",
            ])))
    s3_write = s3_policy_write([
        f"arn:aws:s3:::{batch_job_output_location}/*",
    ])

    batch_transform_permissions = batch_transform_policy()

    lambda_role = create_service_role(
        scope,
        "batch_transform_lambda_role",
        "lambda.amazonaws.com",
        ("Role that creates a lambda function assumes to create a sagemaker batch transform "
         "job in the aws mlops pipeline."),
    )

    lambda_role.add_to_policy(batch_transform_permissions)
    lambda_role.add_to_policy(s3_read)
    lambda_role.add_to_policy(s3_write)
    add_logs_policy(lambda_role)

    batch_transform_lambda = lambda_.Function(
        scope,
        id,
        runtime=lambda_.Runtime.PYTHON_3_8,
        handler="main.handler",
        layers=[sm_layer],
        role=lambda_role,
        code=lambda_.Code.from_bucket(
            blueprint_bucket, "blueprints/byom/lambdas/batch_transform.zip"),
        environment={
            "model_name": model_name,
            "inference_instance": inference_instance,
            "assets_bucket": assets_bucket.bucket_name,
            "batch_inference_data": batch_inference_data,
            "batch_job_output_location": batch_job_output_location,
            "kms_key_arn": kms_key_arn,
            "LOG_LEVEL": "INFO",
        },
    )

    batch_transform_lambda.node.default_child.cfn_options.metadata = suppress_lambda_policies(
    )

    return batch_transform_lambda
def create_stackset_action(
    scope,  # NOSONAR:S107 this function is designed to take many arguments
    action_name,
    blueprint_bucket,
    source_output,
    artifact,
    template_file,
    stage_params_file,
    accound_ids,
    org_ids,
    regions,
    assets_bucket,
    stack_name,
):
    """
    create_stackset_action an invokeLambda action to be added to AWS Codepipeline stage

    :scope: CDK Construct scope that's needed to create CDK resources
    :action_name: name of the StackSet action
    :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline
    :source_output: CDK object of the Source action's output
    :artifact: name of the input aritifcat to the StackSet action
    :template_file: name of the Cloudformation template to be deployed
    :stage_params_file: name of the template parameters for the satge
    :accound_ids: list of AWS acounts where the stack with be deployed
    :org_ids: list of AWS orginizational ids where the stack with be deployed
    :regions: list of regions where the stack with be deployed
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :stack_name: name of the stack to be deployed
    :return: codepipeline invokeLambda action in a form of a CDK object that can be attached to a codepipeline stage
    """
    # creating a role so that this lambda can create a baseline job
    lambda_role = create_service_role(
        scope,
        f"{action_name}_role",
        lambda_service,
        "The role that is assumed by create_update_cf_stackset Lambda function.",
    )
    # make the stackset name unique
    stack_name = f"{stack_name}-{str(uuid.uuid4())[:8]}"
    # cloudformation stackset permissions
    cloudformation_stackset_permissions = cloudformation_stackset_policy(
        stack_name)
    cloudformation_stackset_instances_permissions = cloudformation_stackset_instances_policy(
        stack_name)

    lambda_role.add_to_policy(cloudformation_stackset_permissions)
    lambda_role.add_to_policy(cloudformation_stackset_instances_permissions)
    add_logs_policy(lambda_role)

    # defining the lambda function that gets invoked in this stage
    create_update_cf_stackset_lambda = lambda_.Function(
        scope,
        f"{action_name}_stackset_lambda",
        runtime=lambda_.Runtime.PYTHON_3_8,
        handler="main.lambda_handler",
        role=lambda_role,
        code=lambda_.Code.from_bucket(
            blueprint_bucket,
            "blueprints/byom/lambdas/create_update_cf_stackset.zip"),
        timeout=core.Duration.minutes(15),
    )

    create_update_cf_stackset_lambda.node.default_child.cfn_options.metadata = suppress_lambda_policies(
    )
    role_child_nodes = create_update_cf_stackset_lambda.role.node.find_all()
    role_child_nodes[
        2].node.default_child.cfn_options.metadata = suppress_pipeline_policy(
        )

    # Create codepipeline action
    create_stackset_action = codepipeline_actions.LambdaInvokeAction(
        action_name=action_name,
        inputs=[source_output],
        variables_namespace=f"{action_name}-namespace",
        lambda_=create_update_cf_stackset_lambda,
        user_parameters={
            "stackset_name": stack_name,
            "artifact": artifact,
            "template_file": template_file,
            "stage_params_file": stage_params_file,
            "accound_ids": accound_ids,
            "org_ids": org_ids,
            "regions": regions,
        },
        run_order=1,
    )
    return (create_update_cf_stackset_lambda.function_arn,
            create_stackset_action)
def create_data_baseline_job(
    scope,  # NOSONAR:S107 this function is designed to take many arguments
    blueprint_bucket,
    assets_bucket,
    baseline_job_name,
    training_data_location,
    baseline_job_output_location,
    endpoint_name,
    instance_type,
    instance_volume_size,
    max_runtime_seconds,
    kms_key_arn,
    kms_key_arn_provided_condition,
    stack_name,
):
    """
    create_baseline_job creates a data baseline processing job in a lambda invoked codepipeline action

    :scope: CDK Construct scope that's needed to create CDK resources
    :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline
    :assets_bucket: the bucket cdk object where pipeline assets are stored
    :baseline_job_name: name of the baseline job to be created
    :training_data_location: location of the training data used to train the deployed model
    :baseline_job_output_location: S3 prefix in the S3 assets bucket to store the output of the job
    :endpoint_name: name of the deployed SageMaker endpoint to be monitored
    :instance_type: compute instance type for the baseline job, in the form of a CDK CfnParameter object
    :instance_volume_size: volume size of the EC2 instance
    :max_runtime_seconds: max time the job is allowd to run
    :kms_key_arn: kms key arn to encrypt the baseline job's output
    :stack_name: model monitor stack name
    :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage
    """
    s3_read = s3_policy_read([
        f"arn:aws:s3:::{assets_bucket.bucket_name}",
        f"arn:aws:s3:::{assets_bucket.bucket_name}/{training_data_location}",
    ])
    s3_write = s3_policy_write([
        f"arn:aws:s3:::{baseline_job_output_location}/*",
    ])

    create_baseline_job_policy = sagemaker_baseline_job_policy(
        baseline_job_name)
    sagemaker_logs_policy = sagemaker_logs_metrics_policy_document(
        scope, "BaselineLogsMetrcis")

    # Kms Key permissions
    kms_policy = kms_policy_document(scope, "BaselineKmsPolicy", kms_key_arn)
    # add conditions to KMS and ECR policies
    core.Aspects.of(kms_policy).add(
        ConditionalResources(kms_key_arn_provided_condition))

    # create sagemaker role
    sagemaker_role = create_service_role(
        scope,
        "create_baseline_sagemaker_role",
        "sagemaker.amazonaws.com",
        "Role that is create sagemaker model Lambda function assumes to create a baseline job.",
    )
    # attach the conditional policies
    kms_policy.attach_to_role(sagemaker_role)

    # create a trust relation to assume the Role
    sagemaker_role.add_to_policy(
        iam.PolicyStatement(actions=["sts:AssumeRole"],
                            resources=[sagemaker_role.role_arn]))
    # creating a role so that this lambda can create a baseline job
    lambda_role = create_service_role(
        scope,
        "create_baseline_job_lambda_role",
        lambda_service,
        "Role that is create_data_baseline_job Lambda function assumes to create a baseline job in the pipeline.",
    )

    sagemaker_logs_policy.attach_to_role(sagemaker_role)
    sagemaker_role.add_to_policy(create_baseline_job_policy)
    sagemaker_role.add_to_policy(s3_read)
    sagemaker_role.add_to_policy(s3_write)
    sagemaker_role_nodes = sagemaker_role.node.find_all()
    sagemaker_role_nodes[
        2].node.default_child.cfn_options.metadata = suppress_pipeline_policy(
        )
    lambda_role.add_to_policy(
        iam.PolicyStatement(actions=["iam:PassRole"],
                            resources=[sagemaker_role.role_arn]))
    lambda_role.add_to_policy(create_baseline_job_policy)
    lambda_role.add_to_policy(s3_write)
    lambda_role.add_to_policy(s3_read)
    add_logs_policy(lambda_role)

    # defining the lambda function that gets invoked in this stage
    create_baseline_job_lambda = lambda_.Function(
        scope,
        "create_data_baseline_job",
        runtime=lambda_.Runtime.PYTHON_3_8,
        handler=lambda_handler,
        role=lambda_role,
        code=lambda_.Code.from_bucket(
            blueprint_bucket,
            "blueprints/byom/lambdas/create_data_baseline_job.zip"),
        environment={
            "BASELINE_JOB_NAME": baseline_job_name,
            "ASSETS_BUCKET": assets_bucket.bucket_name,
            "SAGEMAKER_ENDPOINT_NAME": endpoint_name,
            "TRAINING_DATA_LOCATION": training_data_location,
            "BASELINE_JOB_OUTPUT_LOCATION": baseline_job_output_location,
            "INSTANCE_TYPE": instance_type,
            "INSTANCE_VOLUME_SIZE": instance_volume_size,
            "MAX_RUNTIME_SECONDS": max_runtime_seconds,
            "ROLE_ARN": sagemaker_role.role_arn,
            "KMS_KEY_ARN": kms_key_arn,
            "STACK_NAME": stack_name,
            "LOG_LEVEL": "INFO",
        },
        timeout=core.Duration.minutes(10),
    )

    create_baseline_job_lambda.node.default_child.cfn_options.metadata = suppress_lambda_policies(
    )
    role_child_nodes = create_baseline_job_lambda.role.node.find_all()
    role_child_nodes[
        2].node.default_child.cfn_options.metadata = suppress_pipeline_policy(
        )

    return create_baseline_job_lambda