def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) blue_env = self.node.try_get_context("blue_env") green_env = self.node.try_get_context("green_env") app_name = self.node.try_get_context("app_name") bucket = s3.Bucket( self, 'BlueGreenBucket', # The default removal policy is RETAIN, which means that cdk # destroy will not attempt to delete the new bucket, and it will # remain in your account until manually deleted. By setting the # policy to DESTROY, cdk destroy will attempt to delete the bucket, # but will error if the bucket is not empty. removal_policy=core.RemovalPolicy.DESTROY # NOT recommended for production code ) handler = lmbda.Function(self, 'BlueGreenLambda', runtime=lmbda.Runtime.PYTHON_3_6, code=lmbda.Code.asset('resources'), handler='blue_green.lambda_handler', environment={'BUCKET': bucket.bucket_name}) bucket.grant_read_write(handler) repo = cc.Repository( self, 'Repository', repository_name='MyRepositoryName', ) pipeline = cp.Pipeline(self, 'MyFirstPipeline') source_stage = pipeline.add_stage(stage_name='Source') source_artifact = cp.Artifact('Source') source_action = cpactions.CodeCommitSourceAction( action_name='CodeCommit', repository=repo, output=source_artifact) source_stage.add_action(source_action) deploy_stage = pipeline.add_stage(stage_name='Deploy') lambda_action = cpactions.LambdaInvokeAction( action_name='InvokeAction', lambda_=handler, user_parameters={ 'blueEnvironment': blue_env, 'greenEnvironment': green_env, 'application': app_name }, inputs=[source_artifact]) deploy_stage.add_action(lambda_action)
def _create_set_experiment_info_env_step(self) -> Resource: stage = self.pipeline.add_stage(stage_name="SetExperimentInfoEnvs") action = codepipeline_actions.LambdaInvokeAction( action_name=f"{self.name_prefix}-set-experiment-info-env-action", user_parameters={ 'EVAL_REPORT_S3': '#{trainStep.EVAL_REPORT_S3}', }, lambda_=self.set_experiment_info_env_lambdaFn, variables_namespace="experimentInfo", ) stage.add_action(action)
def _create_post_process_step(self) -> Resource: stage = self.pipeline.add_stage(stage_name="PostProcess") action = codepipeline_actions.LambdaInvokeAction( action_name=f"{self.name_prefix}-postprocess-action", user_parameters={ 'EXPERIMENT_NAME': '#{experimentInfo.experiment_name}', 'RUN_ID': '#{experimentInfo.run_id}', 'TRAIN_JOB_NAME': '#{trainStep.TRAIN_JOB_NAME}', 'TRAINED_MODEL_S3': '#{trainStep.TRAINED_MODEL_S3}', }, lambda_=self.postprocess_lambdaFn) stage.add_action(action)
def create_monitoring_schedule( scope, blueprint_bucket, assets_bucket, baseline_job_output_location, baseline_job_name, monitoring_schedual_name, monitoring_output_location, schedule_expression, endpoint_name, instance_type, instance_volume_size, max_runtime_seconds, monitoring_type, stack_name, ): """ create_monitoring_schedule creates a model monitoring job in a lambda invoked codepipeline action :scope: CDK Construct scope that's needed to create CDK resources :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline :assets_bucket: the bucket cdk object where pipeline assets are stored :baseline_job_output_location: S3 prefix in the S3 assets bucket to store the output of the job :baseline_job_name: name of the baseline job :monitoring_schedual_name: name of the monitoring job to be created :schedule_expression cron job expression :endpoint_name: name of the deployed SageMaker endpoint to be monitored :instance_type: compute instance type for the baseline job, in the form of a CDK CfnParameter object :instance_volume_size: volume size of the EC2 instance :monitoring_type: type of monitoring to be created :max_runtime_seconds: max time the job is allowd to run :stack_name: name of the model monitoring satck :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage """ create_monitoring_schedule_policy = iam.PolicyStatement( actions=[ "sagemaker:DescribeEndpointConfig", "sagemaker:DescribeEndpoint", "sagemaker:CreateMonitoringSchedule", "sagemaker:DescribeMonitoringSchedule", "sagemaker:StopMonitoringSchedule", "sagemaker:DeleteMonitoringSchedule", "sagemaker:DescribeProcessingJob", ], resources=[ (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"endpoint/{endpoint_name.value_as_string}*"), (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"endpoint-config/{endpoint_name.value_as_string}*"), (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"monitoring-schedule/{monitoring_schedual_name.value_as_string}" ), (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"processing-job/{baseline_job_name.value_as_string}"), ], ) s3_policy = iam.PolicyStatement( actions=[ "s3:ListBucket", "s3:GetObject", "s3:PutObject", ], resources=[ assets_bucket.bucket_arn, assets_bucket.arn_for_objects("*"), ], ) sagemaker_logs_policy = iam.PolicyStatement( actions=[ "cloudwatch:PutMetricData", "logs:CreateLogStream", "logs:PutLogEvents", "logs:CreateLogGroup", "logs:DescribeLogStreams", ], resources=["*"], ) # create sagemaker role sagemaker_role = iam.Role( scope, "create_monitoring_scheduale_sagemaker_role", assumed_by=iam.ServicePrincipal("sagemaker.amazonaws.com"), description= "Role that is create sagemaker model Lambda function assumes to create a model in the pipeline.", ) # create a trust relation to assume the Role sagemaker_role.add_to_policy( iam.PolicyStatement(actions=["sts:AssumeRole"], resources=[sagemaker_role.role_arn])) # creating a role so that this lambda can create a baseline job lambda_role = iam.Role( scope, "create_monitoring_scheduale_role", assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), description= "Role that is create_data_baseline_job Lambda function assumes to create a baseline job in the pipeline.", ) sagemaker_role.add_to_policy(create_monitoring_schedule_policy) sagemaker_role.add_to_policy(sagemaker_logs_policy) sagemaker_role.add_to_policy(s3_policy) sagemaker_role_nodes = sagemaker_role.node.find_all() sagemaker_role_nodes[ 2].node.default_child.cfn_options.metadata = suppress_pipeline_policy( ) lambda_role.add_to_policy( iam.PolicyStatement(actions=["iam:PassRole"], resources=[sagemaker_role.role_arn])) lambda_role.add_to_policy(create_monitoring_schedule_policy) lambda_role.add_to_policy(s3_policy) add_logs_policy(lambda_role) # defining the lambda function that gets invoked in this stage create_moniroring_schedule_lambda = lambda_.Function( scope, "create_moniroring_schedule", runtime=lambda_.Runtime.PYTHON_3_8, handler="main.handler", role=lambda_role, code=lambda_.Code.from_bucket( blueprint_bucket, "blueprints/byom/lambdas/create_model_monitoring_schedule.zip"), environment={ "BASELINE_JOB_NAME": baseline_job_name.value_as_string, "BASELINE_JOB_OUTPUT_LOCATION": baseline_job_output_location.value_as_string, "ASSETS_BUCKET": assets_bucket.bucket_name, "SAGEMAKER_ENDPOINT_NAME": f"{endpoint_name.value_as_string}", "MONITORING_SCHEDULE_NAME": monitoring_schedual_name.value_as_string, "MONITORING_OUTPUT_LOCATION": monitoring_output_location.value_as_string, "SCHEDULE_EXPRESSION": schedule_expression.value_as_string, "INSTANCE_TYPE": instance_type.value_as_string, "INSTANCE_VOLUME_SIZE": instance_volume_size.value_as_string, "MAX_RUNTIME_SECONDS": max_runtime_seconds.value_as_string, "ROLE_ARN": sagemaker_role.role_arn, "MONITORING_TYPE": monitoring_type.value_as_string, "STACK_NAME": stack_name, "LOG_LEVEL": "INFO", }, timeout=core.Duration.minutes(10), ) create_moniroring_schedule_lambda.node.default_child.cfn_options.metadata = suppress_cloudwatch_policy( ) role_child_nodes = create_moniroring_schedule_lambda.role.node.find_all() role_child_nodes[ 2].node.default_child.cfn_options.metadata = suppress_pipeline_policy( ) # Create codepipeline action create_moniroring_schedule_action = codepipeline_actions.LambdaInvokeAction( action_name="create_monitoring_schedule", inputs=[], outputs=[], variables_namespace="monitoring_schedule", lambda_=create_moniroring_schedule_lambda, run_order=2, # this runs second in the deploy stage ) return (create_moniroring_schedule_lambda.function_arn, create_moniroring_schedule_action)
def create_model( scope, blueprint_bucket, assets_bucket, model_name, model_artifact_location, custom_container, model_framework, model_framework_version, container_uri, sm_layer, ): """ create_model creates a sagemaker model in a lambda invoked codepipeline action :scope: CDK Construct scope that's needed to create CDK resources :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline :assets_bucket: the bucket cdk object where pipeline assets are stored :model_name: name of the sagemaker model to be created, in the form of a CDK CfnParameter object :model_artifact_location: path to the model artifact in the S3 bucket: assets_bucket :custom_container: whether to the model is a custom algorithm or a sagemaker algorithmm, in the form of a CDK CfnParameter object :model_framework: name of the framework if the model is a sagemaker algorithm, in the form of a CDK CfnParameter object :model_framework_version: version of the framework if the model is a sagemaker algorithm, in the form of a CDK CfnParameter object :container_uri: URI for the container registry that stores the model if the model is a custom algorithm :sm_layer: sagemaker lambda layer :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage """ create_model_policy = iam.PolicyStatement( actions=[ "sagemaker:CreateModel", "sagemaker:DescribeModel", "sagemaker:DeleteModel", ], resources=[ # Lambda that uses this polict requires access to all objects in the assets bucket f"arn:{core.Aws.PARTITION}:s3:::{assets_bucket.bucket_name}/*", (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}" f":model/{model_name.value_as_string}"), ], ) s3_policy = iam.PolicyStatement( actions=[ "s3:GetObject", "s3:PutObject", "s3:ListBucket", ], resources=[ assets_bucket.arn_for_objects("*"), assets_bucket.bucket_arn ], ) # creating this policy for sagemaker create endpoint in custom model ecr_policy = iam.PolicyStatement( actions=[ "ecr:BatchGetImage", "ecr:BatchCheckLayerAvailability", "ecr:DescribeImages", "ecr:DescribeRepositories", "ecr:GetDownloadUrlForLayer", ], resources=[ f"arn:{core.Aws.PARTITION}:ecr:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:repository/awsmlopsmodels*" ], ) ecr_token_policy = iam.PolicyStatement( actions=["ecr:GetAuthorizationToken"], resources=[ "*" ], # GetAuthorizationToken can not be bound to resources other than * ) # creating a role for the lambda function so that it can create a model in sagemaker sagemaker_role = iam.Role( scope, "create_model_sagemaker_role", assumed_by=iam.ServicePrincipal("sagemaker.amazonaws.com"), description= "Role that is create sagemaker model Lambda function assumes to create a model in the pipeline.", ) lambda_role = iam.Role( scope, "create_model_lambda_role", assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), description= "Role that is create sagemaker model Lambda function assumes to create a model in the pipeline.", ) sagemaker_role.add_to_policy(create_model_policy) sagemaker_role.add_to_policy(s3_policy) sagemaker_role.add_to_policy(ecr_policy) sagemaker_role.add_to_policy(ecr_token_policy) sagemaker_role_nodes = sagemaker_role.node.find_all() sagemaker_role_nodes[ 2].node.default_child.cfn_options.metadata = suppress_ecr_policy() lambda_role.add_to_policy( iam.PolicyStatement(actions=["iam:PassRole"], resources=[sagemaker_role.role_arn])) lambda_role.add_to_policy(create_model_policy) lambda_role.add_to_policy(s3_policy) add_logs_policy(lambda_role) # defining the lambda function that gets invoked by codepipeline in this step create_sagemaker_model = lambda_.Function( scope, "create_sagemaker_model", runtime=lambda_.Runtime.PYTHON_3_8, handler="main.handler", timeout=core.Duration.seconds(60), code=lambda_.Code.from_bucket( blueprint_bucket, "blueprints/byom/lambdas/create_sagemaker_model.zip"), layers=[sm_layer], role=lambda_role, environment={ "custom_container": custom_container.value_as_string, "model_framework": model_framework.value_as_string, "model_framework_version": model_framework_version.value_as_string, "model_name": model_name.value_as_string, "model_artifact_location": assets_bucket.s3_url_for_object( model_artifact_location.value_as_string), "create_model_role_arn": sagemaker_role.role_arn, "container_uri": container_uri, "LOG_LEVEL": "INFO", }, ) create_sagemaker_model.node.default_child.cfn_options.metadata = suppress_cloudwatch_policy( ) role_child_nodes = create_sagemaker_model.role.node.find_all() role_child_nodes[ 2].node.default_child.cfn_options.metadata = suppress_pipeline_policy( ) # creating the codepipeline action that invokes create model lambda create_sagemaker_model_action = codepipeline_actions.LambdaInvokeAction( action_name="create_sagemaker_model", inputs=[], outputs=[], lambda_=create_sagemaker_model, run_order=1, # runs first in the Deploy stage ) return (create_sagemaker_model.function_arn, create_sagemaker_model_action)
def batch_transform( scope, blueprint_bucket, assets_bucket, model_name, inference_instance, batch_inference_data, sm_layer, ): """ batch_transform creates a sagemaker batch transform job in a lambda invoked codepipeline action :scope: CDK Construct scope that's needed to create CDK resources :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline :assets_bucket: the bucket cdk object where pipeline assets are stored :model_name: name of the sagemaker model to be created, in the form of a CDK CfnParameter object :inference_instance: compute instance type for the sagemaker inference endpoint, in the form of a CDK CfnParameter object :batch_inference_data: location of the batch inference data in assets bucket, in the form of a CDK CfnParameter object :is_batch_transform: a CDK CfnCondition object that says if inference type is batch transform or not :sm_layer: sagemaker lambda layer :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage """ batch_transform_policy = iam.PolicyStatement( actions=[ "sagemaker:CreateTransformJob", "s3:ListBucket", "s3:GetObject", "s3:PutObject", ], resources=[ assets_bucket.bucket_arn, assets_bucket.arn_for_objects("*"), (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"transform-job/{model_name.value_as_string}-*"), ], ) lambda_role = iam.Role( scope, "batch_transform_lambda_role", assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), description= ("Role that creates a lambda function assumes to create a sagemaker batch transform " "job in the aws mlops pipeline."), ) lambda_role.add_to_policy(batch_transform_policy) lambda_role.add_to_policy(codepipeline_policy()) add_logs_policy(lambda_role) # defining batch transform lambda function batch_transform = lambda_.Function( scope, "batch_transform", runtime=lambda_.Runtime.PYTHON_3_8, handler="main.handler", layers=[sm_layer], role=lambda_role, code=lambda_.Code.from_bucket( blueprint_bucket, "blueprints/byom/lambdas/batch_transform.zip"), environment={ "model_name": model_name.value_as_string, "inference_instance": inference_instance.value_as_string, "assets_bucket": assets_bucket.bucket_name, "batch_inference_data": batch_inference_data.value_as_string, "LOG_LEVEL": "INFO", }, ) batch_transform.node.default_child.cfn_options.metadata = suppress_cloudwatch_policy( ) role_child_nodes = batch_transform.role.node.find_all() role_child_nodes[ 2].node.default_child.cfn_options.metadata = suppress_pipeline_policy( ) batch_transform_action = codepipeline_actions.LambdaInvokeAction( action_name="batch_transform", inputs=[], outputs=[], variables_namespace="batch_transform", lambda_=batch_transform, run_order=2, # this runs second in the deploy stage ) return (batch_transform.function_arn, batch_transform_action)
def create_endpoint(scope, blueprint_bucket, assets_bucket, model_name, inference_instance): """ create_endpoint creates a sagemaker inference endpoint in a lambda invoked codepipeline action :scope: CDK Construct scope that's needed to create CDK resources :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline :assets_bucket: the bucket cdk object where pipeline assets are stored :model_name: name of the sagemaker model to be created, in the form of a CDK CfnParameter object :inference_instance: compute instance type for the sagemaker inference endpoint, in the form of a CDK CfnParameter object :is_realtime_inference: a CDK CfnCondition object that says if inference type is realtime or not :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage """ create_endpoint_policy = iam.PolicyStatement( actions=[ "sagemaker:CreateEndpoint", "sagemaker:CreateEndpointConfig", "sagemaker:DeleteEndpointConfig", "sagemaker:DescribeEndpointConfig", "sagemaker:DescribeEndpoint", ], resources=[ (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"endpoint/{model_name.value_as_string}-endpoint"), (f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:" f"endpoint-config/{model_name.value_as_string}-endpoint-config"), ], ) # creating a role so that this lambda can create a sagemaker endpoint and endpoint config lambda_role = iam.Role( scope, "create_endpoint_lambda_role", assumed_by=iam.ServicePrincipal("lambda.amazonaws.com"), description= "Role that is create sagemaker model Lambda function assumes to create a model in the pipeline.", ) lambda_role.add_to_policy(create_endpoint_policy) add_logs_policy(lambda_role) # defining the lambda function that gets invoked in this stage create_sagemaker_endpoint = lambda_.Function( scope, "create_sagemaker_endpoint", runtime=lambda_.Runtime.PYTHON_3_8, handler="main.handler", role=lambda_role, code=lambda_.Code.from_bucket( blueprint_bucket, "blueprints/byom/lambdas/create_sagemaker_endpoint.zip"), environment={ "model_name": model_name.value_as_string, "inference_instance": inference_instance.value_as_string, "assets_bucket": assets_bucket.bucket_name, "LOG_LEVEL": "INFO", }, timeout=core.Duration.minutes(10), ) create_sagemaker_endpoint.node.default_child.cfn_options.metadata = suppress_cloudwatch_policy( ) role_child_nodes = create_sagemaker_endpoint.role.node.find_all() role_child_nodes[ 2].node.default_child.cfn_options.metadata = suppress_pipeline_policy( ) # create_endpoint_action = core.Fn.condition_if("isRealtimeInference", create_endpoint_action = codepipeline_actions.LambdaInvokeAction( action_name="create_sagemaker_endpoint", inputs=[], outputs=[], variables_namespace="sagemaker_endpoint", lambda_=create_sagemaker_endpoint, run_order=2, # this runs second in the deploy stage ) return (create_sagemaker_endpoint.function_arn, create_endpoint_action)
def configure_inference(scope, blueprint_bucket): """ configure_inference updates inference lambda function's environment variables and puts the value for Sagemaker endpoint URI as a lambda invoked codepipeline action :scope: CDK Construct scope that's needed to create CDK resources :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline :is_realtime_inference: a CDK CfnCondition object that says if inference type is realtime or not :return: codepipeline action in a form of a CDK object that can be attached to a codepipeline stage """ # provision api gateway and lambda for inference using solution constructs inference_api_gateway = aws_apigateway_lambda.ApiGatewayToLambda( scope, "BYOMInference", lambda_function_props={ "runtime": lambda_.Runtime.PYTHON_3_8, "handler": "main.handler", "code": lambda_.Code.from_bucket(blueprint_bucket, "blueprints/byom/lambdas/inference.zip"), }, api_gateway_props={ "defaultMethodOptions": { "authorizationType": apigw.AuthorizationType.IAM, }, "restApiName": f"{core.Aws.STACK_NAME}-inference", "proxy": False, }, ) provision_resource = inference_api_gateway.api_gateway.root.add_resource("inference") provision_resource.add_method("POST") inference_api_gateway.lambda_function.add_to_role_policy( iam.PolicyStatement( actions=[ "sagemaker:InvokeEndpoint", ], resources=[ f"arn:{core.Aws.PARTITION}:sagemaker:{core.Aws.REGION}:{core.Aws.ACCOUNT_ID}:endpoint/*", ], ) ) # lambda function that gets invoked from codepipeline configure_inference_lambda = lambda_.Function( scope, "configure_inference_lambda", runtime=lambda_.Runtime.PYTHON_3_8, handler="main.handler", code=lambda_.Code.from_bucket(blueprint_bucket, "blueprints/byom/lambdas/configure_inference_lambda.zip"), environment={ "inference_lambda_arn": inference_api_gateway.lambda_function.function_arn, "LOG_LEVEL": "INFO", }, ) configure_inference_lambda.node.default_child.cfn_options.metadata = suppress_cloudwatch_policy() # iam permissions to respond to codepipeline and update inference lambda configure_inference_lambda.add_to_role_policy( iam.PolicyStatement( actions=[ "lambda:UpdateFunctionConfiguration", ], resources=[inference_api_gateway.lambda_function.function_arn], ) ) configure_inference_lambda.add_to_role_policy(codepipeline_policy()) role_child_nodes = configure_inference_lambda.role.node.find_all() role_child_nodes[2].node.default_child.cfn_options.metadata = { "cfn_nag": { "rules_to_suppress": [ { "id": "W12", "reason": ( "The codepipeline permissions PutJobSuccessResult and PutJobFailureResult " "are not able to be bound to resources." ), } ] } } # configuring codepipeline action to invoke the lambda configure_inference_action = codepipeline_actions.LambdaInvokeAction( action_name="configure_inference_lambda", inputs=[], outputs=[], # passing the parameter from the last stage in pipeline user_parameters=[{"endpointName": "#{sagemaker_endpoint.endpointName}"}], lambda_=configure_inference_lambda, ) return (configure_inference_lambda.function_arn, configure_inference_action)
def create_action( scope: core.Construct, id: str, action_def: Union[CodeCommitAction, CodeBuildAction, CloudFormationCreateUpdateStackAction, ApprovalAction, LambdaInvokeAction, S3SourceAction, ], ): action_name = action_def.pop("name") run_order = action_def.get("run_order", 1) variables_namespace = action_def.get("variables_namespace") role = (aws_iam.Role.from_role_arn(scope, f"{id}RoleRef", action_def["role_arn"]) if "role_arn" in action_def else None) if action_def["type"] == "CODECOMMIT": action_def = cast(CodeCommitAction, action_def) repository = aws_codecommit.Repository.from_repository_name( scope, f"{id}Repo", action_def["repository"]) output = aws_codepipeline.Artifact(action_def["output"]) return aws_codepipeline_actions.CodeCommitSourceAction( action_name=action_name, output=output, repository=repository, branch=action_def.get("branch", "master"), run_order=run_order, role=role, variables_namespace=variables_namespace, ) elif action_def["type"] == "S3_SOURCE": action_def = cast(S3SourceAction, action_def) output = aws_codepipeline.Artifact(action_def["output"]) if "kms_key_arn" in action_def: role = aws_iam.Role( scope, f"{id}Role", assumed_by=aws_iam.AccountRootPrincipal(), ) aws_kms.Key.from_key_arn( scope, f"{id}KeyRef", key_arn=action_def["kms_key_arn"]).grant_decrypt(role) if "bucket" in action_def: bucket = aws_s3.Bucket.from_bucket_name(scope, f"{id}SourceBucketRef", action_def["bucket"]) else: bucket = aws_s3.Bucket( scope, f"{id}SourceBucket", block_public_access=aws_s3.BlockPublicAccess.BLOCK_ALL, removal_policy=core.RemovalPolicy.DESTROY, ) core.CfnOutput(scope, f"{id}SourceBucketName", value=bucket.bucket_name) return aws_codepipeline_actions.S3SourceAction( action_name=action_name, output=output, run_order=run_order, role=role, bucket=bucket, bucket_key=action_def["key"], ) elif action_def["type"] == "CODEBUILD": action_def = cast(CodeBuildAction, action_def) # Set up CodeBuild project project_params = { "build_spec": aws_codebuild.BuildSpec.from_source_filename( action_def.get("build_spec", "buildspec.yaml")), "timeout": core.Duration.minutes(int(action_def.get("timeout_minutes", 60))), } project_params["environment"] = { "build_image": aws_codebuild.LinuxBuildImage.AMAZON_LINUX_2_3 } if "environment" in action_def: if "build_image" in action_def["environment"]: project_params["environment"]["build_image"] = getattr( aws_codebuild.LinuxBuildImage, action_def["environment"].pop("build_image"), ) if "compute_type" in action_def["environment"]: project_params["environment"]["compute_type"] = getattr( aws_codebuild.ComputeType, action_def["environment"].pop("compute_type"), ) project_params["environment"].update(**action_def["environment"]) project_role = aws_iam.Role( scope, f"{id}CodeBuildRole", path="/codebuild/", assumed_by=aws_iam.ServicePrincipal( service="codebuild.amazonaws.com"), ) project_role.add_to_policy( aws_iam.PolicyStatement(actions=["*"], resources=["*"], effect=aws_iam.Effect.ALLOW)) project_environment_variables = ({ var_key: aws_codebuild.BuildEnvironmentVariable( value=str(var_value), type=aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) for var_key, var_value in action_def["environment_variables"].items() if "#" not in str(var_value) } if "environment_variables" in action_def else None) project = aws_codebuild.PipelineProject( scope, f"{id}Project", project_name=id, role=project_role, environment_variables=project_environment_variables, **project_params, ) pipeline_environment_variables = ({ var_key: aws_codebuild.BuildEnvironmentVariable( value=str(var_value), type=aws_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ) for var_key, var_value in action_def["environment_variables"].items() if "#" in str(var_value) } if "environment_variables" in action_def else None) extra_inputs = ([ aws_codepipeline.Artifact(input_) for input_ in action_def["extra_inputs"] ] if "extra_inputs" in action_def else None) outputs = ([ aws_codepipeline.Artifact(output) for output in action_def["outputs"] ] if "outputs" in action_def else None) return aws_codepipeline_actions.CodeBuildAction( action_name=action_name, input=aws_codepipeline.Artifact(action_def["input"]), project=project, run_order=run_order, role=role, variables_namespace=variables_namespace, environment_variables=pipeline_environment_variables, extra_inputs=extra_inputs, outputs=outputs, ) elif action_def["type"] == "CLOUDFORMATION": action_def = cast(CloudFormationCreateUpdateStackAction, action_def) return aws_codepipeline_actions.CloudFormationCreateUpdateStackAction( action_name=action_name, admin_permissions=False, stack_name=action_def["stack_name"], template_path=aws_codepipeline.ArtifactPath( aws_codepipeline.Artifact(action_def["input"]), action_def.get("template_path", "template.yaml"), ), capabilities=[ # This lstrip does not support all possibilties, but is good enough for now aws_cloudformation.CloudFormationCapabilities[ capability.lstrip("CAPABILITY_")] for capability in action_def["capabilities"] ] if "capabilities" in action_def else None, deployment_role=role, role=role, parameter_overrides=action_def.get("parameter_overrides"), run_order=run_order, variables_namespace=variables_namespace, ) elif action_def["type"] == "APPROVAL": action_def = cast(ApprovalAction, action_def) return aws_codepipeline_actions.ManualApprovalAction( action_name=action_name, run_order=run_order, role=role, additional_information=action_def.get("additional_information"), external_entity_link=action_def.get("external_entity_link"), notification_topic=action_def.get("notification_topic"), variables_namespace=variables_namespace, ) elif action_def["type"] == "LAMBDA": action_def = cast(LambdaInvokeAction, action_def) user_parameters = action_def.get("user_parameters") return aws_codepipeline_actions.LambdaInvokeAction( action_name=action_name, run_order=run_order, lambda_=aws_lambda.Function.from_function_arn( scope, f"{id}Lambda", action_def["function_arn"]), user_parameters=user_parameters, role=role, variables_namespace=variables_namespace, )
def __init__( self, scope: core.Construct, id: str, branch: str, sandbox_account: str, **kwargs ) -> None: """Init the Construct fore creating hd-auto-service-catalog. Args: scope: CDK Parent Stack aap.py id: Name of the stack: "hd-auto-service-catalog" branch: string for A/B Deployment sandbox_account: Sandbox account id **kwargs: """ super().__init__(scope, id, **kwargs) # # The code that defines your stack goes here # def id_generator(size=6, chars=string.ascii_uppercase + string.digits): # string = "".join(random.choice(chars) for _ in range(size)).lower() # return string # # branch = branch # ############################################################## # Tagging List # ############################################################## tagging_list = [] # ############################################################## # Account List # ############################################################## # account_list = ["431892011317"] # ############################################################## # Parameters # ############################################################## # =============================== # App name app_name = core.CfnParameter( self, id="AppName-{}".format(branch), description="Name of the app", type="String", default="hd-auto-cicd-service-catalog", ) # =============================== # Environment name env_name = core.CfnParameter( self, id="EnvName-{}".format(branch), description="Name of the environment", type="String", default="auto", ) # =============================== # IAM Role and Policy parameter role_name = core.CfnParameter( self, id="ConstraintRoleName-{}".format(branch), description="Name of the launch constraint role", type="String", default="CrossAccountAdmin", ) # =============================== # Principal management lambdas unassign_lambda = core.CfnParameter( self, id="UnassignPrincipalLambdaName-{}".format(branch), description="Name of the unassign principal management Lambda", type="String", default="UnassignPrincipalFromServiceCatalog", ) assign_lambda = core.CfnParameter( self, id="AssignPrincipalLambdaName-{}".format(branch), description="Name of the assign principal management Lambda", type="String", default="AssignPrincipalToServiceCatalog", ) # =============================== # Branch name if branch == "master": branch_name = "master" elif branch == "dmz": branch_name = "dmz" else: branch_name = "feature/{}".format(branch.split("-")[1]) # =============================== # Path name path_name = core.CfnParameter( self, id="Path-{}".format(branch), description="CodeCommit repository folder for Service Catalogs Products", type="String", default="service_catalog/products/", ) # =============================== # Path for the configuration INI path_ini = core.CfnParameter( self, id="ConfigINI-{}".format(branch), description="Configuration file path", type="String", default="service_catalog/config/config_{}.ini".format(branch.split("-")[0]), ) # =============================== # Path for the template store template_store = core.CfnParameter( self, id="TemplateStore-{}".format(branch), description="S3 Bucket and Folder evaluated CloudFormation Templates", type="String", default="template-store/", ) # ############################################################## # Artifacts Bucket # ############################################################## artifact_bucket = _s3.Bucket( self, id="ArtifactsBucket-{}".format(branch), bucket_name="my-sandbox-cicd-build-artifacts-{}".format( branch.split("-")[0] ), removal_policy=core.RemovalPolicy.DESTROY, ) empty_s3_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "s3:DeleteBucket", "s3:ListBucket", "s3:DeleteObjects", "s3:DeleteObject", ], resources=[artifact_bucket.bucket_arn, artifact_bucket.bucket_arn + "/*",], ) empty_bucket_lambda = Lambda.create_lambda( self, name="EmptyArtifactsBucket-{}".format(branch), function_name="EmptyArtifactsBucket-{}".format(branch), handler="empty_bucket.empty_bucket", code_injection_method=_lambda.Code.asset(path="./src/lambda/empty_bucket/"), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=None, policy_statements=[empty_s3_policy,], log_retention=None, environment_vars=[], ) cr_empty_bucket = core.CustomResource( self, id="CR-EmptyBucket-{}".format(branch), service_token=empty_bucket_lambda.lambda_function_object.function_arn, properties={"BUCKET_NAME": artifact_bucket.bucket_name,}, removal_policy=core.RemovalPolicy.DESTROY, ) cr_empty_bucket.node.add_dependency(artifact_bucket) tagging_list.append(cr_empty_bucket) artifact_bucket.add_to_resource_policy( permission=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["s3:GetObject"], resources=[artifact_bucket.bucket_arn + "/template-store/*",], principals=[_iam.ServicePrincipal("servicecatalog"),], ) ) tagging_list.append(artifact_bucket) # ############################################################## # Code repo # ############################################################## if branch == "master": service_catalog_git = _code.Repository( self, id="ServiceCatalogGit", repository_name="hd-auto-service-catalog", description="This git hosts all templates for the ServiceCatalog and the CICD itself.", ) tagging_list.append(service_catalog_git) else: service_catalog_git = _code.Repository.from_repository_name( self, id="ServiceCatalogGit", repository_name="hd-auto-service-catalog", ) tagging_list.append(service_catalog_git) # ############################################################## # Lambda Layer # ############################################################## source_code = _lambda.Code.from_asset("./src/lambda_layer/") layer = _lambda.LayerVersion( self, id="Python3_7_Layer-{}".format(branch), code=source_code, compatible_runtimes=[_lambda.Runtime.PYTHON_3_7], ) tagging_list.append(layer) # ############################################################## # CodeBuild Project # ############################################################## build_project = _codebuild.PipelineProject( self, id="BuildProject-{}".format(branch), project_name="hd-auto-cicd-service-catalog-{}".format(branch), description="Build project for the Service Catalog pipeline", environment=_codebuild.BuildEnvironment( build_image=_codebuild.LinuxBuildImage.STANDARD_4_0, privileged=True ), cache=_codebuild.Cache.bucket(artifact_bucket, prefix="codebuild-cache"), build_spec=_codebuild.BuildSpec.from_source_filename("./buildspec.yaml"), ) tagging_list.append(build_project) # CodeBuild IAM permissions to read write to s3 artifact_bucket.grant_read_write(build_project) # Build and create test runs for templates build_project.add_to_role_policy( statement=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, not_actions=["aws-portal:*", "organizations:*"], resources=["*"], # No further restriction due to IAM! ) ) # ############################################################## # Service Catalog # ############################################################## portfolio = _servicecatalog.CfnPortfolio( self, id="BasicPortfolio-{}".format(branch), display_name="hd-mdp-portfolio-{}".format(branch), provider_name="MDP-Team", accept_language="en", description=""" This portfolio contains AWS Services combined into technical and functional approved architectures. You don't need IAM permissions to run those products. You will use them. """, ) remove_portfolio_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "servicecatalog:SearchProductsAsAdmin", "servicecatalog:DeleteProduct", "servicecatalog:DeleteConstraint", "servicecatalog:ListConstraintsForPortfolio", "servicecatalog:DisassociatePrincipalFromPortfolio", "servicecatalog:DisassociateProductFromPortfolio", ], resources=["*",], ) iam_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "iam:GetRole", "iam:PassRole", "iam:CreateRole", "iam:DeleteRole", "iam:ListRoles", "iam:PutRolePolicy", "iam:DeleteRolePolicy", "iam:DeletePolicy", ], resources=[ "arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role_name.value_as_string ), ], ) remove_products_lambda = Lambda.create_lambda( self, name="RemoveProductsFromPortfolio-{}".format(branch), function_name="RemoveProductsFromPortfolio-{}".format(branch), handler="remove_portfolio.remove_portfolio", code_injection_method=_lambda.Code.asset( path="./src/lambda/remove_portfolio/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=None, policy_statements=[remove_portfolio_policy, iam_policy], log_retention=None, environment_vars=[ {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),} ], ) cr_remove_products = core.CustomResource( self, id="CR-RemoveProductsFromPortfolio-{}".format(branch), service_token=remove_products_lambda.lambda_function_object.function_arn, properties={"PORTFOLIO_ID": portfolio.ref,}, removal_policy=core.RemovalPolicy.DESTROY, ) cr_remove_products.node.add_dependency(portfolio) iam_role_list = [role_name.value_as_string] if branch == "master": # TODO: Accept Portfolio share principal management # for idx, account in enumerate(account_list): # _servicecatalog.CfnPortfolioShare( # self, # id="PortfolioSharing-{}-{}".format(branch, idx), # account_id=account, # portfolio_id=portfolio.ref, # accept_language="en", # ) for idx, role in enumerate(iam_role_list): _servicecatalog.CfnPortfolioPrincipalAssociation( self, id="PrincipalAssociation-{}-{}".format(branch, idx), portfolio_id=portfolio.ref, principal_arn="arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role ), principal_type="IAM", accept_language="en", ) core.CfnOutput( self, id="PortfolioId-{}".format(branch), value=portfolio.ref ) tagging_list.append(portfolio) else: for idx, role in enumerate(iam_role_list): _servicecatalog.CfnPortfolioPrincipalAssociation( self, id="PrincipalAssociation-{}-{}".format(branch, idx), portfolio_id=portfolio.ref, principal_arn="arn:aws:iam::{}:role/{}".format( core.Aws.ACCOUNT_ID, role ), principal_type="IAM", accept_language="en", ) core.CfnOutput( self, id="PortfolioId-{}".format(branch), value=portfolio.ref ) tagging_list.append(portfolio) # ############################################################## # Lambda Permissions # ############################################################## s3_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "s3:GetObject*", "s3:GetBucket*", "s3:List*", "s3:DeleteObject*", "s3:PutObject*", "s3:Abort*", ], resources=[artifact_bucket.bucket_arn, artifact_bucket.bucket_arn + "/*"], ) codecommit_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "codecommit:GetDifferences", "codecommit:GetBranch", "codecommit:GetCommit", ], resources=[service_catalog_git.repository_arn], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) codebuild_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["codebuild:StartBuild", "codebuild:UpdateProject*"], resources=[build_project.project_arn], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) service_catalog_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "servicecatalog:CreateProduct", "servicecatalog:CreateProvisioningArtifact", "servicecatalog:UpdateProvisioningArtifact", "servicecatalog:DeleteProvisioningArtifact", "servicecatalog:ListProvisioningArtifacts", "servicecatalog:ListPortfolios", "servicecatalog:SearchProductsAsAdmin", "servicecatalog:AssociateProductWithPortfolio", "servicecatalog:AssociatePrincipalWithPortfolio", "servicecatalog:DisassociatePrincipalFromPortfolio", "servicecatalog:DisassociateProductFromPortfolio", "servicecatalog:DeleteProduct", "servicecatalog:CreatePortfolioShare", "servicecatalog:AcceptPortfolioShare", "servicecatalog:CreateConstraint", "servicecatalog:DeleteConstraint", "servicecatalog:ListConstraintsForPortfolio", ], resources=["*"], ) sts_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["sts:AssumeRole"], resources=[ "arn:aws:iam::{}:role/{}".format( sandbox_account, role_name.value_as_string ), ], ) codepipeline_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "codepipeline:PutJobFailureResult", # Supports only a wildcard (*) in the policy Resource element. "codepipeline:PutJobSuccessResult", # Supports only a wildcard (*) in the policy Resource element. ], # https://docs.aws.amazon.com/codepipeline/latest/userguide/permissions-reference.html resources=["*"], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) lambda_policy = _iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=[ "lambda:GetFunction", "lambda:CreateFunction", "lambda:DeleteFunction", "lambda:AddPermission", "lambda:RemovePermission", "lambda:CreateEventSourceMapping", "lambda:DeleteEventSourceMapping", "lambda:InvokeFunction", "lambda:UpdateFunctionCode", "lambda:UpdateFunctionConfiguration", ], resources=[ "arn:aws:lambda:{}:{}:function:{}-{}".format( core.Aws.REGION, sandbox_account, unassign_lambda.value_as_string, sandbox_account, ), "arn:aws:lambda:{}:{}:function:{}-{}".format( core.Aws.REGION, sandbox_account, assign_lambda.value_as_string, sandbox_account, ), ], conditions={"StringEquals": {"aws:RequestedRegion": "eu-central-1"}}, ) # ############################################################## # CICD Lambdas # ############################################################## # ========================== # Get Latest Git Meta Data git_metadata = Lambda.create_lambda( self, name="GetLastGitChanges-{}".format(branch), function_name="GetLastGitChanges-{}".format(branch,), handler="git_metadata.get_changes", code_injection_method=_lambda.Code.asset(path="./src/lambda/git_metadata/"), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=128, timeout=30, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ codecommit_policy, codebuild_policy, codepipeline_policy, service_catalog_policy, ], log_retention=None, environment_vars=[ { "Key": "REPOSITORY_NAME", "Value": "{}".format(service_catalog_git.repository_name), }, ], ) # ========================== # Principal Management Lambda principal_management = Lambda.create_lambda( self, name="PrincipalManagement-{}".format(branch), function_name="PrincipalManagement-{}".format(branch), handler="principal_management.principal_management", code_injection_method=_lambda.Code.asset( path="./src/lambda/principal_management/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=1024, timeout=120, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ iam_policy, lambda_policy, sts_policy, service_catalog_policy, codepipeline_policy, codecommit_policy, ], log_retention=None, environment_vars=[ {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),} ], ) # ========================== # Sync Service Catalog Lambda service_catalog_synchronisation = Lambda.create_lambda( self, name="UpdateServiceCatalog-{}".format(branch), function_name="UpdateServiceCatalog-{}".format(branch), handler="sync_catalog.service_catalog_janitor", code_injection_method=_lambda.Code.asset( path="./src/lambda/update_servicecatalog/" ), lambda_runtime=_lambda.Runtime.PYTHON_3_7, amount_of_memory=1024, timeout=120, amount_of_retries=0, rules_to_invoke=None, events_to_invoke=None, lambda_layers_to_use=[layer], policy_statements=[ sts_policy, service_catalog_policy, codepipeline_policy, codecommit_policy, iam_policy, s3_policy, ], log_retention=None, environment_vars=[ { "Key": "LOCAL_ROLE_NAME_SC", "Value": "{}".format(role_name.value_as_string), }, {"Key": "SANDBOX_ACCOUNT_ID", "Value": "{}".format(sandbox_account),}, { "Key": "REPOSITORY_NAME", "Value": "{}".format(service_catalog_git.repository_name), }, {"Key": "PATH_INI", "Value": "{}".format(path_ini.value_as_string)}, {"Key": "PATH", "Value": "{}".format(path_name.value_as_string)}, {"Key": "BUCKET", "Value": "{}".format(artifact_bucket.bucket_name)}, { "Key": "S3_PATH", "Value": "{}".format(template_store.value_as_string), }, ], ) # ############################################################## # CodePipeline # ############################################################## # General output source_output = _codepipeline.Artifact("git-change") tested_source_files = _codepipeline.Artifact("tested-cfn") cicd_pipeline = _codepipeline.Pipeline( self, id="ServiceCatalogPipeline-{}".format(branch), pipeline_name="ServiceCatalog-CICD-{}".format(branch), artifact_bucket=artifact_bucket, stages=[ _codepipeline.StageProps( stage_name="Source_CFN-Templates", actions=[ _codepipeline_actions.CodeCommitSourceAction( action_name="SourceControlCFNTemplates", output=source_output, repository=service_catalog_git, variables_namespace="source", branch=branch_name, ), ], ), _codepipeline.StageProps( stage_name="Getting_CFN-Template", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="GettingCFNTemplate", lambda_=git_metadata.lambda_function_object, user_parameters={ "before_commit": "", "after_commit": "#{source.CommitId}", }, variables_namespace="filtered_source", ) ], ), _codepipeline.StageProps( stage_name="Testing_CFN-Template", actions=[ _codepipeline_actions.CodeBuildAction( type=_codepipeline_actions.CodeBuildActionType.BUILD, action_name="TestingCFNTemplates", project=build_project, input=source_output, outputs=[tested_source_files], environment_variables={ "PIPELINE_NAME": _codebuild.BuildEnvironmentVariable( value="ServiceCatalog-CICD-{}".format(branch), type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_ADDED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.added_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_MODIFIED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.modified_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "FILES_DELETED": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.deleted_files}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "JOB_ID": _codebuild.BuildEnvironmentVariable( value="#{filtered_source.job_id}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "REPOSITORY_BRANCH": _codebuild.BuildEnvironmentVariable( value="#{source.BranchName}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), "REPOSITORY_NAME": _codebuild.BuildEnvironmentVariable( value="#{source.RepositoryName}", type=_codebuild.BuildEnvironmentVariableType.PLAINTEXT, ), }, ) ], ), _codepipeline.StageProps( stage_name="Principal_Management", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="PrincipalManagement", lambda_=principal_management.lambda_function_object, user_parameters={ "job_id": "#{filtered_source.job_id}", "commit_id": "#{filtered_source.commit_id}", "portfolio_id": portfolio.ref, }, ) ], ), _codepipeline.StageProps( stage_name="Update_Servicecatalog", actions=[ _codepipeline_actions.LambdaInvokeAction( action_name="UpdateServiceCatalog", lambda_=service_catalog_synchronisation.lambda_function_object, inputs=[source_output], user_parameters={ "modified_files": "#{filtered_source.modified_files}", "added_files": "#{filtered_source.added_files}", "deleted_files": "#{filtered_source.deleted_files}", "job_id": "#{filtered_source.job_id}", "commit_id": "#{filtered_source.commit_id}", "portfolio_id": portfolio.ref, }, ) ], ), ], ) cicd_pipeline.add_to_role_policy( statement=_iam.PolicyStatement( effect=_iam.Effect.ALLOW, actions=["codecommit:GetBranch", "codecommit:GetCommit"], resources=[service_catalog_git.repository_arn], ) ) tagging_list.append(cicd_pipeline) # ############################################################## # Tag resources # ############################################################## Tags.tag_resources( resources_list=tagging_list, keys_list=["app", "env"], values_list=[app_name.value_as_string, env_name.value_as_string], ) _ssm.StringParameter( self, id="LambdaLayerExport-{}".format(branch), parameter_name="/hd/mdp/{}/lambda/layer-pandas-numpy-servicecatalog".format( branch ), description="Lambda Layer ARN", string_value=layer.layer_version_arn, )
def __init__(self, scope: core.Construct, id: str, artifact_bucket: s3.Bucket, static_website_bucket: s3.Bucket, backend_fn: _lambda.Function, api: apigateway.LambdaRestApi, **kwargs) -> None: super().__init__(scope, id, **kwargs) fn = _lambda.Function( scope=self, id="source-update-function", runtime=_lambda.Runtime.PYTHON_3_8, handler="index.handler", # memory_size=500, timeout=core.Duration.seconds(10), code=_lambda.Code.from_asset( os.path.join("lambdas", "updateSource"))) fn.add_to_role_policy( statement=iam.PolicyStatement(actions=[ "lambda:UpdateFunctionCode", ], resources=[ backend_fn.function_arn, ])) fn.add_to_role_policy( statement=iam.PolicyStatement(actions=[ "s3:GetObject", ], resources=[ artifact_bucket.bucket_arn + "/Server/main.zip", ])) # Codepipeline deploy_pipeline = codepipeline.Pipeline( scope=self, id="deploy-pipeline", restart_execution_on_update=True, ) lambda_source_output = codepipeline.Artifact() client_source_output = codepipeline.Artifact() deploy_pipeline.add_stage(stage_name="Source", actions=[ codepipeline_actions.S3SourceAction( action_name="LambdaSource", bucket=artifact_bucket, bucket_key="Server/main.zip", output=lambda_source_output, ), codepipeline_actions.S3SourceAction( action_name="ClientSource", bucket=artifact_bucket, bucket_key="Client/src.zip", output=client_source_output, ) ]) build_specs = { "version": "0.2", "env": { "variables": { "REACT_APP_AUTH_URL": api.url, } }, "phases": { "install": { "commands": [ "npm install -g yarn", ] }, "build": { "commands": [ "npm install", "yarn test", "yarn build", ] } }, "artifacts": { "base-directory": "build", "files": [ "**/*", ], } } client_build_output = codepipeline.Artifact() deploy_pipeline.add_stage( stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( action_name="ClientBuild", project=codebuild.Project( scope=self, id="codebuild-client", build_spec=codebuild.BuildSpec.from_object( build_specs), ), input=client_source_output, outputs=[client_build_output]) ]) deploy_pipeline.add_stage(stage_name="Deploy", actions=[ codepipeline_actions.LambdaInvokeAction( lambda_=fn, inputs=[lambda_source_output], action_name="UpdateSource", user_parameters={ "functionName": backend_fn.function_name, "sourceBucket": artifact_bucket.bucket_name, "sourceKey": "Server/main.zip", }), codepipeline_actions.S3DeployAction( bucket=static_website_bucket, input=client_build_output, action_name="DeployClient", extract=True, ), ])
def create_stackset_action( scope, # NOSONAR:S107 this function is designed to take many arguments action_name, blueprint_bucket, source_output, artifact, template_file, stage_params_file, accound_ids, org_ids, regions, assets_bucket, stack_name, ): """ create_stackset_action an invokeLambda action to be added to AWS Codepipeline stage :scope: CDK Construct scope that's needed to create CDK resources :action_name: name of the StackSet action :blueprint_bucket: CDK object of the blueprint bucket that contains resources for BYOM pipeline :source_output: CDK object of the Source action's output :artifact: name of the input aritifcat to the StackSet action :template_file: name of the Cloudformation template to be deployed :stage_params_file: name of the template parameters for the satge :accound_ids: list of AWS acounts where the stack with be deployed :org_ids: list of AWS orginizational ids where the stack with be deployed :regions: list of regions where the stack with be deployed :assets_bucket: the bucket cdk object where pipeline assets are stored :stack_name: name of the stack to be deployed :return: codepipeline invokeLambda action in a form of a CDK object that can be attached to a codepipeline stage """ # creating a role so that this lambda can create a baseline job lambda_role = create_service_role( scope, f"{action_name}_role", lambda_service, "The role that is assumed by create_update_cf_stackset Lambda function.", ) # make the stackset name unique stack_name = f"{stack_name}-{str(uuid.uuid4())[:8]}" # cloudformation stackset permissions cloudformation_stackset_permissions = cloudformation_stackset_policy( stack_name) cloudformation_stackset_instances_permissions = cloudformation_stackset_instances_policy( stack_name) lambda_role.add_to_policy(cloudformation_stackset_permissions) lambda_role.add_to_policy(cloudformation_stackset_instances_permissions) add_logs_policy(lambda_role) # defining the lambda function that gets invoked in this stage create_update_cf_stackset_lambda = lambda_.Function( scope, f"{action_name}_stackset_lambda", runtime=lambda_.Runtime.PYTHON_3_8, handler="main.lambda_handler", role=lambda_role, code=lambda_.Code.from_bucket( blueprint_bucket, "blueprints/byom/lambdas/create_update_cf_stackset.zip"), timeout=core.Duration.minutes(15), ) create_update_cf_stackset_lambda.node.default_child.cfn_options.metadata = suppress_lambda_policies( ) role_child_nodes = create_update_cf_stackset_lambda.role.node.find_all() role_child_nodes[ 2].node.default_child.cfn_options.metadata = suppress_pipeline_policy( ) # Create codepipeline action create_stackset_action = codepipeline_actions.LambdaInvokeAction( action_name=action_name, inputs=[source_output], variables_namespace=f"{action_name}-namespace", lambda_=create_update_cf_stackset_lambda, user_parameters={ "stackset_name": stack_name, "artifact": artifact, "template_file": template_file, "stage_params_file": stage_params_file, "accound_ids": accound_ids, "org_ids": org_ids, "regions": regions, }, run_order=1, ) return (create_update_cf_stackset_lambda.function_arn, create_stackset_action)