def add_pipeline_trigger(self, trigger_type, trigger_config): if trigger_type not in self._accepted_triggers: LOGGER.error(f"{trigger_type} is not currently supported. Supported values are: {self._accepted_triggers.keys()}") raise Exception(f"{trigger_type} is not currently supported as a pipeline trigger") trigger_type = self._accepted_triggers[trigger_type] if trigger_type == self.CODEARTIFACT_TRIGGER: details = {"repositoryName": trigger_config["repository"]} if trigger_config.get("package"): details["packageName"] = trigger_config["package"] _eventbridge.Rule( self, f"codeartifact-pipeline-trigger-{trigger_config['repository']}-{trigger_config.get('package', 'all')}", event_pattern=_eventbridge.EventPattern( source=["aws.codeartifact"], detail_type=["CodeArtifact Package Version State Change"], detail=details, ), targets=[_eventbridge_targets.CodePipeline(pipeline=_codepipeline.Pipeline.from_pipeline_arn(self, "imported", pipeline_arn=self.cfn.ref))], )
def __init__(self, scope: core.Construct, id: str, params: dict, **kwargs): #pylint: disable=W0622 super().__init__(scope, id, **kwargs) _pipeline = _codepipeline.Pipeline.from_pipeline_arn( self, 'pipeline', params["pipeline"]) _source_account = params.get('source', {}).get('account_id') _provider = params.get('source', {}).get('provider') if _source_account and _provider == 'codecommit': _event = _events.Rule( self, 'trigger_{0}'.format(params["name"]), description= "Triggers {0} on changes in source CodeCommit repository". format(params["name"]), event_pattern=_events.EventPattern( resources=[ "arn:aws:codecommit:{0}:{1}:{2}".format( ADF_DEPLOYMENT_REGION, params['source']['account_id'], params['source']['repo_name']) ], source=["aws.codecommit"], detail_type=['CodeCommit Repository State Change'], detail={ "event": ["referenceCreated", "referenceUpdated"], "referenceType": ["branch"], "referenceName": ["master"] })) _event.add_target(_targets.CodePipeline(pipeline=_pipeline)) if params.get('topic_arn'): _topic = _sns.Topic.from_topic_arn(self, 'topic_arn', params["topic_arn"]) _event = _events.Rule( self, 'pipeline_state_{0}'.format(params["name"]), description= "{0} | Trigger notifications based on pipeline state changes". format(params["name"]), enabled=True, event_pattern=_events.EventPattern( detail={ "state": ["FAILED", "STARTED", "SUCCEEDED"], "pipeline": ["{0}{1}".format(ADF_PIPELINE_PREFIX, params["name"])] }, detail_type=[ "CodePipeline Pipeline Execution State Change" ], source=["aws.codepipeline"])) _event.add_target( _targets.SnsTopic( topic=_topic, message=_events.RuleTargetInput.from_text( "The pipeline {0} from account {1} has {2} at {3}.". format( _events.EventField.from_path( '$.detail.pipeline' ), # Need to parse and get the pipeline: "$.detail.pipeline" state: "$.detail.state" _events.EventField.account, _events.EventField.from_path('$.detail.state'), _events.EventField.time)))) if params.get('completion_trigger'): # There might be other types of completion triggers later, eg lambda.. for index, pipeline in enumerate(params['completion_trigger'].get( 'pipelines', [])): _event = _events.Rule( self, 'completion_{0}'.format(pipeline), description="Triggers {0} on completion of {1}".format( pipeline, params['pipeline']), enabled=True, event_pattern=_events.EventPattern( detail={ "state": ["SUCCEEDED"], "pipeline": [ "{0}{1}".format(ADF_PIPELINE_PREFIX, params["name"]) ] }, detail_type=[ "CodePipeline Pipeline Execution State Change" ], source=["aws.codepipeline"])) _completion_pipeline = _codepipeline.Pipeline.from_pipeline_arn( self, 'pipeline-{0}'.format(index), "arn:aws:codepipeline:{0}:{1}:{2}".format( ADF_DEPLOYMENT_REGION, ADF_DEPLOYMENT_ACCOUNT_ID, "{0}{1}".format(ADF_PIPELINE_PREFIX, pipeline))) _event.add_target( _targets.CodePipeline(pipeline=_completion_pipeline)) if params.get('schedule'): _event = _events.Rule( self, 'schedule_{0}'.format(params['name']), description="Triggers {0} on a schedule of {1}".format( params['name'], params['schedule']), enabled=True, schedule=_events.Schedule.expression(params['schedule'])) _target_pipeline = _targets.CodePipeline(pipeline=_pipeline) _event.add_target(_target_pipeline)
def __init__(self, scope: core.Construct, id: str, params: dict, **kwargs): # pylint: disable=W0622 super().__init__(scope, id, **kwargs) # pylint: disable=no-value-for-parameter stack = core.Stack.of(self) _pipeline = _codepipeline.Pipeline.from_pipeline_arn( self, 'pipeline', params["pipeline"]) _source_account = params.get('source', {}).get('account_id') _provider = params.get('source', {}).get('provider') _add_trigger_on_changes = ( _provider == 'codecommit' and _source_account and params.get('source', {}).get('trigger_on_changes') and not params.get('source', {}).get('poll_for_changes')) name = params.get('name') account_id = params['source']['account_id'] repo_name = params['source']['repo_name'] if _add_trigger_on_changes: _event = _events.Rule( self, f'trigger_{name}', description= f'Triggers {name} on changes in source CodeCommit repository', event_pattern=_events. EventPattern(resources=[ f'arn:{stack.partition}:codecommit:{ADF_DEPLOYMENT_REGION}:{account_id}:{repo_name}' ], source=["aws.codecommit"], detail_type=[ 'CodeCommit Repository State Change' ], detail={ "event": ["referenceCreated", "referenceUpdated"], "referenceType": ["branch"], "referenceName": [params['source']['branch']] })) _event.add_target(_targets.CodePipeline(pipeline=_pipeline)) if params.get('topic_arn'): # pylint: disable=no-value-for-parameter _topic = _sns.Topic.from_topic_arn(self, 'topic_arn', params["topic_arn"]) _event = _events.Rule( self, f'pipeline_state_{name}', description= f"{name} | Trigger notifications based on pipeline state changes", enabled=True, event_pattern=_events.EventPattern( detail={ "state": ["FAILED", "STARTED", "SUCCEEDED"], "pipeline": [ f"{ADF_PIPELINE_PREFIX}{name}", ] }, detail_type=[ "CodePipeline Pipeline Execution State Change" ], source=["aws.codepipeline"])) _event.add_target( _targets.SnsTopic( topic=_topic, message=_events.RuleTargetInput.from_text( # Need to parse and get the pipeline: "$.detail.pipeline" state: "$.detail.state" f"The pipeline {_events.EventField.from_path('$.detail.pipeline')} " f"from account {_events.EventField.account} " f"has {_events.EventField.from_path('$.detail.state')} " f"at {_events.EventField.time}."))) if params.get('completion_trigger'): # There might be other types of completion triggers later, eg lambda.. for index, pipeline in enumerate(params['completion_trigger'].get( 'pipelines', [])): _event = _events.Rule( self, f'completion_{pipeline}', description= "Triggers {pipeline} on completion of {params['pipeline']}", enabled=True, event_pattern=_events.EventPattern( detail={ "state": ["SUCCEEDED"], "pipeline": [ f"{ADF_PIPELINE_PREFIX}{name}", ] }, detail_type=[ "CodePipeline Pipeline Execution State Change" ], source=["aws.codepipeline"])) # pylint: disable=no-value-for-parameter _completion_pipeline = _codepipeline.Pipeline.from_pipeline_arn( self, f'pipeline-{index}', f'arn:{stack.partition}:codepipeline:' f'{ADF_DEPLOYMENT_REGION}:{ADF_DEPLOYMENT_ACCOUNT_ID}:' f'{ADF_PIPELINE_PREFIX}{pipeline}') _event.add_target( _targets.CodePipeline(pipeline=_completion_pipeline)) if params.get('schedule'): _event = _events.Rule( self, f'schedule_{params["name"]}', description= f"Triggers {params['name']} on a schedule of {params['schedule']}", enabled=True, # pylint: disable=no-value-for-parameter schedule=_events.Schedule.expression(params['schedule'])) _target_pipeline = _targets.CodePipeline(pipeline=_pipeline) _event.add_target(_target_pipeline)
def __init__( self, scope: core.Construct, construct_id: str, # deployment_asset: s3_assets.Asset, **kwargs, ) -> None: super().__init__(scope, construct_id, **kwargs) # Create Required parameters for sagemaker projects # see: https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-projects-templates-custom.html # see also: # https://docs.aws.amazon.com/cdk/latest/guide/parameters.html project_name = core.CfnParameter( self, "SageMakerProjectName", type="String", description="The name of the SageMaker project.", min_length=1, max_length=32, ) project_id = core.CfnParameter( self, "SageMakerProjectId", type="String", min_length=1, max_length=16, description="Service generated Id of the project.", ) stage_name = core.CfnParameter( self, "StageName", type="String", min_length=1, max_length=8, description="The stage name.", default="dev", ) seed_bucket = core.CfnParameter( self, "CodeCommitSeedBucket", type="String", description="The optional s3 seed bucket", min_length=1, ) seed_key = core.CfnParameter( self, "CodeCommitSeedKey", type="String", description="The optional s3 seed key", min_length=1, ) register_lambda = core.CfnParameter( self, "RegisterLambda", type="String", description="The AWS Lambda to invoke when registering this model", min_length=1, ) # Get the service catalog role for all permssions (if None CDK will create new roles) # CodeBuild and CodePipeline resources need to start with "sagemaker-" to be within default policy service_catalog_role = aws_iam.Role.from_role_arn( self, "PipelineRole", f"arn:{self.partition}:iam::{self.account}:role/service-role/AmazonSageMakerServiceCatalogProductsUseRole", ) # Define the repository name and branch branch_name = "main" # Create source repo from seed bucket/key repo = codecommit.CfnRepository( self, "CodeRepo", repository_name="sagemaker-{}-repo".format( project_name.value_as_string), repository_description="Amazon SageMaker A/B testing pipeline", code=codecommit.CfnRepository.CodeProperty( s3=codecommit.CfnRepository.S3Property( bucket=seed_bucket.value_as_string, key=seed_key.value_as_string, object_version=None, ), branch_name=branch_name, ), tags=[ core.CfnTag(key="sagemaker:deployment-stage", value=stage_name.value_as_string), core.CfnTag(key="sagemaker:project-id", value=project_id.value_as_string), core.CfnTag(key="sagemaker:project-name", value=project_name.value_as_string), ], ) # Reference the newly created repository code = codecommit.Repository.from_repository_name( self, "ImportedRepo", repo.attr_name) cdk_build = codebuild.PipelineProject( self, "CdkBuild", project_name="sagemaker-{}-cdk-{}".format( project_name.value_as_string, stage_name.value_as_string), role=service_catalog_role, build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict( install=dict(commands=[ "npm install aws-cdk", "npm update", "python -m pip install -r requirements.txt", ]), build=dict(commands=[ "npx cdk synth -o dist --path-metadata false", ]), ), artifacts={ "base-directory": "dist", "files": ["*.template.json"], }, environment=dict( buildImage=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, ), )), environment_variables={ "SAGEMAKER_PROJECT_NAME": codebuild.BuildEnvironmentVariable( value=project_name.value_as_string), "SAGEMAKER_PROJECT_ID": codebuild.BuildEnvironmentVariable( value=project_id.value_as_string), "STAGE_NAME": codebuild.BuildEnvironmentVariable( value=stage_name.value_as_string), }, ) register_build = codebuild.PipelineProject( self, "RegisterBuild", project_name="sagemaker-{}-register-{}".format( project_name.value_as_string, stage_name.value_as_string), role=service_catalog_role, build_spec=codebuild.BuildSpec.from_object( dict( version="0.2", phases=dict(build=dict(commands=[ "python register.py > output.txt", ]), ), artifacts={ "files": ["output.txt"], }, environment=dict( buildImage=codebuild.LinuxBuildImage.AMAZON_LINUX_2_3, ), )), environment_variables={ "SAGEMAKER_PROJECT_NAME": codebuild.BuildEnvironmentVariable( value=project_name.value_as_string), "STAGE_NAME": codebuild.BuildEnvironmentVariable( value=stage_name.value_as_string), "REGISTER_LAMBDA": codebuild.BuildEnvironmentVariable( value=register_lambda.value_as_string), }, ) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact() register_build_output = codepipeline.Artifact() # Create the s3 artifact (name must be < 63 chars) s3_artifact = s3.Bucket( self, "S3Artifact", bucket_name="sagemaker-{}-artifact-{}-{}".format( project_id.value_as_string, stage_name.value_as_string, self.region), removal_policy=core.RemovalPolicy.DESTROY, ) deploy_pipeline = codepipeline.Pipeline( self, "Pipeline", role=service_catalog_role, artifact_bucket=s3_artifact, pipeline_name="sagemaker-{}-pipeline-{}".format( project_name.value_as_string, stage_name.value_as_string), stages=[ codepipeline.StageProps( stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, trigger=codepipeline_actions.CodeCommitTrigger. NONE, # Created below event_role=service_catalog_role, output=source_output, branch=branch_name, role=service_catalog_role, ) ], ), codepipeline.StageProps( stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( action_name="CDK_Build", project=cdk_build, input=source_output, outputs=[ cdk_build_output, ], role=service_catalog_role, ), ], ), codepipeline.StageProps( stage_name="Deploy", actions=[ codepipeline_actions. CloudFormationCreateUpdateStackAction( action_name="SageMaker_CFN_Deploy", run_order=1, template_path=cdk_build_output.at_path( "ab-testing-sagemaker.template.json"), stack_name="sagemaker-{}-deploy-{}".format( project_name.value_as_string, stage_name.value_as_string), admin_permissions=False, role=service_catalog_role, deployment_role=service_catalog_role, replace_on_failure=True, ), ], ), codepipeline.StageProps( stage_name="Register", actions=[ codepipeline_actions.CodeBuildAction( action_name="Register_Build", project=register_build, input=source_output, outputs=[ register_build_output, ], role=service_catalog_role, ), ], ), ], ) # Add deploy role to target the code pipeline when model package is approved deploy_rule = events.Rule( self, "DeployRule", rule_name="sagemaker-{}-model-{}".format( project_name.value_as_string, stage_name.value_as_string), description= "Rule to trigger a deployment when SageMaker Model registry is updated with a new model package. For example, a new model package is registered with Registry", event_pattern=events.EventPattern( source=["aws.sagemaker"], detail_type=["SageMaker Model Package State Change"], detail={ "ModelPackageGroupName": [ f"{project_name.value_as_string}-champion", f"{project_name.value_as_string}-challenger", ] }, ), targets=[ targets.CodePipeline( pipeline=deploy_pipeline, event_role=service_catalog_role, ) ], ) code_rule = events.Rule( self, "CodeRule", rule_name="sagemaker-{}-code-{}".format( project_name.value_as_string, stage_name.value_as_string), description= "Rule to trigger a deployment when SageMaker Model registry is updated with a new model package. For example, a new model package is registered with Registry", event_pattern=events.EventPattern( source=["aws.codecommit"], detail_type=["CodeCommit Repository State Change"], detail={ "event": ["referenceCreated", "referenceUpdated"], "referenceType": ["branch"], "referenceName": [branch_name], }, resources=[code.repository_arn], ), targets=[ targets.CodePipeline( pipeline=deploy_pipeline, event_role=service_catalog_role, ) ], )