Example #1
0
def test_join_expressions():
    assert Join(
        values=[
            "foo",
            ParameterFloat(name="MyFloat"),
            ParameterInteger(name="MyInt"),
            ParameterString(name="MyStr"),
            Properties(path="Steps.foo.OutputPath.S3Uri"),
            ExecutionVariables.PIPELINE_EXECUTION_ID,
            Join(on=",", values=[1, "a", False, 1.1]),
        ]
    ).expr == {
        "Std:Join": {
            "On": "",
            "Values": [
                "foo",
                {"Get": "Parameters.MyFloat"},
                {"Get": "Parameters.MyInt"},
                {"Get": "Parameters.MyStr"},
                {"Get": "Steps.foo.OutputPath.S3Uri"},
                {"Get": "Execution.PipelineExecutionId"},
                {"Std:Join": {"On": ",", "Values": [1, "a", False, 1.1]}},
            ],
        },
    }
def test_join_primitives_default_on():
    assert Join(values=[1, "a", False, 1.1]).expr == {
        "Std:Join": {
            "On": "",
            "Values": [1, "a", False, 1.1],
        },
    }
Example #3
0
    def get_top_model_s3_uri(self,
                             top_k: int,
                             s3_bucket: str,
                             prefix: str = ""):
        """Get the model artifact s3 uri from the top performing training jobs.

        Args:
            top_k (int): the index of the top performing training job
                tuning step stores up to 50 top performing training jobs, hence
                a valid top_k value is from 0 to 49. The best training job
                model is at index 0
            s3_bucket (str): the s3 bucket to store the training job output artifact
            prefix (str): the s3 key prefix to store the training job output artifact
        """
        values = ["s3:/", s3_bucket]
        if prefix != "" and prefix is not None:
            values.append(prefix)

        return Join(
            on="/",
            values=values + [
                self.properties.TrainingJobSummaries[top_k].TrainingJobName,
                "output/model.tar.gz",
            ],
        )
def test_join_primitives():
    assert Join(on=",", values=[1, "a", False, 1.1]).expr == {
        "Std:Join": {
            "On": ",",
            "Values": [1, "a", False, 1.1],
        },
    }
Example #5
0
def test_fail_step_with_join_fn_in_error_message():
    param = ParameterInteger(name="MyInt", default_value=2)
    cond = ConditionEquals(left=param, right=1)
    step_cond = ConditionStep(
        name="CondStep",
        conditions=[cond],
        if_steps=[],
        else_steps=[],
    )
    step_fail = FailStep(
        name="FailStep",
        error_message=Join(on=": ",
                           values=[
                               "Failed due to xxx == yyy returns",
                               step_cond.properties.Outcome
                           ]),
    )
    pipeline = Pipeline(
        name="MyPipeline",
        steps=[step_cond, step_fail],
        parameters=[param],
    )

    _expected_dsl = [
        {
            "Name": "CondStep",
            "Type": "Condition",
            "Arguments": {
                "Conditions": [{
                    "Type": "Equals",
                    "LeftValue": {
                        "Get": "Parameters.MyInt"
                    },
                    "RightValue": 1
                }],
                "IfSteps": [],
                "ElseSteps": [],
            },
        },
        {
            "Name": "FailStep",
            "Type": "Fail",
            "Arguments": {
                "ErrorMessage": {
                    "Std:Join": {
                        "On":
                        ": ",
                        "Values": [
                            "Failed due to xxx == yyy returns",
                            {
                                "Get": "Steps.CondStep.Outcome"
                            },
                        ],
                    }
                }
            },
        },
    ]

    assert json.loads(pipeline.definition())["Steps"] == _expected_dsl
Example #6
0
def test_add_func_of_join():
    func_join1 = Join(values=[1, "a"])
    param = ParameterInteger(name="MyInteger", default_value=3)

    with pytest.raises(TypeError) as error:
        func_join1 + param

    assert str(error.value) == "Pipeline variables do not support concatenation."
Example #7
0
def test_implicit_value_on_join():
    func = Join(values=[1, "a", False, 1.1])

    with pytest.raises(TypeError) as error:
        str(func)
    assert str(error.value) == "Pipeline variables do not support __str__ operation."

    with pytest.raises(TypeError) as error:
        int(func)
    assert str(error.value) == "Pipeline variables do not support __int__ operation."

    with pytest.raises(TypeError) as error:
        float(func)
    assert str(error.value) == "Pipeline variables do not support __float__ operation."
Example #8
0
    def to_string(self):
        """Prompt the pipeline to convert the pipeline variable to String in runtime"""
        from sagemaker.workflow.functions import Join

        return Join(on="", values=[self])
Example #9
0
def test_string_builtin_funcs_that_return_bool_on_join():
    func = Join(on=",", values=["s3:/", "my-bucket", "a"])
    # The func will only be parsed in runtime (Pipeline backend) so not able to tell in SDK
    assert not func.startswith("s3")
    assert not func.endswith("s3")
def test_training_step_with_output_path_as_join(
    sagemaker_session, role, tf_full_version, tf_full_py_version, pipeline_name, region_name
):
    base_dir = os.path.join(DATA_DIR, "dummy_tensor")
    input_path = sagemaker_session.upload_data(
        path=base_dir, key_prefix="integ-test-data/estimator/training"
    )
    inputs = TrainingInput(s3_data=input_path)

    instance_count = ParameterInteger(name="InstanceCount", default_value=1)
    instance_type = ParameterString(name="InstanceType", default_value="ml.m5.xlarge")
    output_path = Join(
        on="/", values=["s3:/", f"{sagemaker_session.default_bucket()}", f"{pipeline_name}Train"]
    )

    image_uri = image_uris.retrieve("factorization-machines", sagemaker_session.boto_region_name)
    estimator = Estimator(
        image_uri=image_uri,
        role=role,
        instance_count=instance_count,
        instance_type=instance_type,
        sagemaker_session=sagemaker_session,
        output_path=output_path,
    )
    estimator.set_hyperparameters(
        num_factors=10, feature_dim=784, mini_batch_size=100, predictor_type="binary_classifier"
    )
    step_train = TrainingStep(
        name="MyTrain",
        estimator=estimator,
        inputs=inputs,
    )

    pipeline = Pipeline(
        name=pipeline_name,
        parameters=[instance_count, instance_type],
        steps=[step_train],
        sagemaker_session=sagemaker_session,
    )

    try:
        response = pipeline.create(role)
        create_arn = response["PipelineArn"]

        assert re.match(
            rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}",
            create_arn,
        )

        execution = pipeline.start(parameters={})
        assert re.match(
            rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}/execution/",
            execution.arn,
        )
        try:
            execution.wait(delay=30, max_attempts=60)
        except WaiterError:
            pass
        execution_steps = execution.list_steps()

        assert len(execution_steps) == 1
        assert execution_steps[0]["StepName"] == "MyTrain"
    finally:
        try:
            pipeline.delete()
        except Exception:
            pass
def test_two_step_fail_pipeline_with_join_fn(sagemaker_session, role,
                                             pipeline_name):
    param = ParameterInteger(name="MyInt", default_value=2)
    cond = ConditionEquals(left=param, right=1)
    step_cond = ConditionStep(
        name="CondStep",
        conditions=[cond],
        if_steps=[],
        else_steps=[],
    )
    step_fail = FailStep(
        name="FailStep",
        error_message=Join(on=": ",
                           values=[
                               "Failed due to xxx == yyy returns",
                               step_cond.properties.Outcome
                           ]),
    )
    pipeline = Pipeline(
        name=pipeline_name,
        steps=[step_cond, step_fail],
        sagemaker_session=sagemaker_session,
        parameters=[param],
    )

    try:
        response = pipeline.create(role)
        pipeline_arn = response["PipelineArn"]
        execution = pipeline.start(parameters={"MyInt": 3}, )
        response = execution.describe()
        assert response["PipelineArn"] == pipeline_arn

        try:
            execution.wait(delay=30, max_attempts=60)
        except WaiterError:
            pass
        execution_steps = execution.list_steps()

        assert len(execution_steps) == 2
        for execution_step in execution_steps:
            if execution_step["StepName"] == "CondStep":
                assert execution_step["StepStatus"] == "Succeeded"
                continue
            assert execution_step["StepName"] == "FailStep"
            assert execution_step["StepStatus"] == "Failed"
            assert execution_step[
                "FailureReason"] == "Failed due to xxx == yyy returns: false"
            metadata = execution_steps[0]["Metadata"]["Fail"]
            assert metadata[
                "ErrorMessage"] == "Failed due to xxx == yyy returns: false"

        # Check FailureReason field in ListPipelineExecutions
        executions = sagemaker_session.sagemaker_client.list_pipeline_executions(
            PipelineName=pipeline.name)["PipelineExecutionSummaries"]

        assert len(executions) == 1
        assert executions[0]["PipelineExecutionStatus"] == "Failed"
        assert ("Step failure: One or multiple steps failed"
                in executions[0]["PipelineExecutionFailureReason"])
    finally:
        try:
            pipeline.delete()
        except Exception:
            pass
Example #12
0
def test_pipeline_execution_with_custom_experiment_config(
    sagemaker_session,
    smclient,
    role,
    sklearn_latest_version,
    cpu_instance_type,
    pipeline_name,
    athena_dataset_definition,
):
    instance_count = ParameterInteger(name="InstanceCount", default_value=2)
    script_path = os.path.join(DATA_DIR, "dummy_script.py")
    input_file_path = os.path.join(DATA_DIR, "dummy_input.txt")
    inputs = [
        ProcessingInput(source=input_file_path,
                        destination="/opt/ml/processing/inputs/"),
        ProcessingInput(dataset_definition=athena_dataset_definition),
    ]

    sklearn_processor = SKLearnProcessor(
        framework_version=sklearn_latest_version,
        role=role,
        instance_type=cpu_instance_type,
        instance_count=instance_count,
        command=["python3"],
        sagemaker_session=sagemaker_session,
        base_job_name="test-sklearn",
    )

    step_sklearn = ProcessingStep(
        name="sklearn-process",
        processor=sklearn_processor,
        inputs=inputs,
        code=script_path,
    )

    experiment_name = f"my-experiment-{int(time.time() * 10**7)}"

    pipeline = Pipeline(
        name=pipeline_name,
        parameters=[instance_count],
        pipeline_experiment_config=PipelineExperimentConfig(
            experiment_name=experiment_name,
            trial_name=Join(
                on="-",
                values=["my-trial", ExecutionVariables.PIPELINE_EXECUTION_ID]),
        ),
        steps=[step_sklearn],
        sagemaker_session=sagemaker_session,
    )

    try:
        pipeline.create(role)
        execution = pipeline.start(parameters={})

        try:
            execution.wait(delay=30, max_attempts=3)
        except WaiterError:
            pass
        execution_steps = execution.list_steps()
        assert len(execution_steps) == 1
        assert execution_steps[0]["StepName"] == "sklearn-process"

        execution_id = execution.arn.split("/")[-1]

        # trial components
        trial_components = smclient.list_trial_components(
            TrialName=f"my-trial-{execution_id}")
        assert len(trial_components["TrialComponentSummaries"]) == 1

        # trial details
        trial = smclient.describe_trial(TrialName=f"my-trial-{execution_id}")
        assert experiment_name == trial["ExperimentName"]
    finally:
        try:
            pipeline.delete()
        except Exception:
            pass
def test_steps_with_map_params_pipeline(
    sagemaker_session,
    role,
    script_dir,
    pipeline_name,
    region_name,
    athena_dataset_definition,
):
    instance_count = ParameterInteger(name="InstanceCount", default_value=2)
    framework_version = "0.20.0"
    instance_type = ParameterString(name="InstanceType", default_value="ml.m5.xlarge")
    output_prefix = ParameterString(name="OutputPrefix", default_value="output")
    input_data = f"s3://sagemaker-sample-data-{region_name}/processing/census/census-income.csv"

    sklearn_processor = SKLearnProcessor(
        framework_version=framework_version,
        instance_type=instance_type,
        instance_count=instance_count,
        base_job_name="test-sklearn",
        sagemaker_session=sagemaker_session,
        role=role,
    )
    step_process = ProcessingStep(
        name="my-process",
        display_name="ProcessingStep",
        description="description for Processing step",
        processor=sklearn_processor,
        inputs=[
            ProcessingInput(source=input_data, destination="/opt/ml/processing/input"),
            ProcessingInput(dataset_definition=athena_dataset_definition),
        ],
        outputs=[
            ProcessingOutput(output_name="train_data", source="/opt/ml/processing/train"),
            ProcessingOutput(
                output_name="test_data",
                source="/opt/ml/processing/test",
                destination=Join(
                    on="/",
                    values=[
                        "s3:/",
                        sagemaker_session.default_bucket(),
                        "test-sklearn",
                        output_prefix,
                        ExecutionVariables.PIPELINE_EXECUTION_ID,
                    ],
                ),
            ),
        ],
        code=os.path.join(script_dir, "preprocessing.py"),
    )

    sklearn_train = SKLearn(
        framework_version=framework_version,
        entry_point=os.path.join(script_dir, "train.py"),
        instance_type=instance_type,
        sagemaker_session=sagemaker_session,
        role=role,
        hyperparameters={
            "batch-size": 500,
            "epochs": 5,
        },
    )
    step_train = TrainingStep(
        name="my-train",
        display_name="TrainingStep",
        description="description for Training step",
        estimator=sklearn_train,
        inputs=TrainingInput(
            s3_data=step_process.properties.ProcessingOutputConfig.Outputs[
                "train_data"
            ].S3Output.S3Uri
        ),
    )

    model = Model(
        image_uri=sklearn_train.image_uri,
        model_data=step_train.properties.ModelArtifacts.S3ModelArtifacts,
        sagemaker_session=sagemaker_session,
        role=role,
    )
    model_inputs = CreateModelInput(
        instance_type="ml.m5.large",
        accelerator_type="ml.eia1.medium",
    )
    step_model = CreateModelStep(
        name="my-model",
        display_name="ModelStep",
        description="description for Model step",
        model=model,
        inputs=model_inputs,
    )

    # Condition step for evaluating model quality and branching execution
    cond_lte = ConditionGreaterThanOrEqualTo(
        left=step_train.properties.HyperParameters["batch-size"],
        right=6.0,
    )

    step_cond = ConditionStep(
        name="CustomerChurnAccuracyCond",
        conditions=[cond_lte],
        if_steps=[],
        else_steps=[step_model],
    )

    pipeline = Pipeline(
        name=pipeline_name,
        parameters=[instance_type, instance_count, output_prefix],
        steps=[step_process, step_train, step_cond],
        sagemaker_session=sagemaker_session,
    )

    definition = json.loads(pipeline.definition())
    assert definition["Version"] == "2020-12-01"

    steps = definition["Steps"]
    assert len(steps) == 3
    training_args = {}
    condition_args = {}
    for step in steps:
        if step["Type"] == "Training":
            training_args = step["Arguments"]
        if step["Type"] == "Condition":
            condition_args = step["Arguments"]

    assert training_args["InputDataConfig"][0]["DataSource"]["S3DataSource"]["S3Uri"] == {
        "Get": "Steps.my-process.ProcessingOutputConfig.Outputs['train_data'].S3Output.S3Uri"
    }
    assert condition_args["Conditions"][0]["LeftValue"] == {
        "Get": "Steps.my-train.HyperParameters['batch-size']"
    }

    try:
        response = pipeline.create(role)
        create_arn = response["PipelineArn"]
        assert re.match(
            rf"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}",
            create_arn,
        )

    finally:
        try:
            pipeline.delete()
        except Exception:
            pass
def test_pipeline_variable_in_pipeline_definition(sagemaker_session):
    param_str = ParameterString(name="MyString", default_value="1")
    param_int = ParameterInteger(name="MyInteger", default_value=3)

    property_file = PropertyFile(
        name="name",
        output_name="result",
        path="output",
    )
    json_get_func2 = JsonGet(
        step_name="my-step",
        property_file=property_file,
        json_path="my-json-path",
    )
    prop = Properties("Steps.MyStep", "DescribeProcessingJobResponse")

    cond = ConditionGreaterThan(left=param_str, right=param_int.to_string())
    step_fail = FailStep(
        name="MyFailStep",
        error_message=Join(
            on=" ",
            values=[
                "Execution failed due to condition check fails, see:",
                json_get_func2.to_string(),
                prop.ProcessingOutputConfig.Outputs["MyOutputName"].S3Output.
                S3Uri.to_string(),
                param_int,
            ],
        ),
    )
    step_cond = ConditionStep(
        name="MyCondStep",
        conditions=[cond],
        if_steps=[],
        else_steps=[step_fail],
    )
    pipeline = Pipeline(
        name="MyPipeline",
        parameters=[param_str, param_int],
        steps=[step_cond],
        sagemaker_session=sagemaker_session,
    )

    dsl = json.loads(pipeline.definition())
    assert dsl["Parameters"] == [
        {
            "Name": "MyString",
            "Type": "String",
            "DefaultValue": "1"
        },
        {
            "Name": "MyInteger",
            "Type": "Integer",
            "DefaultValue": 3
        },
    ]
    assert len(dsl["Steps"]) == 1
    assert dsl["Steps"][0] == {
        "Name": "MyCondStep",
        "Type": "Condition",
        "Arguments": {
            "Conditions": [
                {
                    "Type": "GreaterThan",
                    "LeftValue": {
                        "Get": "Parameters.MyString"
                    },
                    "RightValue": {
                        "Std:Join": {
                            "On": "",
                            "Values": [{
                                "Get": "Parameters.MyInteger"
                            }],
                        },
                    },
                },
            ],
            "IfSteps": [],
            "ElseSteps": [{
                "Name": "MyFailStep",
                "Type": "Fail",
                "Arguments": {
                    "ErrorMessage": {
                        "Std:Join": {
                            "On":
                            " ",
                            "Values": [
                                "Execution failed due to condition check fails, see:",
                                {
                                    "Std:Join": {
                                        "On":
                                        "",
                                        "Values": [
                                            {
                                                "Std:JsonGet": {
                                                    "PropertyFile": {
                                                        "Get":
                                                        "Steps.my-step.PropertyFiles.name"
                                                    },
                                                    "Path": "my-json-path",
                                                }
                                            },
                                        ],
                                    },
                                },
                                {
                                    "Std:Join": {
                                        "On":
                                        "",
                                        "Values": [
                                            {
                                                "Get":
                                                "Steps.MyStep.ProcessingOutputConfig."
                                                +
                                                "Outputs['MyOutputName'].S3Output.S3Uri"
                                            },
                                        ],
                                    },
                                },
                                {
                                    "Get": "Parameters.MyInteger"
                                },
                            ],
                        }
                    }
                },
            }],
        },
    }
Example #15
0
def test_to_string_on_join():
    func = Join(values=[1, "a", False, 1.1])

    assert func.to_string() == func
Example #16
0
def test_three_step_definition(
    sagemaker_session,
    region_name,
    role,
    script_dir,
    pipeline_name,
    athena_dataset_definition,
):
    framework_version = "0.20.0"
    instance_type = ParameterString(name="InstanceType",
                                    default_value="ml.m5.xlarge")
    instance_count = ParameterInteger(name="InstanceCount", default_value=1)
    output_prefix = ParameterString(name="OutputPrefix",
                                    default_value="output")

    input_data = f"s3://sagemaker-sample-data-{region_name}/processing/census/census-income.csv"

    sklearn_processor = SKLearnProcessor(
        framework_version=framework_version,
        instance_type=instance_type,
        instance_count=instance_count,
        base_job_name="test-sklearn",
        sagemaker_session=sagemaker_session,
        role=role,
    )
    step_process = ProcessingStep(
        name="my-process",
        processor=sklearn_processor,
        inputs=[
            ProcessingInput(source=input_data,
                            destination="/opt/ml/processing/input"),
            ProcessingInput(dataset_definition=athena_dataset_definition),
        ],
        outputs=[
            ProcessingOutput(output_name="train_data",
                             source="/opt/ml/processing/train"),
            ProcessingOutput(
                output_name="test_data",
                source="/opt/ml/processing/test",
                destination=Join(
                    on="/",
                    values=[
                        "s3:/",
                        sagemaker_session.default_bucket(),
                        "test-sklearn",
                        output_prefix,
                        ExecutionVariables.PIPELINE_EXECUTION_ID,
                    ],
                ),
            ),
        ],
        code=os.path.join(script_dir, "preprocessing.py"),
    )

    sklearn_train = SKLearn(
        framework_version=framework_version,
        entry_point=os.path.join(script_dir, "train.py"),
        instance_type=instance_type,
        sagemaker_session=sagemaker_session,
        role=role,
    )
    step_train = TrainingStep(
        name="my-train",
        estimator=sklearn_train,
        inputs=TrainingInput(
            s3_data=step_process.properties.ProcessingOutputConfig.
            Outputs["train_data"].S3Output.S3Uri),
    )

    model = Model(
        image_uri=sklearn_train.image_uri,
        model_data=step_train.properties.ModelArtifacts.S3ModelArtifacts,
        sagemaker_session=sagemaker_session,
        role=role,
    )
    model_inputs = CreateModelInput(
        instance_type="ml.m5.large",
        accelerator_type="ml.eia1.medium",
    )
    step_model = CreateModelStep(
        name="my-model",
        model=model,
        inputs=model_inputs,
    )

    pipeline = Pipeline(
        name=pipeline_name,
        parameters=[instance_type, instance_count, output_prefix],
        steps=[step_process, step_train, step_model],
        sagemaker_session=sagemaker_session,
    )

    definition = json.loads(pipeline.definition())
    assert definition["Version"] == "2020-12-01"

    assert set(tuple(param.items())
               for param in definition["Parameters"]) == set([
                   tuple({
                       "Name": "InstanceType",
                       "Type": "String",
                       "DefaultValue": "ml.m5.xlarge"
                   }.items()),
                   tuple({
                       "Name": "InstanceCount",
                       "Type": "Integer",
                       "DefaultValue": 1
                   }.items()),
                   tuple({
                       "Name": "OutputPrefix",
                       "Type": "String",
                       "DefaultValue": "output"
                   }.items()),
               ])

    steps = definition["Steps"]
    assert len(steps) == 3

    names_and_types = []
    processing_args = {}
    training_args = {}
    for step in steps:
        names_and_types.append((step["Name"], step["Type"]))
        if step["Type"] == "Processing":
            processing_args = step["Arguments"]
        if step["Type"] == "Training":
            training_args = step["Arguments"]
        if step["Type"] == "Model":
            model_args = step["Arguments"]

    assert set(names_and_types) == set([
        ("my-process", "Processing"),
        ("my-train", "Training"),
        ("my-model", "Model"),
    ])

    assert processing_args["ProcessingResources"]["ClusterConfig"] == {
        "InstanceType": {
            "Get": "Parameters.InstanceType"
        },
        "InstanceCount": {
            "Get": "Parameters.InstanceCount"
        },
        "VolumeSizeInGB": 30,
    }

    assert training_args["ResourceConfig"] == {
        "InstanceCount": 1,
        "InstanceType": {
            "Get": "Parameters.InstanceType"
        },
        "VolumeSizeInGB": 30,
    }
    assert training_args["InputDataConfig"][0]["DataSource"]["S3DataSource"][
        "S3Uri"] == {
            "Get":
            "Steps.my-process.ProcessingOutputConfig.Outputs['train_data'].S3Output.S3Uri"
        }
    assert model_args["PrimaryContainer"]["ModelDataUrl"] == {
        "Get": "Steps.my-train.ModelArtifacts.S3ModelArtifacts"
    }
    try:
        response = pipeline.create(role)
        create_arn = response["PipelineArn"]
        assert re.match(
            fr"arn:aws:sagemaker:{region_name}:\d{{12}}:pipeline/{pipeline_name}",
            create_arn,
        )
    finally:
        try:
            pipeline.delete()
        except Exception:
            pass
def test_ppl_var_to_string_and_add(sagemaker_session, role, pipeline_name):
    param_str = ParameterString(name="MyString", default_value="1")
    param_int = ParameterInteger(name="MyInteger", default_value=3)

    cond = ConditionGreaterThan(left=param_str, right=param_int.to_string())
    step_cond = ConditionStep(
        name="CondStep",
        conditions=[cond],
        if_steps=[],
        else_steps=[],
    )
    join_fn1 = Join(
        on=" ",
        values=[
            "condition greater than check return:",
            step_cond.properties.Outcome.to_string(),
            "and left side param str is",
            param_str,
            "and right side param int is",
            param_int,
        ],
    )

    step_fail = FailStep(
        name="FailStep",
        error_message=join_fn1,
    )
    pipeline = Pipeline(
        name=pipeline_name,
        parameters=[param_str, param_int],
        steps=[step_cond, step_fail],
        sagemaker_session=sagemaker_session,
    )

    try:
        response = pipeline.create(role)
        pipeline_arn = response["PipelineArn"]
        execution = pipeline.start()
        response = execution.describe()
        assert response["PipelineArn"] == pipeline_arn

        try:
            execution.wait(delay=30, max_attempts=60)
        except WaiterError:
            pass
        execution_steps = execution.list_steps()

        assert len(execution_steps) == 2
        for execution_step in execution_steps:
            if execution_step["StepName"] == "CondStep":
                assert execution_step["StepStatus"] == "Succeeded"
                continue
            assert execution_step["StepName"] == "FailStep"
            assert execution_step["StepStatus"] == "Failed"
            assert (
                execution_step["FailureReason"] ==
                "condition greater than check return: false "
                "and left side param str is 1 and right side param int is 3")

        # Update int param to update cond step outcome
        execution = pipeline.start(parameters={"MyInteger": 0})
        try:
            execution.wait(delay=30, max_attempts=60)
        except WaiterError:
            pass
        execution_steps = execution.list_steps()

        assert len(execution_steps) == 2
        for execution_step in execution_steps:
            if execution_step["StepName"] == "CondStep":
                assert execution_step["StepStatus"] == "Succeeded"
                continue
            assert execution_step["StepName"] == "FailStep"
            assert execution_step["StepStatus"] == "Failed"
            assert (
                execution_step["FailureReason"] ==
                "condition greater than check return: true "
                "and left side param str is 1 and right side param int is 0")
    finally:
        try:
            pipeline.delete()
        except Exception:
            pass