def test_deploy_cli_creates_sagemaker_transform_job_and_s3_resources_with_expected_names_from_s3(
        pretrained_model, sagemaker_client):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository("s3://{}".format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path,
                                   artifact_path=artifact_path)
    model_s3_uri = "s3://{bucket_name}/{artifact_path}".format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path)

    job_name = "test-job"
    result = CliRunner(env={
        "LC_ALL": "en_US.UTF-8",
        "LANG": "en_US.UTF-8"
    }).invoke(
        mfscli.commands,
        [
            "deploy-transform-job",
            "--job-name",
            job_name,
            "--model-uri",
            model_s3_uri,
            "--input-data-type",
            "Some Data Type",
            "--input-uri",
            "Some Input Uri",
            "--content-type",
            "Some Content Type",
            "--output-path",
            "Some Output Path",
            "--archive",
        ],
    )
    assert result.exit_code == 0

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    transform_job_description = sagemaker_client.describe_transform_job(
        TransformJobName=job_name)
    model_name = transform_job_description["ModelName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert job_name in [
        transform_job["TransformJobName"] for transform_job in
        sagemaker_client.list_transform_jobs()["TransformJobSummaries"]
    ]
示例#2
0
def test_deploy_cli_creates_sagemaker_and_s3_resources_with_expected_names_and_env_from_s3(
        pretrained_model, sagemaker_client):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository("s3://{}".format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path,
                                   artifact_path=artifact_path)
    model_s3_uri = "s3://{bucket_name}/{artifact_path}".format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path)

    app_name = "test-app"
    result = CliRunner(env={
        "LC_ALL": "en_US.UTF-8",
        "LANG": "en_US.UTF-8"
    }).invoke(
        mfscli.commands,
        [
            "deploy", "-a", app_name, "-m", model_s3_uri, "--mode",
            mfs.DEPLOYMENT_MODE_CREATE
        ],
    )
    assert result.exit_code == 0

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(
        EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
    model_environment = sagemaker_client.describe_model(
        ModelName=model_name)["PrimaryContainer"]["Environment"]
    assert model_environment == {
        "MLFLOW_DEPLOYMENT_FLAVOR_NAME": "python_function",
        "SERVING_ENVIRONMENT": "SageMaker",
    }
示例#3
0
def test_deploy_cli_creates_sagemaker_and_s3_resources_with_expected_names_from_s3(
        pretrained_model, sagemaker_client):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository('s3://{}'.format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path,
                                   artifact_path=artifact_path)
    model_s3_uri = 's3://{bucket_name}/{artifact_path}'.format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path)

    app_name = "test-app"
    result = CliRunner(env={
        "LC_ALL": "en_US.UTF-8",
        "LANG": "en_US.UTF-8"
    }).invoke(mfscli.commands, [
        'deploy',
        '-a',
        app_name,
        '-m',
        model_s3_uri,
        '--mode',
        mfs.DEPLOYMENT_MODE_CREATE,
    ])
    assert result.exit_code == 0

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(
        EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
示例#4
0
def test_deploy_creates_sagemaker_and_s3_resources_with_expected_names_from_local(
        pretrained_model, sagemaker_client):
    app_name = "test-app"
    mfs.deploy(app_name=app_name,
               model_uri=pretrained_model.model_uri,
               mode=mfs.DEPLOYMENT_MODE_CREATE)

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(
        EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
def test_deploy_cli_creates_sagemaker_and_s3_resources_with_expected_names_and_env_from_local(
    pretrained_model, sagemaker_client
):
    app_name = "test-app"
    region_name = sagemaker_client.meta.region_name
    create_sagemaker_deployment_through_cli(app_name, pretrained_model.model_uri, region_name)

    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [model["ModelName"] for model in sagemaker_client.list_models()["Models"]]
    object_names = [
        entry["Key"] for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any(
        [
            app_name in config["EndpointConfigName"]
            for config in sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
        ]
    )
    assert app_name in [
        endpoint["EndpointName"] for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
    model_environment = sagemaker_client.describe_model(ModelName=model_name)["PrimaryContainer"][
        "Environment"
    ]
    assert model_environment == {
        "MLFLOW_DEPLOYMENT_FLAVOR_NAME": "python_function",
        "SERVING_ENVIRONMENT": "SageMaker",
    }
def test_delete_deployment_synchronous_with_archiving_only_deletes_endpoint(
    pretrained_model, sagemaker_client, sagemaker_deployment_client
):
    name = "test-app"
    region_name = sagemaker_client.meta.region_name

    sagemaker_deployment_client.create_deployment(
        name=name, model_uri=pretrained_model.model_uri, config=dict(region_name=region_name)
    )

    sagemaker_deployment_client.delete_deployment(
        name=name, config=dict(archive=True, synchronous=True, region_name=region_name)
    )

    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_objects = s3_client.list_objects_v2(Bucket=default_bucket)
    endpoints = sagemaker_client.list_endpoints()
    endpoint_configs = sagemaker_client.list_endpoint_configs()
    models = sagemaker_client.list_models()

    assert s3_objects["KeyCount"] > 0
    assert len(endpoints["Endpoints"]) == 0
    assert len(endpoint_configs["EndpointConfigs"]) > 0
    assert len(models["Models"]) > 0
def test_deploy_creates_sagemaker_transform_job_and_s3_resources_with_expected_names_from_local(
        pretrained_model, sagemaker_client):
    job_name = "test-job"
    mfs.deploy_transform_job(
        job_name=job_name,
        model_uri=pretrained_model.model_uri,
        s3_input_data_type="Some Data Type",
        s3_input_uri="Some Input Uri",
        content_type="Some Content Type",
        s3_output_path="Some Output Path",
        archive=True,
    )

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    transform_job_description = sagemaker_client.describe_transform_job(
        TransformJobName=job_name)
    model_name = transform_job_description["ModelName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert job_name in [
        transform_job["TransformJobName"] for transform_job in
        sagemaker_client.list_transform_jobs()["TransformJobSummaries"]
    ]
示例#8
0
def test_deploy_creates_sagemaker_and_s3_resources_with_expected_names(
        pretrained_model, sagemaker_client):
    app_name = "test-app"
    mfs.deploy(app_name=app_name,
               model_path=pretrained_model.model_path,
               run_id=pretrained_model.run_id,
               mode=mfs.DEPLOYMENT_MODE_CREATE)

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([
        pretrained_model.run_id in object_name for object_name in object_names
    ])
    assert any([
        app_name in model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
def test_deploy_cli_updates_sagemaker_and_s3_resources_in_replace_mode(
    pretrained_model, sagemaker_client
):
    app_name = "test-app"
    region_name = sagemaker_client.meta.region_name
    create_sagemaker_deployment_through_cli(app_name, pretrained_model.model_uri, region_name)

    result = CliRunner(env={"LC_ALL": "en_US.UTF-8", "LANG": "en_US.UTF-8"}).invoke(
        cli_commands,
        [
            "update",
            "--target",
            f"sagemaker:/{region_name}",
            "--name",
            app_name,
            "--model-uri",
            pretrained_model.model_uri,
        ],
    )
    assert result.exit_code == 0

    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [model["ModelName"] for model in sagemaker_client.list_models()["Models"]]
    object_names = [
        entry["Key"] for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any(
        [
            app_name in config["EndpointConfigName"]
            for config in sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
        ]
    )
    assert app_name in [
        endpoint["EndpointName"] for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
    model_environment = sagemaker_client.describe_model(ModelName=model_name)["PrimaryContainer"][
        "Environment"
    ]
    assert model_environment == {
        "MLFLOW_DEPLOYMENT_FLAVOR_NAME": "python_function",
        "SERVING_ENVIRONMENT": "SageMaker",
    }
示例#10
0
def test_deploy_in_replace_mode_with_archiving_does_not_delete_resources(
        pretrained_model, sagemaker_client):
    region_name = sagemaker_client.meta.region_name
    sagemaker_backend = get_sagemaker_backend(region_name)
    sagemaker_backend.set_endpoint_update_latency(5)

    app_name = "test-app"
    mfs.deploy(app_name=app_name,
               model_path=pretrained_model.model_path,
               run_id=pretrained_model.run_id,
               mode=mfs.DEPLOYMENT_MODE_CREATE)

    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    object_names_before_replacement = [
            entry["Key"] for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]]
    endpoint_configs_before_replacement = [
            config["EndpointConfigName"] for config in
            sagemaker_client.list_endpoint_configs()["EndpointConfigs"]]
    models_before_replacement = [
            model["ModelName"] for model in sagemaker_client.list_models()["Models"]]

    sk_model = mlflow.sklearn.load_model(
            path=pretrained_model.model_path, run_id=pretrained_model.run_id)
    new_artifact_path = "model"
    with mlflow.start_run():
        mlflow.sklearn.log_model(sk_model=sk_model, artifact_path=new_artifact_path)
        new_run_id = mlflow.active_run().info.run_uuid
    mfs.deploy(app_name=app_name,
               model_path=new_artifact_path,
               run_id=new_run_id,
               mode=mfs.DEPLOYMENT_MODE_REPLACE,
               archive=True,
               synchronous=True)

    object_names_after_replacement = [
            entry["Key"] for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]]
    endpoint_configs_after_replacement = [
            config["EndpointConfigName"] for config in
            sagemaker_client.list_endpoint_configs()["EndpointConfigs"]]
    models_after_replacement = [
            model["ModelName"] for model in sagemaker_client.list_models()["Models"]]
    assert all([object_name in object_names_after_replacement
                for object_name in object_names_before_replacement])
    assert all([endpoint_config in endpoint_configs_after_replacement
                for endpoint_config in endpoint_configs_before_replacement])
    assert all([model in models_after_replacement for model in models_before_replacement])
def test_create_deployment_creates_sagemaker_and_s3_resources_with_expected_names_and_env_from_s3(
    pretrained_model, sagemaker_client, sagemaker_deployment_client
):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository("s3://{}".format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path, artifact_path=artifact_path)
    model_s3_uri = "s3://{bucket_name}/{artifact_path}".format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path
    )

    name = "test-app"
    sagemaker_deployment_client.create_deployment(
        name=name,
        model_uri=model_s3_uri,
    )

    endpoint_description = sagemaker_client.describe_endpoint(EndpointName=name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [model["ModelName"] for model in sagemaker_client.list_models()["Models"]]

    s3_client = boto3.client("s3", region_name=region_name)
    object_names = [
        entry["Key"] for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any(
        [
            name in config["EndpointConfigName"]
            for config in sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
        ]
    )
    assert name in [
        endpoint["EndpointName"] for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
    model_environment = sagemaker_client.describe_model(ModelName=model_name)["PrimaryContainer"][
        "Environment"
    ]
    assert model_environment == {
        "MLFLOW_DEPLOYMENT_FLAVOR_NAME": "python_function",
        "SERVING_ENVIRONMENT": "SageMaker",
    }
示例#12
0
def test_deploy_cli_creates_sagemaker_and_s3_resources_with_expected_names_from_local(
        pretrained_model, sagemaker_client):
    app_name = "test-app"
    result = CliRunner(env={
        "LC_ALL": "en_US.UTF-8",
        "LANG": "en_US.UTF-8"
    }).invoke(
        mfscli.commands,
        [
            "deploy",
            "-a",
            app_name,
            "-m",
            pretrained_model.model_uri,
            "--mode",
            mfs.DEPLOYMENT_MODE_CREATE,
        ],
    )
    assert result.exit_code == 0

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    endpoint_description = sagemaker_client.describe_endpoint(
        EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
示例#13
0
def test_deploy_creates_sagemaker_and_s3_resources_with_expected_names_from_s3(
        pretrained_model, sagemaker_client):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository("s3://{}".format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path,
                                   artifact_path=artifact_path)
    model_s3_uri = "s3://{bucket_name}/{artifact_path}".format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path)

    app_name = "test-app"
    mfs.deploy(app_name=app_name,
               model_uri=model_s3_uri,
               mode=mfs.DEPLOYMENT_MODE_CREATE)

    endpoint_description = sagemaker_client.describe_endpoint(
        EndpointName=app_name)
    endpoint_production_variants = endpoint_description["ProductionVariants"]
    assert len(endpoint_production_variants) == 1
    model_name = endpoint_production_variants[0]["VariantName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]

    s3_client = boto3.client("s3", region_name=region_name)
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert any([
        app_name in config["EndpointConfigName"] for config in
        sagemaker_client.list_endpoint_configs()["EndpointConfigs"]
    ])
    assert app_name in [
        endpoint["EndpointName"]
        for endpoint in sagemaker_client.list_endpoints()["Endpoints"]
    ]
def test_deploy_creates_sagemaker_transform_job_and_s3_resources_with_expected_names_from_s3(
        pretrained_model, sagemaker_client):
    local_model_path = _download_artifact_from_uri(pretrained_model.model_uri)
    artifact_path = "model"
    region_name = sagemaker_client.meta.region_name
    default_bucket = mfs._get_default_s3_bucket(region_name)
    s3_artifact_repo = S3ArtifactRepository("s3://{}".format(default_bucket))
    s3_artifact_repo.log_artifacts(local_model_path,
                                   artifact_path=artifact_path)
    model_s3_uri = "s3://{bucket_name}/{artifact_path}".format(
        bucket_name=default_bucket, artifact_path=pretrained_model.model_path)

    job_name = "test-job"
    mfs.deploy_transform_job(
        job_name=job_name,
        model_uri=model_s3_uri,
        s3_input_data_type="Some Data Type",
        s3_input_uri="Some Input Uri",
        content_type="Some Content Type",
        s3_output_path="Some Output Path",
        archive=True,
    )

    transform_job_description = sagemaker_client.describe_transform_job(
        TransformJobName=job_name)
    model_name = transform_job_description["ModelName"]
    assert model_name in [
        model["ModelName"]
        for model in sagemaker_client.list_models()["Models"]
    ]

    s3_client = boto3.client("s3", region_name=region_name)
    object_names = [
        entry["Key"]
        for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any([model_name in object_name for object_name in object_names])
    assert job_name in [
        transform_job["TransformJobName"] for transform_job in
        sagemaker_client.list_transform_jobs()["TransformJobSummaries"]
    ]
示例#15
0
def test_deploy_cli_creates_sagemaker_transform_job_and_s3_resources_with_expected_names_from_local(
    pretrained_model, sagemaker_client
):
    job_name = "test-job"
    result = CliRunner(env={"LC_ALL": "en_US.UTF-8", "LANG": "en_US.UTF-8"}).invoke(
        mfscli.commands,
        [
            "deploy-transform-job",
            "--job-name",
            job_name,
            "--model-uri",
            pretrained_model.model_uri,
            "--input-data-type",
            "Some Data Type",
            "--input-uri",
            "Some Input Uri",
            "--content-type",
            "Some Content Type",
            "--output-path",
            "Some Output Path",
            "--archive",
        ],
    )
    assert result.exit_code == 0

    region_name = sagemaker_client.meta.region_name
    s3_client = boto3.client("s3", region_name=region_name)
    default_bucket = mfs._get_default_s3_bucket(region_name)
    transform_job_description = sagemaker_client.describe_transform_job(TransformJobName=job_name)
    model_name = transform_job_description["ModelName"]
    assert model_name in [model["ModelName"] for model in sagemaker_client.list_models()["Models"]]
    object_names = [
        entry["Key"] for entry in s3_client.list_objects(Bucket=default_bucket)["Contents"]
    ]
    assert any(model_name in object_name for object_name in object_names)
    assert job_name in [
        transform_job["TransformJobName"]
        for transform_job in sagemaker_client.list_transform_jobs()["TransformJobSummaries"]
    ]