예제 #1
0
def test_pytorch_logger(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pytorch-logger", ModelType.PYTORCH)

    model_dir = "test/pytorch-model"

    undeploy_all_version()

    logger = Logger(model=LoggerConfig(enabled=True, mode=LoggerMode.REQUEST))
    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        endpoint = merlin.deploy(logger=logger)

    model_config = endpoint.logger.model
    assert model_config is not None
    assert model_config.enabled
    assert model_config.mode == LoggerMode.REQUEST

    transformer_config = endpoint.logger.transformer
    assert transformer_config is None

    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(request_json["instances"])

    undeploy_all_version()
예제 #2
0
파일: merlin.py 프로젝트: zhangchi1/merlin
def deploy(env, model_name, model_type, model_dir, project, url, min_replica,
           max_replica, cpu_request, memory_request):

    merlin.set_url(url)

    target_env = merlin.get_environment(env)

    resource_request = target_env.default_resource_request
    if min_replica is not None:
        resource_request.min_replica = int(min_replica)
    if max_replica is not None:
        resource_request.max_replica = int(max_replica)
    if cpu_request is not None:
        resource_request.cpu_request = cpu_request
    if memory_request is not None:
        resource_request.memory_request = memory_request

    merlin.set_project(project)
    merlin.set_model(model_name, ModelType(model_type))

    with merlin.new_model_version() as v:
        merlin.log_model(model_dir=model_dir)

    try:
        endpoint = merlin.deploy(v, env, resource_request)
        if endpoint:
            print('Model deployed to {}'.format(endpoint))
    except Exception as e:
        print(e)
예제 #3
0
def test_feast_enricher(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("feast-enricher", ModelType.PYFUNC)

    undeploy_all_version()
    with merlin.new_model_version() as v:
        v.log_pyfunc_model(
            model_instance=EchoModel(),
            conda_env="test/pyfunc/env.yaml",
            code_dir=["test"],
            artifacts={},
        )

    transformer_config_path = os.path.join("test/transformer",
                                           "feast_enricher.yaml")
    transformer = StandardTransformer(config_file=transformer_config_path,
                                      enabled=True)

    request_json = {"driver_id": "1000"}
    endpoint = merlin.deploy(v, transformer=transformer)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    feast_features = resp.json()["feast_features"]
    assert feast_features is not None
    assert pd.DataFrame(feast_features) is not None

    merlin.undeploy(v)
예제 #4
0
def test_model_version_with_labels(integration_test_url, project_name,
                                   use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("sklearn-labels", ModelType.SKLEARN)

    model_dir = "test/sklearn-model"
    MODEL_FILE = "model.joblib"

    undeploy_all_version()

    with merlin.new_model_version(labels={"model": "T-800"}) as v:
        clf = svm.SVC(gamma="scale")
        iris = load_iris()
        X, y = iris.data, iris.target
        clf.fit(X, y)
        dump(clf, os.path.join(model_dir, MODEL_FILE))

        # Upload the serialized model to MLP
        merlin.log_model(model_dir=model_dir)
        assert len(v.labels) == 1
        assert v.labels["model"] == "T-800"

    merlin_active_model = merlin.active_model()
    all_versions = merlin_active_model.list_version(
        labels={"model": ["T-800"]})
    for version in all_versions:
        assert version.labels["model"] == "T-800"

    should_not_exist_versions = merlin_active_model.list_version(
        labels={"model": ["T-1000"]})
    assert len(should_not_exist_versions) == 0
예제 #5
0
def test_tensorflow(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("tensorflow-sample", ModelType.TENSORFLOW)

    model_dir = "test/tensorflow-model"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        merlin.log_model(model_dir=model_dir)

    endpoint = merlin.deploy(v)
    request_json = {
        "signature_name": "predict",
        "instances": [
            {"sepal_length": 2.8, "sepal_width": 1.0, "petal_length": 6.8,
             "petal_width": 0.4},
            {"sepal_length": 0.1, "sepal_width": 0.5, "petal_length": 1.8,
             "petal_width": 2.4}
        ]
    }
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    merlin.undeploy(v)
    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 404
예제 #6
0
def test_new_model_version(url, project, model, version, mock_oauth,
                           use_google_oauth):
    # expect exception when creating new model  version but client and
    # project is not set
    with pytest.raises(Exception):
        with merlin.new_model_version() as v:
            print(v)

    merlin.set_url(url, use_google_oauth=use_google_oauth)

    with pytest.raises(Exception):
        with merlin.new_model_version() as v:
            print(v)

    _mock_get_project_call(project)
    merlin.set_project(project.name)

    with pytest.raises(Exception):
        with merlin.new_model_version() as v:
            print(v)

    _mock_get_model_call(project, model)
    merlin.set_model(model.name, model.type)

    _mock_new_model_version_call(model, version)
    with merlin.new_model_version() as v:
        assert v is not None
        assert isinstance(v, ModelVersion)

        assert v.mlflow_run_id == version.mlflow_run_id
예제 #7
0
def test_mlflow_tracking(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("mlflow-tracking", ModelType.PYTORCH)

    model_dir = "test/pytorch-model"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        merlin.log_param("model_type", "pytorch")
        merlin.log_param("iteration", 5)

        merlin.set_tag("version", "v1.0")
        merlin.set_tag("build", "latest")
        merlin.set_tag("team_id", 1)

        merlin.log_metric("model_loaded", 10.23)

        assert merlin.get_param("model_type") == "pytorch"
        # Stringify value which is integer originally
        assert merlin.get_param("iteration") == '5'
        assert merlin.get_param("random_key") is None

        assert merlin.get_tag("version") == "v1.0"
        assert merlin.get_tag("xxx") is None
        # Stringify value which is integer originally
        assert merlin.get_tag("team_id") == "1"

        assert merlin.get_metric("model_loaded") == 10.23
        assert merlin.get_metric("response_time") is None

        assert merlin.list_tag() == {
            "version": "v1.0", "build": "latest", "team_id": "1"}
예제 #8
0
def test_sklearn(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("sklearn-sample", ModelType.SKLEARN)

    model_dir = "test/sklearn-model"
    MODEL_FILE = "model.joblib"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        clf = svm.SVC(gamma='scale')
        iris = load_iris()
        X, y = iris.data, iris.target
        clf.fit(X, y)
        dump(clf, os.path.join(model_dir, MODEL_FILE))

        # Upload the serialized model to MLP
        merlin.log_model(model_dir=model_dir)

    endpoint = merlin.deploy(v)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    merlin.undeploy(v)
    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 404
예제 #9
0
def test_pyfunc_env_vars(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pyfunc-env-vars-sample", ModelType.PYFUNC)

    undeploy_all_version()
    with merlin.new_model_version() as v:
        v.log_pyfunc_model(model_instance=EnvVarModel(),
                           conda_env="test/pyfunc/env.yaml",
                           code_dir=["test"],
                           artifacts={})

    env_vars = {"WORKERS": "8", "ENV_VAR_1": "1", "ENV_VAR_2": "2"}
    endpoint = merlin.deploy(v, env_vars=env_vars)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert resp.json()['workers'] == "8"
    assert resp.json()['env_var_1'] == "1"
    assert resp.json()['env_var_2'] == "2"
    assert env_vars.items() <= endpoint.env_vars.items()

    merlin.undeploy(v)
    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 404
예제 #10
0
def test_pyfunc(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pyfunc-sample", ModelType.PYFUNC)

    undeploy_all_version()
    with merlin.new_model_version() as v:
        iris = load_iris()
        y = iris['target']
        X = iris['data']
        xgb_path = train_xgboost_model(X, y)
        sklearn_path = train_sklearn_model(X, y)

        v.log_pyfunc_model(model_instance=EnsembleModel(),
                           conda_env="test/pyfunc/env.yaml",
                           code_dir=["test"],
                           artifacts={
                               "xgb_model": xgb_path,
                               "sklearn_model": sklearn_path
                           })

    endpoint = merlin.deploy(v)
    sleep(5)

    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    merlin.undeploy(v)
    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 404
예제 #11
0
def test_trasformer_pytorch_logger(integration_test_url, project_name,
                                   use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("transformer-logger", ModelType.PYTORCH)

    model_dir = "test/transformer"

    undeploy_all_version()

    resource_request = ResourceRequest(1, 1, "100m", "200Mi")
    transformer = Transformer(
        "gcr.io/kubeflow-ci/kfserving/image-transformer:latest",
        resource_request=resource_request,
    )

    logger = Logger(
        model=LoggerConfig(enabled=True, mode=LoggerMode.ALL),
        transformer=LoggerConfig(enabled=True, mode=LoggerMode.ALL),
    )
    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        endpoint = merlin.deploy(transformer=transformer, logger=logger)

    assert endpoint.logger is not None

    model_config = endpoint.logger.model
    assert model_config is not None
    assert model_config.enabled
    assert model_config.mode == LoggerMode.ALL

    transformer_config = endpoint.logger.transformer
    assert transformer_config is not None
    assert transformer_config.enabled
    assert transformer_config.mode == LoggerMode.ALL

    with open(os.path.join("test/transformer", "input.json"), "r") as f:
        req = json.load(f)

    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=req)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(req["instances"])

    model_endpoint = merlin.serve_traffic({endpoint: 100})
    sleep(5)
    resp = requests.post(f"{model_endpoint.url}", json=req)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(req["instances"])

    # Try to undeploy serving model version. It must be fail
    with pytest.raises(Exception):
        assert merlin.undeploy(v)

    # Undeploy other running model version endpoints
    undeploy_all_version()
예제 #12
0
def test_custom_model_with_artifact(integration_test_url, project_name,
                                    use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("custom-w-artifact", ModelType.CUSTOM)
    undeploy_all_version()

    resource_request = ResourceRequest(1, 1, "1", "1Gi")
    model_dir = "test/custom-model"
    BST_FILE = "model.bst"

    iris = load_iris()
    y = iris["target"]
    X = iris["data"]
    dtrain = xgb.DMatrix(X, label=y)
    param = {
        "max_depth": 6,
        "eta": 0.1,
        "silent": 1,
        "nthread": 4,
        "num_class": 10,
        "objective": "multi:softmax",
    }
    xgb_model = xgb.train(params=param, dtrain=dtrain)
    model_file = os.path.join((model_dir), BST_FILE)
    xgb_model.save_model(model_file)

    with merlin.new_model_version() as v:
        v.log_custom_model(
            image="ghcr.io/tiopramayudi/custom-predictor-go:v0.2",
            model_dir=model_dir)

    endpoint = merlin.deploy(v,
                             resource_request=resource_request,
                             env_vars={"MODEL_FILE_NAME": BST_FILE})

    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert resp.json()["predictions"] is not None

    model_endpoint = merlin.serve_traffic({endpoint: 100})
    sleep(5)
    resp = requests.post(f"{model_endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert resp.json()["predictions"] is not None

    # Try to undeploy serving model version. It must be fail
    with pytest.raises(Exception):
        assert merlin.undeploy(v)

    # Undeploy other running model version endpoints
    undeploy_all_version()
예제 #13
0
def test_standard_transformer_without_feast(integration_test_url, project_name,
                                            use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("std-transformer", ModelType.PYFUNC)

    undeploy_all_version()
    with merlin.new_model_version() as v:
        v.log_pyfunc_model(
            model_instance=EchoModel(),
            conda_env="test/pyfunc/env.yaml",
            code_dir=["test"],
            artifacts={},
        )

    transformer_config_path = os.path.join(
        "test/transformer", "standard_transformer_no_feast.yaml")
    transformer = StandardTransformer(config_file=transformer_config_path,
                                      enabled=True)

    endpoint = merlin.deploy(v, transformer=transformer)
    request_json = {
        "drivers": [{
            "id": 1,
            "name": "driver-1",
            "vehicle": "motorcycle",
            "previous_vehicle": "suv",
            "rating": 4
        }, {
            "id": 2,
            "name": "driver-2",
            "vehicle": "sedan",
            "previous_vehicle": "mpv",
            "rating": 3
        }],
        "customer": {
            "id": 1111
        },
    }
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    exp_resp = {
        "instances": {
            "columns": [
                "customer_id", "name", "rank", "rating", "vehicle",
                "previous_vehicle"
            ],
            "data": [[1111, "driver-2", 2.5, 0.5, 2, 3],
                     [1111, "driver-1", -2.5, 0.75, 0, 1]],
        }
    }

    assert resp.json()["instances"] == exp_resp["instances"]
    merlin.undeploy(v)
예제 #14
0
def test_list_environment(url, mock_oauth, use_google_oauth):
    merlin.set_url(url, use_google_oauth=use_google_oauth)

    _mock_list_environment_call()

    envs = merlin.list_environment()

    assert len(envs) == 2
    assert envs[0].name == env_1.name
    assert envs[1].name == env_2.name
예제 #15
0
def test_get_default_environment(url, mock_oauth, use_google_oauth):
    merlin.set_url(url, use_google_oauth=use_google_oauth)

    _mock_list_environment_call()

    env = merlin.get_default_environment()

    assert env is not None
    assert env.name == env_1.name
    assert env.is_default
예제 #16
0
def test_standard_transformer_with_feast(integration_test_url, project_name,
                                         use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("std-transformer-feast", ModelType.PYFUNC)

    undeploy_all_version()
    with merlin.new_model_version() as v:
        v.log_pyfunc_model(
            model_instance=EchoModel(),
            conda_env="test/pyfunc/env.yaml",
            code_dir=["test"],
            artifacts={},
        )

    transformer_config_path = os.path.join(
        "test/transformer", "standard_transformer_with_feast.yaml")
    transformer = StandardTransformer(config_file=transformer_config_path,
                                      enabled=True)

    endpoint = merlin.deploy(v, transformer=transformer)
    request_json = {
        "drivers": [
            {
                "id": "1234",
                "name": "driver-1"
            },
            {
                "id": "5678",
                "name": "driver-2"
            },
        ],
        "customer": {
            "id": 1111
        },
    }
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    exp_resp = {
        "instances": {
            "columns": [
                "rank",
                "driver_id",
                "customer_id",
                "merlin_test_driver_features:test_int32",
                "merlin_test_driver_features:test_float",
            ],
            "data": [[0, "1234", 1111, -1, 0], [1, "5678", 1111, -1, 0]],
        }
    }

    assert resp.json()["instances"] == exp_resp["instances"]
    merlin.undeploy(v)
예제 #17
0
def test_get_environment(url, mock_oauth, use_google_oauth):
    merlin.set_url(url, use_google_oauth=use_google_oauth)

    _mock_list_environment_call()

    env = merlin.get_environment(env_1.name)
    assert env is not None
    assert env.name == env_1.name

    env = merlin.get_environment("undefined_env")
    assert env is None
예제 #18
0
def test_resource_request(integration_test_url, project_name,
                          use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("resource-request", ModelType.XGBOOST)

    model_dir = "test/xgboost-model"
    BST_FILE = "model.bst"

    envs = merlin.list_environment()
    assert len(envs) >= 1

    default_env = merlin.get_default_environment()
    assert default_env is not None

    undeploy_all_version()
    with merlin.new_model_version() as v:
        iris = load_iris()
        y = iris['target']
        X = iris['data']
        dtrain = xgb.DMatrix(X, label=y)
        param = {
            'max_depth': 6,
            'eta': 0.1,
            'silent': 1,
            'nthread': 4,
            'num_class': 10,
            'objective': 'multi:softmax'
        }
        xgb_model = xgb.train(params=param, dtrain=dtrain)
        model_file = os.path.join(model_dir, BST_FILE)
        xgb_model.save_model(model_file)

        # Upload the serialized model to MLP
        merlin.log_model(model_dir=model_dir)

        resource_request = ResourceRequest(1, 1, "100m", "200Mi")
        endpoint = merlin.deploy(v,
                                 environment_name=default_env.name,
                                 resource_request=resource_request)

    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    merlin.undeploy(v)
    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 404
예제 #19
0
def test_batch_pyfunc_v2_batch(integration_test_url, project_name,
                               service_account, use_google_oauth,
                               batch_bigquery_source, batch_bigquery_sink,
                               batch_gcs_staging_bucket):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("batch-iris", ModelType.PYFUNC_V2)
    service_account_name = "*****@*****.**"
    _create_secret(merlin.active_project(), service_account_name,
                   service_account)

    clf = svm.SVC(gamma='scale')
    iris = load_iris()
    X, y = iris.data, iris.target
    clf.fit(X, y)
    joblib.dump(clf, MODEL_PATH)
    # Create new version of the model
    mdl = merlin.active_model()
    v = mdl.new_model_version()
    v.start()
    # Upload the serialized model to MLP
    v.log_pyfunc_model(model_instance=IrisClassifier(),
                       conda_env=ENV_PATH,
                       code_dir=["test"],
                       artifacts={MODEL_PATH_ARTIFACT_KEY: MODEL_PATH})

    v.finish()

    bq_source = BigQuerySource(batch_bigquery_source,
                               features=[
                                   "sepal_length", "sepal_width",
                                   "petal_length", "petal_width"
                               ])
    bq_sink = BigQuerySink(batch_bigquery_sink,
                           staging_bucket=batch_gcs_staging_bucket,
                           result_column="prediction",
                           save_mode=SaveMode.OVERWRITE)
    job_config = PredictionJobConfig(source=bq_source,
                                     sink=bq_sink,
                                     service_account_name=service_account_name,
                                     env_vars={"ALPHA": "0.2"})
    job = v.create_prediction_job(job_config=job_config)

    assert job.status == JobStatus.COMPLETED

    job = v.create_prediction_job(job_config=job_config, sync=False)
    while job.status == JobStatus.PENDING:
        sleep(20)
        job = job.refresh()
    job = job.stop()

    assert job.status == JobStatus.TERMINATED
예제 #20
0
def test_set_project(url, project, mock_oauth, use_google_oauth):
    # expect exception when setting project but client is not set
    with pytest.raises(Exception):
        merlin.set_project(project.name)

    _mock_get_project_call(project)

    merlin.set_url(url, use_google_oauth=use_google_oauth)
    merlin.set_project(project.name)

    assert merlin.active_project().name == project.name
    assert merlin.active_project().id == project.id
    assert merlin.active_project(
    ).mlflow_tracking_url == project.mlflow_tracking_url
예제 #21
0
def test_set_traffic(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("set-traffic-sample", ModelType.SKLEARN)

    model_dir = "test/sklearn-model"
    MODEL_FILE = "model.joblib"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        clf = svm.SVC(gamma='scale')
        iris = load_iris()
        X, y = iris.data, iris.target
        clf.fit(X, y)
        dump(clf, os.path.join(model_dir, MODEL_FILE))

        # Upload the serialized model to MLP
        merlin.log_model(model_dir=model_dir)
        endpoint = merlin.deploy(v)

    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    # Undeploy deployed model version
    merlin.undeploy(v)
    sleep(5)

    # Redeploy and set traffic
    merlin.deploy(v)

    endpoint = merlin.set_traffic({v: 100})
    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    # Try to undeploy serving model version. It must be fail
    with pytest.raises(Exception):
        assert merlin.undeploy(v)

    # Undeploy other running model version endpoints
    undeploy_all_version()
예제 #22
0
def test_xgboost(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("xgboost-sample", ModelType.XGBOOST)
    v = _get_latest_version(merlin.active_model())
    port = _get_free_port()
    p = Process(target=v.start_server, kwargs={"port": port, "build_image": True})
    p.start()
    _wait_server_ready(f"http://{host}:{port}")
    resp = requests.post(_get_local_endpoint(v, port), json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])
    p.terminate()
예제 #23
0
def test_cli_deployment_undeployment(deployment_info, runner,
                                     use_google_oauth):

    model_name = 'cli-test'
    merlin.set_url(deployment_info['url'], use_google_oauth=use_google_oauth)
    merlin.set_project(deployment_info['project'])
    merlin.set_model(model_name, ModelType.SKLEARN)

    undeploy_all_version()

    # Deployment
    result = runner.invoke(cli, [
        'deploy', '--env', deployment_info['env'], '--model-type',
        deployment_info['model_type'], '--model-dir',
        deployment_info['model_dir'], '--model-name', model_name, '--project',
        deployment_info['project'], '--url', deployment_info['url']
    ])

    if result.exception:
        traceback.print_exception(*result.exc_info)

    test_deployed_model_version = result.output.split('\n')[0].split(' ')[-1]

    # Get latest deployed model's version
    merlin.set_url(deployment_info['url'], use_google_oauth=use_google_oauth)
    merlin.set_project(deployment_info['project'])
    merlin.set_model(model_name, ModelType.SKLEARN)

    merlin_active_model = merlin.active_model()
    all_versions = merlin_active_model.list_version()

    latest_version = all_versions[0]

    # Undeployment
    undeploy_result = runner.invoke(cli, [
        'undeploy', '--model-version', test_deployed_model_version,
        '--model-name', model_name, '--project', deployment_info['project'],
        '--url', deployment_info['url']
    ])
    if result.exception:
        traceback.print_exception(*result.exc_info)

    planned_output = "Deleting deployment of model {} version {}".format(
        model_name, test_deployed_model_version)
    received_output = undeploy_result.output.split(' from')[0]

    assert latest_version._id == int(test_deployed_model_version)
    assert received_output == planned_output
예제 #24
0
def test_stop_serving_traffic(integration_test_url, project_name,
                              use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("stop-serving-traffic", ModelType.SKLEARN)

    model_dir = "test/sklearn-model"
    MODEL_FILE = "model.joblib"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        clf = svm.SVC(gamma='scale')
        iris = load_iris()
        X, y = iris.data, iris.target
        clf.fit(X, y)
        dump(clf, os.path.join(model_dir, MODEL_FILE))

        # Upload the serialized model to MLP
        merlin.log_model(model_dir=model_dir)
        endpoint = merlin.deploy(v)

    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    model_endpoint = merlin.serve_traffic({endpoint: 100})
    sleep(5)
    resp = requests.post(f"{model_endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])

    merlin.stop_serving_traffic(model_endpoint.environment_name)

    endpoints = merlin.list_model_endpoints()
    for endpoint in endpoints:
        if endpoint.environment_name == model_endpoint.environment_name:
            assert endpoint.status == Status.TERMINATED

    # Undeploy other running model version endpoints
    undeploy_all_version()
예제 #25
0
def test_new_model_version_with_labels(url, project, model, version,
                                       mock_oauth, use_google_oauth):
    merlin.set_url(url, use_google_oauth=use_google_oauth)
    _mock_get_project_call(project)
    merlin.set_project(project.name)
    _mock_get_model_call(project, model)
    merlin.set_model(model.name, model.type)

    # Insert labels
    labels = {"model": "T-800", "software": "skynet"}
    _mock_new_model_version_call(model, version, labels)

    with merlin.new_model_version(labels=labels) as v:
        assert v is not None
        assert isinstance(v, ModelVersion)

        assert v.mlflow_run_id == version.mlflow_run_id
        for key, value in v.labels.items():
            assert labels[key] == value
예제 #26
0
def test_mlflow_methods(url, project, model, version, mock_oauth,
                        use_google_oauth):
    _mock_get_project_call(project)
    _mock_get_model_call(project, model)
    _mock_new_model_version_call(model, version)

    merlin.set_url(url, use_google_oauth=use_google_oauth)
    merlin.set_project(project.name)
    merlin.set_model(model.name, model.type)
    with merlin.new_model_version() as v:
        merlin.log_metric("metric", 0.1)
        merlin.log_param("param", "value")
        merlin.set_tag("tag", "value")
    run_id = v.mlflow_run_id
    run = mlflow.get_run(run_id=run_id)

    assert run.data.metrics["metric"] == 0.1
    assert run.data.params["param"] == "value"
    assert run.data.tags["tag"] == "value"
예제 #27
0
def test_pytorch(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pytorch-sample", ModelType.PYTORCH)
    model_dir = "test/pytorch-model"

    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)

    port = _get_free_port()
    p = Process(target=v.start_server, kwargs={"port": port, "build_image": True})
    p.start()
    _wait_server_ready(f"http://{host}:{port}")
    resp = requests.post(_get_local_endpoint(v, port), json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])
    p.terminate()
예제 #28
0
def test_pytorch(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pytorch-sample", ModelType.PYTORCH)

    model_dir = "test/pytorch-model"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        endpoint = merlin.deploy()

    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(request_json["instances"])

    merlin.undeploy(v)
예제 #29
0
def test_set_model(url, project, model, mock_oauth, use_google_oauth):
    # expect exception when setting model but client and project is not set
    with pytest.raises(Exception):
        merlin.set_model(model.name, model.type)

    merlin.set_url(url, use_google_oauth=use_google_oauth)

    with pytest.raises(Exception):
        merlin.set_model(model.name, model.type)

    _mock_get_project_call(project)
    merlin.set_project(project.name)

    _mock_get_model_call(project, model)
    merlin.set_model(model.name, model.type)

    assert merlin.active_model().name == model.name
    assert merlin.active_model().type == model.type
    assert merlin.active_model().id == model.id
    assert merlin.active_model(
    ).mlflow_experiment_id == model.mlflow_experiment_id
예제 #30
0
파일: merlin.py 프로젝트: zhangchi1/merlin
def undeploy(model_name, model_version, project, url):

    merlin.set_url(url)
    merlin.set_project(project)
    merlin.set_model(model_name)

    merlin_active_model = merlin.active_model()
    all_versions = merlin_active_model.list_version()

    try:
        wanted_model_info = [
            model_info for model_info in all_versions
            if model_info._id == int(model_version)
        ][0]
    except Exception as e:
        print(e)
        print('Model Version {} is not found.'.format(model_version))

    try:
        merlin.undeploy(wanted_model_info)
    except Exception as e:
        print(e)