Example #1
0
def test_trasformer_pytorch_logger(integration_test_url, project_name,
                                   use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("transformer-logger", ModelType.PYTORCH)

    model_dir = "test/transformer"

    undeploy_all_version()

    resource_request = ResourceRequest(1, 1, "100m", "200Mi")
    transformer = Transformer(
        "gcr.io/kubeflow-ci/kfserving/image-transformer:latest",
        resource_request=resource_request,
    )

    logger = Logger(
        model=LoggerConfig(enabled=True, mode=LoggerMode.ALL),
        transformer=LoggerConfig(enabled=True, mode=LoggerMode.ALL),
    )
    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        endpoint = merlin.deploy(transformer=transformer, logger=logger)

    assert endpoint.logger is not None

    model_config = endpoint.logger.model
    assert model_config is not None
    assert model_config.enabled
    assert model_config.mode == LoggerMode.ALL

    transformer_config = endpoint.logger.transformer
    assert transformer_config is not None
    assert transformer_config.enabled
    assert transformer_config.mode == LoggerMode.ALL

    with open(os.path.join("test/transformer", "input.json"), "r") as f:
        req = json.load(f)

    sleep(5)
    resp = requests.post(f"{endpoint.url}", json=req)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(req["instances"])

    model_endpoint = merlin.serve_traffic({endpoint: 100})
    sleep(5)
    resp = requests.post(f"{model_endpoint.url}", json=req)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(req["instances"])

    # Try to undeploy serving model version. It must be fail
    with pytest.raises(Exception):
        assert merlin.undeploy(v)

    # Undeploy other running model version endpoints
    undeploy_all_version()
Example #2
0
def test_pytorch_logger(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pytorch-logger", ModelType.PYTORCH)

    model_dir = "test/pytorch-model"

    undeploy_all_version()

    logger = Logger(model=LoggerConfig(enabled=True, mode=LoggerMode.REQUEST))
    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        endpoint = merlin.deploy(logger=logger)

    model_config = endpoint.logger.model
    assert model_config is not None
    assert model_config.enabled
    assert model_config.mode == LoggerMode.REQUEST

    transformer_config = endpoint.logger.transformer
    assert transformer_config is None

    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(request_json["instances"])

    undeploy_all_version()
Example #3
0
def test_mlflow_tracking(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("mlflow-tracking", ModelType.PYTORCH)

    model_dir = "test/pytorch-model"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        merlin.log_param("model_type", "pytorch")
        merlin.log_param("iteration", 5)

        merlin.set_tag("version", "v1.0")
        merlin.set_tag("build", "latest")
        merlin.set_tag("team_id", 1)

        merlin.log_metric("model_loaded", 10.23)

        assert merlin.get_param("model_type") == "pytorch"
        # Stringify value which is integer originally
        assert merlin.get_param("iteration") == '5'
        assert merlin.get_param("random_key") is None

        assert merlin.get_tag("version") == "v1.0"
        assert merlin.get_tag("xxx") is None
        # Stringify value which is integer originally
        assert merlin.get_tag("team_id") == "1"

        assert merlin.get_metric("model_loaded") == 10.23
        assert merlin.get_metric("response_time") is None

        assert merlin.list_tag() == {
            "version": "v1.0", "build": "latest", "team_id": "1"}
Example #4
0
def test_pytorch(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pytorch-sample", ModelType.PYTORCH)
    model_dir = "test/pytorch-model"

    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)

    port = _get_free_port()
    p = Process(target=v.start_server, kwargs={"port": port, "build_image": True})
    p.start()
    _wait_server_ready(f"http://{host}:{port}")
    resp = requests.post(_get_local_endpoint(v, port), json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()['predictions']) == len(request_json['instances'])
    p.terminate()
Example #5
0
def test_pytorch(integration_test_url, project_name, use_google_oauth):
    merlin.set_url(integration_test_url, use_google_oauth=use_google_oauth)
    merlin.set_project(project_name)
    merlin.set_model("pytorch-sample", ModelType.PYTORCH)

    model_dir = "test/pytorch-model"

    undeploy_all_version()

    with merlin.new_model_version() as v:
        merlin.log_pytorch_model(model_dir=model_dir)
        endpoint = merlin.deploy()

    resp = requests.post(f"{endpoint.url}", json=request_json)

    assert resp.status_code == 200
    assert resp.json() is not None
    assert len(resp.json()["predictions"]) == len(request_json["instances"])

    merlin.undeploy(v)