예제 #1
0
async def test_name_fallback(model_folder: str, model_repository: ModelRepository):
    # Create empty model-settings.json file
    model_settings = ModelSettings()
    model_settings_path = os.path.join(model_folder, DEFAULT_MODEL_SETTINGS_FILENAME)
    with open(model_settings_path, "w") as model_settings_file:
        d = model_settings.dict()
        del d["name"]
        d["implementation"] = get_import_path(d["implementation"])
        json.dump(d, model_settings_file)

    model_settings = model_repository._load_model_settings(model_settings_path)
    assert model_settings.name == os.path.basename(model_folder)
예제 #2
0
def test_model_settings_from_env(monkeypatch):
    model_name = "foo-model"
    model_version = "v0.1.0"
    model_uri = "/mnt/models/my-model"

    monkeypatch.setenv("mlserver_model_name", model_name)
    monkeypatch.setenv("mlserver_model_version", model_version)
    monkeypatch.setenv("mlserver_model_uri", model_uri)

    model_settings = ModelSettings()
    model_settings.parameters = ModelParameters()

    assert model_settings.name == model_name
    assert model_settings.version == model_version
    assert model_settings.parameters.uri == model_uri
예제 #3
0
def case_custom_model(custom_model: Model) -> ModelSettings:
    save(custom_model, save_env=False)
    model_uri = custom_model.details.local_folder

    return ModelSettings(
        name="custom-model",
        parameters=ModelParameters(uri=model_uri),
    )
예제 #4
0
def case_wrapped_class_instance(inference_pipeline_class) -> ModelSettings:
    save(inference_pipeline_class, save_env=False)
    model_uri = inference_pipeline_class.pipeline.details.local_folder

    return ModelSettings(
        name="wrapped-class-instance",
        parameters=ModelParameters(uri=model_uri),
    )
예제 #5
0
async def mlserver_runtime(model_settings: ModelSettings) -> InferenceRuntime:
    if is_lazy(model_settings):
        # NOTE: Some times pytest-cases may return a "LazyValue"
        model_settings = model_settings.get(request_or_item=mlserver_runtime)

    _runtime = InferenceRuntime(model_settings)
    await _runtime.load()

    return _runtime
예제 #6
0
def case_wrapped_class(inference_pipeline_class) -> ModelSettings:
    MyClass = inference_pipeline_class.__class__

    save(MyClass, save_env=False)
    model_uri = MyClass.pipeline.details.local_folder

    return ModelSettings(
        name="wrapped-class",
        parameters=ModelParameters(uri=model_uri),
    )
예제 #7
0
def xgboost_model(xgboost_model_uri: str) -> XGBoostModel:
    model_settings = ModelSettings(
        name="xgboost-model",
        version="v1.2.3",
        parameters=ModelParameters(uri=xgboost_model_uri),
    )
    model = XGBoostModel(model_settings)
    model.load()

    return model
예제 #8
0
def sklearn_model(sklearn_model_uri: str) -> SKLearnModel:
    model_settings = ModelSettings(
        name="sklearn-model",
        version="v1.2.3",
        parameters=ModelParameters(uri=sklearn_model_uri),
    )
    model = SKLearnModel(model_settings)
    model.load()

    return model
예제 #9
0
async def test_ready(data_plane, model_registry, ready):
    model_settings = ModelSettings(
        name="sum-model-2", parameters=ModelParameters(version="v1.2.3"))
    new_model = SumModel(model_settings)
    await model_registry.load(new_model)

    new_model.ready = ready

    all_ready = await data_plane.ready()

    assert all_ready == ready
예제 #10
0
def case_async_custom_model() -> ModelSettings:
    @aio.model(name="async-custom-model", platform=ModelFramework.Custom)
    async def _custom_model(payload: np.ndarray) -> np.ndarray:
        return payload.sum(keepdims=True)

    save(_custom_model, save_env=False)
    model_uri = _custom_model.details.local_folder

    return ModelSettings(
        name="async-custom-model",
        parameters=ModelParameters(uri=model_uri),
    )
예제 #11
0
def model_settings_pytorch_fixed(pytorch_model_uri) -> ModelSettings:
    return ModelSettings(
        name="mlflow-model",
        parameters=ModelParameters(uri=pytorch_model_uri),
    )
예제 #12
0
def model_settings(model_uri: str) -> ModelSettings:
    return ModelSettings(
        name="mlflow-model",
        parameters=ModelParameters(uri=model_uri),
    )
예제 #13
0
def model_settings(model_uri: str) -> ModelSettings:
    return ModelSettings(
        name="xgboost-model",
        parameters=ModelParameters(uri=model_uri, version="v1.2.3"),
    )
예제 #14
0
def model_settings(pipeline_uri: str) -> ModelSettings:
    return ModelSettings(
        name="sum-pipeline",
        parameters=ModelParameters(uri=pipeline_uri),
    )
예제 #15
0
파일: conftest.py 프로젝트: kz33/MLServer
def xgboost_model_settings(xgboost_model_uri: str) -> ModelSettings:
    return ModelSettings(
        name="xgboost-model",
        version="v1.2.3",
        parameters=ModelParameters(uri=xgboost_model_uri),
    )
예제 #16
0
파일: conftest.py 프로젝트: kz33/MLServer
def sklearn_model_settings(sklearn_model_uri: str) -> ModelSettings:
    return ModelSettings(
        name="sklearn-model",
        version="v1.2.3",
        parameters=ModelParameters(uri=sklearn_model_uri),
    )