Exemplo n.º 1
0
def test_xgboost_save_load(booster_params, metadata,
                           save_proc):  # noqa # pylint: disable

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    bentomodel = save_proc(booster_params,
                           metadata,
                           labels=labels,
                           custom_objects={"func": custom_f})
    assert bentomodel.info.metadata is not None
    assert_have_file_extension(bentomodel.path, ".json")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    xgb_loaded = bentoml.xgboost.load(bentomodel.tag,
                                      booster_params=booster_params)
    config = json.loads(xgb_loaded.save_config())
    if not booster_params:
        assert config["learner"]["generic_param"]["nthread"] == str(
            psutil.cpu_count())
    else:
        assert config["learner"]["generic_param"]["nthread"] == str(2)
    assert isinstance(xgb_loaded, xgb.Booster)
    assert predict_df(xgb_loaded, test_df) == 1
Exemplo n.º 2
0
def test_statsmodels_save_load(metadata, holt_model):  # noqa # pylint: disable

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    tag = bentoml.statsmodels.save(
        TEST_MODEL_NAME,
        holt_model,
        metadata=metadata,
        labels=labels,
        custom_objects={"func": custom_f},
    )
    bentomodel = bentoml.models.get(tag)
    assert bentomodel.info.metadata is not None
    assert_have_file_extension(bentomodel.path, ".pkl")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    statsmodels_loaded = bentoml.statsmodels.load(tag)

    assert isinstance(
        statsmodels_loaded,
        statsmodels.tsa.holtwinters.results.HoltWintersResultsWrapper,
    )

    np.testing.assert_array_equal(holt_model.predict(),
                                  statsmodels_loaded.predict())
Exemplo n.º 3
0
def test_tensorflow_v1_save_load(tf1_model_path: Callable[[],
                                                          Generator[str, None,
                                                                    None]]):

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    tag = bentoml.tensorflow_v1.save(
        "tensorflow_test",
        tf1_model_path,
        labels=labels,
        custom_objects={"func": custom_f},
    )
    bentomodel = bentoml.models.get(tag)
    assert_have_file_extension(bentomodel.path, ".pb")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    tf1_loaded = bentoml.tensorflow_v1.load("tensorflow_test")
    with tf.get_default_graph().as_default():
        tf.global_variables_initializer()
        prediction = _model_dunder_call(tf1_loaded, test_tensor)
        assert prediction.shape == (1, )
Exemplo n.º 4
0
def test_tensorflow_v2_save_load(
    mcls: "tf_ext.Module",
    tensor: "tf_ext.TensorLike",
    predict_fn: Callable[
        ["tf_ext.AutoTrackable", "tf_ext.TensorLike"], "tf_ext.TensorLike"
    ],
    is_ragged: bool,
):

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    tag = bentoml.tensorflow.save(
        MODEL_NAME, mcls, labels=labels, custom_objects={"func": custom_f}
    )
    bentomodel = bentoml.models.get(tag)
    assert_have_file_extension(bentomodel.path, ".pb")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    model = bentoml.tensorflow.load(MODEL_NAME)
    output = predict_fn(model, tensor)
    if is_ragged:
        assert all(output.numpy() == np.array([[15.0]] * 3))
    else:
        assert all(output.numpy() == np.array([[15.0]]))
Exemplo n.º 5
0
def test_pl_save_load():

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    model: "pl.LightningModule" = AdditionModel()
    tag = bentoml.pytorch_lightning.save(
        "pytorch_lightning_test",
        model,
        labels=labels,
        custom_objects={"func": custom_f},
    )
    bentomodel = bentoml.models.get(tag)
    assert_have_file_extension(bentomodel.path, ".pt")
    assert bentomodel.info.context.get(
        "model_format") == "pytorch_lightning:v1"

    pl_loaded: "pl.LightningModule" = bentoml.pytorch_lightning.load(tag)
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    assert predict_df(pl_loaded, test_df) == [[6, 5, 4, 3]]
Exemplo n.º 6
0
def test_catboost_save_load(
    model_params: t.Dict[str, t.Any],
    metadata: t.Dict[str, t.Any],
) -> None:

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    tag = save_procedure(
        model_params,
        metadata,
        labels=labels,
        custom_objects={"func": custom_f},
    )
    _model = bentoml.models.get(tag)
    assert _model.info.metadata is not None
    assert_have_file_extension(_model.path, ".cbm")

    cbt_loaded = bentoml.catboost.load(_model.tag, model_params=model_params)
    assert isinstance(cbt_loaded, CatBoostClassifier)
    assert cbt_loaded.predict(test_df) == np.array([1])
    for k in labels.keys():
        assert labels[k] == _model.info.labels[k]
    assert _model.custom_objects["func"](3) == custom_f(3)
Exemplo n.º 7
0
def test_lightgbm_sklearn_save_load(save_sklearn_proc):
    bentomodel = save_sklearn_proc(None)
    assert_have_file_extension(bentomodel.path, ".pkl")

    sklearn_loaded = bentoml.lightgbm.load(bentomodel.tag, )

    assert isinstance(sklearn_loaded, lgb.LGBMClassifier)
    assert sklearn_loaded.predict(np.array([[0] * 10] * 10)).any() == np.array(
        [0])
Exemplo n.º 8
0
def test_paddle_save_load(train_paddle_model, input_spec):
    tag = bentoml.paddle.save(
        "linear_model",
        train_paddle_model,
        input_spec=input_spec,
    )
    info = bentoml.models.get(tag)
    assert_have_file_extension(info.path, ".pdmodel")
    loaded = bentoml.paddle.load(tag)
    res1 = predict_df(loaded, test_df)
    res2 = np.array([[0.9003858]], dtype=np.float32)
    assert np.isclose(res1, res2).all()
Exemplo n.º 9
0
def test_mlflow_save_load():
    (model, data) = sklearn_model_data()
    uri = Path(current_file, "sklearn_clf")
    if not uri.exists():
        mlflow.sklearn.save_model(model, uri.resolve())
    tag = bentoml.mlflow.import_from_uri(MODEL_NAME, str(uri.resolve()))
    model_info = bentoml.models.get(tag)
    assert_have_file_extension(os.path.join(model_info.path, "sklearn_clf"),
                               ".pkl")

    loaded = bentoml.mlflow.load(tag)
    np.testing.assert_array_equal(loaded.predict(data), res_arr)  # noqa
Exemplo n.º 10
0
def test_keras_save_load(
    model: "keras.Model",
    kwargs: t.Dict[str, t.Any],
) -> None:
    tag = bentoml.keras.save(MODEL_NAME, model, **kwargs)
    model_info = bentoml.models.get(tag)
    if kwargs.get("custom_objects") is not None:
        assert_have_file_extension(model_info.path, ".pkl")
    if kwargs["store_as_json_and_weights"]:
        assert_have_file_extension(model_info.path, ".json")
        if kwargs["save_format"] == "h5":
            assert_have_file_extension(model_info.path, ".hdf5")
    else:
        if kwargs["save_format"] == "h5":
            assert_have_file_extension(model_info.path, ".h5")
    if not TF2:
        session = bentoml.keras.get_session()
        # Initialize variables in the graph/model
        session.run(tf.global_variables_initializer())
        with session.as_default():
            loaded = bentoml.keras.load(tag)
            predict_assert_equal(loaded)
    else:
        loaded = bentoml.keras.load(tag)
        predict_assert_equal(loaded)
Exemplo n.º 11
0
def test_pytorch_save_load(models):

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    tag = models(labels=labels, custom_objects={"func": custom_f})
    bentomodel = bentoml.models.get(tag)
    assert_have_file_extension(bentomodel.path, ".pt")
    assert bentomodel.info.context.get("model_format") == "torch.save:v1"
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    pytorch_loaded: nn.Module = bentoml.pytorch.load(tag)
    assert predict_df(pytorch_loaded, test_df) == 5.0
Exemplo n.º 12
0
def test_lightgbm_save_load(metadata, save_proc):

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    bentomodel = save_proc(metadata,
                           labels=labels,
                           custom_objects={"func": custom_f})
    assert bentomodel.info.metadata is not None
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)
    assert_have_file_extension(bentomodel.path, ".txt")

    lgb_loaded = bentoml.lightgbm.load(bentomodel.tag, )

    assert isinstance(lgb_loaded, lgb.basic.Booster)
    assert lgb_loaded.predict(np.array([[0]])) == np.array([0.0])
Exemplo n.º 13
0
def test_sklearn_save_load(metadata: t.Dict[str, t.Any]) -> None:

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    _, data = sklearn_model_data(clf=RandomForestClassifier)
    tag = save_procedure(metadata, labels=labels, custom_objects={"func": custom_f})
    bentomodel = bentoml.models.get(tag)
    assert bentomodel.info.metadata is not None
    assert_have_file_extension(bentomodel.path, ".pkl")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    loaded = bentoml.sklearn.load(bentomodel.tag)

    assert isinstance(loaded, RandomForestClassifier)

    np.testing.assert_array_equal(loaded.predict(data), res_arr)
Exemplo n.º 14
0
def test_pycaret_save_load(
    get_pycaret_data, metadata, save_proc
):  # noqa # pylint: disable

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    _, test_data = get_pycaret_data
    bentomodel = save_proc(metadata, labels=labels, custom_objects={"func": custom_f})
    assert bentomodel.info.metadata is not None
    assert_have_file_extension(bentomodel.path, ".pkl")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    pycaret_loaded = bentoml.pycaret.load(
        bentomodel.tag,
    )
    assert isinstance(pycaret_loaded, sklearn.pipeline.Pipeline)
    assert predict_model(pycaret_loaded, data=test_data)["Score"][0] == 0.7609
Exemplo n.º 15
0
def test_onnxmlir_save_load(
    compile_model,
    tmpdir,
):  # noqa

    labels = {"stage": "dev"}

    def custom_f(x: int) -> int:
        return x + 1

    model = os.path.join(tmpdir, "model.so")
    tag = bentoml.onnxmlir.save("onnx_model_tests",
                                model,
                                labels=labels,
                                custom_objects={"func": custom_f})
    bentomodel = bentoml.models.get(tag)
    assert "compiled_path" in bentomodel.info.options
    assert_have_file_extension(str(bentomodel.path), ".so")
    for k in labels.keys():
        assert labels[k] == bentomodel.info.labels[k]
    assert bentomodel.custom_objects["func"](3) == custom_f(3)

    session = bentoml.onnxmlir.load(tag)
    assert predict_df(session, test_df)[0] == np.array([[15.0]])
Exemplo n.º 16
0
def test_fastai_save_pack(tmpdir):
    pack_models(tmpdir)
    assert_have_file_extension(tmpdir, ".pkl")

    loaded_fastai: "Learner" = FastAIModel.load(tmpdir)
    assert predict_df(loaded_fastai, test_df) == 5.0