Esempio n. 1
0
def test_pytorch_lightning_model_artifact():
    svc = PytorchLightningService()
    model = TorchLightningModel()
    svc.pack('model', model)

    with export_service_bundle(svc) as saved_path:
        svc = bentoml.load(saved_path)
        result = svc.predict(pd.DataFrame([[5, 4, 3, 2]]))
        assert result.tolist() == [[6, 5, 4, 3]]
Esempio n. 2
0
def test_keras_artifact_loaded(svc):
    with export_service_bundle(svc) as saved_path:
        loaded = bentoml.load(saved_path)
        assert (
            loaded.predict([test_data]) == 15.0
        ), 'Inference on saved and loaded Keras artifact does not match expected'
        assert (
            loaded.predict2([test_data]) == 15.0
        ), 'Inference on saved and loaded Keras artifact does not match expected'
def test_tensorflow_2_artifact_loaded(svc):
    with export_service_bundle(svc) as saved_path:
        svc_loaded = bentoml.load(saved_path)
        assert (
            svc_loaded.predict1(test_tensor) == 15.0
        ), 'Inference on saved and loaded TF2 artifact does not match expected'
        assert (
            svc_loaded.predict2(test_tensor) == 15.0
        ), 'Inference on saved and loaded TF2 artifact does not match expected'
        assert (
            (svc_loaded.predict3(ragged_data) == 15.0).numpy().all()
        ), 'Inference on saved and loaded TF2 artifact does not match expected'
def image(svc, clean_context):
    with export_service_bundle(svc) as saved_path:
        yield clean_context.enter_context(build_api_server_docker_image(saved_path))
Esempio n. 5
0
def test_svc_bundle(clean_context, test_svc):
    return clean_context.enter_context(export_service_bundle(test_svc))
Esempio n. 6
0
def image(test_svc, clean_context):
    with export_service_bundle(test_svc) as bundle_dir:
        yield clean_context.enter_context(
            build_api_server_docker_image(bundle_dir, "example_service"))