예제 #1
0
def tf_serving(mode, models, required_models, verbose):
    from modelkit.utils.tensorflow import deploy_tf_models

    service = _configure_from_cli_arguments(models, required_models,
                                            {"lazy_loading": True})

    deploy_tf_models(service, mode, verbose=verbose)
예제 #2
0
def tf_serving_fixture(request, lib, deployment="docker"):
    cmd = [
        "--port=8500",
        "--rest_api_port=8501",
    ]

    # this cannot be easily tested in the CI because it requires installing
    # tf serving as a process
    if deployment == "process":  # pragma: no cover
        deploy_tf_models(lib, "local-process", config_name="testing")
        proc = subprocess.Popen([
            "tensorflow_model_server",
            "--model_config_file="
            f"{os.environ['MODELKIT_ASSETS_DIR']}/testing.config",
        ] + cmd)

        def finalize():
            proc.terminate()

    else:
        deploy_tf_models(lib, "local-docker", config_name="testing")
        # kill previous tfserving container (if any)
        subprocess.call(
            ["docker", "rm", "-f", "modelkit-tfserving-tests"],
            stderr=subprocess.DEVNULL,
        )
        # start tfserving as docker container
        tfserving_proc = subprocess.Popen([
            "docker",
            "run",
            "--name",
            "modelkit-tfserving-tests",
            "--volume",
            f"{os.environ['MODELKIT_ASSETS_DIR']}:/config",
            "-p",
            "8500:8500",
            "-p",
            "8501:8501",
            "tensorflow/serving:2.4.0",
            "--model_config_file=/config/testing.config",
        ] + cmd)

        def finalize():
            subprocess.call(["docker", "kill", "modelkit-tfserving-tests"])
            tfserving_proc.terminate()

    request.addfinalizer(finalize)
    connect_tf_serving(
        next(
            (x for x in lib.required_models
             if issubclass(lib.configuration[x].model_type, TensorflowModel))),
        "localhost",
        8500,
        "grpc",
    )
예제 #3
0
def test_deploy_tf_models(monkeypatch):
    class DummyTFModel(TensorflowModel):
        CONFIGURATIONS = {
            "dummy_tf_model": {
                "asset": "dummy_tf_model:0.0",
                "model_settings": {
                    "output_dtypes": {
                        "lambda": np.float32
                    },
                    "output_tensor_mapping": {
                        "lambda": "nothing"
                    },
                    "output_shapes": {
                        "lambda": (3, 2, 1)
                    },
                },
            }
        }

    with pytest.raises(ValueError):
        lib = ModelLibrary(models=[DummyTFModel],
                           settings={"lazy_loading": True})
        deploy_tf_models(lib, "remote", "remote")

    ref = testing.ReferenceText(
        os.path.join(TEST_DIR, "testdata", "tf_configs"))
    with tempfile.TemporaryDirectory() as tmp_dir:
        monkeypatch.setenv("MODELKIT_ASSETS_DIR", tmp_dir)
        monkeypatch.setenv("MODELKIT_STORAGE_BUCKET", TEST_DIR)
        monkeypatch.setenv("MODELKIT_STORAGE_PREFIX", "testdata")
        monkeypatch.setenv("MODELKIT_STORAGE_PROVIDER", "local")

        shutil.copytree(os.path.join(TEST_DIR, "testdata"),
                        os.path.join(tmp_dir, "testdata"))
        os.makedirs(
            os.path.join(tmp_dir, "testdata", "dummy_tf_model_sub", "0.0"))
        lib = ModelLibrary(models=[DummyTFModel],
                           settings={"lazy_loading": True})
        deploy_tf_models(lib, "local-docker", "local-docker")
        with open(os.path.join(tmp_dir, "local-docker.config")) as f:
            ref.assert_equal("local-docker.config", f.read())

        deploy_tf_models(lib, "remote", "remote")
        with open(os.path.join(tmp_dir, "remote.config")) as f:
            config_data = f.read().replace(TEST_DIR, "STORAGE_BUCKET")
            ref.assert_equal("remote.config", config_data)

        # local process mode depends on the tmp dir above and the platform
        # hence it cannot be tested reliably
        deploy_tf_models(lib, "local-process", "local-process")
예제 #4
0
def test_deploy_tf_models_no_asset():
    np = pytest.importorskip("numpy")

    class DummyTFModelNoAsset(TensorflowModel):
        CONFIGURATIONS = {
            "dummy_non_tf_model": {
                "model_settings": {
                    "output_dtypes": {
                        "lambda": np.float32
                    },
                    "output_tensor_mapping": {
                        "lambda": "nothing"
                    },
                    "output_shapes": {
                        "lambda": (3, 2, 1)
                    },
                }
            }
        }

    lib = ModelLibrary(models=DummyTFModelNoAsset,
                       settings={"lazy_loading": True})
    with pytest.raises(ValueError):
        deploy_tf_models(lib, "local-docker")
예제 #5
0
def test_deploy_tf_models_no_tf_model():
    class DummyNonTFModel(Model):
        CONFIGURATIONS = {"dummy_non_tf_model": {}}

    lib = ModelLibrary(models=DummyNonTFModel, settings={"lazy_loading": True})
    deploy_tf_models(lib, "local-docker")