예제 #1
0
def test_eval_model_zip(bioimageio_model_bytes):
    with ZipFile(bioimageio_model_bytes) as zf:
        pipeline = eval_model_zip(zf, devices=["cpu"], preserve_batch_dim=True)

    assert pipeline.input_axes == "bcyx"
    assert pipeline.output_axes == "bcyx"
    assert pipeline.input_shape == [("b", 1), ("c", 1), ("y", 512), ("x", 512)]
    assert pipeline.halo == [("b", 0), ("c", 0), ("y", 32), ("x", 32)]
예제 #2
0
def test_eval_onnx_model_zip_predict(bioimageio_unet2d_onnx_bytes,
                                     bioimageio_unet2d_onnx_test_data):
    with ZipFile(bioimageio_unet2d_onnx_bytes) as zf:
        adapter = eval_model_zip(zf, devices=["cpu"], preserve_batch_dim=True)
        test_input = xarray.DataArray(np.load(
            bioimageio_unet2d_onnx_test_data["test_input"]),
                                      dims=("b", "c", "x", "y"))
        # TODO: Figure out why test output doesn't match result
        adapter.forward(test_input)
예제 #3
0
def test_eval_model_zip_metadata_with_batch_dim(
        bioimageio_unet2d_torchscript_bytes,
        bioimageio_unet2d_torchscript_test_data):
    with ZipFile(bioimageio_unet2d_torchscript_bytes) as zf:
        pipeline = eval_model_zip(zf, devices=["cpu"], preserve_batch_dim=True)
        assert pipeline.input_axes == "bcyx"
        assert pipeline.output_axes == "bcyx"
        assert pipeline.input_shape == [("b", 1), ("c", 1), ("y", 512),
                                        ("x", 512)]
        assert pipeline.halo == [("b", 0), ("c", 0), ("y", 32), ("x", 32)]
예제 #4
0
    def __init__(self, model_zip: bytes, devices: List[str]) -> None:
        cache_path = os.getenv("PYBIO_CACHE_PATH", None)
        if cache_path is not None:
            cache_path = Path(cache_path)

        with zipfile.ZipFile(io.BytesIO(model_zip)) as model_file:
            self._model = eval_model_zip(model_file, devices, cache_path=cache_path)

        self._datasets = {}
        self._worker = base.SessionBackend(self._model)
예제 #5
0
def test_eval_tensorflow_model_zip(pybio_dummy_tensorflow_model_bytes, cache_path):
    with ZipFile(pybio_dummy_tensorflow_model_bytes) as zf:
        pipeline = eval_model_zip(zf, devices=["cpu"], cache_path=cache_path)
        assert isinstance(pipeline, PredictionPipeline)

        test_input = xr.DataArray(np.zeros(shape=(1, 128, 128)), dims=("c", "y", "x"))
        out_arr = np.ones(shape=(1, 128, 128))
        out_arr.fill(42)
        test_output = xr.DataArray(out_arr, dims=("c", "y", "x"))
        result = pipeline.forward(test_input)
        xr.testing.assert_equal(result, test_output)
예제 #6
0
def test_eval_model_zip_metadata_no_batch_dim(
        pybio_unet2d_torchscript_bytes, pybio_unet2d_torchscript_test_data,
        cache_path):
    with ZipFile(pybio_unet2d_torchscript_bytes) as zf:
        pipeline = eval_model_zip(zf,
                                  devices=["cpu"],
                                  cache_path=cache_path,
                                  preserve_batch_dim=False)
        assert pipeline.name == "UNet 2D Nuclei Broad"
        assert pipeline.input_axes == "cyx"
        assert pipeline.output_axes == "cyx"
        assert pipeline.input_shape == [("c", 1), ("y", 512), ("x", 512)]
        assert pipeline.halo == [("c", 0), ("y", 32), ("x", 32)]
예제 #7
0
def test_eval_torchscript_model_zip_predict(
        bioimageio_unet2d_torchscript_bytes,
        bioimageio_unet2d_torchscript_test_data):
    with ZipFile(bioimageio_unet2d_torchscript_bytes) as zf:
        pipeline = eval_model_zip(zf, devices=["cpu"], preserve_batch_dim=True)
        test_input = xarray.DataArray(np.load(
            bioimageio_unet2d_torchscript_test_data["test_input"]).astype(
                np.float32),
                                      dims=("b", "c", "x", "y"))
        test_output = np.load(
            bioimageio_unet2d_torchscript_test_data["test_output"])
        result = pipeline.forward(test_input)
        assert_array_almost_equal(result.data, test_output, decimal=4)
예제 #8
0
def test_eval_onnx_model_zip(pybio_unet2d_onnx_bytes, cache_path):
    with ZipFile(pybio_unet2d_onnx_bytes) as zf:
        adapter = eval_model_zip(zf, devices=["cpu"], cache_path=cache_path)
        assert isinstance(adapter, PredictionPipeline)
예제 #9
0
def test_eval_model_zip(pybio_model_bytes, cache_path):
    with ZipFile(pybio_model_bytes) as zf:
        exemplum = eval_model_zip(zf, devices=["cpu"], cache_path=cache_path)
        assert isinstance(exemplum, PredictionPipeline)
예제 #10
0
def test_eval_tensorflow_model_zip(pybio_dummy_tensorflow_model_bytes,
                                   cache_path):
    with ZipFile(pybio_dummy_tensorflow_model_bytes) as zf:
        exemplum = eval_model_zip(zf, devices=["cpu"], cache_path=cache_path)
        assert isinstance(exemplum, ModelAdapter)
예제 #11
0
파일: process.py 프로젝트: FynnBe/tiktorch
    def __init__(self, model_zip: bytes, devices: List[str]) -> None:
        with zipfile.ZipFile(io.BytesIO(model_zip)) as model_file:
            self._model = eval_model_zip(model_file, devices)

        self._datasets = {}
        self._worker = base.SessionBackend(self._model)
예제 #12
0
def test_eval_onnx_model_zip(bioimageio_unet2d_onnx_bytes):
    with ZipFile(bioimageio_unet2d_onnx_bytes) as zf:
        adapter = eval_model_zip(zf, devices=["cpu"])
        assert isinstance(adapter, PredictionPipeline)
예제 #13
0
def test_eval_model_zip(bioimageio_model_bytes):
    with ZipFile(bioimageio_model_bytes) as zf:
        exemplum = eval_model_zip(zf, devices=["cpu"])
        assert isinstance(exemplum, PredictionPipeline)