Exemplo n.º 1
0
def _write_depedencies(export_folder, dependencies):
    dep_path = os.path.join(export_folder, "environment.yaml")
    if dependencies is None:
        ver = torch.__version__
        major, minor = list(map(int, ver.split(".")[:2]))
        assert major == 1
        # the torch zip layout changed for a few versions:
        torch_min_version = "1.0"
        if minor > 6 and minor < 10:
            torch_min_version = "1.6"
        else:
            torch_min_version = "1.10"
        torch_min_version = "1.6" if minor >= 6 else "1.0"
        dependencies = {
            "channels": ["pytorch", "conda-forge"],
            "name": "torch-em-deploy",
            "dependencies": [f"pytorch>={torch_min_version},<2.0"]
        }
        with open(dep_path, "w") as f:
            yaml.dump(dependencies, f)
    else:
        assert os.path.exists(dependencies)
        dep = yaml.load(dependencies)
        assert "channels" in dep
        assert "name" in dep
        assert "dependencies" in dep
        copyfile(dependencies, dep_path)
Exemplo n.º 2
0
def get_unet2d_nuclei_broad(unet2d_nuclei_broad_base_path, request) -> dict:
    if request.param == "v0_4_5":
        v = ""
    else:
        v = f"_{request.param}"

    f_name = f"rdf{v}.yaml"
    path = unet2d_nuclei_broad_base_path / f_name
    return yaml.load(path)
Exemplo n.º 3
0
def write_test_summaries(rdf_dir: Path, resource_id: str, version_id: str,
                         summaries_dir: Path, postfix: str):
    for rdf_path in rdf_dir.glob(f"{resource_id}/{version_id}/rdf.yaml"):
        test_name = f"reproduce test outputs with ilastik {postfix}"
        error = None
        status = None
        reason = None
        try:
            rdf = yaml.load(rdf_path)
        except Exception as e:
            error = f"Unable to load rdf: {e}"
            status = "failed"
            rdf = {}

        rd_id = rdf.get("id")
        if rd_id is None or not isinstance(rd_id, str):
            print(
                f"::warning file=scripts/test_with_ilastik.py,line=37,endline=41,title=Invalid RDF::"
                f"Missing/invalid 'id' in rdf {str(rdf_path.relative_to(rdf_dir).parent)}"
            )
            continue

        if rdf.get("type") != "model":
            status = "skipped"
            reason = "not a model RDF"

        weight_formats = list(rdf.get("weights", []))
        if not isinstance(weight_formats, list) or not weight_formats:
            status = "failed"
            error = f"Missing/invalid weight formats for {rd_id}"

        if status:
            # write single test summary
            write_summary(summaries_dir / rd_id /
                          f"test_summary_{postfix}.yaml",
                          name=test_name,
                          status=status,
                          error=error,
                          reason=reason)
            continue

        # write test summary for each weight format
        for weight_format in weight_formats:
            try:
                summary = test_model(rdf_path, weight_format=weight_format)
            except Exception as e:
                summary = dict(error=str(e),
                               traceback=traceback.format_tb(e.__traceback__))

            summary["name"] = f"{test_name} using {weight_format} weights"
            write_summary(
                summaries_dir / rd_id /
                f"test_summary_{weight_format}_{postfix}.yaml", **summary)
Exemplo n.º 4
0
def test_update_rdf_using_paths(unet2d_nuclei_broad_base_path, tmp_path):
    from bioimageio.spec.commands import update_rdf

    in_path = unet2d_nuclei_broad_base_path / "rdf.yaml"
    assert in_path.exists()
    update_path = tmp_path / "update.yaml"
    yaml.dump(
        dict(name="updated",
             outputs=[{
                 "name": "updated",
                 "halo": ["KEEP", "DROP", 0, 9, 9]
             }]), update_path)
    out_path = tmp_path / "output.yaml"
    update_rdf(in_path, update_path, out_path)
    actual = yaml.load(out_path)
    assert actual["name"] == "updated"
    assert actual["outputs"][0]["name"] == "updated"
    assert actual["outputs"][0]["halo"] == [0, 0, 9, 9]
Exemplo n.º 5
0
def export_model():
    import imageio
    import h5py
    from torch_em.util import export_biomageio_model, get_default_citations
    from bioimageio.spec.shared import yaml

    with h5py.File("./data/gt_image_000.h5", "r") as f:
        input_data = f["raw/serum_IgG/s0"][:256, :256]
    imageio.imwrite("./cover.jpg", input_data)

    doc = "Example Model: Different Output Shape"
    cite = get_default_citations(model="UNet2d")

    export_biomageio_model(
        "./checkpoints/diff-output-shape",
        "./exported",
        input_data=input_data,
        authors=[{"name": "Constantin Pape; @constantinpape"}],
        tags=["segmentation"],
        license="CC-BY-4.0",
        documentation=doc,
        git_repo="https://github.com/constantinpape/torch-em.git",
        cite=cite,
        covers=["./cover.jpg"],
        input_optional_parameters=False
    )

    rdf_path = "./exported/rdf.yaml"
    with open(rdf_path, "r") as f:
        rdf = yaml.load(f)

    # update the shape descriptions
    rdf["inputs"][0]["shape"] = {"min": [1, 1, 32, 32], "step": [0, 0, 16, 16]}
    rdf["outputs"][0]["shape"] = {"reference_input": "input", "offset": [0, 0, 0, 0], "scale": [1, 1, 0.5, 0.5]}

    # update the network description
    rdf["source"] = "./resize_unet.py:ResizeUNet"
    rdf["kwargs"] = dict(in_channels=1, out_channels=1, depth=3, initial_features=16)
    copyfile("./resize_unet.py", "./exported/resize_unet.py")

    with open(rdf_path, "w") as f:
        yaml.dump(rdf, f)
Exemplo n.º 6
0
def test_update_rdf(unet2d_nuclei_broad_base_path, tmp_path):
    in_path = unet2d_nuclei_broad_base_path / "rdf.yaml"
    assert in_path.exists()
    update_path = tmp_path / "update.yaml"
    yaml.dump(
        dict(name="updated",
             outputs=[{
                 "name": "updated",
                 "halo": ["KEEP", "DROP", 0, 9, 9]
             }]), update_path)
    out_path = tmp_path / "output.yaml"
    ret = subprocess.run([
        "bioimageio", "update-rdf",
        str(in_path),
        str(update_path),
        str(out_path)
    ])
    assert ret.returncode == 0
    actual = yaml.load(out_path)
    assert actual["name"] == "updated"
    assert actual["outputs"][0]["name"] == "updated"
    assert actual["outputs"][0]["halo"] == [0, 0, 9, 9]
Exemplo n.º 7
0
def export_model():
    import h5py
    from torch_em.util import export_biomageio_model, get_default_citations
    from bioimageio.spec.shared import yaml

    with h5py.File("./data/gt_image_000.h5", "r") as f:
        input_data = f["raw/serum_IgG/s0"][:256, :256]

    doc = "Example Model: Fixed Shape"
    cite = get_default_citations(model="UNet2d")

    export_biomageio_model(
        "./checkpoints/fixed-shape",
        "./exported",
        input_data=input_data,
        authors=[{
            "name": "Constantin Pape; @constantinpape"
        }],
        tags=["segmentation"],
        license="CC-BY-4.0",
        documentation=doc,
        git_repo="https://github.com/constantinpape/torch-em.git",
        cite=cite,
        input_optional_parameters=False)

    shape = (1, 1) + input_data.shape
    assert len(shape) == 4

    # replace the shape
    rdf_path = "./exported/rdf.yaml"
    with open(rdf_path, "r") as f:
        rdf = yaml.load(f)
    rdf["inputs"][0]["shape"] = shape
    rdf["outputs"][0]["shape"] = shape
    with open(rdf_path, "w") as f:
        yaml.dump(rdf, f)
Exemplo n.º 8
0
def export_model():
    import imageio
    import h5py
    from torch_em.util import add_weight_formats, export_biomageio_model, get_default_citations
    from bioimageio.spec.shared import yaml

    with h5py.File("./data/gt_image_000.h5", "r") as f:
        input_data = [
            f["raw/serum_IgG/s0"][:256, :256],
            f["raw/nuclei/s0"][:256, :256],
        ]
    imageio.imwrite("./cover.jpg", input_data[0])

    doc = "Example Model: Different Output Shape"
    cite = get_default_citations(model="UNet2d")

    export_biomageio_model(
        "./checkpoints/multi-tensor",
        "./exported",
        input_data=input_data,
        authors=[{
            "name": "Constantin Pape; @constantinpape"
        }],
        tags=["segmentation"],
        license="CC-BY-4.0",
        documentation=doc,
        git_repo="https://github.com/constantinpape/torch-em.git",
        cite=cite,
        covers=["./cover.jpg"],
        input_optional_parameters=False)
    add_weight_formats("./exported", ["onnx", "torchscript"])

    rdf_path = "./exported/rdf.yaml"
    with open(rdf_path, "r") as f:
        rdf = yaml.load(f)

    # update the inputs / output descriptions
    rdf["inputs"][0]["name"] = "input0"
    rdf["inputs"][0]["shape"] = {"min": [1, 1, 32, 32], "step": [0, 0, 16, 16]}

    input1 = deepcopy(rdf["inputs"][0])
    input1["name"] = "input1"
    rdf["inputs"].append(input1)

    rdf["outputs"][0]["name"] = "output0"
    rdf["outputs"][0]["shape"] = {
        "reference_input": "input0",
        "offset": [0, 0, 0, 0],
        "scale": [1, 1, 1, 1]
    }

    output1 = deepcopy(rdf["outputs"][0])
    output1["name"] = "output1"
    output1["shape"]["reference_input"] = "input1"
    rdf["outputs"].append(output1)

    # update the network description
    rdf["source"] = "./multi_tensor_unet.py:MultiTensorUNet"
    rdf["kwargs"] = dict(in_channels=2,
                         out_channels=2,
                         depth=3,
                         initial_features=16)
    copyfile("./multi_tensor_unet.py", "./exported/multi_tensor_unet.py")

    with open(rdf_path, "w") as f:
        yaml.dump(rdf, f)
Exemplo n.º 9
0
def unet2d_nuclei_broad_collection():
    return yaml.load(
        pathlib.Path(__file__).parent /
        "../example_specs/collections/unet2d_nuclei_broad_coll/rdf.yaml")
Exemplo n.º 10
0
def invalid_rdf_v0_4_0_duplicate_tensor_names(unet2d_nuclei_broad_base_path):
    return yaml.load(unet2d_nuclei_broad_base_path /
                     "invalid_rdf_v0_4_0_duplicate_tensor_names.yaml")