Exemplo n.º 1
0
    def __init__(
        self,
        *,
        filesystem: JsonableFilesystem,
        info_dir: PurePosixPath,  #path to dir containing with the info file
        scale: PrecomputedChunksScale,
        dtype: "np.dtype[Any]",
        num_channels: int,
    ):
        self.info_dir = info_dir
        self.scale = scale

        shape = Shape5D(x=scale.size[0],
                        y=scale.size[1],
                        z=scale.size[2],
                        c=num_channels)
        location = Point5D(x=scale.voxel_offset[0],
                           y=scale.voxel_offset[1],
                           z=scale.voxel_offset[2])
        interval = shape.to_interval5d(offset=location)
        chunk_sizes_5d = [
            Shape5D(x=cs[0], y=cs[1], z=cs[2], c=num_channels)
            for cs in scale.chunk_sizes
        ]

        super().__init__(
            filesystem=filesystem,
            path=info_dir,
            tile_shape=chunk_sizes_5d[0],  #FIXME
            interval=interval,
            dtype=dtype,
        )
Exemplo n.º 2
0
 def __init__(
     self,
     *,
     key: PurePosixPath,
     size: Tuple[int, int, int],
     resolution: Tuple[int, int, int],
     voxel_offset: Tuple[int, int, int],
     chunk_sizes: Tuple[Tuple[int, int, int], ...],
     encoding: PrecomputedChunksEncoder,
     num_channels: int,
 ):
     super().__init__(key=key,
                      size=size,
                      resolution=resolution,
                      voxel_offset=voxel_offset,
                      chunk_sizes=chunk_sizes,
                      encoding=encoding)
     self.num_channels = num_channels
     self.shape = Shape5D(x=self.size[0],
                          y=self.size[1],
                          z=self.size[2],
                          c=num_channels)
     self.location = Point5D(x=self.voxel_offset[0],
                             y=self.voxel_offset[1],
                             z=self.voxel_offset[2])
     self.interval = self.shape.to_interval5d(offset=self.location)
     self.chunk_sizes_5d = [
         Shape5D(x=cs[0], y=cs[1], z=cs[2], c=num_channels)
         for cs in self.chunk_sizes
     ]
Exemplo n.º 3
0
    def from_json_data(cls,
                       data: JsonValue,
                       location_override: Optional[Point5D] = None
                       ) -> "N5DatasetAttributes":
        raw_attributes = ensureJsonObject(data)

        dimensions = ensureJsonIntArray(raw_attributes.get("dimensions"))
        blockSize = ensureJsonIntArray(raw_attributes.get("blockSize"))
        axes = raw_attributes.get("axes")
        if axes is None:
            axiskeys = guess_axiskeys(dimensions)
        else:
            axiskeys = "".join(ensureJsonStringArray(axes)[::-1]).lower()
        location = raw_attributes.get("location")
        if location is None:
            location_5d = Point5D.zero()
        else:
            location_5d = Point5D.zero(
                **dict(zip(axiskeys,
                           ensureJsonIntArray(location)[::-1])))

        return N5DatasetAttributes(
            blockSize=Shape5D.create(raw_shape=blockSize[::-1],
                                     axiskeys=axiskeys),
            dimensions=Shape5D.create(raw_shape=dimensions[::-1],
                                      axiskeys=axiskeys),
            dataType=np.dtype(ensureJsonString(raw_attributes.get(
                "dataType"))).newbyteorder(">"),  # type: ignore
            axiskeys=axiskeys,
            compression=N5Compressor.from_json_data(
                raw_attributes["compression"]),
            location=location_override or location_5d,
        )
Exemplo n.º 4
0
def test_n5_attributes():
    attributes = N5DatasetAttributes(
        dimensions=Shape5D(x=100, y=200),
        blockSize=Shape5D(x=10, y=20),
        c_axiskeys="yx",
        dataType=np.dtype("uint16").newbyteorder(">"),
        compression=GzipCompressor(level=3))

    reserialized_attributes = N5DatasetAttributes.from_json_data(
        attributes.to_json_data())
    assert reserialized_attributes == attributes
    assert attributes.to_json_data()["axes"] == ("x", "y")
Exemplo n.º 5
0
    def __init__(
        self,
        *,
        path: PurePosixPath,
        location: Point5D = Point5D.zero(),
        filesystem: JsonableFilesystem,
        tile_shape: Optional[Shape5D] = None,
        spatial_resolution: Optional[Tuple[int, int, int]] = None,
    ):
        raw_data: "np.ndarray[Any, Any]" = skimage.io.imread(filesystem.openbin(path.as_posix())) # type: ignore
        c_axiskeys_on_disk = "yxc"[: len(raw_data.shape)]
        self._data = Array5D(raw_data, axiskeys=c_axiskeys_on_disk, location=location)

        if tile_shape is None:
            tile_shape = Shape5D.hypercube(256).to_interval5d().clamped(self._data.shape).shape

        super().__init__(
            c_axiskeys_on_disk=c_axiskeys_on_disk,
            filesystem=filesystem,
            path=path,
            dtype=self._data.dtype,
            interval=self._data.interval,
            tile_shape=tile_shape,
            spatial_resolution=spatial_resolution,
        )
Exemplo n.º 6
0
def test_writing_to_precomputed_chunks():
    tmp_path = create_tmp_dir(prefix="test_writing_to_precomputed_chunks")
    datasource = ArrayDataSource(data=data, tile_shape=Shape5D(x=10, y=10))
    scale = PrecomputedChunksScale.from_datasource(
        datasource=datasource,
        key=PurePosixPath("my_test_data"),
        encoding=RawEncoder())
    sink_path = PurePosixPath("mytest.precomputed")
    filesystem = OsFs(tmp_path.as_posix())

    datasink = PrecomputedChunksScaleSink(
        filesystem=filesystem,
        info_dir=sink_path,
        scale=scale,
        dtype=datasource.dtype,
        num_channels=datasource.shape.c,
    )
    creation_result = datasink.create()
    if isinstance(creation_result, Exception):
        raise creation_result

    for tile in datasource.roi.get_datasource_tiles():
        datasink.write(tile.retrieve())

    precomp_datasource = PrecomputedChunksDataSource(
        path=sink_path, filesystem=filesystem, resolution=scale.resolution)
    reloaded_data = precomp_datasource.retrieve()
    assert reloaded_data == data
Exemplo n.º 7
0
def test_retrieve_roi_smaller_than_tile():
    # fmt: off
    data = Array5D(np.asarray([
        [[   1,    2,    3,    4,     5],
         [   6,    7,    8,    9,    10],
         [  11,   12,   13,   14,    15],
         [  16,   17,   18,   19,    20]],

        [[ 100,  200,  300,  400,   500],
         [ 600,  700,  800,  900,  1000],
         [1100, 1200, 1300, 1400,  1500],
         [1600, 1700, 1800, 1900,  2000]],
    ]).astype(np.uint32), axiskeys="cyx")

    expected_cyx = np.asarray([
        [[ 100,  200,  300,  400],
         [ 600,  700,  800,  900],
         [1100, 1200, 1300, 1400],
         [1600, 1700, 1800, 1900]]
    ])
    # fmt: on
    path = PurePosixPath(create_n5(data, chunk_size=Shape5D(c=2, y=4, x=4)))
    ds = N5DataSource(path=path / "data", filesystem=OsFs("/"))
    smaller_than_tile = ds.retrieve(c=1, y=(0, 4), x=(0, 4))
    assert np.all(smaller_than_tile.raw("cyx") == expected_cyx)
Exemplo n.º 8
0
def test_h5_datasource():
    data_2d = Array5D(np.arange(100).reshape(10, 10), axiskeys="yx")
    h5_path = create_h5(data_2d, axiskeys_style="vigra", chunk_shape=Shape5D(x=3, y=3))
    ds = H5DataSource(outer_path=h5_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/"))
    assert ds.shape == data_2d.shape
    assert ds.tile_shape == Shape5D(x=3, y=3)

    slc = ds.interval.updated(x=(0, 3), y=(0, 2))
    assert (ds.retrieve(slc).raw("yx") == data_2d.cut(slc).raw("yx")).all()

    data_3d = Array5D(np.arange(10 * 10 * 10).reshape(10, 10, 10), axiskeys="zyx")
    h5_path = create_h5(data_3d, axiskeys_style="vigra", chunk_shape=Shape5D(x=3, y=3))
    ds = H5DataSource(outer_path=h5_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/"))
    assert ds.shape == data_3d.shape
    assert ds.tile_shape == Shape5D(x=3, y=3)

    slc = ds.interval.updated(x=(0, 3), y=(0, 2), z=3)
    assert (ds.retrieve(slc).raw("yxz") == data_3d.cut(slc).raw("yxz")).all()
Exemplo n.º 9
0
def test_data_roi_get_tiles_can_clamp_to_datasource_tiles():
    # fmt: off
    data = Array5D(np.asarray([
        [1,  2,  3,  4,  5],
        [6,  7,  8,  9,  10],
        [11, 12, 13, 14, 15],
        [16, 17, 18, 19, 20],
    ]).astype(np.uint8), axiskeys="yx")
    # fmt: on

    ds = ArrayDataSource(data=data, tile_shape=Shape5D(x=2, y=2))
    data_slice = DataRoi(datasource=ds, x=(1, 4), y=(0, 3))

    # fmt: off
    dataslice_expected_data = Array5D(np.asarray([
        [2,  3,  4],
        [7,  8,  9],
        [12, 13, 14]
    ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(x=1))
    # fmt: on

    assert data_slice.retrieve() == dataslice_expected_data

    # fmt: off
    dataslice_expected_slices = [
        Array5D(np.asarray([
            [1, 2],
            [6, 7]
        ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero()),

        Array5D(np.asarray([
            [3,  4],
            [8,  9],
        ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(x=2)),

        Array5D(np.asarray([
            [11, 12],
            [16, 17],
        ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(y=2)),

        Array5D(np.asarray([
            [13, 14],
            [18, 19],
        ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(x=2, y=2))
    ]
    # fmt: on
    expected_slice_dict = {a.interval: a for a in dataslice_expected_slices}
    for piece in data_slice.get_datasource_tiles(clamp_to_datasource=True):
        expected_data = expected_slice_dict.pop(piece.interval)
        assert expected_data == piece.retrieve()
    assert len(expected_slice_dict) == 0
Exemplo n.º 10
0
 def __init__(self,
              *,
              outer_path: PurePosixPath,
              inner_path: PurePosixPath,
              location: Point5D = Point5D.zero(),
              filesystem: JsonableFilesystem,
              spatial_resolution: Optional[Tuple[int, int, int]] = None):
     self.outer_path = outer_path
     self.inner_path = inner_path
     self.filesystem = filesystem
     binfile = filesystem.openbin(outer_path.as_posix())
     # FIXME: h5py might not like this if the filesystem isn't OSFS
     f = h5py.File(binfile, "r")  #type: ignore
     try:
         dataset = f[inner_path.as_posix()]
         if not isinstance(dataset, h5py.Dataset):
             raise ValueError(f"{inner_path} is not a Dataset")
         self.axiskeys = self.getAxisKeys(dataset)
         self._dataset = dataset
         tile_shape = Shape5D.create(raw_shape=self._dataset.chunks
                                     or self._dataset.shape,
                                     axiskeys=self.axiskeys)
         base_url = Url.parse(filesystem.geturl(outer_path.as_posix()))
         assert base_url is not None
         super().__init__(
             c_axiskeys_on_disk=self.axiskeys,
             tile_shape=tile_shape,
             interval=Shape5D.create(
                 raw_shape=self._dataset.shape,
                 axiskeys=self.axiskeys).to_interval5d(location),
             dtype=self._dataset.dtype,
             spatial_resolution=spatial_resolution or (1, 1, 1),  # FIXME
             filesystem=filesystem,
             path=self.outer_path)
     except Exception as e:
         f.close()
         raise e
Exemplo n.º 11
0
def create_h5(array: Array5D, axiskeys_style: str, chunk_shape: Optional[Shape5D] = None, axiskeys: str = "xyztc"):
    raw_chunk_shape = (chunk_shape or Shape5D() * 2).clamped(maximum=array.shape).to_tuple(axiskeys)

    path = tempfile.mkstemp()[1] + ".h5"
    f = h5py.File(path, "w")
    ds = f.create_dataset("data", chunks=raw_chunk_shape, data=array.raw(axiskeys))
    if axiskeys_style == "dims":
        for key, dim in zip(axiskeys, ds.dims):
            dim.label = key
    elif axiskeys_style == "vigra":
        type_flags = {"x": 2, "y": 2, "z": 2, "t": 2, "c": 1}
        axistags = [{"key": key, "typeflags": type_flags[key], "resolution": 0, "description": ""} for key in axiskeys]
        ds.attrs["axistags"] = json.dumps({"axes": axistags})
    else:
        raise Exception(f"Bad axiskeys_style: {axiskeys_style}")

    return PurePosixPath(path)
Exemplo n.º 12
0
 def __init__(
     self,
     *,
     data: Array5D,
     tile_shape: Optional[Shape5D] = None,
     spatial_resolution: Optional[Tuple[int, int, int]] = None,
 ):
     self._data = data
     if tile_shape is None:
         tile_shape = Shape5D.hypercube(256).to_interval5d().clamped(
             self._data.shape).shape
     super().__init__(
         dtype=self._data.dtype,
         tile_shape=tile_shape,
         interval=self._data.interval,
         spatial_resolution=spatial_resolution,
     )
Exemplo n.º 13
0
    def __call__(self, roi: DataRoi) -> FeatureData:
        haloed_roi = roi.enlarged(self.halo)
        source_data = self.preprocessor(haloed_roi)

        step_shape: Shape5D = Shape5D(
            c=1,
            t=1,
            x=1 if self.axis_2d == "x" else source_data.shape.x,
            y=1 if self.axis_2d == "y" else source_data.shape.y,
            z=1 if self.axis_2d == "z" else source_data.shape.z,
        )

        out = Array5D.allocate(
            interval=roi.updated(c=(roi.c[0] * self.channel_multiplier,
                                    roi.c[1] * self.channel_multiplier)),
            dtype=numpy.dtype("float32"),
            axiskeys=source_data.axiskeys.replace("c", "") +
            "c"  # fastfilters puts channel last
        )

        for data_slice in source_data.split(step_shape):
            source_axes = "zyx"
            if self.axis_2d:
                source_axes = source_axes.replace(self.axis_2d, "")

            raw_data: "ndarray[Any, dtype[float32]]" = data_slice.raw(
                source_axes).astype(numpy.float32)
            raw_feature_data: "ndarray[Any, dtype[float32]]" = self.filter_fn(
                raw_data)

            feature_data = FeatureData(
                raw_feature_data,
                axiskeys=source_axes +
                "c" if len(raw_feature_data.shape) > len(source_axes) else
                source_axes,
                location=data_slice.location.updated(c=data_slice.location.c *
                                                     self.channel_multiplier))
            out.set(feature_data, autocrop=True)
        out.setflags(write=False)
        return FeatureData(
            out.raw(out.axiskeys),
            axiskeys=out.axiskeys,
            location=out.location,
        )
Exemplo n.º 14
0
def test_skimage_datasource_tiles():
    bs = DataRoi(SkimageDataSource(path=png_image, filesystem=OsFs("/")))
    num_checked_tiles = 0
    for tile in bs.split(Shape5D(x=2, y=2)):
        if tile == Interval5D.zero(x=(0, 2), y=(0, 2)):
            expected_raw = raw_0_2x0_2y
        elif tile == Interval5D.zero(x=(0, 2), y=(2, 4)):
            expected_raw = raw_0_2x2_4y
        elif tile == Interval5D.zero(x=(2, 4), y=(0, 2)):
            expected_raw = raw_2_4x0_2y
        elif tile == Interval5D.zero(x=(2, 4), y=(2, 4)):
            expected_raw = raw_2_4x2_4y
        elif tile == Interval5D.zero(x=(4, 5), y=(0, 2)):
            expected_raw = raw_4_5x0_2y
        elif tile == Interval5D.zero(x=(4, 5), y=(2, 4)):
            expected_raw = raw_4_5x2_4y
        else:
            raise Exception(f"Unexpected tile {tile}")
        assert (tile.retrieve().raw("yx") == expected_raw).all()
        num_checked_tiles += 1
    assert num_checked_tiles == 6
Exemplo n.º 15
0
def test_n5_datasink():
    tmp_path = create_tmp_dir(prefix="test_n5_datasink")
    sink = N5DatasetSink(
        filesystem=OsFs(tmp_path.as_posix()),
        outer_path=PurePosixPath("test_n5_datasink.n5"),
        inner_path=PurePosixPath("/data"),
        attributes=N5DatasetAttributes(
            dimensions=datasource.shape,
            blockSize=Shape5D(x=10, y=10),
            c_axiskeys=data.axiskeys,  #FIXME: double check this
            dataType=datasource.dtype,
            compression=RawCompressor(),
            location=Point5D.zero(x=7, y=13)))
    sink_writer = sink.create()
    assert not isinstance(sink_writer, Exception)
    for tile in DataRoi(datasource).split(sink.tile_shape):
        sink_writer.write(tile.retrieve().translated(Point5D.zero(x=7, y=13)))

    n5ds = N5DataSource(filesystem=sink.filesystem, path=sink.full_path)
    saved_data = n5ds.retrieve()
    assert saved_data.location == Point5D.zero(x=7, y=13)
    assert saved_data == data
Exemplo n.º 16
0
def test_n5_datasource():
    # fmt: off
    data = Array5D(np.asarray([
        [1,  2,  3,  4,  5 ],
        [6,  7,  8,  9,  10],
        [11, 12, 13, 14, 15],
        [16, 17, 18, 19, 20]
    ]).astype(np.uint8), axiskeys="yx")
    # fmt: on

    path = PurePosixPath(create_n5(data, chunk_size=Shape5D(x=2, y=2)))
    ds = N5DataSource(path=path / "data", filesystem=OsFs("/"))
    assert ds.shape == data.shape

    # fmt: off
    expected_raw_piece = Array5D(np.asarray([
        [1, 2, 3],
        [6, 7, 8]
    ]).astype(np.uint8), axiskeys="yx")
    # fmt: on
    assert ds.retrieve(x=(0, 3), y=(0, 2)) == expected_raw_piece

    ds2 = pickle.loads(pickle.dumps(ds))
    assert ds2.retrieve(x=(0, 3), y=(0, 2)) == expected_raw_piece
Exemplo n.º 17
0
def test_writing_to_offset_precomputed_chunks():
    tmp_path = create_tmp_dir(
        prefix="test_writing_to_offset_precomputed_chunks")
    data_at_1000_1000 = data.translated(
        Point5D(x=1000, y=1000) - data.location)
    datasource = ArrayDataSource(data=data_at_1000_1000,
                                 tile_shape=Shape5D(x=10, y=10))
    scale = PrecomputedChunksScale.from_datasource(
        datasource=datasource,
        key=PurePosixPath("my_test_data"),
        encoding=RawEncoder())
    sink_path = PurePosixPath("mytest.precomputed")
    filesystem = OsFs(tmp_path.as_posix())

    print(f"\n\n will write to '{filesystem.geturl(sink_path.as_posix())}' ")

    datasink = PrecomputedChunksScaleSink(
        filesystem=filesystem,
        info_dir=sink_path,
        scale=scale,
        num_channels=datasource.shape.c,
        dtype=datasource.dtype,
    )
    creation_result = datasink.create()
    if isinstance(creation_result, Exception):
        raise creation_result

    for tile in datasource.roi.get_datasource_tiles():
        datasink.write(tile.retrieve())

    precomp_datasource = PrecomputedChunksDataSource(
        path=sink_path, filesystem=filesystem, resolution=scale.resolution)

    reloaded_data = precomp_datasource.retrieve(
        interval=data_at_1000_1000.interval)
    assert (reloaded_data.raw("xyz") == data.raw("xyz")).all()
Exemplo n.º 18
0
async def main():
    ds = SkimageDataSource(
        filesystem=HttpFs(read_url=Url.parse("https://app.ilastik.org/")),
        path=Path("api/images/c_cells_1.png"))

    async with aiohttp.ClientSession() as session:

        print(f"Creating new session--------------")
        async with session.post(f"https://app.ilastik.org/api/session",
                                json={"session_duration": 30}) as response:
            response.raise_for_status()
            session_data: Dict[str, Any] = await response.json()
            session_id = session_data["id"]
        print(
            f"Done creating session: {json.dumps(session_data)} <<<<<<<<<<<<<<<<<<"
        )

        session_is_ready = False
        for _ in range(10):
            response = await session.get(
                f"https://app.ilastik.org/api/session/{session_id}")
            response.raise_for_status()
            session_status = await response.json()
            if session_status["status"] == "ready":
                session_url = session_status["url"]
                break
            print(f"Session {session_id} is notready yet")
            await asyncio.sleep(2)
        else:
            raise RuntimeError("Given up waiting on session")

        async with session.ws_connect(f"{session_url}/ws") as ws:
            asyncio.get_event_loop().create_task(read_server_status(ws))
            print("sending some feature extractors=======")
            await ws.send_json({
                "feature_selection_applet": [
                    {
                        "__class__": "GaussianSmoothing",
                        "sigma": 0.3,
                        "axis_2d": "z"
                    },
                    {
                        "__class__": "HessianOfGaussianEigenvalues",
                        "scale": 0.7,
                        "axis_2d": "z"
                    },
                ]
            })
            print("done sending feature extractors<<<<<")

            asyncio.get_event_loop().create_task(read_server_status(ws))
            print("sending some annotations=======")
            brush_strokes = [
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=140, y=150),
                    Point5D.zero(x=145, y=155)
                ],
                                                   color=Color(r=np.uint8(0),
                                                               g=np.uint8(255),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=238, y=101),
                    Point5D.zero(x=229, y=139)
                ],
                                                   color=Color(r=np.uint8(0),
                                                               g=np.uint8(255),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=283, y=87),
                    Point5D.zero(x=288, y=92)
                ],
                                                   color=Color(r=np.uint8(255),
                                                               g=np.uint8(0),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
                Annotation.interpolate_from_points(voxels=[
                    Point5D.zero(x=274, y=168),
                    Point5D.zero(x=256, y=191)
                ],
                                                   color=Color(r=np.uint8(255),
                                                               g=np.uint8(0),
                                                               b=np.uint8(0)),
                                                   raw_data=ds),
            ]
            await ws.send_json(
                {"brushing_applet": [a.to_json_data() for a in brush_strokes]})
            print("done sending annotations<<<<<")

        from base64 import b64encode
        encoded_ds: str = b64encode(json.dumps(
            ds.to_json_value()).encode("utf8"),
                                    altchars=b'-_').decode("utf8")

        response_tasks = {}
        for tile in ds.roi.get_tiles(tile_shape=Shape5D(x=256, y=256, c=2),
                                     tiles_origin=Point5D.zero()):
            url = f"{session_url}/predictions/raw_data={encoded_ds}/run_id=123456/data/{tile.x[0]}-{tile.x[1]}_{tile.y[0]}-{tile.y[1]}_0-1"
            print(f"---> Requesting {url}")
            response_tasks[tile] = session.get(url)

        for tile, resp in response_tasks.items():
            async with resp as response:
                print("Status:", response.status)
                print("Content-type:", response.headers['content-type'])

                tile_bytes = await response.content.read()
                print(f"Got predictions<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")

                raw_data = np.frombuffer(tile_bytes, dtype=np.uint8).reshape(
                    2, tile.shape.y, tile.shape.x)
                a = Array5D(raw_data, axiskeys="cyx")
                # a.show_channels()

            global finished
            finished = True
Exemplo n.º 19
0
from ndstructs.point5D import Point5D, Shape5D
from ndstructs.array5D import Array5D

from webilastik.datasink.n5_dataset_sink import N5DatasetSink
from webilastik.datasink.precomputed_chunks_sink import PrecomputedChunksScaleSink
from webilastik.datasource import DataRoi
from webilastik.datasource.array_datasource import ArrayDataSource
from webilastik.datasource.n5_datasource import N5DataSource
from webilastik.datasource.precomputed_chunks_info import PrecomputedChunksScale, RawEncoder
from webilastik.datasource.n5_attributes import GzipCompressor, N5DatasetAttributes, RawCompressor
from webilastik.datasource.precomputed_chunks_datasource import PrecomputedChunksDataSource

data = Array5D(np.arange(20 * 10 * 7).reshape(20, 10, 7), axiskeys="xyz")
data.setflags(write=False)

datasource = ArrayDataSource(data=data, tile_shape=Shape5D(x=10, y=10))


def test_n5_attributes():
    attributes = N5DatasetAttributes(
        dimensions=Shape5D(x=100, y=200),
        blockSize=Shape5D(x=10, y=20),
        c_axiskeys="yx",
        dataType=np.dtype("uint16").newbyteorder(">"),
        compression=GzipCompressor(level=3))

    reserialized_attributes = N5DatasetAttributes.from_json_data(
        attributes.to_json_data())
    assert reserialized_attributes == attributes
    assert attributes.to_json_data()["axes"] == ("x", "y")
Exemplo n.º 20
0
def test_sequence_datasource():
    # fmt: off
    img1_data = Array5D(np.asarray([
       [[100, 101, 102, 103, 104],
        [105, 106, 107, 108, 109],
        [110, 111, 112, 113, 114],
        [115, 116, 117, 118, 119]],

       [[120, 121, 122, 123, 124],
        [125, 126, 127, 128, 129],
        [130, 131, 132, 133, 134],
        [135, 136, 137, 138, 139]],

       [[140, 141, 142, 143, 144],
        [145, 146, 147, 148, 149],
        [150, 151, 152, 153, 154],
        [155, 156, 157, 158, 159]]
    ]), axiskeys="cyx")

    img2_data = Array5D(np.asarray([
       [[200, 201, 202, 203, 204],
        [205, 206, 207, 208, 209],
        [210, 211, 212, 213, 214],
        [215, 216, 217, 218, 219]],

       [[220, 221, 222, 223, 224],
        [225, 226, 227, 228, 229],
        [230, 231, 232, 233, 234],
        [235, 236, 237, 238, 239]],

       [[240, 241, 242, 243, 244],
        [245, 246, 247, 248, 249],
        [250, 251, 252, 253, 254],
        [255, 256, 257, 258, 259]]
    ]), axiskeys="cyx")

    img3_data = Array5D(np.asarray([
       [[300, 301, 302, 303, 304],
        [305, 306, 307, 308, 309],
        [310, 311, 312, 313, 314],
        [315, 316, 317, 318, 319]],

       [[320, 321, 322, 323, 324],
        [325, 326, 327, 328, 329],
        [330, 331, 332, 333, 334],
        [335, 336, 337, 338, 339]],

       [[340, 341, 342, 343, 344],
        [345, 346, 347, 348, 349],
        [350, 351, 352, 353, 354],
        [355, 356, 357, 358, 359]]
    ]), axiskeys="cyx")

    expected_x_2_4__y_1_3 = Array5D(np.asarray([
      [[[107, 108],
        [112, 113]],

       [[127, 128],
        [132, 133]],

       [[147, 148],
        [152, 153]]],


      [[[207, 208],
        [212, 213]],

       [[227, 228],
        [232, 233]],

       [[247, 248],
        [252, 253]]],


      [[[307, 308],
        [312, 313]],

       [[327, 328],
        [332, 333]],

       [[347, 348],
        [352, 353]]],
    ]), axiskeys="zcyx")
    # fmt: on
    slice_x_2_4__y_1_3 = {"x": (2, 4), "y": (1, 3)}

    h5_outer_paths = [
        # create_n5(img1_data, axiskeys="cyx"),
        create_h5(img1_data, axiskeys_style="dims", axiskeys="cyx"),
        # create_n5(img2_data, axiskeys="cyx"),
        create_h5(img2_data, axiskeys_style="dims", axiskeys="cyx"),
        # create_n5(img3_data, axiskeys="cyx"),
        create_h5(img3_data, axiskeys_style="dims", axiskeys="cyx"),
    ]

    def stack_h5s(stack_axis: str) -> List[H5DataSource]:
        offset = Point5D.zero()
        stack: List[H5DataSource] = []
        for outer_path in h5_outer_paths:
            stack.append(H5DataSource(outer_path=outer_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/"), location=offset))
            offset += Point5D.zero(**{stack_axis: stack[-1].shape[stack_axis]})
        return stack

    seq_ds = SequenceDataSource(datasources=stack_h5s("z"), stack_axis="z")
    assert seq_ds.shape == Shape5D(x=5, y=4, c=3, z=3)
    data = seq_ds.retrieve(**slice_x_2_4__y_1_3)
    assert (expected_x_2_4__y_1_3.raw("xyzc") == data.raw("xyzc")).all()

    seq_ds = SequenceDataSource(datasources=stack_h5s("z"), stack_axis="z")
    data = seq_ds.retrieve(**slice_x_2_4__y_1_3)
    assert (expected_x_2_4__y_1_3.raw("xyzc") == data.raw("xyzc")).all()
Exemplo n.º 21
0
def test_neighboring_tiles():
    # fmt: off
    arr = Array5D(np.asarray([
        [10, 11, 12,   20, 21, 22,   30],
        [13, 14, 15,   23, 24, 25,   33],
        [16, 17, 18,   26, 27, 28,   36],

        [40, 41, 42,   50, 51, 52,   60],
        [43, 44, 45,   53, 54, 55,   63],
        [46, 47, 48,   56, 57, 58,   66],

        [70, 71, 72,   80, 81, 82,   90],
        [73, 74, 75,   83, 84, 85,   93],
        [76, 77, 78,   86, 87, 88,   96],

        [0,   1,  2,    3,  4,  5,    6]], dtype=np.uint8), axiskeys="yx")

    ds = SkimageDataSource(path=create_png(arr), filesystem=OsFs("/"))

    fifties_slice = DataRoi(ds, x=(3, 6), y=(3, 6))
    expected_fifties_slice = Array5D(np.asarray([
        [50, 51, 52],
        [53, 54, 55],
        [56, 57, 58]
    ]), axiskeys="yx")
    # fmt: on

    top_slice = DataRoi(ds, x=(3, 6), y=(0, 3))
    bottom_slice = DataRoi(ds, x=(3, 6), y=(6, 9))

    right_slice = DataRoi(ds, x=(6, 7), y=(3, 6))
    left_slice = DataRoi(ds, x=(0, 3), y=(3, 6))

    # fmt: off
    fifties_neighbor_data = {
        top_slice: Array5D(np.asarray([
            [20, 21, 22],
            [23, 24, 25],
            [26, 27, 28]
        ]), axiskeys="yx"),

        right_slice: Array5D(np.asarray([
            [60],
            [63],
            [66]
        ]), axiskeys="yx"),

        bottom_slice: Array5D(np.asarray([
            [80, 81, 82],
            [83, 84, 85],
            [86, 87, 88]
        ]), axiskeys="yx"),

        left_slice: Array5D(np.asarray([
            [40, 41, 42],
            [43, 44, 45],
            [46, 47, 48]
        ]), axiskeys="yx"),
    }

    # fmt: on

    assert (fifties_slice.retrieve().raw("yx") == expected_fifties_slice.raw("yx")).all()

    for neighbor in fifties_slice.get_neighboring_tiles(tile_shape=Shape5D(x=3, y=3)):
        try:
            expected_slice = fifties_neighbor_data.pop(neighbor)
            assert (expected_slice.raw("yx") == neighbor.retrieve().raw("yx")).all()
        except KeyError:
            print(f"\nWas searching for ", neighbor, "\n")
            for k in fifties_neighbor_data.keys():
                print("--->>> ", k)
    assert len(fifties_neighbor_data) == 0
Exemplo n.º 22
0
def test_pixel_classification_workflow():
    brushing_applet = BrushingApplet("brushing_applet")
    feature_selection_applet = FeatureSelectionApplet(
        "feature_selection_applet", datasources=brushing_applet.datasources)
    pixel_classifier_applet = PixelClassificationApplet(
        "pixel_classifier_applet",
        feature_extractors=feature_selection_applet.feature_extractors,
        annotations=brushing_applet.annotations)
    # wf = PixelClassificationWorkflow(
    #     feature_selection_applet=feature_selection_applet,
    #     brushing_applet=brushing_applet,
    #     pixel_classifier_applet=pixel_classifier_applet,
    #     predictions_export_applet=predictions_export_applet
    # )

    # GUI creates a datasource somewhere...
    ds = SkimageDataSource(Path("public/images/c_cells_1.png"),
                           filesystem=OsFs("."),
                           tile_shape=Shape5D(x=400, y=400))

    # GUI creates some feature extractors
    feature_selection_applet.feature_extractors.set_value(
        [
            GaussianSmoothing.from_ilp_scale(scale=0.3, axis_2d="z"),
            HessianOfGaussianEigenvalues.from_ilp_scale(scale=0.7,
                                                        axis_2d="z"),
        ],
        confirmer=dummy_confirmer)

    # GUI creates some annotations
    brush_strokes = [
        Annotation.interpolate_from_points(
            voxels=[Point5D.zero(x=140, y=150),
                    Point5D.zero(x=145, y=155)],
            color=Color(r=np.uint8(0), g=np.uint8(0), b=np.uint8(255)),
            raw_data=ds),
        Annotation.interpolate_from_points(
            voxels=[Point5D.zero(x=238, y=101),
                    Point5D.zero(x=229, y=139)],
            color=Color(r=np.uint8(0), g=np.uint8(0), b=np.uint8(255)),
            raw_data=ds),
        Annotation.interpolate_from_points(
            voxels=[Point5D.zero(x=283, y=87),
                    Point5D.zero(x=288, y=92)],
            color=Color(r=np.uint8(255), g=np.uint8(0), b=np.uint8(0)),
            raw_data=ds),
        Annotation.interpolate_from_points(
            voxels=[Point5D.zero(x=274, y=168),
                    Point5D.zero(x=256, y=191)],
            color=Color(r=np.uint8(255), g=np.uint8(0), b=np.uint8(0)),
            raw_data=ds),
    ]
    brushing_applet.annotations.set_value(brush_strokes,
                                          confirmer=dummy_confirmer)

    # preds = predictions_export_applet.compute(DataRoi(ds))

    classifier = pixel_classifier_applet.pixel_classifier()
    executor = HashingExecutor(num_workers=8)

    # calculate predictions on an arbitrary data
    preds = executor.submit(classifier.compute, ds.roi)
    preds.result().as_uint8().show_channels()

    # for png_bytes in preds.to_z_slice_pngs():
    #     path = f"/tmp/junk_test_image_{uuid.uuid4()}.png"
    #     with open(path, "wb") as outfile:
    #         outfile.write(png_bytes.getbuffer())
    #     os.system(f"gimp {path}")

    # calculate predictions on just a piece of arbitrary data
    exported_tile = executor.submit(
        classifier.compute, DataRoi(datasource=ds, x=(100, 200), y=(100, 200)))
    exported_tile.result().show_channels()

    # wf.save_as(Path("/tmp/blas.ilp"))

    #try removing a brush stroke
    brushing_applet.annotations.set_value(brush_strokes[1:],
                                          confirmer=dummy_confirmer)
    assert tuple(brushing_applet.annotations()) == tuple(brush_strokes[1:])
Exemplo n.º 23
0
    def parse(cls, group: h5py.Group, raw_data_sources: Mapping[int, "FsDataSource | None"]) -> "IlpPixelClassificationGroup":
        LabelColors = ensure_color_list(group, "LabelColors")
        LabelNames = ensure_encoded_string_list(group, "LabelNames")
        class_to_color: Mapping[np.uint8, Color] = {np.uint8(i): color for i, color in enumerate(LabelColors, start=1)}

        label_classes: Dict[Color, Label] = {color: Label(name=name, color=color, annotations=[]) for name, color in zip(LabelNames, LabelColors)}
        LabelSets = ensure_group(group, "LabelSets")
        for lane_key in LabelSets.keys():
            if not lane_key.startswith("labels"):
                continue
            lane_index = int(lane_key.replace("labels", ""))
            lane_label_blocks = ensure_group(LabelSets, lane_key)
            if len(lane_label_blocks.keys()) == 0:
                continue
            raw_data = raw_data_sources.get(lane_index)
            if raw_data is None:
                raise IlpParsingError(f"No datasource for lane {lane_index:03d}")
            for block_name in lane_label_blocks.keys():
                if not block_name.startswith("block"):
                    continue
                block = ensure_dataset(lane_label_blocks, block_name)
                block_data = block[()]
                if not isinstance(block_data, np.ndarray):
                    raise IlpParsingError("Expected annotation block to contain a ndarray")

                raw_axistags = block.attrs.get("axistags")
                if not isinstance(raw_axistags, str):
                    raise IlpParsingError(f"Expected axistags to be a str, found {raw_axistags}")
                axistags = AxisTags.fromJSON(raw_axistags)
                axiskeys = "".join(axistags.keys())

                if "blockSlice" not in block.attrs:
                    raise IlpParsingError(f"Expected 'blockSlice' in attrs from {block.name}")
                blockSlice = block.attrs["blockSlice"]
                if not isinstance(blockSlice, str):
                    raise IlpParsingError(f"Expected 'blockSlice'' to be a str, found {blockSlice}")
                # import pydevd; pydevd.settrace()
                blockSpans: Sequence[List[str]] = [span_str.split(":") for span_str in blockSlice[1:-1].split(",")]
                blockInterval = Interval5D.zero(**{
                    key: (int(span[0]), int(span[1]))
                    for key, span in zip(axiskeys, blockSpans)
                })

                block_5d = Array5D(block_data, axiskeys=axiskeys)
                for color_5d in block_5d.unique_colors().split(shape=Shape5D(x=1, c=block_5d.shape.c)):
                    color_index = np.uint8(color_5d.raw("c")[0])
                    if color_index == np.uint8(0): # background
                        continue
                    color = class_to_color.get(color_index)
                    if color is None:
                        raise IlpParsingError(f"Could not find a label color for index {color_index}")
                    annotation_data: "np.ndarray[Any, np.dtype[np.uint8]]" = block_5d.color_filtered(color=color_5d).raw(axiskeys)
                    annotation = Annotation(
                        annotation_data.astype(np.dtype(bool)),
                        location=blockInterval.start,
                        axiskeys=axiskeys, # FIXME: what if the user changed the axiskeys in the data source?
                        raw_data=raw_data,
                    )

                    label_classes[color].annotations.append(annotation)



        ClassifierFactory = ensure_bytes(group, "ClassifierFactory")
        if ClassifierFactory != VIGRA_ILP_CLASSIFIER_FACTORY:
            raise IlpParsingError(f"Expecting ClassifierFactory to be pickled ParallelVigraRfLazyflowClassifierFactory, found {ClassifierFactory}")
        if "ClassifierForests" in group:
            ClassifierForests = ensure_group(group, "ClassifierForests")
            forests: List[VigraRandomForest] = []
            for forest_key in sorted(ClassifierForests.keys()):
                if not forest_key.startswith("Forest"):
                    continue
                forest = VigraRandomForest(group.file.filename, f"{ClassifierForests.name}/{forest_key}")
                # forest_bytes = ensure_bytes(ClassifierForests, forest_key)
                # forest = h5_bytes_to_vigra_forest(h5_bytes=VigraForestH5Bytes(forest_bytes))
                forests.append(forest)

            feature_names = ensure_encoded_string_list(ClassifierForests, "feature_names")
            feature_extractors, expected_num_channels = cls.ilp_filters_and_expected_num_channels_from_names(feature_names)

            classifier = VigraPixelClassifier(
                feature_extractors=feature_extractors,
                forest_h5_bytes=[vigra_forest_to_h5_bytes(forest) for forest in forests],
                num_classes=len([label for label in label_classes.values() if not label.is_empty()]),
                num_input_channels=expected_num_channels,
            )
        else:
            classifier = None

        return IlpPixelClassificationGroup(
            classifier=classifier,
            labels=list(label_classes.values()),
        )