Esempio n. 1
0
def test_paint_point():
    # fmt: off
    img = Array5D(numpy.asarray([
        [[100,   0,   0,  100],
         [ 0,   17,   0,    0],
         [ 0,    0,  17,    0],
         [ 0,    0,   0,    0]],

        [[200,   0,   0, 200],
         [  0,  40,   0,   0],
         [  0,   0,  40,   0],
         [  0,   0,   0,   0]]
    ]), axiskeys="cyx")
    # fmt: on

    # fmt: off
    expected_painted = Array5D(
        numpy.asarray(
            [
                [[107, 0, 0, 100], [0, 17, 0, 0], [0, 0, 17, 0], [0, 0, 0, 0]],
                [[200, 0, 0, 200], [0, 40, 123, 0], [0, 0, 40, 0], [0, 0, 0, 0]],
            ]
        ),
        axiskeys="cyx",
    )
    # fmt: on

    img.paint_point(Point5D.zero(c=0, y=0, x=0), value=107)
    img.paint_point(Point5D.zero(c=1, y=1, x=2), value=123)
    assert img == expected_painted
Esempio n. 2
0
def test_combine():
    # fmt: off
    arr = Array5D(numpy.asarray([
        [7, 7, 0, 0, 0, 0],
        [7, 7, 0, 0, 0, 0],
        [7, 0, 0, 0, 0, 0],
        [0, 0, 0, 3, 0, 0],
        [0, 0, 3, 3, 3, 0],
        [0, 0, 0, 3, 0, 0],
        [0, 0, 0, 0, 0, 0],
        [0, 0, 0, 0, 0, 0],
        [0, 0, 5, 5, 0, 0]]), axiskeys="yx")

    piece1 = Array5D(numpy.asarray([
        [7, 7,],
        [7, 7,],
        [7, 0,]]), axiskeys="yx", location=Point5D.zero())

    piece2 = Array5D(numpy.asarray([
        [0, 3, 0, 0],
        [3, 3, 3, 0],
        [0, 3, 0, 0]]), axiskeys="yx", location=Point5D.zero(y=3, x=2))

    piece3 = Array5D(numpy.asarray([
        [5, 5]]), axiskeys="yx", location=Point5D.zero(y=8, x=2))
    # fmt: on

    combined = piece1.combine([piece2, piece3])
    assert (combined.raw("yx") == arr.raw("yx")).all()
Esempio n. 3
0
def test_slice_enclosing():
    p1 = Point5D.zero(x=-13, y=40)
    p2 = Point5D.zero(z=-1, c=6)
    p3 = Point5D.zero(t=3, x=4)
    p4 = Point5D.zero(t=100, y=400)

    expected_slice = Slice5D(x=slice(-13, 4 + 1), y=slice(40, 400 + 1), z=slice(-1, -1 + 1), c=slice(6, 6 + 1))
    assert Slice5D.enclosing([p1, p2, p3, p4])
Esempio n. 4
0
def test_allocation():
    arr = Array5D.allocate(Slice5D.zero(x=slice(100, 200), y=slice(200, 300)), numpy.uint8)
    assert arr.shape == Shape5D(x=100, y=100)
    assert arr.location == Point5D.zero(x=100, y=200)

    arr = Array5D.allocate(Slice5D.zero(x=slice(-100, 200), y=slice(200, 300)), numpy.uint8)
    assert arr.shape == Shape5D(x=300, y=100)
    assert arr.location == Point5D.zero(x=-100, y=200)
Esempio n. 5
0
def test_data_roi_get_tiles_can_clamp_to_datasource_tiles():
    # fmt: off
    data = Array5D(np.asarray([
        [1, 2, 3, 4, 5],
        [6, 7, 8, 9, 10],
        [11, 12, 13, 14, 15],
        [16, 17, 18, 19, 20],
    ]).astype(np.uint8),
                   axiskeys="yx")
    # fmt: on

    ds = ArrayDataSource.from_array5d(data, tile_shape=Shape5D(x=2, y=2))
    data_slice = DataRoi(datasource=ds, x=(1, 4), y=(0, 3))

    # fmt: off
    dataslice_expected_data = Array5D(np.asarray([[2, 3, 4], [7, 8, 9],
                                                  [12, 13,
                                                   14]]).astype(np.uint8),
                                      axiskeys="yx",
                                      location=Point5D.zero(x=1))
    # fmt: on

    assert data_slice.retrieve() == dataslice_expected_data

    # fmt: off
    dataslice_expected_slices = [
        Array5D(np.asarray([[1, 2], [6, 7]]).astype(np.uint8),
                axiskeys="yx",
                location=Point5D.zero()),
        Array5D(np.asarray([
            [3, 4],
            [8, 9],
        ]).astype(np.uint8),
                axiskeys="yx",
                location=Point5D.zero(x=2)),
        Array5D(np.asarray([
            [11, 12],
            [16, 17],
        ]).astype(np.uint8),
                axiskeys="yx",
                location=Point5D.zero(y=2)),
        Array5D(np.asarray([
            [13, 14],
            [18, 19],
        ]).astype(np.uint8),
                axiskeys="yx",
                location=Point5D.zero(x=2, y=2))
    ]
    # fmt: on
    expected_slice_dict = {a.interval: a for a in dataslice_expected_slices}
    for piece in data_slice.get_datasource_tiles(clamp_to_datasource=True):
        expected_data = expected_slice_dict.pop(piece.interval)
        assert expected_data == piece.retrieve()
    assert len(expected_slice_dict) == 0
Esempio n. 6
0
def test_setting_rois():
    # fmt: off
    raw = numpy.asarray([
        [[1,   2,   3,   4,   5 ],
         [6,   7,   8,   9,   10],
         [11,  12,  13,  14,  15],
         [16,  17,  18,  19,  20]],

        [[-1,  -2,  -3,  -4,  -5 ],
         [-6,  -7,  -8,  -9,  -10],
         [-11, -12, -13, -14, -15],
         [-16, -17, -18, -19, -20]],
    ])

    piece = Array5D(numpy.asarray([
        [100, 200],
        [300, 400]
    ]), "yx", location=Point5D.zero(x=2, y=1, c=1))

    expected_cyx_raw_with_piece = numpy.asarray([
        [[1,   2,   3,   4,   5 ],
         [6,   7,   8,   9,   10],
         [11,  12,  13,  14,  15],
         [16,  17,  18,  19,  20]],

        [[-1,  -2,  -3,  -4,  -5 ],
         [-6,  -7,  100, 200,  -10],
         [-11, -12, 300, 400, -15],
         [-16, -17, -18, -19, -20]],
    ])

    extrapolating_piece = Array5D(numpy.asarray([
        [111, 222, 333],
        [444, 555, 6661]
    ]), "yx", location=Point5D.zero(x=3, y=2, c=0))

    expected_cyx_raw_with_extrapolating_piece = numpy.asarray([
        [[1,   2,   3,   4,    5 ],
         [6,   7,   8,   9,    10],
         [11,  12,  13,  111,  222],
         [16,  17,  18,  444,  555]],

        [[-1,  -2,  -3,  -4,   -5 ],
         [-6,  -7,  100, 200,  -10],
         [-11, -12, 300, 400,  -15],
         [-16, -17, -18, -19,  -20]],
    ])
    # fmt: on
    arr = Array5D(raw, "cyx")
    arr.set(piece)
    assert (arr.raw("cyx") == expected_cyx_raw_with_piece).all()

    arr.set(extrapolating_piece, autocrop=True)
    assert (arr.raw("cyx") == expected_cyx_raw_with_extrapolating_piece).all()
Esempio n. 7
0
 def stack_h5s(stack_axis: str) -> List[H5DataSource]:
     offset = Point5D.zero()
     stack: List[H5DataSource] = []
     for outer_path in h5_outer_paths:
         stack.append(
             H5DataSource(outer_path=outer_path,
                          inner_path=PurePosixPath("/data"),
                          filesystem=OsFs("/"),
                          location=offset))
         offset += Point5D.zero(**{stack_axis: stack[-1].shape[stack_axis]})
     return stack
Esempio n. 8
0
 def __init__(self,
              *,
              outer_path: Path,
              inner_path: PurePosixPath,
              location: Point5D = Point5D.zero(),
              filesystem: JsonableFilesystem):
     self.outer_path = outer_path
     self.inner_path = inner_path
     self.filesystem = filesystem
     binfile = filesystem.openbin(outer_path.as_posix())
     f = h5py.File(binfile, "r")
     try:
         dataset = f[inner_path.as_posix()]
         if not isinstance(dataset, h5py.Dataset):
             raise ValueError(f"{inner_path} is not a Dataset")
         axiskeys = self.getAxisKeys(dataset)
         self._dataset = cast(h5py.Dataset, dataset)
         tile_shape = Shape5D.create(raw_shape=self._dataset.chunks
                                     or self._dataset.shape,
                                     axiskeys=axiskeys)
         super().__init__(
             tile_shape=tile_shape,
             interval=Shape5D.create(
                 raw_shape=self._dataset.shape,
                 axiskeys=axiskeys).to_interval5d(location),
             dtype=self._dataset.dtype,
             axiskeys=axiskeys,
         )
     except Exception as e:
         f.close()
         raise e
Esempio n. 9
0
 def __init__(self,
              arr: np.ndarray,
              axiskeys: str,
              channel_colors: Sequence[Color],
              location: Point5D = Point5D.zero()):
     super().__init__(arr, axiskeys, location=location)
     self.channel_colors = tuple(channel_colors)
Esempio n. 10
0
 def __init__(self,
              path: Path,
              *,
              location: Point5D = Point5D.zero(),
              filesystem: FS):
     self._dataset: Optional[h5py.Dataset] = None
     try:
         self._dataset, outer_path, inner_path = self.openDataset(
             path, filesystem=filesystem)
         axiskeys = self.getAxisKeys(self._dataset)
         tile_shape = Shape5D.create(raw_shape=self._dataset.chunks
                                     or self._dataset.shape,
                                     axiskeys=axiskeys)
         super().__init__(
             url=filesystem.desc(outer_path.as_posix()) + "/" +
             inner_path.as_posix(),
             tile_shape=tile_shape,
             shape=Shape5D.create(raw_shape=self._dataset.shape,
                                  axiskeys=axiskeys),
             dtype=self._dataset.dtype,
             name=self._dataset.file.filename.split("/")[-1] +
             self._dataset.name,
             location=location,
             axiskeys=axiskeys,
         )
     except Exception as e:
         if self._dataset:
             self._dataset.file.close()
         raise e
Esempio n. 11
0
    def __init__(self,
                 path: Path,
                 *,
                 location: Point5D = Point5D.zero(),
                 filesystem: FS):
        url = filesystem.geturl(path.as_posix())
        match = re.search(r"[^/]+\.n5/.*$", url, re.IGNORECASE)
        if not match:
            raise UnsupportedUrlException(url)
        name = match.group(0)
        self.filesystem = filesystem.opendir(path.as_posix())

        with self.filesystem.openbin("attributes.json", "r") as f:
            attributes_json_bytes = f.read()
        attributes = json.loads(attributes_json_bytes.decode("utf8"))

        dimensions = attributes["dimensions"][::-1]
        blockSize = attributes["blockSize"][::-1]
        axiskeys = "".join(attributes["axes"]).lower(
        )[::-1] if "axes" in attributes else guess_axiskeys(dimensions)

        super().__init__(
            url=url,
            name=name,
            tile_shape=Shape5D.create(raw_shape=blockSize, axiskeys=axiskeys),
            shape=Shape5D.create(raw_shape=dimensions, axiskeys=axiskeys),
            dtype=np.dtype(attributes["dataType"]).newbyteorder(">"),
            location=location,
            axiskeys=axiskeys,
        )
        self.compression_type = attributes["compression"]["type"]
        if self.compression_type not in N5Block.DECOMPRESSORS.keys():
            raise NotImplementedError(
                f"Don't know how to decompress from {self.compression_type}")
Esempio n. 12
0
 def __init__(self, path: Path, *, location: Point5D = Point5D.zero(), filesystem: FS):
     try:
         raw_data = skimage.io.imread(filesystem.openbin(path.as_posix()))
     except ValueError:
         raise UnsupportedUrlException(path)
     axiskeys = "yxc"[: len(raw_data.shape)]
     super().__init__(url=filesystem.desc(path.as_posix()), data=raw_data, axiskeys=axiskeys, location=location)
Esempio n. 13
0
def test_cut():
    # fmt: off
    raw = numpy.asarray([
        [1,  2,  3,  4,  5],
        [6,  7,  8,  9,  10],
        [11, 12, 13, 14, 15],
        [16, 17, 18, 19, 20],
    ])
    expected_piece = numpy.asarray([
        [2,  3],
        [7,  8],
        [12, 13],
        [17, 18]
    ])
    expected_global_sub_piece = numpy.asarray([
        [3],
        [8],
        [13],
        [18]
    ])
    # fmt: on
    arr = Array5D(raw, "zy")
    piece = arr.cut(Slice5D(y=slice(1, 3)))
    assert (piece.raw("zy") == expected_piece).all()
    assert piece.location == Point5D.zero(y=1)

    global_sub_piece = piece.cut(Slice5D(y=2))
    assert (global_sub_piece.raw("zy") == expected_global_sub_piece).all()

    local_sub_piece = piece.local_cut(Slice5D(y=1))
    assert (local_sub_piece.raw("zy") == global_sub_piece.raw("zy")).all()
Esempio n. 14
0
def test_zero_factory_method_defaults_coords_to_zero():
    p = Point5D.zero(x=123, c=456)
    assert p.x == 123
    assert p.y == 0
    assert p.z == 0
    assert p.t == 0
    assert p.c == 456
Esempio n. 15
0
 def create(cls, path: Path, *, location: Point5D = Point5D.zero(), filesystem: Optional[FS] = None) -> "DataSource":
     filesystem = filesystem or OSFS(path.anchor)
     for klass in cls.REGISTRY if cls == DataSource else [cls]:
         try:
             return klass(path, location=location, filesystem=filesystem)
         except UnsupportedUrlException:
             pass
     raise UnsupportedUrlException(path)
Esempio n. 16
0
    def __init__(
            self,
            paths: List[Path],
            *,
            stack_axis: str,
            layer_axiskeys: Union[str, Sequence[str]] = "",
            location: Point5D = Point5D.zero(),
            filesystems: Sequence[FS] = (),
    ):
        layer_axiskeys = layer_axiskeys or [""] * len(paths)
        assert len(layer_axiskeys) == len(paths)
        self.stack_axis = stack_axis
        self.layers: List[DataSource] = []
        self.layer_offsets: List[int] = []
        layer_offset = Point5D.zero()
        for layer_path, layer_fs in itertools.zip_longest(paths, filesystems):
            layer = DataSource.create(layer_path,
                                      location=layer_offset,
                                      filesystem=layer_fs)
            self.layers.append(layer)
            self.layer_offsets.append(layer_offset[stack_axis])
            layer_offset += Point5D.zero(
                **{stack_axis: layer.shape[stack_axis]})

        if len(
                set(
                    layer.shape.with_coord(**{stack_axis: 1})
                    for layer in self.layers)) > 1:
            raise ValueError(
                "Provided files have different dimensions on the non-stacking axis"
            )
        if any(layer.dtype != self.layers[0].dtype for layer in self.layers):
            raise ValueError("All layers must have the same data type!")

        stack_size = sum(layer.shape[self.stack_axis] for layer in self.layers)
        full_shape = self.layers[0].shape.with_coord(
            **{self.stack_axis: stack_size})

        super().__init__(
            url=":".join(p.as_posix() for p in paths),
            shape=full_shape,
            name="Stack from " + ":".join(p.name for p in paths),
            dtype=self.layers[0].dtype,
            location=location,
            axiskeys=stack_axis + Point5D.LABELS.replace(stack_axis, ""),
        )
Esempio n. 17
0
def test_n5_datasink(tmp_path: Path, data: Array5D, datasource: DataSource):
    sink = N5DatasetSink.create(filesystem=OsFs(tmp_path.as_posix()),
                                outer_path=Path("test_n5_datasink.n5"),
                                inner_path=PurePosixPath("/data"),
                                attributes=N5DatasetAttributes(
                                    dimensions=datasource.shape,
                                    blockSize=Shape5D(x=10, y=10),
                                    axiskeys=datasource.axiskeys,
                                    dataType=datasource.dtype,
                                    compression=RawCompressor(),
                                    location=Point5D.zero(x=7, y=13)))
    for tile in DataRoi(datasource).split(sink.tile_shape):
        sink.write(tile.retrieve().translated(Point5D.zero(x=7, y=13)))

    n5ds = N5DataSource(filesystem=sink.filesystem, path=sink.path)
    saved_data = n5ds.retrieve()
    assert saved_data.location == Point5D.zero(x=7, y=13)
    assert saved_data == data
Esempio n. 18
0
 def show(self):
     data = self.data_tile.retrieve().cut(copy=True)
     for axis in "xyz":
         increment = Point5D.zero(**{axis: 1})
         for pos in (self.position + increment, self.position - increment):
             if data.interval.contains(Interval5D.enclosing([pos])):
                 data.paint_point(pos, 0)
     data.paint_point(self.position, 255)
     data.show_images()
Esempio n. 19
0
 def from_array5d(cls,
                  arr: Array5D,
                  *,
                  tile_shape: Optional[Shape5D] = None,
                  location: Point5D = Point5D.zero()):
     return cls(data=arr.raw(Point5D.LABELS),
                axiskeys=Point5D.LABELS,
                location=location,
                tile_shape=tile_shape)
Esempio n. 20
0
def test_point_arithmetic():
    p = Point5D(x=100, y=200, z=300, t=400, c=500)
    assert p + Point5D.zero(x=100) == Point5D(x=200, y=200, z=300, t=400, c=500)
    assert p + Point5D.inf(x=100) == Point5D.inf(x=200)
    assert p + Point5D(x=1, y=2, z=3, t=4, c=5) == Point5D(x=101, y=202, z=303, t=404, c=505)

    other = Point5D(x=1, y=2, z=3, t=4, c=5)
    for op in ("__add__", "__sub__", "__mul__", "__floordiv__"):
        p_as_np = p.to_np(Point5D.LABELS)
        np_result = getattr(p_as_np, op)(other.to_np(Point5D.LABELS))
        assert all(getattr(p, op)(other).to_np(Point5D.LABELS) == np_result)
Esempio n. 21
0
 def from_json_value(cls, value: JsonValue) -> "SkimageDataSource":
     value_obj = ensureJsonObject(value)
     raw_location = value_obj.get("location")
     raw_tile_shape = value_obj.get("tile_shape")
     return SkimageDataSource(
         path=Path(ensureJsonString(value_obj.get("path"))),
         location=Point5D.zero()
         if raw_location is None else Point5D.from_json_value(raw_location),
         filesystem=JsonableFilesystem.from_json_value(
             value_obj.get("filesystem")),
         tile_shape=None if raw_tile_shape is None else
         Shape5D.from_json_value(raw_tile_shape))
Esempio n. 22
0
 def from_json_value(cls, value: JsonValue) -> "H5DataSource":
     value_obj = ensureJsonObject(value)
     raw_location = value_obj.get("location")
     return H5DataSource(
         outer_path=Path(ensureJsonString(value_obj.get("outer_path"))),
         inner_path=PurePosixPath(
             ensureJsonString(value_obj.get("inner_path"))),
         filesystem=JsonableFilesystem.from_json_value(
             value_obj.get("filesystem")),
         location=Point5D.zero()
         if raw_location is None else Point5D.from_json_value(raw_location),
     )
Esempio n. 23
0
 def compute(self, roi: DataRoi) -> FeatureData:
     features: List[FeatureData] = []
     channel_offset: int = 0
     for fx in self.extractors:
         result = fx.compute(roi).translated(Point5D.zero(c=channel_offset))
         features.append(result)
         channel_offset += result.shape.c
     out = Array5D.combine(features)
     return FeatureData(
         arr=out.raw(out.axiskeys),
         axiskeys=out.axiskeys,
         location=out.location
     )
Esempio n. 24
0
 def get_neighboring_tiles(self,
                           tile_shape: Shape5D = None
                           ) -> Iterator["DataSourceSlice"]:
     if not self.is_defined():
         return self.defined().get_neighboring_tiles(tile_shape=tile_shape)
     tile_shape = tile_shape or self.tile_shape
     assert self.is_tile(tile_shape)
     for axis in Point5D.LABELS:
         for axis_offset in (tile_shape[axis], -tile_shape[axis]):
             offset = Point5D.zero(**{axis: axis_offset})
             neighbor = self.translated(offset).clamped(self.full())
             if neighbor.shape.hypervolume > 0 and neighbor != self:
                 yield neighbor
Esempio n. 25
0
 def __init__(self,
              arr: np.ndarray,
              *,
              axiskeys: str,
              location: Point5D = Point5D.zero(),
              color: Color,
              raw_data: DataSource):
     super().__init__(arr.astype(bool),
                      axiskeys=axiskeys,
                      location=location)
     if not raw_data.interval.contains(self.interval):
         raise AnnotationOutOfBounds(annotation_roi=self.interval,
                                     raw_data=raw_data)
     self.color = color
     self.raw_data = raw_data
Esempio n. 26
0
 def __init__(self,
              path: Path,
              *,
              location: Point5D = Point5D.zero(),
              filesystem: JsonableFilesystem,
              tile_shape: Optional[Shape5D] = None):
     self.path = path
     self.filesystem = filesystem
     raw_data: np.ndarray = skimage.io.imread(
         filesystem.openbin(path.as_posix()))  # type: ignore
     axiskeys = "yxc"[:len(raw_data.shape)]
     super().__init__(
         data=raw_data,
         axiskeys=axiskeys,
         location=location,
         tile_shape=tile_shape,
     )
Esempio n. 27
0
 def __init__(
     self,
     url: str = "",
     *,
     data: np.ndarray,
     axiskeys: str,
     tile_shape: Optional[Shape5D] = None,
     location: Point5D = Point5D.zero(),
 ):
     self._data = Array5D(data, axiskeys=axiskeys)
     super().__init__(
         url=url or "memory://{id(data)}]",
         shape=self._data.shape,
         dtype=self._data.dtype,
         tile_shape=tile_shape,
         location=location,
         axiskeys=axiskeys,
     )
Esempio n. 28
0
 def __init__(
         self,
         *,
         data: np.ndarray,
         axiskeys: str,
         tile_shape: Optional[Shape5D] = None,
         location: Point5D = Point5D.zero(),
 ):
     self._data = Array5D(data, axiskeys=axiskeys, location=location)
     if tile_shape is None:
         tile_shape = Shape5D.hypercube(256).to_interval5d().clamped(
             self._data.shape).shape
     super().__init__(
         dtype=self._data.dtype,
         tile_shape=tile_shape,
         interval=self._data.interval,
         axiskeys=axiskeys,
     )
Esempio n. 29
0
 def __init__(
     self,
     url: str,
     *,
     tile_shape: Optional[Shape5D] = None,
     dtype: np.dtype,
     name: str = "",
     shape: Shape5D,
     location: Point5D = Point5D.zero(),
     axiskeys: str,
 ):
     self.url = url
     self.tile_shape = (tile_shape or Shape5D.hypercube(256)).to_slice_5d().clamped(shape.to_slice_5d()).shape
     self.dtype = dtype
     self.name = name or self.url.split("/")[-1]
     self.shape = shape
     self.roi = shape.to_slice_5d(offset=location)
     self.location = location
     self.axiskeys = axiskeys
Esempio n. 30
0
    def __init__(self,
                 path: Path,
                 *,
                 location: Point5D = Point5D.zero(),
                 chunk_size: Optional[Shape5D] = None,
                 filesystem: FS):
        """A DataSource that handles Neurogancer's precomputed chunks

        path: a path all the pay down to the scale, i.e., if some scale has
                "key": "my_scale"
              then your path should end in "my_scale"
        chunk_size: a valid chunk_size for the scale selected by 'path'
        """
        self.filesystem = filesystem.opendir(path.parent.as_posix())
        self.info = PrecomputedChunksInfo.load(path=Path("info"),
                                               filesystem=self.filesystem)
        self.scale = self.info.get_scale(key=path.name)
        super().__init__(
            url="precomputed://" + filesystem.desc(path.as_posix()),
            tile_shape=self.scale.get_tile_shape_5d(
                self.info.num_channels, tile_shape_hint=chunk_size),
            shape=self.scale.get_shape_5d(self.info.num_channels),
            dtype=self.info.data_type,
            name=path.name,
            location=location,
            axiskeys=self.scale.
            axiskeys[::
                     -1],  # externally reported axiskeys are always c-ordered
        )
        encoding_type = self.scale.encoding
        if encoding_type == "raw":
            noop = lambda data: data
            self.decompressor = noop
            self.compressor = noop
        else:
            raise NotImplementedError(
                f"Don't know how to decompress {compression_type}")