def test_paint_point(): # fmt: off img = Array5D(numpy.asarray([ [[100, 0, 0, 100], [ 0, 17, 0, 0], [ 0, 0, 17, 0], [ 0, 0, 0, 0]], [[200, 0, 0, 200], [ 0, 40, 0, 0], [ 0, 0, 40, 0], [ 0, 0, 0, 0]] ]), axiskeys="cyx") # fmt: on # fmt: off expected_painted = Array5D( numpy.asarray( [ [[107, 0, 0, 100], [0, 17, 0, 0], [0, 0, 17, 0], [0, 0, 0, 0]], [[200, 0, 0, 200], [0, 40, 123, 0], [0, 0, 40, 0], [0, 0, 0, 0]], ] ), axiskeys="cyx", ) # fmt: on img.paint_point(Point5D.zero(c=0, y=0, x=0), value=107) img.paint_point(Point5D.zero(c=1, y=1, x=2), value=123) assert img == expected_painted
def from_json_value(cls, data: JsonValue) -> "Annotation": data_dict = ensureJsonObject(data) raw_voxels = ensureJsonArray(data_dict.get("voxels")) voxels: Sequence[Point5D] = [ Point5D.from_json_value(raw_voxel) for raw_voxel in raw_voxels ] color = Color.from_json_data(data_dict.get("color")) raw_data = DataSource.from_json_value(data_dict.get("raw_data")) start = Point5D.min_coords(voxels) stop = Point5D.max_coords( voxels ) + 1 # +1 because slice.stop is exclusive, but max_point isinclusive scribbling_roi = Interval5D.create_from_start_stop(start=start, stop=stop) if scribbling_roi.shape.c != 1: raise ValueError( f"Annotations must not span multiple channels: {voxels}") scribblings = Array5D.allocate(scribbling_roi, dtype=np.dtype(bool), value=False) for voxel in voxels: scribblings.paint_point(point=voxel, value=True) return cls(scribblings._data, axiskeys=scribblings.axiskeys, color=color, raw_data=raw_data, location=start)
def interpolate_from_points(cls, color: Color, voxels: Sequence[Point5D], raw_data: DataSource): start = Point5D.min_coords(voxels) stop = Point5D.max_coords( voxels ) + 1 # +1 because slice.stop is exclusive, but max_point isinclusive scribbling_roi = Interval5D.create_from_start_stop(start=start, stop=stop) if scribbling_roi.shape.c != 1: raise ValueError( f"Annotations must not span multiple channels: {voxels}") scribblings = Array5D.allocate(scribbling_roi, dtype=np.dtype(bool), value=False) anchor = voxels[0] for voxel in voxels: for interp_voxel in anchor.interpolate_until(voxel): scribblings.paint_point(point=interp_voxel, value=True) anchor = voxel return cls(scribblings._data, axiskeys=scribblings.axiskeys, color=color, raw_data=raw_data, location=start)
def test_combine(): # fmt: off arr = Array5D(numpy.asarray([ [7, 7, 0, 0, 0, 0], [7, 7, 0, 0, 0, 0], [7, 0, 0, 0, 0, 0], [0, 0, 0, 3, 0, 0], [0, 0, 3, 3, 3, 0], [0, 0, 0, 3, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 5, 5, 0, 0]]), axiskeys="yx") piece1 = Array5D(numpy.asarray([ [7, 7,], [7, 7,], [7, 0,]]), axiskeys="yx", location=Point5D.zero()) piece2 = Array5D(numpy.asarray([ [0, 3, 0, 0], [3, 3, 3, 0], [0, 3, 0, 0]]), axiskeys="yx", location=Point5D.zero(y=3, x=2)) piece3 = Array5D(numpy.asarray([ [5, 5]]), axiskeys="yx", location=Point5D.zero(y=8, x=2)) # fmt: on combined = piece1.combine([piece2, piece3]) assert (combined.raw("yx") == arr.raw("yx")).all()
def test_allocation(): arr = Array5D.allocate(Slice5D.zero(x=slice(100, 200), y=slice(200, 300)), numpy.uint8) assert arr.shape == Shape5D(x=100, y=100) assert arr.location == Point5D.zero(x=100, y=200) arr = Array5D.allocate(Slice5D.zero(x=slice(-100, 200), y=slice(200, 300)), numpy.uint8) assert arr.shape == Shape5D(x=300, y=100) assert arr.location == Point5D.zero(x=-100, y=200)
def test_slice_enlarge(): slc = Slice5D(x=slice(10, 100), y=slice(20, 200)) enlarged = slc.enlarged(radius=Point5D(x=1, y=2, z=3, t=4, c=5)) assert enlarged == Slice5D(x=slice(9, 101), y=slice(18, 202)) slc = Slice5D(x=slice(10, 100), y=slice(20, 200), z=0, t=0, c=0) enlarged = slc.enlarged(radius=Point5D(x=1, y=2, z=3, t=4, c=5)) assert enlarged == Slice5D(x=slice(9, 101), y=slice(18, 202), z=slice(-3, 4), t=slice(-4, 5), c=slice(-5, 6))
def test_slice_enclosing(): p1 = Point5D.zero(x=-13, y=40) p2 = Point5D.zero(z=-1, c=6) p3 = Point5D.zero(t=3, x=4) p4 = Point5D.zero(t=100, y=400) expected_slice = Slice5D(x=slice(-13, 4 + 1), y=slice(40, 400 + 1), z=slice(-1, -1 + 1), c=slice(6, 6 + 1)) assert Slice5D.enclosing([p1, p2, p3, p4])
def test_slice_translation(): slc = Slice5D(x=slice(10, 100), y=slice(20, 200)) translated_slc = slc.translated(Point5D(x=1, y=2, z=3, t=4, c=5)) assert translated_slc == Slice5D(x=slice(11, 101), y=slice(22, 202)) slc = Slice5D(x=slice(10, 100), y=slice(20, 200), z=0, t=0, c=0) translated_slc = slc.translated(Point5D(x=-1, y=-2, z=-3, t=-4, c=-5000)) assert translated_slc == Slice5D( x=slice(9, 99), y=slice(18, 198), z=slice(-3, -2), t=slice(-4, -3), c=slice(-5000, -4999) )
def test_setting_rois(): # fmt: off raw = numpy.asarray([ [[1, 2, 3, 4, 5 ], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20]], [[-1, -2, -3, -4, -5 ], [-6, -7, -8, -9, -10], [-11, -12, -13, -14, -15], [-16, -17, -18, -19, -20]], ]) piece = Array5D(numpy.asarray([ [100, 200], [300, 400] ]), "yx", location=Point5D.zero(x=2, y=1, c=1)) expected_cyx_raw_with_piece = numpy.asarray([ [[1, 2, 3, 4, 5 ], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20]], [[-1, -2, -3, -4, -5 ], [-6, -7, 100, 200, -10], [-11, -12, 300, 400, -15], [-16, -17, -18, -19, -20]], ]) extrapolating_piece = Array5D(numpy.asarray([ [111, 222, 333], [444, 555, 6661] ]), "yx", location=Point5D.zero(x=3, y=2, c=0)) expected_cyx_raw_with_extrapolating_piece = numpy.asarray([ [[1, 2, 3, 4, 5 ], [6, 7, 8, 9, 10], [11, 12, 13, 111, 222], [16, 17, 18, 444, 555]], [[-1, -2, -3, -4, -5 ], [-6, -7, 100, 200, -10], [-11, -12, 300, 400, -15], [-16, -17, -18, -19, -20]], ]) # fmt: on arr = Array5D(raw, "cyx") arr.set(piece) assert (arr.raw("cyx") == expected_cyx_raw_with_piece).all() arr.set(extrapolating_piece, autocrop=True) assert (arr.raw("cyx") == expected_cyx_raw_with_extrapolating_piece).all()
def test_data_roi_get_tiles_can_clamp_to_datasource_tiles(): # fmt: off data = Array5D(np.asarray([ [1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20], ]).astype(np.uint8), axiskeys="yx") # fmt: on ds = ArrayDataSource.from_array5d(data, tile_shape=Shape5D(x=2, y=2)) data_slice = DataRoi(datasource=ds, x=(1, 4), y=(0, 3)) # fmt: off dataslice_expected_data = Array5D(np.asarray([[2, 3, 4], [7, 8, 9], [12, 13, 14]]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(x=1)) # fmt: on assert data_slice.retrieve() == dataslice_expected_data # fmt: off dataslice_expected_slices = [ Array5D(np.asarray([[1, 2], [6, 7]]).astype(np.uint8), axiskeys="yx", location=Point5D.zero()), Array5D(np.asarray([ [3, 4], [8, 9], ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(x=2)), Array5D(np.asarray([ [11, 12], [16, 17], ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(y=2)), Array5D(np.asarray([ [13, 14], [18, 19], ]).astype(np.uint8), axiskeys="yx", location=Point5D.zero(x=2, y=2)) ] # fmt: on expected_slice_dict = {a.interval: a for a in dataslice_expected_slices} for piece in data_slice.get_datasource_tiles(clamp_to_datasource=True): expected_data = expected_slice_dict.pop(piece.interval) assert expected_data == piece.retrieve() assert len(expected_slice_dict) == 0
def stack_h5s(stack_axis: str) -> List[H5DataSource]: offset = Point5D.zero() stack: List[H5DataSource] = [] for outer_path in h5_outer_paths: stack.append( H5DataSource(outer_path=outer_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/"), location=offset)) offset += Point5D.zero(**{stack_axis: stack[-1].shape[stack_axis]}) return stack
def from_json_value(cls, value: JsonValue) -> "H5DataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") return H5DataSource( outer_path=Path(ensureJsonString(value_obj.get("outer_path"))), inner_path=PurePosixPath( ensureJsonString(value_obj.get("inner_path"))), filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), location=Point5D.zero() if raw_location is None else Point5D.from_json_value(raw_location), )
def from_json_value(cls, value: JsonValue) -> "SkimageDataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") raw_tile_shape = value_obj.get("tile_shape") return SkimageDataSource( path=Path(ensureJsonString(value_obj.get("path"))), location=Point5D.zero() if raw_location is None else Point5D.from_json_value(raw_location), filesystem=JsonableFilesystem.from_json_value( value_obj.get("filesystem")), tile_shape=None if raw_tile_shape is None else Shape5D.from_json_value(raw_tile_shape))
def __init__(self, path: Path, *, location: Point5D = Point5D.zero(), filesystem: FS): try: raw_data = skimage.io.imread(filesystem.openbin(path.as_posix())) except ValueError: raise UnsupportedUrlException(path) axiskeys = "yxc"[: len(raw_data.shape)] super().__init__(url=filesystem.desc(path.as_posix()), data=raw_data, axiskeys=axiskeys, location=location)
def __init__(self, arr: np.ndarray, axiskeys: str, channel_colors: Sequence[Color], location: Point5D = Point5D.zero()): super().__init__(arr, axiskeys, location=location) self.channel_colors = tuple(channel_colors)
def test_writing_to_offset_precomputed_chunks(tmp_path: Path, data: Array5D): datasource = ArrayDataSource.from_array5d(data, tile_shape=Shape5D(x=10, y=10), location=Point5D(x=1000, y=1000)) scale = PrecomputedChunksScale.from_datasource(datasource=datasource, key=Path("my_test_data"), encoding=RawEncoder()) sink_path = Path("mytest.precomputed") filesystem = OsFs(tmp_path.as_posix()) info = PrecomputedChunksInfo( data_type=datasource.dtype, type_="image", num_channels=datasource.shape.c, scales=tuple([scale]), ) datasink = PrecomputedChunksSink.create( filesystem=filesystem, base_path=sink_path, info=info, ).scale_sinks[0] for tile in datasource.roi.get_datasource_tiles(): datasink.write(tile.retrieve()) precomp_datasource = PrecomputedChunksDataSource( path=sink_path, filesystem=filesystem, resolution=scale.resolution) reloaded_data = precomp_datasource.retrieve() assert (reloaded_data.raw("xyz") == data.raw("xyz")).all() # type: ignore
def test_ininf_factory_method_defaults_coords_to_ninf(): p = Point5D.ninf(c=123, y=456) assert p.x == Point5D.NINF assert p.y == 456 assert p.z == Point5D.NINF assert p.t == Point5D.NINF assert p.c == 123
def test_one_factory_method_defaults_coords_to_one(): p = Point5D.one(x=123, c=456) assert p.x == 123 assert p.y == 1 assert p.z == 1 assert p.t == 1 assert p.c == 456
def test_zero_factory_method_defaults_coords_to_zero(): p = Point5D.zero(x=123, c=456) assert p.x == 123 assert p.y == 0 assert p.z == 0 assert p.t == 0 assert p.c == 456
def test_labeled_coords_constructor_property_assignment(): p = Point5D(x=1, y=2, z=3, t=4, c=5) assert p.x == 1 assert p.y == 2 assert p.z == 3 assert p.t == 4 assert p.c == 5
def __init__(self, *, outer_path: Path, inner_path: PurePosixPath, location: Point5D = Point5D.zero(), filesystem: JsonableFilesystem): self.outer_path = outer_path self.inner_path = inner_path self.filesystem = filesystem binfile = filesystem.openbin(outer_path.as_posix()) f = h5py.File(binfile, "r") try: dataset = f[inner_path.as_posix()] if not isinstance(dataset, h5py.Dataset): raise ValueError(f"{inner_path} is not a Dataset") axiskeys = self.getAxisKeys(dataset) self._dataset = cast(h5py.Dataset, dataset) tile_shape = Shape5D.create(raw_shape=self._dataset.chunks or self._dataset.shape, axiskeys=axiskeys) super().__init__( tile_shape=tile_shape, interval=Shape5D.create( raw_shape=self._dataset.shape, axiskeys=axiskeys).to_interval5d(location), dtype=self._dataset.dtype, axiskeys=axiskeys, ) except Exception as e: f.close() raise e
def __init__(self, path: Path, *, location: Point5D = Point5D.zero(), filesystem: FS): self._dataset: Optional[h5py.Dataset] = None try: self._dataset, outer_path, inner_path = self.openDataset( path, filesystem=filesystem) axiskeys = self.getAxisKeys(self._dataset) tile_shape = Shape5D.create(raw_shape=self._dataset.chunks or self._dataset.shape, axiskeys=axiskeys) super().__init__( url=filesystem.desc(outer_path.as_posix()) + "/" + inner_path.as_posix(), tile_shape=tile_shape, shape=Shape5D.create(raw_shape=self._dataset.shape, axiskeys=axiskeys), dtype=self._dataset.dtype, name=self._dataset.file.filename.split("/")[-1] + self._dataset.name, location=location, axiskeys=axiskeys, ) except Exception as e: if self._dataset: self._dataset.file.close() raise e
def test_cut(): # fmt: off raw = numpy.asarray([ [1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20], ]) expected_piece = numpy.asarray([ [2, 3], [7, 8], [12, 13], [17, 18] ]) expected_global_sub_piece = numpy.asarray([ [3], [8], [13], [18] ]) # fmt: on arr = Array5D(raw, "zy") piece = arr.cut(Slice5D(y=slice(1, 3))) assert (piece.raw("zy") == expected_piece).all() assert piece.location == Point5D.zero(y=1) global_sub_piece = piece.cut(Slice5D(y=2)) assert (global_sub_piece.raw("zy") == expected_global_sub_piece).all() local_sub_piece = piece.local_cut(Slice5D(y=1)) assert (local_sub_piece.raw("zy") == global_sub_piece.raw("zy")).all()
def __init__(self, path: Path, *, location: Point5D = Point5D.zero(), filesystem: FS): url = filesystem.geturl(path.as_posix()) match = re.search(r"[^/]+\.n5/.*$", url, re.IGNORECASE) if not match: raise UnsupportedUrlException(url) name = match.group(0) self.filesystem = filesystem.opendir(path.as_posix()) with self.filesystem.openbin("attributes.json", "r") as f: attributes_json_bytes = f.read() attributes = json.loads(attributes_json_bytes.decode("utf8")) dimensions = attributes["dimensions"][::-1] blockSize = attributes["blockSize"][::-1] axiskeys = "".join(attributes["axes"]).lower( )[::-1] if "axes" in attributes else guess_axiskeys(dimensions) super().__init__( url=url, name=name, tile_shape=Shape5D.create(raw_shape=blockSize, axiskeys=axiskeys), shape=Shape5D.create(raw_shape=dimensions, axiskeys=axiskeys), dtype=np.dtype(attributes["dataType"]).newbyteorder(">"), location=location, axiskeys=axiskeys, ) self.compression_type = attributes["compression"]["type"] if self.compression_type not in N5Block.DECOMPRESSORS.keys(): raise NotImplementedError( f"Don't know how to decompress from {self.compression_type}")
def create(cls, path: Path, *, location: Point5D = Point5D.zero(), filesystem: Optional[FS] = None) -> "DataSource": filesystem = filesystem or OSFS(path.anchor) for klass in cls.REGISTRY if cls == DataSource else [cls]: try: return klass(path, location=location, filesystem=filesystem) except UnsupportedUrlException: pass raise UnsupportedUrlException(path)
def from_json_value(cls, value: JsonValue) -> "N5DataSource": value_obj = ensureJsonObject(value) raw_location = value_obj.get("location") return N5DataSource( path=Path(ensureJsonString(value_obj.get("path"))), filesystem=JsonableFilesystem.from_json_value(value_obj.get("filesystem")), location=raw_location if raw_location is None else Point5D.from_json_value(raw_location), )
def __init__( self, paths: List[Path], *, stack_axis: str, layer_axiskeys: Union[str, Sequence[str]] = "", location: Point5D = Point5D.zero(), filesystems: Sequence[FS] = (), ): layer_axiskeys = layer_axiskeys or [""] * len(paths) assert len(layer_axiskeys) == len(paths) self.stack_axis = stack_axis self.layers: List[DataSource] = [] self.layer_offsets: List[int] = [] layer_offset = Point5D.zero() for layer_path, layer_fs in itertools.zip_longest(paths, filesystems): layer = DataSource.create(layer_path, location=layer_offset, filesystem=layer_fs) self.layers.append(layer) self.layer_offsets.append(layer_offset[stack_axis]) layer_offset += Point5D.zero( **{stack_axis: layer.shape[stack_axis]}) if len( set( layer.shape.with_coord(**{stack_axis: 1}) for layer in self.layers)) > 1: raise ValueError( "Provided files have different dimensions on the non-stacking axis" ) if any(layer.dtype != self.layers[0].dtype for layer in self.layers): raise ValueError("All layers must have the same data type!") stack_size = sum(layer.shape[self.stack_axis] for layer in self.layers) full_shape = self.layers[0].shape.with_coord( **{self.stack_axis: stack_size}) super().__init__( url=":".join(p.as_posix() for p in paths), shape=full_shape, name="Stack from " + ":".join(p.name for p in paths), dtype=self.layers[0].dtype, location=location, axiskeys=stack_axis + Point5D.LABELS.replace(stack_axis, ""), )
def from_array5d(cls, arr: Array5D, *, tile_shape: Optional[Shape5D] = None, location: Point5D = Point5D.zero()): return cls(data=arr.raw(Point5D.LABELS), axiskeys=Point5D.LABELS, location=location, tile_shape=tile_shape)
def show(self): data = self.data_tile.retrieve().cut(copy=True) for axis in "xyz": increment = Point5D.zero(**{axis: 1}) for pos in (self.position + increment, self.position - increment): if data.interval.contains(Interval5D.enclosing([pos])): data.paint_point(pos, 0) data.paint_point(self.position, 255) data.show_images()
def test_from_start_stop(): start = Point5D(x=10, y=20, z=30, t=40, c=50) stop = start + 10 slc = Slice5D.create_from_start_stop(start, stop) assert slc == Slice5D(x=slice(10, 20), y=slice(20, 30), z=slice(30, 40), t=slice(40, 50), c=slice(50, 60))