def test_label(self): zarr = parse_url(str(self.path + "/labels/coins")) assert zarr is not None node = Node(zarr, list()) self.matches(node, {Label, Multiscales}) multiscales = self.get_spec(node, Multiscales) assert multiscales.lookup("multiscales", [])
def test_top_level(self): zarr = parse_url(str(self.path)) assert zarr is not None node = Node(zarr, list()) self.matches(node, {Multiscales, OMERO}) multiscales = self.get_spec(node, Multiscales) assert multiscales.lookup("multiscales", [])
def assert_data(self, path, shape, fmt, mode="r"): loc = parse_url(path, mode=mode, fmt=fmt) assert loc reader = Reader(loc) node = list(reader())[0] assert Multiscales.matches(node.zarr) assert node.data[0].shape == shape assert np.max(node.data[0]) > 0 assert loc.fmt == fmt
def create_data(self, shape, fmt=CurrentFormat(), dtype=np.uint8, mean_val=128): self.store = parse_url(self.path, mode="w", fmt=fmt).store self.root = zarr.group(store=self.store) self.group = self.root.create_group("test") rng = np.random.default_rng(0) return rng.poisson(mean_val, size=shape).astype(dtype)
def test_writer(self, shape, scaler): data = self.create_data(shape) write_image(image=data, group=self.group, chunks=(128, 128), scaler=scaler) # Verify reader = Reader(parse_url(f"{self.path}/test")) node = list(reader())[0] assert Multiscales.matches(node.zarr) assert node.data[0].shape == shape assert node.data[0].chunks == ((1,), (2,), (1,), (128, 128), (128, 128))
def test_writer(self, shape, scaler, format_version): data = self.create_data(shape) version = format_version() axes = "tczyx"[-len(shape):] write_image( image=data, group=self.group, chunks=(128, 128), scaler=scaler, fmt=version, axes=axes, ) # Verify reader = Reader(parse_url(f"{self.path}/test")) node = list(reader())[0] assert Multiscales.matches(node.zarr) if version.version not in ("0.1", "0.2"): # v0.1 and v0.2 MUST be 5D assert node.data[0].shape == shape else: assert node.data[0].ndim == 5 assert np.allclose(data, node.data[0][...].compute())
def save(self, masks: List[omero.model.Shape], name: str) -> None: """ Save the masks/labels. In case of plate, make sure to set_image first. :param masks: The masks :param name: The name :return: None """ # Figure out whether we can flatten some dimensions unique_dims: Dict[str, Set[int]] = { "T": {unwrap(mask.theT) for shapes in masks for mask in shapes}, "C": {unwrap(mask.theC) for shapes in masks for mask in shapes}, "Z": {unwrap(mask.theZ) for shapes in masks for mask in shapes}, } ignored_dimensions: Set[str] = set() print(f"Unique dimensions: {unique_dims}") for d in "TCZ": if unique_dims[d] == {None}: ignored_dimensions.add(d) if self.plate: filename = f"{self.plate.id}.zarr" else: filename = f"{self.image.id}.zarr" # Verify that we are linking this mask to a real ome-zarr source_image = self.source_image source_image_link = self.source_image if source_image is None: # Assume that we're using the output directory source_image = filename source_image_link = "../.." # Drop "labels/0" if self.plate: assert self.plate_path, "Need image path within the plate" source_image = f"{source_image}/{self.plate_path}" current_path = f"{self.plate_path}/{self.path}" else: current_path = self.path print(f"source_image {source_image}") src = parse_url(source_image) assert src, "Source image does not exist" input_pyramid = Node(src, []) assert input_pyramid.load(Multiscales), "No multiscales metadata found" input_pyramid_levels = len(input_pyramid.data) root = zarr_open(filename) if current_path in root.group_keys(): out_labels = getattr(root, current_path) else: out_labels = root.require_group(current_path) _mask_shape: List[int] = list(self.image_shape) for d in ignored_dimensions: _mask_shape[DIMENSION_ORDER[d]] = 1 mask_shape: Tuple[int, ...] = tuple(_mask_shape) del _mask_shape print(f"Ignoring dimensions {ignored_dimensions}") if self.style not in ("labeled", "split"): assert False, "6d has been removed" # Create and store binary data labels, fill_colors, properties = self.masks_to_labels( masks, mask_shape, ignored_dimensions, check_overlaps=True, ) scaler = Scaler(max_layer=input_pyramid_levels) label_pyramid = scaler.nearest(labels) pyramid_grp = out_labels.require_group(name) write_multiscale(label_pyramid, pyramid_grp) # TODO: dtype, chunks, overwite # Specify and store metadata image_label_colors: List[JSONDict] = [] label_properties: List[JSONDict] = [] image_label = { "version": "0.2", "colors": image_label_colors, "properties": label_properties, "source": { "image": source_image_link }, } if properties: for label_value, props_dict in sorted(properties.items()): new_dict: Dict = {"label-value": label_value, **props_dict} label_properties.append(new_dict) if fill_colors: for label_value, rgba_int in sorted(fill_colors.items()): image_label_colors.append({ "label-value": label_value, "rgba": int_to_rgba_255(rgba_int) }) # TODO: move to write method pyramid_grp.attrs["image-label"] = image_label # Register with labels metadata print(f"Created {filename}/{current_path}/{name}") attrs = out_labels.attrs.asdict() # TODO: could temporarily support "masks" here as well if "labels" in attrs: if name not in attrs["labels"]: attrs["labels"].append(name) else: attrs["labels"] = [name] out_labels.attrs.update(attrs)
def test_label(self): filename = str(self.path.join("labels", "coins")) reader = Reader(parse_url(filename)) assert len(list(reader())) == 3
def test_image(self): reader = Reader(parse_url(str(self.path))) assert len(list(reader())) == 3
def initdir(self, tmpdir): self.path = pathlib.Path(tmpdir.mkdir("data")) self.store = parse_url(self.path, mode="w").store self.root = zarr.group(store=self.store) self.group = self.root.create_group("test")
def test_label(self): filename = str(self.path.join("labels", "coins")) node = Node(parse_url(filename), list()) assert node.data assert node.metadata
def test_image(self): node = Node(parse_url(str(self.path)), list()) assert node.data assert node.metadata
def test_labels(self): zarr = parse_url(str(self.path + "/labels")) assert zarr is not None node = Node(zarr, list()) self.matches(node, {Labels})