def save(self, masks: List[omero.model.Shape], name: str) -> None: """ Save the masks/labels. In case of plate, make sure to set_image first. :param masks: The masks :param name: The name :return: None """ # Figure out whether we can flatten some dimensions unique_dims: Dict[str, Set[int]] = { "T": {unwrap(mask.theT) for shapes in masks for mask in shapes}, "C": {unwrap(mask.theC) for shapes in masks for mask in shapes}, "Z": {unwrap(mask.theZ) for shapes in masks for mask in shapes}, } ignored_dimensions: Set[str] = set() print(f"Unique dimensions: {unique_dims}") for d in "TCZ": if unique_dims[d] == {None}: ignored_dimensions.add(d) if self.plate: filename = f"{self.plate.id}.zarr" else: filename = f"{self.image.id}.zarr" # Verify that we are linking this mask to a real ome-zarr source_image = self.source_image source_image_link = self.source_image if source_image is None: # Assume that we're using the output directory source_image = filename source_image_link = "../.." # Drop "labels/0" if self.plate: assert self.plate_path, "Need image path within the plate" source_image = f"{source_image}/{self.plate_path}" current_path = f"{self.plate_path}/{self.path}" else: current_path = self.path print(f"source_image {source_image}") src = parse_url(source_image) assert src, "Source image does not exist" input_pyramid = Node(src, []) assert input_pyramid.load(Multiscales), "No multiscales metadata found" input_pyramid_levels = len(input_pyramid.data) root = zarr_open(filename) if current_path in root.group_keys(): out_labels = getattr(root, current_path) else: out_labels = root.require_group(current_path) _mask_shape: List[int] = list(self.image_shape) for d in ignored_dimensions: _mask_shape[DIMENSION_ORDER[d]] = 1 mask_shape: Tuple[int, ...] = tuple(_mask_shape) del _mask_shape print(f"Ignoring dimensions {ignored_dimensions}") if self.style not in ("labeled", "split"): assert False, "6d has been removed" # Create and store binary data labels, fill_colors, properties = self.masks_to_labels( masks, mask_shape, ignored_dimensions, check_overlaps=True, ) scaler = Scaler(max_layer=input_pyramid_levels) label_pyramid = scaler.nearest(labels) pyramid_grp = out_labels.require_group(name) write_multiscale(label_pyramid, pyramid_grp) # TODO: dtype, chunks, overwite # Specify and store metadata image_label_colors: List[JSONDict] = [] label_properties: List[JSONDict] = [] image_label = { "version": "0.2", "colors": image_label_colors, "properties": label_properties, "source": { "image": source_image_link }, } if properties: for label_value, props_dict in sorted(properties.items()): new_dict: Dict = {"label-value": label_value, **props_dict} label_properties.append(new_dict) if fill_colors: for label_value, rgba_int in sorted(fill_colors.items()): image_label_colors.append({ "label-value": label_value, "rgba": int_to_rgba_255(rgba_int) }) # TODO: move to write method pyramid_grp.attrs["image-label"] = image_label # Register with labels metadata print(f"Created {filename}/{current_path}/{name}") attrs = out_labels.attrs.asdict() # TODO: could temporarily support "masks" here as well if "labels" in attrs: if name not in attrs["labels"]: attrs["labels"].append(name) else: attrs["labels"] = [name] out_labels.attrs.update(attrs)
def test_zoom(self, shape): data = self.create_data(shape) scaler = Scaler() downscaled = scaler.zoom(data) self.check_downscaled(downscaled, shape)
def test_local_mean(self, shape): data = self.create_data(shape) scaler = Scaler() downscaled = scaler.local_mean(data) self.check_downscaled(downscaled, shape)
def test_gaussian(self, shape): data = self.create_data(shape) scaler = Scaler() downscaled = scaler.gaussian(data) self.check_downscaled(downscaled, shape)
def test_nearest(self, shape): data = self.create_data(shape) scaler = Scaler() downscaled = scaler.nearest(data) self.check_downscaled(downscaled, shape)
def scaler(self, request): if request.param: return Scaler() else: return None