def test_custom_initialization() -> None: assert Vec3Int.zeros() == Vec3Int(0, 0, 0) assert Vec3Int.ones() == Vec3Int(1, 1, 1) assert Vec3Int.full(4) == Vec3Int(4, 4, 4) assert Vec3Int.ones() - Vec3Int.ones() == Vec3Int.zeros() assert Vec3Int.full(4) == Vec3Int.ones() * 4
def merge_with_fallback_layer( output_path: Path, volume_annotation_path: Path, segmentation_layer_path: Path, ) -> MagView: assert not output_path.exists(), f"Dataset at {output_path} already exists" # Prepare output dataset by creatign a shallow copy of the dataset # determined by segmentation_layer_path, but do a deep copy of # segmentation_layer_path itself (so that we can mutate it). input_segmentation_dataset = wk.Dataset.open( segmentation_layer_path.parent) time_start("Prepare output dataset") output_dataset = input_segmentation_dataset.shallow_copy_dataset( output_path, name=output_path.name, make_relative=True, layers_to_ignore=[segmentation_layer_path.name], ) output_layer = output_dataset.add_copy_layer(segmentation_layer_path, segmentation_layer_path.name) time_stop("Prepare output dataset") input_segmentation_mag = input_segmentation_dataset.get_layer( segmentation_layer_path.name).get_finest_mag() with temporary_annotation_view( volume_annotation_path) as input_annotation_layer: input_annotation_mag = input_annotation_layer.get_finest_mag() bboxes = [ bbox.in_mag(input_annotation_mag._mag) for bbox in input_annotation_mag.get_bounding_boxes_on_disk() ] output_mag = output_layer.get_mag(input_segmentation_mag.mag) cube_size = output_mag.info.chunk_size[ 0] * output_mag.info.chunks_per_shard[0] chunks_with_bboxes = BoundingBox.group_boxes_with_aligned_mag( bboxes, Mag(cube_size)) assert (input_annotation_mag.info.chunks_per_shard == Vec3Int.ones() ), "volume annotation must have file_len=1" assert (input_annotation_mag.info.voxel_type == input_segmentation_mag.info.voxel_type ), "Volume annotation must have same dtype as fallback layer" chunk_count = 0 for chunk, bboxes in chunks_with_bboxes.items(): chunk_count += 1 logger.info(f"Processing chunk {chunk_count}...") time_start("Read chunk") data_buffer = output_mag.read(chunk.topleft, chunk.size)[0, :, :, :] time_stop("Read chunk") time_start("Read/merge bboxes") for bbox in bboxes: read_data = input_annotation_mag.read(bbox.topleft, bbox.size) data_buffer[bbox.offset( -chunk.topleft).to_slices()] = read_data time_stop("Read/merge bboxes") time_start("Write chunk") output_mag.write(data_buffer, chunk.topleft) time_stop("Write chunk") return output_mag