def test_h5_datasource(): data_2d = Array5D(np.arange(100).reshape(10, 10), axiskeys="yx") h5_path = create_h5(data_2d, axiskeys_style="vigra", chunk_shape=Shape5D(x=3, y=3)) ds = H5DataSource(outer_path=h5_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/")) assert ds.shape == data_2d.shape assert ds.tile_shape == Shape5D(x=3, y=3) slc = ds.interval.updated(x=(0, 3), y=(0, 2)) assert (ds.retrieve(slc).raw("yx") == data_2d.cut(slc).raw("yx") ).all() #type: ignore data_3d = Array5D(np.arange(10 * 10 * 10).reshape(10, 10, 10), axiskeys="zyx") h5_path = create_h5(data_3d, axiskeys_style="vigra", chunk_shape=Shape5D(x=3, y=3)) ds = H5DataSource(outer_path=h5_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/")) assert ds.shape == data_3d.shape assert ds.tile_shape == Shape5D(x=3, y=3) slc = ds.interval.updated(x=(0, 3), y=(0, 2), z=3) assert (ds.retrieve(slc).raw("yxz") == data_3d.cut(slc).raw("yxz") ).all() #type: ignore
def test_writing_to_precomputed_chunks(): tmp_path = create_tmp_dir(prefix="test_writing_to_precomputed_chunks") datasource = ArrayDataSource(data=data, tile_shape=Shape5D(x=10, y=10)) scale = PrecomputedChunksScale.from_datasource( datasource=datasource, key=PurePosixPath("my_test_data"), encoding=RawEncoder()) sink_path = PurePosixPath("mytest.precomputed") filesystem = OsFs(tmp_path.as_posix()) datasink = PrecomputedChunksScaleSink( filesystem=filesystem, info_dir=sink_path, scale=scale, dtype=datasource.dtype, num_channels=datasource.shape.c, ) creation_result = datasink.create() if isinstance(creation_result, Exception): raise creation_result for tile in datasource.roi.get_datasource_tiles(): datasink.write(tile.retrieve()) precomp_datasource = PrecomputedChunksDataSource( path=sink_path, filesystem=filesystem, resolution=scale.resolution) reloaded_data = precomp_datasource.retrieve() assert reloaded_data == data
def test_distributed_n5_datasink(): tmp_path = create_tmp_dir(prefix="test_distributed_n5_datasink") filesystem = OsFs(tmp_path.as_posix()) outer_path = PurePosixPath("test_distributed_n5_datasink.n5") inner_path = PurePosixPath("/data") full_path = PurePosixPath("test_distributed_n5_datasink.n5/data") attributes = N5DatasetAttributes( dimensions=datasource.shape, blockSize=datasource.tile_shape, c_axiskeys=data.axiskeys, #FIXME: double check this dataType=datasource.dtype, compression=RawCompressor()) sink = N5DatasetSink(outer_path=outer_path, inner_path=inner_path, filesystem=filesystem, attributes=attributes) sink_writer = sink.create() assert not isinstance(sink_writer, Exception) sink_writers = [sink_writer] * 4 for idx, piece in enumerate(DataRoi(datasource).default_split()): sink = sink_writers[idx % len(sink_writers)] sink.write(piece.retrieve()) n5ds = N5DataSource(filesystem=filesystem, path=full_path) assert n5ds.retrieve() == data
def test_writing_to_precomputed_chunks(tmp_path: Path, data: Array5D): datasource = ArrayDataSource.from_array5d(data, tile_shape=Shape5D(x=10, y=10)) scale = PrecomputedChunksScale.from_datasource(datasource=datasource, key=Path("my_test_data"), encoding=RawEncoder()) info = PrecomputedChunksInfo( data_type=datasource.dtype, type_="image", num_channels=datasource.shape.c, scales=tuple([scale]), ) sink_path = Path("mytest.precomputed") filesystem = OsFs(tmp_path.as_posix()) datasink = PrecomputedChunksSink.create( filesystem=filesystem, base_path=sink_path, info=info, ).scale_sinks[0] for tile in datasource.roi.get_datasource_tiles(): datasink.write(tile.retrieve()) precomp_datasource = PrecomputedChunksDataSource( path=sink_path, filesystem=filesystem, resolution=scale.resolution) reloaded_data = precomp_datasource.retrieve() assert reloaded_data == data
def test_distributed_n5_datasink(tmp_path: Path, data: Array5D, datasource: DataSource): filesystem = OsFs(tmp_path.as_posix()) outer_path = Path("test_distributed_n5_datasink.n5") inner_path = PurePosixPath("/data") full_path = Path("test_distributed_n5_datasink.n5/data") attributes = N5DatasetAttributes(dimensions=datasource.shape, blockSize=datasource.tile_shape, axiskeys=datasource.axiskeys, dataType=datasource.dtype, compression=RawCompressor()) sinks = [ N5DatasetSink.create(outer_path=outer_path, inner_path=inner_path, filesystem=filesystem, attributes=attributes), N5DatasetSink.open(path=full_path, filesystem=filesystem), N5DatasetSink.open(path=full_path, filesystem=filesystem), N5DatasetSink.open(path=full_path, filesystem=filesystem), ] for idx, piece in enumerate(DataRoi(datasource).default_split()): sink = sinks[idx % len(sinks)] sink.write(piece.retrieve()) n5ds = N5DataSource(filesystem=filesystem, path=full_path) assert n5ds.retrieve() == data
def from_ilp( cls, *, ilp_path: Path, on_async_change: Callable[[], None], executor: Executor, priority_executor: PriorityExecutor, allowed_protocols: "Sequence[Protocol] | None" = None, ) -> "PixelClassificationWorkflow | Exception": allowed_protocols = allowed_protocols or (Protocol.HTTP, Protocol.HTTPS) with h5py.File(ilp_path, "r") as f: parsing_result = IlpPixelClassificationWorkflowGroup.parse( group=f, ilp_fs=OsFs("/"), allowed_protocols=allowed_protocols, ) if isinstance(parsing_result, Exception): return parsing_result return PixelClassificationWorkflow( on_async_change=on_async_change, executor=executor, priority_executor=priority_executor, feature_extractors=set( parsing_result.FeatureSelections.feature_extractors), labels=parsing_result.PixelClassification.labels, pixel_classifier=parsing_result.PixelClassification.classifier, )
def test_retrieve_roi_smaller_than_tile(): # fmt: off data = Array5D(np.asarray([ [[ 1, 2, 3, 4, 5], [ 6, 7, 8, 9, 10], [ 11, 12, 13, 14, 15], [ 16, 17, 18, 19, 20]], [[ 100, 200, 300, 400, 500], [ 600, 700, 800, 900, 1000], [1100, 1200, 1300, 1400, 1500], [1600, 1700, 1800, 1900, 2000]], ]).astype(np.uint32), axiskeys="cyx") expected_cyx = np.asarray([ [[ 100, 200, 300, 400], [ 600, 700, 800, 900], [1100, 1200, 1300, 1400], [1600, 1700, 1800, 1900]] ]) # fmt: on path = PurePosixPath(create_n5(data, chunk_size=Shape5D(c=2, y=4, x=4))) ds = N5DataSource(path=path / "data", filesystem=OsFs("/")) smaller_than_tile = ds.retrieve(c=1, y=(0, 4), x=(0, 4)) assert np.all(smaller_than_tile.raw("cyx") == expected_cyx)
def test_neighboring_tiles(): # fmt: off arr = Array5D(np.asarray( [[10, 11, 12, 20, 21, 22, 30], [13, 14, 15, 23, 24, 25, 33], [16, 17, 18, 26, 27, 28, 36], [40, 41, 42, 50, 51, 52, 60], [43, 44, 45, 53, 54, 55, 63], [46, 47, 48, 56, 57, 58, 66], [70, 71, 72, 80, 81, 82, 90], [73, 74, 75, 83, 84, 85, 93], [76, 77, 78, 86, 87, 88, 96], [0, 1, 2, 3, 4, 5, 6]], dtype=np.uint8), axiskeys="yx") ds = SkimageDataSource(path=create_png(arr), filesystem=OsFs("/")) fifties_slice = DataRoi(ds, x=(3, 6), y=(3, 6)) expected_fifties_slice = Array5D(np.asarray([[50, 51, 52], [53, 54, 55], [56, 57, 58]]), axiskeys="yx") # fmt: on top_slice = DataRoi(ds, x=(3, 6), y=(0, 3)) bottom_slice = DataRoi(ds, x=(3, 6), y=(6, 9)) right_slice = DataRoi(ds, x=(6, 7), y=(3, 6)) left_slice = DataRoi(ds, x=(0, 3), y=(3, 6)) # fmt: off fifties_neighbor_data = { top_slice: Array5D(np.asarray([[20, 21, 22], [23, 24, 25], [26, 27, 28]]), axiskeys="yx"), right_slice: Array5D(np.asarray([[60], [63], [66]]), axiskeys="yx"), bottom_slice: Array5D(np.asarray([[80, 81, 82], [83, 84, 85], [86, 87, 88]]), axiskeys="yx"), left_slice: Array5D(np.asarray([[40, 41, 42], [43, 44, 45], [46, 47, 48]]), axiskeys="yx"), } # fmt: on assert (fifties_slice.retrieve().raw("yx") == expected_fifties_slice.raw( "yx")).all() # type: ignore for neighbor in fifties_slice.get_neighboring_tiles( tile_shape=Shape5D(x=3, y=3)): try: expected_slice = fifties_neighbor_data.pop(neighbor) assert (expected_slice.raw("yx") == neighbor.retrieve().raw("yx") ).all() # type: ignore except KeyError: print(f"\nWas searching for ", neighbor, "\n") for k in fifties_neighbor_data.keys(): print("--->>> ", k) assert len(fifties_neighbor_data) == 0
def from_url(url: Url) -> "JsonableFilesystem | Exception": from webilastik.filesystem.osfs import OsFs from webilastik.filesystem.bucket_fs import BucketFs from webilastik.filesystem.http_fs import HttpFs if url.protocol == Protocol.FILE: return OsFs(url.path.as_posix()) if url.raw.startswith(BucketFs.API_URL.raw): return BucketFs.try_from_url(url=url) return HttpFs.try_from_url(url)
def stack_h5s(stack_axis: str) -> List[H5DataSource]: offset = Point5D.zero() stack: List[H5DataSource] = [] for outer_path in h5_outer_paths: stack.append( H5DataSource(outer_path=outer_path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/"), location=offset)) offset += Point5D.zero(**{stack_axis: stack[-1].shape[stack_axis]}) return stack
def test_n5_datasink(tmp_path: Path, data: Array5D, datasource: DataSource): sink = N5DatasetSink.create(filesystem=OsFs(tmp_path.as_posix()), outer_path=Path("test_n5_datasink.n5"), inner_path=PurePosixPath("/data"), attributes=N5DatasetAttributes( dimensions=datasource.shape, blockSize=Shape5D(x=10, y=10), axiskeys=datasource.axiskeys, dataType=datasource.dtype, compression=RawCompressor(), location=Point5D.zero(x=7, y=13))) for tile in DataRoi(datasource).split(sink.tile_shape): sink.write(tile.retrieve().translated(Point5D.zero(x=7, y=13))) n5ds = N5DataSource(filesystem=sink.filesystem, path=sink.path) saved_data = n5ds.retrieve() assert saved_data.location == Point5D.zero(x=7, y=13) assert saved_data == data
def create_n5(array: Array5D, *, axiskeys: Optional[str] = None, chunk_size: Shape5D, compression: N5Compressor = RawCompressor()): path = Path(tempfile.mkstemp()[1] + ".n5") sink = N5DatasetSink.create(outer_path=path, inner_path=PurePosixPath("/data"), filesystem=OsFs("/"), attributes=N5DatasetAttributes( dimensions=array.shape, blockSize=chunk_size, axiskeys=axiskeys or array.axiskeys, dataType=array.dtype, compression=compression, )) for tile in array.split(chunk_size): sink.write(tile) return path.as_posix()
def test_n5_datasource(): # fmt: off data = Array5D(np.asarray([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20]]).astype(np.uint8), axiskeys="yx") # fmt: on path = Path(create_n5(data, chunk_size=Shape5D(x=2, y=2))) ds = N5DataSource(path=path / "data", filesystem=OsFs("/")) assert ds.shape == data.shape # fmt: off expected_raw_piece = Array5D(np.asarray([[1, 2, 3], [6, 7, 8]]).astype(np.uint8), axiskeys="yx") # fmt: on assert ds.retrieve(x=(0, 3), y=(0, 2)) == expected_raw_piece ds2 = pickle.loads(pickle.dumps(ds)) assert ds2.retrieve(x=(0, 3), y=(0, 2)) == expected_raw_piece
def test_skimage_datasource_tiles(png_image: Path): bs = DataRoi(SkimageDataSource(png_image, filesystem=OsFs("/"))) num_checked_tiles = 0 for tile in bs.split(Shape5D(x=2, y=2)): if tile == Interval5D.zero(x=(0, 2), y=(0, 2)): expected_raw = raw_0_2x0_2y elif tile == Interval5D.zero(x=(0, 2), y=(2, 4)): expected_raw = raw_0_2x2_4y elif tile == Interval5D.zero(x=(2, 4), y=(0, 2)): expected_raw = raw_2_4x0_2y elif tile == Interval5D.zero(x=(2, 4), y=(2, 4)): expected_raw = raw_2_4x2_4y elif tile == Interval5D.zero(x=(4, 5), y=(0, 2)): expected_raw = raw_4_5x0_2y elif tile == Interval5D.zero(x=(4, 5), y=(2, 4)): expected_raw = raw_4_5x2_4y else: raise Exception(f"Unexpected tile {tile}") assert (tile.retrieve().raw("yx") == expected_raw).all() num_checked_tiles += 1 assert num_checked_tiles == 6
def test_n5_datasink(): tmp_path = create_tmp_dir(prefix="test_n5_datasink") sink = N5DatasetSink( filesystem=OsFs(tmp_path.as_posix()), outer_path=PurePosixPath("test_n5_datasink.n5"), inner_path=PurePosixPath("/data"), attributes=N5DatasetAttributes( dimensions=datasource.shape, blockSize=Shape5D(x=10, y=10), c_axiskeys=data.axiskeys, #FIXME: double check this dataType=datasource.dtype, compression=RawCompressor(), location=Point5D.zero(x=7, y=13))) sink_writer = sink.create() assert not isinstance(sink_writer, Exception) for tile in DataRoi(datasource).split(sink.tile_shape): sink_writer.write(tile.retrieve().translated(Point5D.zero(x=7, y=13))) n5ds = N5DataSource(filesystem=sink.filesystem, path=sink.full_path) saved_data = n5ds.retrieve() assert saved_data.location == Point5D.zero(x=7, y=13) assert saved_data == data
def test_writing_to_offset_precomputed_chunks(): tmp_path = create_tmp_dir( prefix="test_writing_to_offset_precomputed_chunks") data_at_1000_1000 = data.translated( Point5D(x=1000, y=1000) - data.location) datasource = ArrayDataSource(data=data_at_1000_1000, tile_shape=Shape5D(x=10, y=10)) scale = PrecomputedChunksScale.from_datasource( datasource=datasource, key=PurePosixPath("my_test_data"), encoding=RawEncoder()) sink_path = PurePosixPath("mytest.precomputed") filesystem = OsFs(tmp_path.as_posix()) print(f"\n\n will write to '{filesystem.geturl(sink_path.as_posix())}' ") datasink = PrecomputedChunksScaleSink( filesystem=filesystem, info_dir=sink_path, scale=scale, num_channels=datasource.shape.c, dtype=datasource.dtype, ) creation_result = datasink.create() if isinstance(creation_result, Exception): raise creation_result for tile in datasource.roi.get_datasource_tiles(): datasink.write(tile.retrieve()) precomp_datasource = PrecomputedChunksDataSource( path=sink_path, filesystem=filesystem, resolution=scale.resolution) reloaded_data = precomp_datasource.retrieve( interval=data_at_1000_1000.interval) assert (reloaded_data.raw("xyz") == data.raw("xyz")).all()
def get_sample_c_cells_datasource() -> SkimageDataSource: return SkimageDataSource(filesystem=OsFs( get_project_root_dir().as_posix()), path=PurePosixPath("public/images/c_cells_1.png"))
from ndstructs.point5D import Interval5D, Point5D from ndstructs.array5D import Array5D import numpy as np from webilastik.annotations.annotation import Annotation from webilastik.classifiers.pixel_classifier import VigraPixelClassifier from webilastik.datasink.precomputed_chunks_sink import PrecomputedChunksScaleSink from webilastik.datasource.precomputed_chunks_info import PrecomputedChunksScale, RawEncoder from webilastik.datasource.skimage_datasource import SkimageDataSource from webilastik.features.ilp_filter import IlpGaussianSmoothing from webilastik.filesystem.osfs import OsFs # some sample data to work on. DataSource implementations are tile-based. data_source = SkimageDataSource( filesystem=OsFs(project_root_dir.as_posix()), #filesystem could also be HttpFs, BucketFs, etc path=PurePosixPath("public/images/c_cells_1.png") ) feature_extractors = [ #computes in 2D, slicing along the axis_2d. set axis_2d to None to compute in 3D IlpGaussianSmoothing(ilp_scale=0.3, axis_2d="z"), IlpGaussianSmoothing(ilp_scale=0.7, axis_2d="z"), IlpGaussianSmoothing(ilp_scale=1.0, axis_2d="z"), IlpGaussianSmoothing(ilp_scale=1.6, axis_2d="z"), IlpGaussianSmoothing(ilp_scale=3.5, axis_2d="z"), IlpGaussianSmoothing(ilp_scale=5.0, axis_2d="z"), IlpGaussianSmoothing(ilp_scale=10.0, axis_2d="z"), ] label_classes = [
_ = argparser.add_argument("--datasource", choices=["brain", "c_cells"], default="brain") _ = argparser.add_argument("--num-tiles") args = argparser.parse_args() executor: Executor = get_executor(hint="server_tile_handler") selected_feature_extractors: Sequence[ JsonableFeatureExtractor] = args.extractors num_tiles = None if args.num_tiles is None else int(args.num_tiles) mouse_datasources: List[DataSource] = [ PrecomputedChunksDataSource( filesystem=OsFs( Path(__file__).joinpath("../../public/images/").as_posix()), path=PurePosixPath(f"mouse{i}.precomputed"), resolution=(1, 1, 1)) for i in range(1, 3 + 1) ] if args.datasource == "brain": datasource = PrecomputedChunksDataSource( filesystem=OsFs( Path(__file__).joinpath("../../public/images/").as_posix()), path=PurePosixPath(f"mouse1.precomputed"), resolution=(1, 1, 1)) class1_annotations = [ Annotation.from_voxels( voxels=[ Point5D(x=2156, y=1326, z=0), Point5D(x=2157, y=1326, z=0),
def test_pixel_classification_ilp_serialization(): sample_trained_ilp_path = Path( "tests/projects/TrainedPixelClassification.ilp") output_ilp_path = Path("/tmp/rewritten.ilp") with h5py.File(sample_trained_ilp_path, "r") as f: sample_workflow_data = IlpPixelClassificationWorkflowGroup.parse( group=f, ilp_fs=OsFs("."), allowed_protocols=[Protocol.FILE], ) assert not isinstance(sample_workflow_data, Exception) with open(output_ilp_path, "wb") as rewritten: _ = rewritten.write(sample_workflow_data.to_h5_file_bytes()) shutil.copy("tests/projects/c_cells_1.png", output_ilp_path.parent.joinpath("c_cells_1.png")) with h5py.File(output_ilp_path, "r") as rewritten: reloaded_data = IlpPixelClassificationWorkflowGroup.parse( group=rewritten, ilp_fs=OsFs("/"), allowed_protocols=[Protocol.FILE], ) assert not isinstance(reloaded_data, Exception) loaded_feature_extractors = reloaded_data.FeatureSelections.feature_extractors assert IlpGaussianSmoothing(ilp_scale=0.3, axis_2d="z") in loaded_feature_extractors assert IlpLaplacianOfGaussian(ilp_scale=0.7, axis_2d="z") in loaded_feature_extractors assert IlpGaussianGradientMagnitude( ilp_scale=1.0, axis_2d="z") in loaded_feature_extractors assert IlpDifferenceOfGaussians( ilp_scale=1.6, axis_2d="z") in loaded_feature_extractors assert IlpStructureTensorEigenvalues( ilp_scale=3.5, axis_2d="z") in loaded_feature_extractors assert IlpHessianOfGaussianEigenvalues( ilp_scale=5.0, axis_2d="z") in loaded_feature_extractors assert len(loaded_feature_extractors) == 6 loaded_labels = reloaded_data.PixelClassification.labels for label in loaded_labels: loaded_points: Set[Point5D] = set() for a in label.annotations: loaded_points.update(a.to_points()) if label.color == Color(r=np.uint8(255), g=np.uint8(225), b=np.uint8(25)): assert loaded_points == set([ Point5D(x=200, y=200), Point5D(x=201, y=201), Point5D(x=202, y=202) ]) elif label.color == Color(r=np.uint8(0), g=np.uint8(130), b=np.uint8(200)): assert loaded_points == set([ Point5D(x=400, y=400), Point5D(x=401, y=401), Point5D(x=402, y=402) ]) else: assert False, f"Unexpected label color: {label.color}" some_executor = ProcessPoolExecutor(max_workers=2) priority_executor = PriorityExecutor(executor=some_executor, max_active_job_steps=2) workflow = PixelClassificationWorkflow.from_ilp( allowed_protocols=[Protocol.FILE], executor=some_executor, priority_executor=priority_executor, ilp_path=output_ilp_path, on_async_change=lambda: None, ) assert not isinstance(workflow, Exception) print(f"These are the deserialized brush strokes:") from pprint import pprint pprint(workflow.brushing_applet.label_classes()) annotation_raw_data = workflow.brushing_applet.labels( )[0].annotations[0].raw_data expected_annotation1 = Annotation.from_voxels( voxels=[ Point5D(x=200, y=200), Point5D(x=201, y=201), Point5D(x=202, y=202) ], raw_data=annotation_raw_data, ) expected_annotation2 = Annotation.from_voxels( voxels=[ Point5D(x=400, y=400), Point5D(x=401, y=401), Point5D(x=402, y=402) ], raw_data=annotation_raw_data, ) assert set( a for annotations in workflow.brushing_applet.label_classes().values() for a in annotations) == set( [expected_annotation1, expected_annotation2]) res = workflow.brushing_applet.remove_annotation( user_prompt=dummy_prompt, label_name="Label 1", annotation=expected_annotation1) print(res) pprint(workflow.brushing_applet.label_classes()) priority_executor.shutdown() some_executor.shutdown()
def test_pixel_classification_workflow(): brushing_applet = BrushingApplet("brushing_applet") feature_selection_applet = FeatureSelectionApplet( "feature_selection_applet", datasources=brushing_applet.datasources) pixel_classifier_applet = PixelClassificationApplet( "pixel_classifier_applet", feature_extractors=feature_selection_applet.feature_extractors, annotations=brushing_applet.annotations) # wf = PixelClassificationWorkflow( # feature_selection_applet=feature_selection_applet, # brushing_applet=brushing_applet, # pixel_classifier_applet=pixel_classifier_applet, # predictions_export_applet=predictions_export_applet # ) # GUI creates a datasource somewhere... ds = SkimageDataSource(Path("public/images/c_cells_1.png"), filesystem=OsFs("."), tile_shape=Shape5D(x=400, y=400)) # GUI creates some feature extractors feature_selection_applet.feature_extractors.set_value( [ GaussianSmoothing.from_ilp_scale(scale=0.3, axis_2d="z"), HessianOfGaussianEigenvalues.from_ilp_scale(scale=0.7, axis_2d="z"), ], confirmer=dummy_confirmer) # GUI creates some annotations brush_strokes = [ Annotation.interpolate_from_points( voxels=[Point5D.zero(x=140, y=150), Point5D.zero(x=145, y=155)], color=Color(r=np.uint8(0), g=np.uint8(0), b=np.uint8(255)), raw_data=ds), Annotation.interpolate_from_points( voxels=[Point5D.zero(x=238, y=101), Point5D.zero(x=229, y=139)], color=Color(r=np.uint8(0), g=np.uint8(0), b=np.uint8(255)), raw_data=ds), Annotation.interpolate_from_points( voxels=[Point5D.zero(x=283, y=87), Point5D.zero(x=288, y=92)], color=Color(r=np.uint8(255), g=np.uint8(0), b=np.uint8(0)), raw_data=ds), Annotation.interpolate_from_points( voxels=[Point5D.zero(x=274, y=168), Point5D.zero(x=256, y=191)], color=Color(r=np.uint8(255), g=np.uint8(0), b=np.uint8(0)), raw_data=ds), ] brushing_applet.annotations.set_value(brush_strokes, confirmer=dummy_confirmer) # preds = predictions_export_applet.compute(DataRoi(ds)) classifier = pixel_classifier_applet.pixel_classifier() executor = HashingExecutor(num_workers=8) # calculate predictions on an arbitrary data preds = executor.submit(classifier.compute, ds.roi) preds.result().as_uint8().show_channels() # for png_bytes in preds.to_z_slice_pngs(): # path = f"/tmp/junk_test_image_{uuid.uuid4()}.png" # with open(path, "wb") as outfile: # outfile.write(png_bytes.getbuffer()) # os.system(f"gimp {path}") # calculate predictions on just a piece of arbitrary data exported_tile = executor.submit( classifier.compute, DataRoi(datasource=ds, x=(100, 200), y=(100, 200))) exported_tile.result().show_channels() # wf.save_as(Path("/tmp/blas.ilp")) #try removing a brush stroke brushing_applet.annotations.set_value(brush_strokes[1:], confirmer=dummy_confirmer) assert tuple(brushing_applet.annotations()) == tuple(brush_strokes[1:])
def get_test_output_osfs() -> OsFs: test_dir_path = get_tmp_dir() / f"test-{time.monotonic()}/" os.makedirs(test_dir_path, exist_ok=True) return OsFs(str(test_dir_path))