def test_speed(): with Pipeline() as pipeline: level1 = Unpack(range(N_STEPS)) level2 = Unpack(range(N_STEPS)) Sleep() with Timer("sequential") as t: expected_result = [ (obj[level1], obj[level2]) for obj in pipeline.transform_stream() ] elapsed_sequential = t.elapsed with Pipeline() as pipeline: level1 = Unpack(range(N_STEPS)) with ParallelPipeline(4) as pp: level2 = Unpack(range(N_STEPS)) Sleep() with Timer("parallel") as t: result = [(obj[level1], obj[level2]) for obj in pipeline.transform_stream()] elapsed_parallel = t.elapsed assert result == expected_result assert elapsed_parallel < elapsed_sequential
def test_Format(): # Assert that the arguments are appended in the right order fmt = "{},{},{},{},{},{},{a},{b},{c},{d}" args = (1, 2, 3) _args = (4, 5, 6) _kwargs = {"a": 7, "b": 8} kwargs = {"c": 9, "d": 10} with Pipeline() as pipeline: result = Format(fmt, *args, _args=_args, _kwargs=_kwargs, **kwargs) stream = pipeline.transform_stream() obj = next(stream) assert obj[result] == "1,2,3,4,5,6,7,8,9,10" # Assert that the keyword arguments replace as expected fmt = "{a},{b}" _kwargs = {"a": 1, "b": 2} kwargs = {"a": 3, "b": 4} with Pipeline() as pipeline: result = Format(fmt, _kwargs=_kwargs, **kwargs) stream = pipeline.transform_stream() obj = next(stream) assert obj[result] == "3,4"
def test_ecotaxa(tmp_path): archive_fn = tmp_path / "ecotaxa.zip" print(archive_fn) # Create an archive with Pipeline() as p: i = Unpack(range(10)) meta = Call(dict, i=i, foo="Sömé UTF-8 ſtríng…") image = BinaryBlobs() image_name = Format("image_{}.png", i) EcotaxaWriter(archive_fn, (image_name, image), meta) result = [o.to_dict(meta=meta, image=image) for o in p.transform_stream()] # Read the archive with Pipeline() as p: image, meta = EcotaxaReader(archive_fn) roundtrip_result = [ o.to_dict(meta=meta, image=image) for o in p.transform_stream() ] for meta_field in ("i", "foo"): assert [o["meta"][meta_field] for o in result ] == [o["meta"][meta_field] for o in roundtrip_result] assert_equal([o["image"] for o in result], [o["image"] for o in roundtrip_result])
def test_KeyError(): with Pipeline() as pipeline: with ParallelPipeline(4) as pp: KeyErrorRaiser() with pytest.raises(KeyError, match="foo"): pipeline.run()
def test_DrawContours(): image = skimage.data.camera() with Pipeline() as pipeline: mask = ThresholdConst(image, 255) result = DrawContours(image, mask, (0, 255, 0)) pipeline.run()
def test_num_workers(num_workers): with Pipeline() as pipeline: level1 = Unpack(range(N_STEPS)) with ParallelPipeline(num_workers) as pp: level2 = Unpack(range(N_STEPS)) pipeline.run()
def test_Find(data_path): d = data_path / "images" with Pipeline() as pipeline: result = Find(d, [".png"]) stream = pipeline.transform_stream() pipeline.run()
def test_Glob(data_path): d = data_path / "images/*.png" with Pipeline() as pipeline: result = Glob(d, True) stream = pipeline.transform_stream() pipeline.run()
def test_FindRegions(): image = skimage.data.camera() with Pipeline() as pipeline: mask = ThresholdConst(image, 255) result = FindRegions(mask, image, 0, 100, padding=10) pipeline.run()
def test_PiCamera(): with Pipeline() as p: frame = PiCameraReader() # Only capture 10 frames Slice(10) p.run()
def test_ImageProperties(): with Pipeline() as pipeline: image = Unpack([skimage.data.camera()]) mask = ThresholdConst(image, 255) region = ImageProperties(mask, image) image2 = ExtractROI(image, region, 0) for obj in pipeline.transform_stream(): assert_equal(obj[image], obj[image2])
def test_ThresholdConst(): images = [skimage.data.camera(), np.zeros((10, 10), np.uint8) + 255] with Pipeline() as pipeline: image = Unpack(images) mask = ThresholdConst(image, 255) objects = list(pipeline.transform_stream()) assert not objects[1][mask].any()
def test_Call(): def foo(bar, baz): return bar, baz with Pipeline() as pipeline: result = Call(foo, 1, 2) obj, *_ = list(pipeline.transform_stream()) assert obj[result] == (1, 2)
def test_StreamBuffer(): with Pipeline() as pipeline: item = Unpack(range(10)) result = StreamBuffer(1) stream = pipeline.transform_stream() result = [o[item] for o in stream] assert result == list(range(10))
def test_RGB2Gray(): image = skimage.data.astronaut() with Pipeline() as pipeline: result = RGB2Gray(image) stream = pipeline.transform_stream() obj = next(stream) assert obj[result].ndim == 2
def test_vignette_corrector_no_channel(image_fns): with Pipeline() as pipeline: img_fn = Unpack(image_fns) image = Call(imread, img_fn, as_gray=True) result = VignettingCorrector(image) stream = pipeline.transform_stream() obj = next(stream)
def test_ImageWriter(tmp_path): d = tmp_path / "sub" d.mkdir() p = d / "new.jpg" image = skimage.data.camera() with Pipeline() as pipeline: result = ImageWriter(p, image) pipeline.run()
def test_Enumerate(): with Pipeline() as pipeline: a = Unpack(range(10)) i = Enumerate() stream = pipeline.transform_stream() for obj in stream: assert obj[a] == obj[i]
def test_ExtractROI(): with Pipeline() as pipeline: image = Unpack([skimage.data.camera()]) mask = ThresholdConst(image, 255) regions = FindRegions(mask, image) ExtractROI(image, regions) ExtractROI(image, regions, 0.5) pipeline.run()
def test_Gray2RGB(): image = skimage.data.camera() with Pipeline() as pipeline: result = Gray2RGB(image) stream = pipeline.transform_stream() obj = next(stream) assert obj[result].ndim == 3 assert obj[result].shape[-1] == 3
def test_StreamBuffer(): with Pipeline() as pipeline: item = Unpack(range(10)) StreamBuffer(1) stream = pipeline.transform_stream() objects = [o for o in stream] assert objects[0].n_remaining_hint == 10 assert [o[item] for o in objects] == list(range(10))
def test_DrawContoursOnParent(): image = skimage.data.camera() with Pipeline() as pipeline: mask = ThresholdConst(image, 255) regions = FindRegions(mask, image, 0, 100, padding=10) output_ref = image result = DrawContoursOnParent(image, mask, output_ref, regions, (0, 255, 0)) stream = pipeline.transform_stream() pipeline.run()
def test_exception_worker(): with Pipeline() as pipeline: level1 = Unpack(range(N_STEPS)) with ParallelPipeline(4) as pp: Sleep() Raiser() with pytest.raises(SomeException, match="foo"): pipeline.run()
def test_TQDM(): # Assert that the progress bar works with stream with Pipeline() as pipeline: item = Unpack(range(10)) result = TQDM("Description") stream = pipeline.transform_stream() result = [o[item] for o in stream] assert result == list(range(10))
def test_ecotaxa(tmp_path, ext): archive_fn = tmp_path / ("ecotaxa" + ext) print(archive_fn) # Create an archive with Pipeline() as p: i = Unpack(range(10)) meta = Call(dict, i=i, foo="Sömé UTF-8 ſtríng…") image = BinaryBlobs() image_name = Format("image_{}.png", i) EcotaxaWriter( archive_fn, (image_name, image), meta, object_meta={"foo": 0}, acq_meta={"foo": 1}, process_meta={"foo": 2}, sample_meta={"foo": 3}, ) result = [o.to_dict(meta=meta, image=image) for o in p.transform_stream()] # Read the archive with Pipeline() as p: image, meta = EcotaxaReader(archive_fn) roundtrip_result = [ o.to_dict(meta=meta, image=image) for o in p.transform_stream() ] for meta_field in ("i", "foo"): assert [o["meta"][meta_field] for o in result ] == [o["meta"][meta_field] for o in roundtrip_result] for i, prefix in enumerate(("object_", "acq_", "process_", "sample_")): assert [o["meta"][prefix + "foo"] for o in result] == [i for _ in roundtrip_result] assert_equal([o["image"] for o in result], [o["image"] for o in roundtrip_result])
def test_worker_die(): with Pipeline() as pipeline: level1 = Unpack(range(N_STEPS)) with ParallelPipeline(4): Call(lambda: os.kill(os.getpid(), signal.SIGKILL)) with pytest.raises( RuntimeError, match=r"Worker \d+ died unexpectedly. Exit code: -SIGKILL"): pipeline.run()
def test_Unpack(): values = list(range(10)) with Pipeline() as pipeline: value = Unpack(values) stream = pipeline.transform_stream() objects = [o for o in stream] assert objects[0].n_remaining_hint == 10 assert values == [o[value] for o in objects]
def test_Slice(): # Assert that the stream is sliced items = "ABCDEFG" with Pipeline() as pipeline: result = Slice(2) stream = pipeline.transform_stream(items) obj = list(stream) assert obj == ["A", "B"] # Assert that the stream is sliced from the specified start and end with Pipeline() as pipeline: result = Slice(2, 4) stream = pipeline.transform_stream(items) obj = list(stream) assert obj == ["C", "D"]
def test_Unpack(): values = list(range(10)) with Pipeline() as pipeline: value = Unpack(values) stream = pipeline.transform_stream() result = [o[value] for o in stream] assert values == result
def test_Node(): # Assert that Node checks for the existence of a pipeline with pytest.raises(RuntimeError): TestNode(1, 2, 3) # Assert that Node checks for the existance of transform with Pipeline() as pipeline: TestNodeNoTransform() with pytest.raises(AttributeError): pipeline.run() # Assert that parameters and outputs are passed as expected with Pipeline() as pipeline: a, b, c = TestNode(1, 2, 3) obj, *_ = list(pipeline.transform_stream()) assert obj[a] == 1 assert obj[b] == 2 assert obj[c] == 3