Пример #1
0
def test_num_workers(num_workers):
    with Pipeline() as pipeline:
        level1 = Unpack(range(N_STEPS))
        with ParallelPipeline(num_workers) as pp:
            level2 = Unpack(range(N_STEPS))

    pipeline.run()
Пример #2
0
def test_speed():

    with Pipeline() as pipeline:
        level1 = Unpack(range(N_STEPS))
        level2 = Unpack(range(N_STEPS))
        Sleep()

    with Timer("sequential") as t:
        expected_result = [
            (obj[level1], obj[level2]) for obj in pipeline.transform_stream()
        ]

    elapsed_sequential = t.elapsed

    with Pipeline() as pipeline:
        level1 = Unpack(range(N_STEPS))
        with ParallelPipeline(4) as pp:
            level2 = Unpack(range(N_STEPS))
            Sleep()

    with Timer("parallel") as t:
        result = [(obj[level1], obj[level2]) for obj in pipeline.transform_stream()]

    elapsed_parallel = t.elapsed

    assert result == expected_result

    assert elapsed_parallel < elapsed_sequential
Пример #3
0
def test_exception_upstream():

    with Pipeline() as pipeline:
        level1 = Unpack(range(N_STEPS))
        Raiser()
        with ParallelPipeline(4) as pp:
            level2 = Unpack(range(N_STEPS))

    with pytest.raises(SomeException, match="foo"):
        pipeline.run()
Пример #4
0
def test_FilterVariables():
    values = list(range(10))

    with Pipeline() as pipeline:
        a = Unpack(values)
        b = Unpack(values)
        FilterVariables(b)

    stream = list(pipeline.transform_stream())

    for o in stream:
        assert a not in o
        assert b in o
Пример #5
0
def test_ecotaxa(tmp_path):
    archive_fn = tmp_path / "ecotaxa.zip"
    print(archive_fn)

    # Create an archive
    with Pipeline() as p:
        i = Unpack(range(10))

        meta = Call(dict, i=i, foo="Sömé UTF-8 ſtríng…")
        image = BinaryBlobs()
        image_name = Format("image_{}.png", i)

        EcotaxaWriter(archive_fn, (image_name, image), meta)

    result = [o.to_dict(meta=meta, image=image) for o in p.transform_stream()]

    # Read the archive
    with Pipeline() as p:
        image, meta = EcotaxaReader(archive_fn)

    roundtrip_result = [
        o.to_dict(meta=meta, image=image) for o in p.transform_stream()
    ]

    for meta_field in ("i", "foo"):
        assert [o["meta"][meta_field] for o in result
                ] == [o["meta"][meta_field] for o in roundtrip_result]

    assert_equal([o["image"] for o in result],
                 [o["image"] for o in roundtrip_result])
Пример #6
0
def test_exception_main_thread():

    with Pipeline() as pipeline:
        level1 = Unpack(range(N_STEPS))
        with ParallelPipeline(4) as pp:
            level2 = Unpack(range(N_STEPS))
            Sleep()

    stream = pipeline.transform_stream()

    try:
        with pytest.raises(SomeException):
            for i, obj in enumerate(stream):
                if i == 10:
                    raise SomeException()
    finally:
        stream.close()
Пример #7
0
def test_StreamBuffer():
    with Pipeline() as pipeline:
        item = Unpack(range(10))
        result = StreamBuffer(1)

    stream = pipeline.transform_stream()
    result = [o[item] for o in stream]

    assert result == list(range(10))
Пример #8
0
def test_Enumerate():
    with Pipeline() as pipeline:
        a = Unpack(range(10))
        i = Enumerate()

    stream = pipeline.transform_stream()

    for obj in stream:
        assert obj[a] == obj[i]
Пример #9
0
def test_ImageProperties():
    with Pipeline() as pipeline:
        image = Unpack([skimage.data.camera()])
        mask = ThresholdConst(image, 255)
        region = ImageProperties(mask, image)
        image2 = ExtractROI(image, region, 0)

    for obj in pipeline.transform_stream():
        assert_equal(obj[image], obj[image2])
Пример #10
0
def test_ThresholdConst():
    images = [skimage.data.camera(), np.zeros((10, 10), np.uint8) + 255]
    with Pipeline() as pipeline:
        image = Unpack(images)
        mask = ThresholdConst(image, 255)

    objects = list(pipeline.transform_stream())

    assert not objects[1][mask].any()
Пример #11
0
def test_ExtractROI():
    with Pipeline() as pipeline:
        image = Unpack([skimage.data.camera()])
        mask = ThresholdConst(image, 255)
        regions = FindRegions(mask, image)
        ExtractROI(image, regions)
        ExtractROI(image, regions, 0.5)

    pipeline.run()
Пример #12
0
def test_vignette_corrector_no_channel(image_fns):

    with Pipeline() as pipeline:
        img_fn = Unpack(image_fns)
        image = Call(imread, img_fn, as_gray=True)
        result = VignettingCorrector(image)

    stream = pipeline.transform_stream()
    obj = next(stream)
Пример #13
0
def test_TQDM():
    # Assert that the progress bar works with stream
    with Pipeline() as pipeline:
        item = Unpack(range(10))
        result = TQDM("Description")

    stream = pipeline.transform_stream()
    result = [o[item] for o in stream]

    assert result == list(range(10))
Пример #14
0
def test_StreamBuffer():
    with Pipeline() as pipeline:
        item = Unpack(range(10))
        StreamBuffer(1)

    stream = pipeline.transform_stream()
    objects = [o for o in stream]

    assert objects[0].n_remaining_hint == 10
    assert [o[item] for o in objects] == list(range(10))
Пример #15
0
def test_worker_die():

    with Pipeline() as pipeline:
        level1 = Unpack(range(N_STEPS))
        with ParallelPipeline(4):
            Call(lambda: os.kill(os.getpid(), signal.SIGKILL))

    with pytest.raises(
            RuntimeError,
            match=r"Worker \d+ died unexpectedly. Exit code: -SIGKILL"):
        pipeline.run()
Пример #16
0
def test_Unpack():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)

    stream = pipeline.transform_stream()

    result = [o[value] for o in stream]

    assert values == result
Пример #17
0
def test_Unpack():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)

    stream = pipeline.transform_stream()
    objects = [o for o in stream]

    assert objects[0].n_remaining_hint == 10
    assert values == [o[value] for o in objects]
Пример #18
0
def test_CalculateZooProcessFeatures(prefix):
    with Pipeline() as p:
        i = Unpack(range(10))
        mask = BinaryBlobs()
        image = NoiseImage(mask.shape)

        regionprops = FindRegions(mask, image)

        CalculateZooProcessFeatures(regionprops, prefix=prefix)

    p.run()
Пример #19
0
def test_Filter():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)
        Filter(lambda obj: obj[value] % 2 == 0)

    stream = pipeline.transform_stream()

    result = [o[value] for o in stream]

    assert [v for v in values if v % 2 == 0] == result
Пример #20
0
def test_Pack():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)
        values_packed = Pack(2, value)

    stream = pipeline.transform_stream()

    result = [o[values_packed] for o in stream]

    assert [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9)] == result
Пример #21
0
def test_Pack():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)
        values_packed = Pack(2, value)

    objects = [o for o in pipeline.transform_stream()]

    # TODO:
    # assert objects[0].n_remaining_hint == 5

    assert [(0, 1), (2, 3), (4, 5), (6, 7),
            (8, 9)] == [o[values_packed] for o in objects]
Пример #22
0
def test_Filter():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)
        Filter(lambda obj: obj[value] % 2 == 0)

    objects = [o for o in pipeline.transform_stream()]

    assert [v for v in values if v % 2 == 0] == [o[value] for o in objects]

    # n_remaining_hint of last object is 1
    n_remaining = [o.n_remaining_hint for o in objects]
    assert n_remaining[-1] == 1
Пример #23
0
def test_Filter_highlevel():
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)
        predicate = value % 2 == 0
        Filter(predicate)

    objects = [o for o in pipeline.transform_stream()]

    assert [v for v in values if v % 2 == 0] == [o[value] for o in objects]

    # n_remaining_hint of last object is 1
    n_remaining = [o.n_remaining_hint for o in objects]
    assert n_remaining[-1] == 1
Пример #24
0
def test_PrintObjects(capsys):
    values = list(range(10))

    with Pipeline() as pipeline:
        value = Unpack(values)
        PrintObjects(value)

    # TODO: Capture output and compare

    # https://docs.pytest.org/en/latest/capture.html#accessing-captured-output-from-a-test-function
    # pipeline.run()
    stream = pipeline.transform_stream()
    result = [o[value] for o in stream]

    captured = capsys.readouterr()
    print(captured.out)
    assert result == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Пример #25
0
def test_FindRegions(warn_empty, recwarn):
    images = [skimage.data.camera(), np.zeros((10, 10), np.uint8) + 255]
    with Pipeline() as pipeline:
        image = Unpack(images)
        mask = ThresholdConst(image, 255)
        result = FindRegions(mask,
                             image,
                             0,
                             100,
                             padding=10,
                             warn_empty=warn_empty)

    pipeline.run()

    if warn_empty:
        w = recwarn.pop(UserWarning)
        assert re.search(r"^(Image|foo) did not contain any objects.$",
                         str(w.message))
Пример #26
0
def test_ecotaxa(tmp_path, ext):
    archive_fn = tmp_path / ("ecotaxa" + ext)
    print(archive_fn)

    # Create an archive
    with Pipeline() as p:
        i = Unpack(range(10))

        meta = Call(dict, i=i, foo="Sömé UTF-8 ſtríng…")
        image = BinaryBlobs()
        image_name = Format("image_{}.png", i)

        EcotaxaWriter(
            archive_fn,
            (image_name, image),
            meta,
            object_meta={"foo": 0},
            acq_meta={"foo": 1},
            process_meta={"foo": 2},
            sample_meta={"foo": 3},
        )

    result = [o.to_dict(meta=meta, image=image) for o in p.transform_stream()]

    # Read the archive
    with Pipeline() as p:
        image, meta = EcotaxaReader(archive_fn)

    roundtrip_result = [
        o.to_dict(meta=meta, image=image) for o in p.transform_stream()
    ]

    for meta_field in ("i", "foo"):
        assert [o["meta"][meta_field] for o in result
                ] == [o["meta"][meta_field] for o in roundtrip_result]

    for i, prefix in enumerate(("object_", "acq_", "process_", "sample_")):
        assert [o["meta"][prefix + "foo"]
                for o in result] == [i for _ in roundtrip_result]

    assert_equal([o["image"] for o in result],
                 [o["image"] for o in roundtrip_result])
Пример #27
0
def test_Progress(monkeypatch: pytest.MonkeyPatch):
    # Monkeypatch tqdm so that we can extract tqdm instance attributes
    tqdm_instance: List[Optional[tqdm.tqdm]] = [None]
    tqdm_cls = tqdm.tqdm

    def mock_tqdm(*args, **kwargs):
        tqdm_instance[0] = tqdm_cls(*args, **kwargs)
        return tqdm_instance[0]

    monkeypatch.setattr(tqdm, "tqdm", mock_tqdm)

    # Assert that the progress bar works with stream
    with Pipeline() as pipeline:
        item = Unpack(range(10))
        result = Progress("Description")

    stream = pipeline.transform_stream()
    result = [o[item] for o in stream]

    assert result == list(range(10))
    assert tqdm_instance[0].total == 10
Пример #28
0
def test_Profile():

    with Pipeline() as pipeline:
        Unpack(range(N))

        # Sleep beforehand to make sure that only the inner Sleep is profiled.
        Sleep(DURATION_OUTER)

        with Profile("Sleep") as profile_sleep:
            Sleep(DURATION_INNER)

        # Sleep afterwards to make sure that only the inner Sleep is profiled.
        Sleep(DURATION_OUTER)

    objects = list(pipeline.transform_stream())

    assert len(objects) == N

    overhead = profile_sleep._average - DURATION_INNER
    print(f"Overhead {overhead:g}s")

    assert DURATION_INNER <= profile_sleep._average < DURATION_OUTER
Пример #29
0
import os.path

from morphocut import Call, Pipeline
from morphocut.contrib.ecotaxa import EcotaxaWriter
from morphocut.contrib.zooprocess import CalculateZooProcessFeatures
from morphocut.file import Glob
from morphocut.image import FindRegions, ImageReader
from morphocut.parallel import ParallelPipeline
from morphocut.str import Format
from morphocut.stream import Enumerate, Unpack

# First, a Pipeline is defined that contains all operations
# that should be carried out on the objects of the stream.
with Pipeline() as p:
    # Corresponds to `for base_path in ["/path/a", "/path/b", "/path/c"]:`
    base_path = Unpack(["/path/a", "/path/b", "/path/c"])

    # Number the objects in the stream
    running_number = Enumerate()

    # Call calls regular Python functions.
    # Here, a subpath is appended to base_path.
    pattern = Call(os.path.join, base_path, "subpath/to/input/files/*.jpg")

    # Corresponds to `for path in glob(pattern):`
    path = Glob(pattern)

    # Remove path and extension from the filename
    source_basename = Call(lambda x: os.path.splitext(os.path.basename(x))[0], path)

    with ParallelPipeline():