Example #1
0
def test_unique_history_dict():
    source = Stream()
    s = source.unique(history=2)
    L = s.sink_to_list()

    a = {'hi': 'world'}
    b = {'hi': 'bar'}
    c = {'foo': 'bar'}

    source.emit(a)
    source.emit(b)
    source.emit(a)
    source.emit(b)
    source.emit(a)
    source.emit(b)

    assert L == [a, b]

    source.emit(c)
    source.emit(b)

    assert L == [a, b, c]

    source.emit(a)

    assert L == [a, b, c, a]
Example #2
0
def test_buffer(c, s, a, b):
    source = Stream(asynchronous=True)
    L = (
        source.scatter()
        .map(slowinc, delay=0.5)
        .buffer(5)
        .gather()
        .sink_to_list()
    )

    start = time.time()
    for i in range(5):
        yield source.emit(i)
    end = time.time()
    assert end - start < 0.5

    for i in range(5, 10):
        yield source.emit(i)

    end2 = time.time()
    assert end2 - start > (0.5 / 3)

    while len(L) < 10:
        yield gen.sleep(0.01)
        assert time.time() - start < 5

    assert L == list(map(inc, range(10)))

    assert source.loop == c.loop
Example #3
0
def test_latest():
    source = Stream(asynchronous=True)

    L = []

    @gen.coroutine
    def slow_write(x):
        yield gen.sleep(0.050)
        L.append(x)

    s = source.map(inc).latest().map(slow_write)  # flake8: noqa

    source.emit(1)
    yield gen.sleep(0.010)
    source.emit(2)
    source.emit(3)

    start = time()
    while len(L) < 2:
        yield gen.sleep(0.01)
        assert time() < start + 3
    assert L == [2, 4]

    yield gen.sleep(0.060)
    assert L == [2, 4]
Example #4
0
def test_amorphous_pipeline():
    pdf = Stream()
    ns = amorphsivity_pipeline(pdf)
    L = ns["amorphsivity"].sink_to_list()
    a = np.ones(10)
    pdf.emit(a)
    assert L[0] == np.sum(a[6:])
Example #5
0
def test_align_stream_syntax():
    a = Stream()
    b = Stream()
    z = a.align_event_streams(b)
    sl = z.sink_to_list()
    # TODO: use real run engine here
    for n, d, dd in zip(
        ["start", "descriptor", "event", "stop"],
        [
            {"a": "hi", "b": {"hi": "world"}, "uid": "hi", "time": 123},
            {"bla": "foo", "uid": "abc"},
            {"data": "now", "descriptor": "abc"},
            {"stop": "doc"},
        ],
        [
            {"a": "hi2", "b": {"hi2": "world"}},
            {"bla": "foo", "uid": "123"},
            {"data": "now", "descriptor": "123"},
            {"stop": "doc"},
        ],
    ):
        a.emit((n, d))
        b.emit((n, dd))

    assert len(sl) == 4
    assert sl[0][1].get("b") == {"hi": "world", "hi2": "world"}
    assert "original_start_time" in sl[0][1]
Example #6
0
def test_execution_order():
    L = []
    for i in range(5):
        s = Stream()
        b = s.pluck(1)
        a = s.pluck(0)
        l = a.combine_latest(b, emit_on=a).sink_to_list()
        z = [(1, "red"), (2, "blue"), (3, "green")]
        for zz in z:
            s.emit(zz)
        L.append((l, ))
    for ll in L:
        assert ll == L[0]

    L2 = []
    for i in range(5):
        s = Stream()
        a = s.pluck(0)
        b = s.pluck(1)
        l = a.combine_latest(b, emit_on=a).sink_to_list()
        z = [(1, "red"), (2, "blue"), (3, "green")]
        for zz in z:
            s.emit(zz)
        L2.append((l, ))
    for ll, ll2 in zip(L, L2):
        assert ll2 == L2[0]
        assert ll != ll2
Example #7
0
def test_same_hdr_many_times(hw, RE):
    source = Stream()
    fes1 = FromEventStream("start", ("number",), source, principle=True)
    fes2 = FromEventStream("event", ("data", "motor"), source, principle=True)

    out1 = fes1.map(op.add, 1)
    out2 = fes2.combine_latest(out1, emit_on=0).starmap(op.mul)

    a = ToEventStream(out1, ("out1",))
    b = ToEventStream(out2, ("out2",))

    la = a.sink_to_list()
    lb = b.sink_to_list()

    L = []
    RE.subscribe(lambda *x: L.append(x))
    RE(count([hw.motor], md={"number": 5}))

    for i in range(1, 3):
        for ll in L:
            source.emit(ll)
        for lst in [la, lb]:
            o1 = [z[0] for z in lst]
            o2 = ["start", "descriptor", "event", "stop"] * i
            assert o1 == o2
Example #8
0
def test_separate_thread_without_time(loop, thread):
    assert thread.is_alive()
    source = Stream(loop=loop)
    L = source.map(inc).sink_to_list()

    for i in range(10):
        source.emit(i)
        assert L[-1] == i + 1
Example #9
0
def test_partition():
    source = Stream()
    L = source.partition(2).sink_to_list()

    for i in range(10):
        source.emit(i)

    assert L == [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9)]
Example #10
0
def test_filter_none():
    source = Stream()
    L = source.filter(None).sink_to_list()

    for i in range(10):
        source.emit(i % 3)

    assert L == [1, 2, 1, 2, 1, 2]
Example #11
0
def test_filter():
    source = Stream()
    L = source.filter(lambda x: x % 2 == 0).sink_to_list()

    for i in range(10):
        source.emit(i)

    assert L == [0, 2, 4, 6, 8]
Example #12
0
def test_remove():
    source = Stream()
    L = source.remove(lambda x: x % 2 == 0).sink_to_list()

    for i in range(10):
        source.emit(i)

    assert L == [1, 3, 5, 7, 9]
Example #13
0
def test_zip_same():
    a = Stream()
    b = a.zip(a)
    L = b.sink_to_list()

    a.emit(1)
    a.emit(2)
    assert L == [(1, 1), (2, 2)]
Example #14
0
def test_unique_list():
    source = Stream()
    L = source.unique(history=1).sink_to_list()

    source.emit(["a"])
    source.emit(["a"])
    source.emit(["b"])

    assert L == [["a"], ["b"]]
Example #15
0
def test_unique_dict():
    source = Stream()
    L = source.unique().sink_to_list()

    source.emit({"a": 1})
    source.emit({"a": 1})
    source.emit({"b": 1})

    assert L == [{"a": 1}, {"b": 1}]
Example #16
0
def test_unique():
    source = Stream()
    L = source.unique().sink_to_list()

    source.emit(1)
    source.emit(2)
    source.emit(1)

    assert L == [1, 2]
Example #17
0
def test_frequencies():
    source = Stream()
    L = source.frequencies().sink_to_list()

    source.emit("a")
    source.emit("b")
    source.emit("a")

    assert L[-1] == {"a": 2, "b": 1}
Example #18
0
def test_map():
    def add(x=0, y=0):
        return x + y

    source = Stream()
    L = source.map(add, y=10).sink_to_list()

    source.emit(1)

    assert L[0] == 11
Example #19
0
def test_zip_latest_first():
    a = Stream()
    b = Stream()
    c = a.zip_latest(b).starmap(operator.sub)
    d = a.zip_latest(b, first=True).starmap(operator.add)
    L = c.union(d).sink_to_list()

    a.emit(1)
    b.emit(1)
    assert L == [2, 0]
Example #20
0
def test_filter_args_kwargs():
    def f(x, y, z=False):
        print(y)
        print(z)
        return y and z

    source = Stream()
    L = source.filter(f, True, z=True).sink_to_list()
    source.emit(1)
    assert L[0] is 1
Example #21
0
def test_starmap():
    def add(x=0, y=0):
        return x + y

    source = Stream()
    L = source.starmap(add).sink_to_list()

    source.emit((1, 10))

    assert L[0] == 11
Example #22
0
def test_sink_to_file():
    with tmpfile() as fn:
        source = Stream()
        with sink_to_file(fn, source) as f:
            source.emit("a")
            source.emit("b")

        with open(fn) as f:
            data = f.read()

        assert data == "a\nb\n"
Example #23
0
def test_sync_2(loop):
    with cluster() as (s, [a, b]):
        with Client(s["address"], loop=loop):  # flake8: noqa
            source = Stream()
            L = source.scatter().map(inc).gather().sink_to_list()

            for i in range(10):
                source.emit(i)
                assert len(L) == i + 1

            assert L == list(map(inc, range(10)))
Example #24
0
def test_kwargs():
    source = Stream()

    def f(acc, x, y=None):
        acc = acc + x + y
        return acc

    L = source.scan(f, y=10).sink_to_list()
    for i in range(3):
        source.emit(i)

    assert L == [0, 11, 23]
Example #25
0
def test_star_sink():
    L = []

    def add(x, y):
        L.append(x + y)

    source = Stream()
    source.starsink(add)

    source.emit((1, 10))

    assert L[0] == 11
Example #26
0
def test_combine_latest_first():
    a = Stream()
    b = Stream()
    c = a.zip(b)

    z = c.starmap(operator.add)
    zz = z.combine_latest(b, emit_on=0, first=b)
    L = zz.sink_to_list()

    a.emit(1)
    b.emit(1)
    assert len(L) == 1
Example #27
0
def test_scan():
    source = Stream()

    def f(acc, i):
        acc = acc + i
        return acc, acc

    L = source.scan(f, returns_state=True).sink_to_list()
    for i in range(3):
        source.emit(i)

    assert L == [0, 1, 3]
Example #28
0
def test_zip(c, s, a, b):
    a = Stream(asynchronous=True)
    b = Stream(asynchronous=True)
    c = scatter(a).zip(scatter(b))

    L = c.gather().sink_to_list()

    yield a.emit(1)
    yield b.emit("a")
    yield a.emit(2)
    yield b.emit("b")

    assert L == [(1, "a"), (2, "b")]
Example #29
0
def test_accumulate():
    a = Stream()
    b = a.accumulate(lambda x, y: x + y)
    L = b.sink_to_list()
    LL = []

    for i in range(10):
        a.emit(i)
        if len(LL) == 0:
            LL.append(i)
        else:
            LL.append(i + LL[-1])

    assert L == LL
Example #30
0
def test_destroy():
    source = Stream()
    s = source.map(inc)
    L = s.sink_to_list()

    source.emit(1)
    assert L == [2]

    s.destroy()

    assert not list(source.downstreams)
    assert not s.upstreams
    source.emit(2)
    assert L == [2]
Example #31
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"raw")
        RE.subscribe(p)

        # Tiny fake pipeline
        pp = Publisher(proxy[0], prefix=b"an")
        raw_source = Stream()
        SimpleFromEventStream(
            "event",
            ("data", "img"),
            raw_source.starmap(Retrieve({"NPY_SEQ": NumpySeqHandler})),
            principle=True,
        ).map(lambda x: x * 2).SimpleToEventStream(
            ("img2",), analysis_stage="pdf"
        ).starsink(
            pp
        )
        RE.subscribe(lambda *x: raw_source.emit(x))

        RE(bp.count([hw.img], md=dict(analysis_stage="raw")))
        print("finished exp")
        p.close()
Example #32
0
def run_server(
    data_dir,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    prefix=b"an",
):
    """Start up the databroker server for analyzed data.

    Parameters
    ----------
    data_dir : str
        The directory to save the array data into.
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``b"an"``
    """

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    an_broker = glbl_dict["an_db"]

    an_source = Stream()
    an_source.Store(data_dir, NpyWriter).starsink(an_broker.insert)

    rr = RunRouter(
        [
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "pdf"
            else None,
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "integration"
            else None,
        ]
    )

    d.subscribe(rr)

    print("Starting DB Server")
    d.start()
Example #33
0
def run_server(
    prefix=None,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    _publisher=None,
    **kwargs
):
    """Start up the QOI server

    Parameters
    ----------
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    """
    if prefix is None:
        prefix = [b"an", b"raw"]

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    install_qt_kicker(loop=d.loop)

    if _publisher is None:
        an_with_ind_pub = Publisher(inbound_proxy_address, prefix=b"qoi")
    else:
        an_with_ind_pub = _publisher

    raw_source = Stream()

    # create amorphous pipeline
    amorphous_ns = link(
        *[amorphsivity_fem, amorphsivity_pipeline, amorphsivity_tem],
        source=Stream(),
        **kwargs
    )
    # Combine the data outputs with the raw independent data
    amorphous_ns.update(
        to_event_stream_with_ind(
            move_to_first(raw_source.starmap(StripDepVar())),
            *[
                node
                for node in amorphous_ns.values()
                if isinstance(node, SimpleToEventStream)
            ],
            publisher=an_with_ind_pub
        )
    )

    rr = RunRouter(
        [
            lambda x: lambda *y: raw_source.emit(y)
            if x["analysis_stage"] == "raw"
            else None,
            lambda x: lambda *y: amorphous_ns["source"].emit(y)
            if x["analysis_stage"] == "pdf"
            else None,
        ]
    )
    d.subscribe(rr)
    print("Starting QOI Server")
    d.start()
Example #34
0
raw_output3 = SimpleFromEventStream("event", ("data", "img"), raw_source)

pipeline = (
    raw_output.union(raw_output2, raw_output3.map(np.sum))
    .map(lambda x: x ** 2)
    .accumulate(lambda x, y: x + y)
)

res = SimpleToEventStream(pipeline, ("result",))

merge = AlignEventStreams(raw_source.starmap(StripDepVar()), res)
merge.sink(pprint)
# send to viz server
merge.starsink(p)

RE.subscribe(lambda *x: raw_source.emit(x))
RE.subscribe(lambda *x: p(*x))
RE.subscribe(lambda *x: time.sleep(.1))
RE.subscribe(lambda *x: time.sleep(1), "stop")

RE(
    pchain(
        bp.scan([hw.noisy_det], hw.motor, 0, 10, 10),
        bp.grid_scan(
            [hw.ab_det],
            hw.motor,
            0,
            5,
            5,
            hw.motor2,
            0,
Example #35
0
def run_server(
    folder,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    prefix=None,
    handlers=None,
):
    """Start up the portable databroker server

    Parameters
    ----------
    folder : str
        The location where to save the portable databrokers
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    handlers : dict
        The map between handler specs and handler classes, defaults to
        the map used by the experimental databroker if possible
    """
    # TODO: convert to bytestrings if needed
    # TODO: maybe separate this into different processes?
    # TODO: support multiple locations for folders
    if prefix is None:
        prefix = [b"an", b"raw"]
    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    portable_folder = folder
    portable_configs = {}
    for folder_name in ["an", "raw"]:
        fn = os.path.join(portable_folder, folder_name)
        os.makedirs(fn, exist_ok=True)
        # if the path doesn't exist then make the databrokers
        with open(
            os.path.join(portable_folder, f"{folder_name}.yml"), "w"
        ) as f:
            f.write(portable_template.format(folder_name))
        print(portable_template.format(folder_name))

        print(fn)
        portable_configs[folder_name] = yaml.load(
            io.StringIO(portable_template.format(fn))
        )
        os.makedirs(os.path.join(fn, "data"), exist_ok=True)

    # TODO: add more files here, eg. a databroker readme/tutorial
    with open(os.path.join(portable_folder, "db_load.py"), "w") as f:
        f.write(load_script)
    an_broker = Broker.from_config(portable_configs["an"])

    an_source = Stream()
    zed = an_source.Store(
        os.path.join(
            portable_configs["an"]["metadatastore"]["config"]["directory"],
            "data",
        ),
        NpyWriter,
    )
    zed.starsink(an_broker.insert)

    raw_broker = Broker.from_config(portable_configs["raw"])
    if handlers is None:
        handlers = raw_broker.reg.handler_reg

    raw_source = Stream()
    raw_source.starmap(
        ExportCallback(
            os.path.join(
                portable_configs["raw"]["metadatastore"]["config"][
                    "directory"
                ],
                "data",
            ),
            handler_reg=handlers,
        )
    ).starsink(raw_broker.insert)

    rr = RunRouter(
        [
            lambda x: (lambda *nd: raw_source.emit(nd))
            if x.get("analysis_stage", "") == "raw"
            else None
        ]
        + [
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "pdf"
            else None,
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "integration"
            else None,
        ]
    )

    d.subscribe(rr)

    print("Starting Portable DB Server")
    d.start()