Exemple #1
0
def test_dbfriendly(RE, hw):
    source = Stream()
    t = FromEventStream("event", ("data", "motor"), source, principle=True)
    z = t.map(op.add, 1)
    n = ToEventStream(z, "out").DBFriendly()
    d = n.pluck(1).sink_to_list()

    RE.subscribe(unstar(source.emit))

    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    assert isinstance(d[0]["graph"], dict)
    h1 = d[0].get("graph_hash")
    assert h1

    d.clear()
    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    h2 = d[0].get("graph_hash")
    assert h1 == h2
    assert len(d) == 10 + 3

    d.clear()
    z.args = (2,)
    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    h2 = d[0].get("graph_hash")
    assert h1 != h2
    assert len(d) == 10 + 3
Exemple #2
0
def test_execution_order():
    def data():
        suid = str(uuid.uuid4())
        duid = str(uuid.uuid4())
        yield "start", {"hi": "world", "uid": suid}
        yield "descriptor", {
            "name": "hi",
            "data_keys": {"ct"},
            "uid": duid,
            "run_start": suid,
        }
        for i in range(10):
            yield "event", {
                "uid": str(uuid.uuid4()),
                "data": {"ct": i},
                "descriptor": duid,
            }
        duid = str(uuid.uuid4())
        yield "descriptor", {
            "name": "not hi",
            "data_keys": {"ct"},
            "uid": duid,
            "run_start": suid,
        }
        for i in range(100, 110):
            yield "event", {
                "uid": str(uuid.uuid4()),
                "data": {"ct": i},
                "descriptor": duid,
            }
        yield "stop", {"uid": str(uuid.uuid4()), "run_start": suid}

    source = FromEventStream("event", ("data", "ct"), principle=True)
    p = source.map(op.add, 1)
    pp = p.ToEventStream("ctp1")
    ppp = p.map(op.mul, 2)
    l1 = ppp.sink_to_list()
    pppp = ppp.ToEventStream("ctp2")
    l2 = ppp.map(lambda *x: time.time()).sink_to_list()
    assert next(iter(p.downstreams)) is pp
    assert next(iter(ppp.downstreams)) is pppp
    for d in data():
        source.update(d)
    ex_l = [(i + 1) * 2 for i in range(10)] + [
        (i + 1) * 2 for i in range(100, 110)
    ]
    assert l1 == ex_l
    assert all((v == pppp.start_uid for _, v in pppp.times))
    t = sorted([t for t, _ in pppp.times])
    # ToEventStream executed first
    assert all((v < v2 for v, v2 in zip(t, l2)))
Exemple #3
0
def test_merkle_hash():
    source = Stream()
    t = FromEventStream("event", ("data", "motor"), source, principle=True)
    assert t.principle

    n = ToEventStream(t, ("ct",), data_key_md={"ct": {"units": "arb"}})
    h = merkle_hash(n)
    assert h

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    nn = ToEventStream(tt, ("ct",), data_key_md={"ct": {"units": "arb"}})
    assert h == merkle_hash(nn)
    assert h != merkle_hash(tt)

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    z = tt.map(op.add, 1)
    zz = tt.map(op.sub, 1)
    j = z.zip(zz)

    nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}})
    order_1_hash = merkle_hash(nn)

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    zz = tt.map(op.sub, 1)
    z = tt.map(op.add, 1)
    j = z.zip(zz)

    nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}})
    order_2_hash = merkle_hash(nn)
    assert order_1_hash != order_2_hash

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    z = tt.map(op.add, 1)
    zz = tt.map(op.sub, 1)
    j = zz.zip(z)

    nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}})
    order_3_hash = merkle_hash(nn)
    assert order_1_hash != order_3_hash
Exemple #4
0
    source).map(lambda x: 'detector_calibration_server_uid' in x).connect(
        is_calibration_img))
# Only pass through new calibrations (prevents us from recalculating cals)
(FromEventStream('start', ('calibration_md', ),
                 source).unique(history=1).connect(geo_input))

start_timestamp = FromEventStream('start', ('time', ), source)

# Clean out the cached darks and backgrounds on start
# so that this will run regardless of background/dark status
# note that we get the proper data (if it exists downstream)
start_docs.sink(lambda x: raw_background_dark.emit(0.0))
start_docs.sink(lambda x: raw_background.emit(0.0))
start_docs.sink(lambda x: raw_foreground_dark.emit(0.0))

bg_query = (start_docs.map(query_background, db=db))
bg_docs = (bg_query.zip(start_docs).starmap(temporal_prox).filter(
    lambda x: x != []).map(lambda x: x[0].documents(fill=True)).flatten())

# Get foreground dark
fg_dark_query = (start_docs.map(query_dark, db=db))
fg_dark_query.filter(lambda x: x == []).sink(lambda x: print('No dark found!'))
(FromEventStream(
    'event', ('data', image_name),
    fg_dark_query.filter(lambda x: x != []).map(
        lambda x: x if not isinstance(x, list) else x[0]).map(
            lambda x: x.documents(fill=True)).flatten()).map(
                np.float32).connect(raw_foreground_dark))

# Get bg dark
bg_dark_query = (FromEventStream('start', (), bg_docs).map(query_dark, db=db))
Exemple #5
0
            'raw_event': e,
            'raw_start': s,
            'raw_descriptor': d,
            'human_timestamp': _timestampstr(s['time'])
        }))

# If new calibration uid invalidate our current calibration cache
start_timestamp = FromEventStream('start', ('time', ), source)

# Clean out the cached darks and backgrounds on start
# so that this will run regardless of background/dark status
# note that we get the proper data (if it exists downstream)
start_docs.sink(lambda x: raw_foreground_dark.emit(0.0))

# Get foreground dark
fg_dark_query = (start_docs.map(query_dark, db=db))
fg_dark_query.filter(lambda x: x != [] and isinstance(x, list)).sink(print)
fg_dark_query.filter(lambda x: x == []).sink(lambda x: print('No dark found!'))
(FromEventStream(
    'event', ('data', image_name),
    fg_dark_query.filter(lambda x: x != []).map(
        lambda x: x if not isinstance(x, list) else x[0]).map(
            lambda x: x.documents(fill=True)).flatten()).map(
                np.float32).connect(raw_foreground_dark))
(FromEventStream('event', ('data', image_name),
                 source,
                 event_stream_name='dark').map(
                     np.float32).connect(raw_foreground_dark))

# Get foreground
(FromEventStream('event', ('data', image_name),