示例#1
0
def test_dbfriendly(RE, hw):
    source = Stream()
    t = FromEventStream("event", ("data", "motor"), source, principle=True)
    z = t.map(op.add, 1)
    n = ToEventStream(z, "out").DBFriendly()
    d = n.pluck(1).sink_to_list()

    RE.subscribe(unstar(source.emit))

    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    assert isinstance(d[0]["graph"], dict)
    h1 = d[0].get("graph_hash")
    assert h1

    d.clear()
    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    h2 = d[0].get("graph_hash")
    assert h1 == h2
    assert len(d) == 10 + 3

    d.clear()
    z.args = (2,)
    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    h2 = d[0].get("graph_hash")
    assert h1 != h2
    assert len(d) == 10 + 3
示例#2
0
def test_from_event_model(RE, hw):
    source = Stream()
    t = FromEventStream("event", ("data", "motor"), source, principle=True)
    L = t.sink_to_list()

    RE.subscribe(unstar(source.emit))
    RE.subscribe(print)

    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    assert len(L) == 10
    for i, ll in enumerate(L):
        assert i == ll
示例#3
0
def test_execution_order():
    def data():
        suid = str(uuid.uuid4())
        duid = str(uuid.uuid4())
        yield "start", {"hi": "world", "uid": suid}
        yield "descriptor", {
            "name": "hi",
            "data_keys": {"ct"},
            "uid": duid,
            "run_start": suid,
        }
        for i in range(10):
            yield "event", {
                "uid": str(uuid.uuid4()),
                "data": {"ct": i},
                "descriptor": duid,
            }
        duid = str(uuid.uuid4())
        yield "descriptor", {
            "name": "not hi",
            "data_keys": {"ct"},
            "uid": duid,
            "run_start": suid,
        }
        for i in range(100, 110):
            yield "event", {
                "uid": str(uuid.uuid4()),
                "data": {"ct": i},
                "descriptor": duid,
            }
        yield "stop", {"uid": str(uuid.uuid4()), "run_start": suid}

    source = FromEventStream("event", ("data", "ct"), principle=True)
    p = source.map(op.add, 1)
    pp = p.ToEventStream("ctp1")
    ppp = p.map(op.mul, 2)
    l1 = ppp.sink_to_list()
    pppp = ppp.ToEventStream("ctp2")
    l2 = ppp.map(lambda *x: time.time()).sink_to_list()
    assert next(iter(p.downstreams)) is pp
    assert next(iter(ppp.downstreams)) is pppp
    for d in data():
        source.update(d)
    ex_l = [(i + 1) * 2 for i in range(10)] + [
        (i + 1) * 2 for i in range(100, 110)
    ]
    assert l1 == ex_l
    assert all((v == pppp.start_uid for _, v in pppp.times))
    t = sorted([t for t, _ in pppp.times])
    # ToEventStream executed first
    assert all((v < v2 for v, v2 in zip(t, l2)))
示例#4
0
def test_walk_up():
    raw = Stream()
    a_translation = FromEventStream("start", ("time",), raw, principle=True)
    b_translation = FromEventStream("event", ("data", "pe1_image"), raw)

    d = b_translation.zip_latest(a_translation)
    dd = d.map(op.truediv)
    e = ToEventStream(dd, ("data",))

    g = nx.DiGraph()
    walk_to_translation(e, g)
    att = []
    for node, attrs in g.nodes.items():
        att.append(attrs["stream"])
    s = {a_translation, b_translation, d, dd, e}
    assert s == set(att)
    assert {_hash_or_uid(k) for k in s} == set(g.nodes)
示例#5
0
def test_replay_export_test():
    def y():
        suid = str(uuid.uuid4())
        yield ("start", {"uid": suid, "time": time.time()})
        duid = str(uuid.uuid4())
        yield (
            "descriptor",
            {
                "uid": duid,
                "run_start": suid,
                "name": "primary",
                "data_keys": {"det_image": {"dtype": "int", "units": "arb"}},
                "time": time.time(),
            },
        )
        for i in range(5):
            yield (
                "event",
                {
                    "uid": str(uuid.uuid4()),
                    "data": {"det_image": i},
                    "timestamps": {"det_image": time.time()},
                    "seq_num": i + 1,
                    "time": time.time(),
                    "descriptor": duid,
                },
            )
        yield (
            "stop",
            {"uid": str(uuid.uuid4()), "time": time.time(), "run_start": suid},
        )

    print("build graph")
    g1 = FromEventStream(
        "event", ("data", "det_image"), principle=True, stream_name="g1"
    )
    g11 = FromEventStream("event", ("data", "det_image"), stream_name="g11")
    g11_1 = g1.zip(g11)
    g2 = g11_1.starmap(op.mul).map(np.log)
    g = g2.SimpleToEventStream(("img2",))
    from pprint import pprint

    g.sink(pprint)
    L = g.sink_to_list()

    print("run experiment")
    for yy in y():
        print(yy[0])
        g11.update(yy)
        g1.update(yy)
    assert L[-1][1]["run_start"]
示例#6
0
def test_from_event_model_stream_name2():
    def data():
        suid = str(uuid.uuid4())
        duid = str(uuid.uuid4())
        yield "start", {"hi": "world", "uid": suid}
        yield "descriptor", {
            "name": "hi",
            "data_keys": {"ct"},
            "uid": duid,
            "run_start": suid,
        }
        for i in range(10):
            yield "event", {
                "uid": str(uuid.uuid4()),
                "data": {"ct": i},
                "descriptor": duid,
            }
        duid = str(uuid.uuid4())
        yield "descriptor", {
            "name": "not hi",
            "data_keys": {"ct"},
            "uid": duid,
            "run_start": suid,
        }
        for i in range(100, 110):
            yield "event", {
                "uid": str(uuid.uuid4()),
                "data": {"ct": i},
                "descriptor": duid,
            }
        yield "stop", {"uid": str(uuid.uuid4()), "run_start": suid}

    g = data()
    source = Stream()
    t = FromEventStream(
        "event", ("data", "ct"), source, event_stream_name="not hi"
    )
    L = t.sink_to_list()

    for gg in g:
        source.emit(gg)

    assert len(L) == 10
    for i, ll in enumerate(L):
        assert i + 100 == ll
示例#7
0
def test_to_event_model(RE, hw):
    source = Stream()
    t = FromEventStream("event", ("data", "motor"), source, principle=True)
    assert t.principle

    n = ToEventStream(t, ("ct",))
    tt = t.sink_to_list()
    p = n.pluck(0).sink_to_list()
    d = n.pluck(1).sink_to_list()

    RE.subscribe(unstar(source.emit))
    RE.subscribe(print)

    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    assert tt
    assert set(p) == {"start", "stop", "event", "descriptor"}
    assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}}
    assert d[-1]["run_start"]
示例#8
0
def test_db_insertion(RE, hw):
    db = Broker.named("temp")

    source = Stream()
    n0 = FromEventStream("event", ("data", "motor"), source, principle=True)
    n1 = ToEventStream(n0, "motor")
    n1.DBFriendly().starsink(db.v1.insert)

    RE.subscribe(lambda *x: source.emit(x))
    RE(scan([hw.motor], hw.motor, 0, 1, 2))

    assert db[-1]
示例#9
0
def test_no_stop(hw, RE):

    source = Stream().filter(lambda x: x[0] != "stop")
    t = FromEventStream("event", ("data",), source, principle=True)

    n = ToEventStream(t)
    p = n.pluck(0).sink_to_list()
    d = n.pluck(1).sink_to_list()

    RE.subscribe(unstar(source.emit))
    RE.subscribe(print)

    RE(scan([hw.motor], hw.motor, 0, 9, 10))
    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    assert set(p) == {"start", "stop", "event", "descriptor"}
    assert d[1]["hints"] == {
        "analyzer": {"fields": ["motor", "motor_setpoint"]}
    }
    assert d[2]["data"] == {"motor_setpoint": 0, "motor": 0}
示例#10
0
def test_to_event_model_dict(RE, hw):
    source = Stream()
    t = FromEventStream("event", ("data",), source, principle=True)

    n = ToEventStream(t)
    p = n.pluck(0).sink_to_list()
    d = n.pluck(1).sink_to_list()

    n.sink(print)
    RE.subscribe(unstar(source.emit))
    RE.subscribe(print)

    RE(scan([hw.motor], hw.motor, 0, 9, 10))

    print(d[1]["hints"])
    # AAA
    assert set(p) == {"start", "stop", "event", "descriptor"}
    assert d[1]["hints"] == {
        "analyzer": {"fields": ["motor", "motor_setpoint"]}
    }
    assert d[2]["data"] == {"motor_setpoint": 0, "motor": 0}
    assert d[-1]["run_start"]
示例#11
0
文件: prov.py 项目: zthatch/SHED
            },
            'seq_num': i + 1,
            'time': time.time(),
            'descriptor': duid
        })
    yield ('stop', {
        'uid': str(uuid.uuid4()),
        'time': time.time(),
        'run_start': suid
    })


print('build graph')
g1 = FromEventStream('event', (
    'data',
    'det_image',
),
                     principle=True,
                     stream_name='g1')
g11 = FromEventStream('event', (
    'data',
    'det_image',
), stream_name='g11')
g11_1 = g1.zip(g11)
g2 = g11_1.starmap(op.mul)
g = g2.ToEventStream(('img2', ))
dbf = g.DBFriendly()
dbf.starsink(db.insert)

print('run experiment')
for yy in y():
    db.insert(*yy)
示例#12
0
文件: average.py 项目: zthatch/SHED
from bluesky.plans import count
from shed.translation import FromEventStream, ToEventStream
from streamz import Stream
from bluesky.callbacks import LivePlot
import matplotlib.pyplot as plt

# Create callbacks for plotting
fig, ax = plt.subplots()
lp = LivePlot('average', ax=ax)
lp2 = LivePlot('binned', ax=ax)
# lp3 = LivePlot('noisy_det', ax=ax)

# Create a graph
source = Stream()
# Convert from raw event model to data
fes = FromEventStream('event', ('data', 'noisy_det'), source, principle=True)

# Averageing graph
adder = fes.accumulate(lambda x, y: x + y)
counter = fes.accumulate(lambda s, x: s + 1, start=0)
averager = adder.zip(counter).map(lambda x: x[0] / x[1])

# Binned averaging
sw = fes.sliding_window(2).map(sum).map(lambda x: x / 2)

# Convert back to Event Model
tes1 = ToEventStream(averager, ('average', ))
tes2 = ToEventStream(sw, ('binned', ))

# sink to plotting
tes1.sink(lambda x: lp(*x))
示例#13
0

def astype(x, ret_type='float32'):
    return x.astype(ret_type)


def pipeline(raw_source):
    b = (
        raw_source.map(astype).map(np.sum)
        # .sink(print)
        .ToEventStream(('sum', )).DBFriendly().starsink(db2.insert))
    return locals()


namespace = link(pipeline,
                 raw_source=FromEventStream('event', ('data', 'pe1_image'),
                                            principle=True))

# vis = False
vis = True
# source.visualize(source_node=True)
# '''
for hdr in list((db[-1], )):
    for e in hdr.documents(fill=True):
        if e[0] == 'start':
            e[1].update(composition_string='EuTiO3')
        if e[0] == 'event' and vis:
            plt.pause(.1)
        if e[0] == 'event':
            if e[1]['seq_num'] > 3:
                continue
        namespace['raw_source'].update(e)
示例#14
0
from xpdconf.conf import glbl_dict
from xpdtools.pipelines.raw_pipeline import (raw_foreground_dark,
                                             raw_foreground,
                                             dark_corrected_foreground)

image_name = glbl_dict['image_field']
db = glbl_dict['exp_db']
calibration_md_folder = {'folder': 'xpdAcq_calib_info.yml'}

filler = Filler(db=db)
# Build the general pipeline from the raw_pipeline
raw_source = Stream(stream_name='raw source')

# TODO: change this when new dark logic comes
# Check that the data isn't a dark
dk_uid = (FromEventStream(
    'start', (), upstream=raw_source).map(lambda x: 'sc_dk_field_uid' in x))
# Fill the raw event stream
source = (
    raw_source.combine_latest(dk_uid).filter(lambda x: x[1]).pluck(0)
    # Filler returns None for resource/datum data
    .starmap(filler).filter(lambda x: x is not None))
# Get all the documents
start_docs = FromEventStream('start', (), source)
descriptor_docs = FromEventStream('descriptor', (),
                                  source,
                                  event_stream_name='primary')
event_docs = FromEventStream('event', (), source, event_stream_name='primary')
all_docs = (event_docs.combine_latest(
    start_docs, descriptor_docs, emit_on=0).starmap(
        lambda e, s, d: {
            'raw_event': e,
示例#15
0
def test_merkle_hash():
    source = Stream()
    t = FromEventStream("event", ("data", "motor"), source, principle=True)
    assert t.principle

    n = ToEventStream(t, ("ct",), data_key_md={"ct": {"units": "arb"}})
    h = merkle_hash(n)
    assert h

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    nn = ToEventStream(tt, ("ct",), data_key_md={"ct": {"units": "arb"}})
    assert h == merkle_hash(nn)
    assert h != merkle_hash(tt)

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    z = tt.map(op.add, 1)
    zz = tt.map(op.sub, 1)
    j = z.zip(zz)

    nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}})
    order_1_hash = merkle_hash(nn)

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    zz = tt.map(op.sub, 1)
    z = tt.map(op.add, 1)
    j = z.zip(zz)

    nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}})
    order_2_hash = merkle_hash(nn)
    assert order_1_hash != order_2_hash

    tt = FromEventStream("event", ("data", "motor"), source, principle=True)

    z = tt.map(op.add, 1)
    zz = tt.map(op.sub, 1)
    j = zz.zip(z)

    nn = ToEventStream(j, ("ct",), data_key_md={"ct": {"units": "arb"}})
    order_3_hash = merkle_hash(nn)
    assert order_1_hash != order_3_hash
示例#16
0
文件: main.py 项目: Sasaank/xpdAn
from xpdtools.pipelines.raw_pipeline import (
    mask_setting,  # noqa: F401
)
from xpdtools.tools import overlay_mask

image_name = glbl_dict['image_field']
db = glbl_dict['exp_db']
calibration_md_folder = {'folder': 'xpdAcq_calib_info.yml'}

filler = Filler(db=db)
# Build the general pipeline from the raw_pipeline
raw_source = Stream(stream_name='raw source')

# TODO: change this when new dark logic comes
# Check that the data isn't a dark
dk_uid = (FromEventStream(
    'start', (), upstream=raw_source).map(lambda x: 'sc_dk_field_uid' in x))
# Fill the raw event stream
source = (
    raw_source.combine_latest(dk_uid).filter(lambda x: x[1]).pluck(0)
    # Filler returns None for resource/datum data
    .starmap(filler).filter(lambda x: x is not None))
# Get all the documents
start_docs = FromEventStream('start', (), source)
descriptor_docs = FromEventStream('descriptor', (),
                                  source,
                                  event_stream_name='primary')
event_docs = FromEventStream('event', (), source, event_stream_name='primary')
all_docs = (event_docs.combine_latest(
    start_docs, descriptor_docs, emit_on=0, first=True).starmap(
        lambda e, s, d: {
            'raw_event': e,
示例#17
0
文件: hpc_prov.py 项目: zthatch/SHED
from dask.distributed import Client
from dask_jobqueue import PBSCluster
from pprint import pprint
cluster = PBSCluster()
cluster.scale(10)  # Ask for ten workers

client = Client(cluster)

db = Broker.named('temp')

# Now this pipeline runs using HPC resources
source = Stream()
(FromEventStream('event', 'motor1', upstream=source)
 .scatter()
 .map(op.add, 1)
 .buffer(8)
 .gather()
 .ToEventStream('result').DBFriendly().starsink(db.insert))

RE = RunEngine()
RE.subscribe(lambda *x: source.emit(x))

RE(bp.count([hw.motor1], 1))

from shed.replay import replay
from rapidz.graph import _clean_text, readable_graph

# get the graph and data
graph, parents, data, vs = replay(db, db[-1])

# make a graph with human readable names