Exemplo n.º 1
0
def test_main_pipeline(
    exp_db,
    fast_tmp_dir,
    start_uid3,
    start_uid1,
    start_uid2,
    background,
    exception,
    pe2
):
    namespace = link(
        *pipeline_order, raw_source=Stream(stream_name="raw source"),
        db=exp_db,
    )
    iq_em = ToEventStream(
        namespace["mean"].combine_latest(namespace["q"], emit_on=0),
        ("iq", "q"))
    iq_em.sink(print)

    limg = []
    move_to_first(namespace["bg_corrected_img"].sink(lambda x: limg.append(x)))
    lbgc = namespace["mean"].sink_to_list()
    lpdf = namespace["iq_comp"].sink_to_list()
    t0 = time.time()
    if background:
        uid = start_uid1
    elif pe2:
        uid = start_uid2
    else:
        uid = -1
    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            namespace["raw_source"].emit(nd)
        except ValueError:
            pass
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    assert len(limg) == n_events
    if exception:
        assert_lbgc = 0
    else:
        assert_lbgc = n_events
    assert len(lbgc) == assert_lbgc
    assert len(lpdf) == assert_lbgc
    assert iq_em.state == "stopped"
    destroy_pipeline(namespace["raw_source"])
    del namespace
    limg.clear()
    lbgc.clear()
    lpdf.clear()
Exemplo n.º 2
0
def test_main_pipeline(exp_db, fast_tmp_dir, start_uid3, start_uid1,
                       start_uid2, background, exception, pe2):
    namespace = link(
        *pipeline_order,
        raw_source=Stream(stream_name="raw source"),
        db=exp_db,
    )
    iq_em = ToEventStream(
        namespace["mean"].combine_latest(namespace["q"], emit_on=0),
        ("iq", "q"))
    iq_em.sink(print)

    limg = []
    move_to_first(namespace["bg_corrected_img"].sink(lambda x: limg.append(x)))
    lbgc = namespace["mean"].sink_to_list()
    lpdf = namespace["iq_comp"].sink_to_list()
    t0 = time.time()
    if background:
        uid = start_uid1
    elif pe2:
        uid = start_uid2
    else:
        uid = -1
    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            namespace["raw_source"].emit(nd)
        except ValueError:
            pass
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    assert len(limg) == n_events
    if exception:
        assert_lbgc = 0
    else:
        assert_lbgc = n_events
    assert len(lbgc) == assert_lbgc
    assert len(lpdf) == assert_lbgc
    assert iq_em.state == "stopped"
    destroy_pipeline(namespace["raw_source"])
    del namespace
    limg.clear()
    lbgc.clear()
    lpdf.clear()
Exemplo n.º 3
0
def max_intensity_mean(mean_max, q_at_mean_max, **kwargs):
    max_tes = SimpleToEventStream(
        mean_max.combine_latest(q_at_mean_max, emit_on=0),
        ("iq_max", "q_iq_max"),
        analysis_stage="max",
    )
    return locals()
Exemplo n.º 4
0
def max_gr_mean(gr_max, r_at_gr_max, **kwargs):
    max_pdf_tes = SimpleToEventStream(
        gr_max.combine_latest(r_at_gr_max, emit_on=0),
        ("pdf_max", "r_pdf_max"),
        analysis_stage="max_pdf",
    )
    return locals()
Exemplo n.º 5
0
def to_event_model(data, output_info, md=None):
    """Take an iterable of data and put it into the event model

    Parameters
    ----------
    data: iterable
        The data to be inserted
    output_info: list of tuple
        The name of the data and information to put into the descriptor
    md : iterable of dicts
        an iterable of dictionaries to use as metadata for the start documents


    Yields
    -------
    name: str
        Name of doc
    document: dict
        Document of data

    Notes
    -----
    This is only for demonstration/example use, do not use for production.
    """
    if md is None:
        md = {}
    else:
        md = md.copy()
    # add some metadata
    md.update({"source": "to_event_model"})
    source = Stream()
    fes = SimpleFromEventStream("start", (), source, principle=True)
    tes = SimpleToEventStream(fes, output_info, **md)

    start = None
    for d in data:
        if not start:
            yield tes.start(d)
            yield tes.descriptor(d)
        yield tes.event(d)
    yield "stop", tes._create_stop(d)
Exemplo n.º 6
0
install_kicker()
bec = BestEffortCallback()
bec.enable_plots()
hw = hw()
RE = RunEngine()
# build the pipeline
raw_source = Stream()
raw_output = SimpleFromEventStream('event', ('data', 'det_a'),
                                   raw_source,
                                   principle=True)
raw_output2 = SimpleFromEventStream('event', ('data', 'noisy_det'), raw_source)

pipeline = raw_output.union(raw_output2).map(lambda x: 1).accumulate(
    lambda x, y: x + y)

res = SimpleToEventStream(pipeline, ('result', ))

merge = AlignEventStreams(res, raw_source)
merge.starsink(bec)

RE.subscribe(lambda *x: raw_source.emit(x))
RE(
    pchain(
        bp.scan([hw.noisy_det], hw.motor, 0, 10, 10),
        bp.grid_scan([hw.ab_det],
                     hw.motor,
                     0,
                     10,
                     10,
                     hw.motor2,
                     0,
Exemplo n.º 7
0
    name="img",
    labels={"detectors"},
)
RE = RunEngine()
# build the pipeline
raw_source = Stream()
raw_output = SimpleFromEventStream("event", ("data", "det_a"),
                                   raw_source,
                                   principle=True)
raw_output2 = SimpleFromEventStream("event", ("data", "noisy_det"), raw_source)
raw_output3 = SimpleFromEventStream("event", ("data", "img"), raw_source)

pipeline = (raw_output.union(raw_output2, raw_output3.map(
    np.sum)).map(lambda x: x**2).accumulate(lambda x, y: x + y))

res = SimpleToEventStream(pipeline, ("result", ))

merge = AlignEventStreams(raw_source.starmap(StripDepVar()), res)
merge.sink(pprint)
# send to viz server
merge.starsink(p)

RE.subscribe(lambda *x: raw_source.emit(x))
RE.subscribe(lambda *x: p(*x))
RE.subscribe(lambda *x: time.sleep(.1))
RE.subscribe(lambda *x: time.sleep(1), "stop")

RE(
    pchain(
        bp.scan([hw.noisy_det], hw.motor, 0, 10, 10),
        bp.grid_scan(
Exemplo n.º 8
0
def amorphsivity_tem(amorphsivity, **kwargs):
    amorphsivity_em = SimpleToEventStream(amorphsivity, "amorphsivity",
                                          analysis_stage='amorphsivity')
    return locals()
Exemplo n.º 9
0
def tes_radiograph(norm_img, ave_img, **kwargs):
    norm_img_tes = SimpleToEventStream(norm_img, ("normalized_img", ),
                                       analysis_stage="norm_img")
    ave_img_tes = SimpleToEventStream(ave_img, ("averaged_img", ),
                                      analysis_stage="ave_img")
    return locals()
Exemplo n.º 10
0
def z_score_tem(z_score, **kwargs):
    z_score_tes = SimpleToEventStream(z_score, ("z_score", ),
                                      analysis_stage="z_score")
    return locals()