Example #1
0
def clear_geo_gen(source, geometry_img_shape, **kwargs):
    # If new calibration uid invalidate our current calibration cache
    a = FromEventStream("start", ("detector_calibration_client_uid",), source)
    move_to_first(a)
    (
        a.unique(history=1).sink(
            lambda x: geometry_img_shape.lossless_buffer.clear()
        )
    )
Example #2
0
def test_main_pipeline(
    exp_db,
    fast_tmp_dir,
    start_uid3,
    start_uid1,
    start_uid2,
    background,
    exception,
    pe2
):
    namespace = link(
        *pipeline_order, raw_source=Stream(stream_name="raw source"),
        db=exp_db,
    )
    iq_em = ToEventStream(
        namespace["mean"].combine_latest(namespace["q"], emit_on=0),
        ("iq", "q"))
    iq_em.sink(print)

    limg = []
    move_to_first(namespace["bg_corrected_img"].sink(lambda x: limg.append(x)))
    lbgc = namespace["mean"].sink_to_list()
    lpdf = namespace["iq_comp"].sink_to_list()
    t0 = time.time()
    if background:
        uid = start_uid1
    elif pe2:
        uid = start_uid2
    else:
        uid = -1
    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            namespace["raw_source"].emit(nd)
        except ValueError:
            pass
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    assert len(limg) == n_events
    if exception:
        assert_lbgc = 0
    else:
        assert_lbgc = n_events
    assert len(lbgc) == assert_lbgc
    assert len(lpdf) == assert_lbgc
    assert iq_em.state == "stopped"
    destroy_pipeline(namespace["raw_source"])
    del namespace
    limg.clear()
    lbgc.clear()
    lpdf.clear()
Example #3
0
def test_main_pipeline(exp_db, fast_tmp_dir, start_uid3, start_uid1,
                       start_uid2, background, exception, pe2):
    namespace = link(
        *pipeline_order,
        raw_source=Stream(stream_name="raw source"),
        db=exp_db,
    )
    iq_em = ToEventStream(
        namespace["mean"].combine_latest(namespace["q"], emit_on=0),
        ("iq", "q"))
    iq_em.sink(print)

    limg = []
    move_to_first(namespace["bg_corrected_img"].sink(lambda x: limg.append(x)))
    lbgc = namespace["mean"].sink_to_list()
    lpdf = namespace["iq_comp"].sink_to_list()
    t0 = time.time()
    if background:
        uid = start_uid1
    elif pe2:
        uid = start_uid2
    else:
        uid = -1
    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            namespace["raw_source"].emit(nd)
        except ValueError:
            pass
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    assert len(limg) == n_events
    if exception:
        assert_lbgc = 0
    else:
        assert_lbgc = n_events
    assert len(lbgc) == assert_lbgc
    assert len(lpdf) == assert_lbgc
    assert iq_em.state == "stopped"
    destroy_pipeline(namespace["raw_source"])
    del namespace
    limg.clear()
    lbgc.clear()
    lpdf.clear()
Example #4
0
def tomo_event_stream(source,
                      rec,
                      sinogram,
                      *,
                      qoi_name,
                      rec_3D=None,
                      **kwargs):
    raw_stripped = move_to_first(source.starmap(StripDepVar()))

    rec_tes = SimpleToEventStream(
        rec, (f"{qoi_name}_tomo", ),
        analysis_stage="{}_tomo".format(qoi_name)).LastCache()

    # If we have a 3D reconstruction translate it
    if rec_3D:
        rec_3D_tes = SimpleToEventStream(
            rec_3D,
            (f"{qoi_name}_tomo_3D", ),
            analysis_stage="{}_tomo_3D".format(qoi_name),
        ).LastCache()

    sinogram_tes = SimpleToEventStream(
        sinogram,
        (f"{qoi_name}_sinogram", ),
        analysis_stage="{}_sinogram".format(qoi_name),
    ).LastCache()

    return locals()
Example #5
0
def test_align_interrupted(RE, hw):
    a = Stream()
    b = FromEventStream("event", ("data", "img"), a, principle=True).map(
        op.add, 1
    )
    b.sink(print)
    c = ToEventStream(b, ("out",))
    z = move_to_first(a.AlignEventStreams(c))
    sl = z.sink_to_list()

    L = []

    RE.subscribe(lambda *x: L.append(x))

    RE(count([hw.img]))

    for nd in L:
        name, doc = nd
        # cause an exception
        if name == "event":
            doc["data"]["img"] = "hi"
        try:
            a.emit((name, doc))
        except TypeError:
            pass
    assert {"start", "stop"} == set(list(zip(*sl))[0])
    # check that buffers are not cleared, yet
    sl.clear()
    # If there are elements in the buffer they need to be cleared when all
    # start docs come in.
    for nd in L:
        name, doc = nd
        # cause an exception
        if name == "event":
            doc["data"]["img"] = 1
        a.emit((name, doc))
        if name == "start":
            # now buffers should be clear
            assert not any(
                [b for n, tb in z.true_buffers.items() for u, b in tb.items()]
            )
    assert {"start", "descriptor", "event", "stop"} == set(list(zip(*sl))[0])
    # now buffers should be clear (as all docs were emitted)
    assert not any(
        [b for n, tb in z.true_buffers.items() for u, b in tb.items()]
    )
Example #6
0
def create_analysis_pipeline(
    order,
    stage_blacklist=(),
    publisher=Publisher(glbl_dict["inbound_proxy_address"], prefix=b"an"),
    **kwargs,
):
    """Create the analysis pipeline from an list of chunks and pipeline kwargs

    Parameters
    ----------
    order : list of functions
        The list of pipeline chunk functions
    kwargs : Any
        The kwargs to pass to the pipeline creation

    Returns
    -------
    namespace : dict
        The namespace of the pipeline

    """
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    source = namespace["source"]

    # do inspection of pipeline for ToEventModel nodes, maybe?
    # for analyzed data with independent data (vis and save)
    # strip the dependant vars form the raw data
    raw_stripped = move_to_first(source.starmap(StripDepVar()))
    namespace.update(
        to_event_stream_with_ind(
            raw_stripped,
            *[
                node
                for node in namespace.values()
                if isinstance(node, SimpleToEventStream)
                and node.md.get("analysis_stage", None) not in stage_blacklist
            ],
            publisher=publisher,
        )
    )

    return namespace
Example #7
0
def create_analysis_pipeline(
    order, inbound_proxy_address=glbl_dict["inbound_proxy_address"], **kwargs
):
    """Create the analysis pipeline from an list of chunks and pipeline kwargs

    Parameters
    ----------
    order : list of functions
        The list of pipeline chunk functions
    kwargs : Any
        The kwargs to pass to the pipeline creation

    Returns
    -------
    namespace : dict
        The namespace of the pipeline

    """
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    source = namespace["source"]

    # do inspection of pipeline for ToEventModel nodes, maybe?
    # for analyzed data with independent data (vis and save)
    an_with_ind_pub = Publisher(inbound_proxy_address, prefix=b"an")
    # strip the dependant vars form the raw data
    raw_stripped = move_to_first(source.starmap(StripDepVar()))
    namespace.update(
        to_event_stream_with_ind(
            raw_stripped,
            *[
                node
                for node in namespace.values()
                if isinstance(node, SimpleToEventStream)
            ],
            publisher=an_with_ind_pub
        )
    )

    return namespace
Example #8
0
def test_align_buffering(RE, hw):
    zz = {"data": False}
    a = Stream()
    b = FromEventStream(
        "event",
        ("data", "motor"),
        a.filter(lambda x: zz["data"]),
        principle=True,
    ).map(op.add, 1)
    c = ToEventStream(b, ("out",))
    z = move_to_first(a.AlignEventStreams(c))
    sl = z.sink_to_list()

    RE.subscribe(lambda *x: a.emit(x))

    RE(scan([hw.img], hw.motor, 0, 10, 10, md={"hello": "world"}))
    zz["data"] = True
    sl.clear()
    RE(scan([hw.img], hw.motor, 0, 10, 10))

    assert "hello" not in sl[0][1]
Example #9
0
File: tomo.py Project: xpdAcq/xpdAn
def tomo_event_stream(
    source, rec, sinogram, *, qoi_name, rec_3D=None, **kwargs
):
    raw_stripped = move_to_first(source.starmap(StripDepVar()))

    rec_tes = SimpleToEventStream(
        rec, (f"{qoi_name}_tomo",), analysis_stage="{}_tomo".format(qoi_name)
    ).LastCache()

    # If we have a 3D reconstruction translate it
    if rec_3D:
        rec_3D_tes = SimpleToEventStream(
            rec_3D,
            (f"{qoi_name}_tomo_3D",),
            analysis_stage="{}_tomo_3D".format(qoi_name),
        ).LastCache()

    sinogram_tes = SimpleToEventStream(
        sinogram,
        (f"{qoi_name}_sinogram",),
        analysis_stage="{}_sinogram".format(qoi_name),
    ).LastCache()

    return locals()
Example #10
0
def run_server(
    prefix=None,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    _publisher=None,
    **kwargs
):
    """Start up the QOI server

    Parameters
    ----------
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    """
    if prefix is None:
        prefix = [b"an", b"raw"]

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    install_qt_kicker(loop=d.loop)

    if _publisher is None:
        an_with_ind_pub = Publisher(inbound_proxy_address, prefix=b"qoi")
    else:
        an_with_ind_pub = _publisher

    raw_source = Stream()

    # create amorphous pipeline
    amorphous_ns = link(
        *[amorphsivity_fem, amorphsivity_pipeline, amorphsivity_tem],
        source=Stream(),
        **kwargs
    )
    # Combine the data outputs with the raw independent data
    amorphous_ns.update(
        to_event_stream_with_ind(
            move_to_first(raw_source.starmap(StripDepVar())),
            *[
                node
                for node in amorphous_ns.values()
                if isinstance(node, SimpleToEventStream)
            ],
            publisher=an_with_ind_pub
        )
    )

    rr = RunRouter(
        [
            lambda x: lambda *y: raw_source.emit(y)
            if x["analysis_stage"] == "raw"
            else None,
            lambda x: lambda *y: amorphous_ns["source"].emit(y)
            if x["analysis_stage"] == "pdf"
            else None,
        ]
    )
    d.subscribe(rr)
    print("Starting QOI Server")
    d.start()
Example #11
0
def clear_comp(source, iq_comp, **kwargs):
    # Clear composition every start document
    # FIXME: Needs to go after the iq_comp is defined
    a = FromEventStream("start", (), source)
    move_to_first(a)
    (a.sink(lambda x: clear_combine_latest(iq_comp, 1)))