Esempio n. 1
0
def test_raw_pipeline(mask_s):
    # link the pipeline up
    namespace = link(*pipeline_order, **g_namespace)

    is_calibration_img = namespace["is_calibration_img"]
    geo_input = namespace["geo_input"]
    img_counter = namespace["img_counter"]
    namespace["mask_setting"]["setting"] = mask_s

    pdf = namespace["pdf"]
    raw_background_dark = namespace["raw_background_dark"]
    raw_background = namespace["raw_background"]
    raw_foreground_dark = namespace["raw_foreground_dark"]
    composition = namespace["composition"]
    raw_foreground = namespace["raw_foreground"]
    sl = pdf.sink_to_list()
    L = namespace["geometry"].sink_to_list()
    ml = namespace["mask"].sink_to_list()

    is_calibration_img.emit(False)
    a = geo.getPyFAI()
    geo_input.emit(a)
    for s in [raw_background_dark, raw_background, raw_foreground_dark]:
        s.emit(np.zeros(img.shape))
    composition.emit("Au")
    img_counter.emit(1)
    raw_foreground.emit(img)
    destroy_pipeline(raw_foreground)
    del namespace
    assert len(L) == 1
    assert ml
    assert len(sl) == 1
    sl.clear()
    L.clear()
    ml.clear()
Esempio n. 2
0
def test_main_pipeline(exp_db, fast_tmp_dir, start_uid3, start_uid1,
                       background, exception):
    namespace = link(*(pipeline_order + save_pipeline_order),
                     raw_source=Stream(stream_name="raw source"),
                     db=exp_db)
    namespace["save_kwargs"].update({"base_folder": fast_tmp_dir})
    raw_source = namespace["raw_source"]

    t0 = time.time()
    if background:
        uid = start_uid1
    else:
        uid = -1

    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            raw_source.emit(nd)
        except ValueError:
            pass
    if background:
        name = "kapton"
    else:
        name = "Au"
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))

    for root, dirs, files in os.walk(fast_tmp_dir):
        level = root.replace(fast_tmp_dir, "").count(os.sep)
        indent = " " * 4 * level
        print("{}{}/".format(indent, os.path.basename(root)))
        subindent = " " * 4 * (level + 1)
        for f in files:
            print("{}{}".format(subindent, f))
    time.sleep(2)
    print(os.listdir(fast_tmp_dir))
    print(os.listdir(os.path.join(fast_tmp_dir, name)))
    assert name in os.listdir(fast_tmp_dir)
    if exception:
        output_list = ["dark_sub", "mask"]
    else:
        output_list = ["dark_sub", "mask", "iq", "itth", "pdf"]
    for f in output_list:
        assert f in os.listdir(os.path.join(fast_tmp_dir, name))
        if f == "mask":
            assert (len(os.listdir(os.path.join(fast_tmp_dir, name,
                                                f))) == n_events * 2)
        else:
            assert (len(os.listdir(os.path.join(fast_tmp_dir, name,
                                                f))) == n_events)
    assert "{}_{:.6}.yaml".format(name,
                                  exp_db[uid].start["uid"][:6]) in os.listdir(
                                      os.path.join(fast_tmp_dir, name, "meta"))
Esempio n. 3
0
def test_main_pipeline(
    exp_db,
    fast_tmp_dir,
    start_uid3,
    start_uid1,
    start_uid2,
    background,
    exception,
    pe2
):
    namespace = link(
        *pipeline_order, raw_source=Stream(stream_name="raw source"),
        db=exp_db,
    )
    iq_em = ToEventStream(
        namespace["mean"].combine_latest(namespace["q"], emit_on=0),
        ("iq", "q"))
    iq_em.sink(print)

    limg = []
    move_to_first(namespace["bg_corrected_img"].sink(lambda x: limg.append(x)))
    lbgc = namespace["mean"].sink_to_list()
    lpdf = namespace["iq_comp"].sink_to_list()
    t0 = time.time()
    if background:
        uid = start_uid1
    elif pe2:
        uid = start_uid2
    else:
        uid = -1
    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            namespace["raw_source"].emit(nd)
        except ValueError:
            pass
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    assert len(limg) == n_events
    if exception:
        assert_lbgc = 0
    else:
        assert_lbgc = n_events
    assert len(lbgc) == assert_lbgc
    assert len(lpdf) == assert_lbgc
    assert iq_em.state == "stopped"
    destroy_pipeline(namespace["raw_source"])
    del namespace
    limg.clear()
    lbgc.clear()
    lpdf.clear()
Esempio n. 4
0
def tomo_callback_factory(doc, publisher, handler_reg, **kwargs):
    # TODO: Eventually extract from plan hints?
    if doc.get("tomo", {}).get("type", None) == "pencil":
        if len(doc['motors']) == 2:
            po = pencil_order
        else:
            po = d3_pencil_order
        return PencilTomoCallback(
            lambda **inner_kwargs: link(*po, **inner_kwargs),
            publisher,
            **kwargs,
        )
    elif doc.get("tomo", {}).get("type", None) == "full_field":
        return FullFieldTomoCallback(
            lambda **inner_kwargs: link(*full_field_order, **inner_kwargs),
            publisher,
            handler_reg=handler_reg,
            **kwargs,
        )
Esempio n. 5
0
def raw_pipeline_parallel():
    # link the pipeline up
    gg_namespace = dict(g_namespace)
    s_ns = {
        k: v.scatter(backend="thread")
        for k, v in gg_namespace.items() if isinstance(v, Stream)
    }
    gg_namespace.update(
        {"_" + k: v
         for k, v in gg_namespace.items() if isinstance(v, Stream)})
    gg_namespace.update(s_ns)
    namespace = link(*pipeline_order[:-1], **gg_namespace)

    geo_input = namespace["_geo_input"]
    composition = namespace["_composition"]

    raw_background_dark = namespace["_raw_background_dark"]
    raw_background = namespace["_raw_background"]
    raw_foreground_dark = namespace["_raw_foreground_dark"]
    raw_foreground = namespace["_raw_foreground"]

    print(type(namespace["raw_foreground"]))

    a = namespace["mean"]
    futures = a.sink_to_list()
    b = a.buffer(10)
    g = b.gather()
    # g.sink(lambda x: print("gathered data", time.time()))
    LL = g.map(lambda x: time.time()).sink_to_list()
    L = g.sink_to_list()

    a = geo.getPyFAI()
    geo_input.emit(a)
    composition.emit("Au1.0")
    for s in [raw_background_dark, raw_background, raw_foreground_dark]:
        s.emit(np.zeros(img.shape))
    ii = 10
    t0 = time.time()
    for i in range(ii):
        rimg = np.random.random(img.shape)
        raw_foreground.emit(img + rimg)
    while len(L) < ii:
        time.sleep(.01)

    time_diff = [LL[i] - LL[i - 1] for i in range(1, ii)]
    print(max(time_diff), min(time_diff), sum(time_diff) / len(time_diff))
    # print([l - min(LL) for l in LL])
    print([l - t0 for l in LL])
    print(max([l - t0 for l in LL]) / ii)
    destroy_pipeline(raw_foreground)
    del namespace
    futures.clear()
    L.clear()
Esempio n. 6
0
def make_pipeline(_output_sinks=True):
    # link the pipeline up
    namespace = link(*(pipeline_order + [median_gen, std_gen, z_score_gen]),
                     **general_namespace)

    polarization_array = namespace["polarization_array"]
    mask = namespace["mask"]
    mean = namespace["mean"]
    q = namespace["q"]
    geometry = namespace["geometry"]
    dark_corrected_foreground = namespace["dark_corrected_foreground"]
    dark_corrected_background = namespace["dark_corrected_background"]
    mask_kwargs = namespace["mask_kwargs"]
    mask_setting = namespace["mask_setting"]

    median = namespace["median"]
    std = namespace["std"]
    z_score = namespace["z_score"]

    # Modify graph
    # create filename nodes
    filename_source = Stream(stream_name="filename")
    filename_node = filename_source.map(lambda x: os.path.splitext(x)[0])
    # write out mask
    mask.combine_latest(
        filename_node,
        emit_on=0).sink(lambda x: fit2d_save(np.flipud(x[0]), x[1]))
    mask.combine_latest(
        filename_node,
        emit_on=0).sink(lambda x: np.save(x[1] + "_mask.npy", x[0]))

    if _output_sinks:
        outs = [q, mean, median, std]
        out_tup = tuple([[] for _ in outs])
        out_sinks = tuple([k.sink(L.append) for k, L in zip(outs, out_tup)])

    (mean.zip(q).combine_latest(
        filename_node, emit_on=0).map(lambda l: (*l[0], l[1])).sink(
            lambda x: save_output(x[1], x[0], x[2], "Q")))
    (median.zip(q).combine_latest(
        filename_node, emit_on=0).map(lambda l: (*l[0], l[1])).sink(
            lambda x: save_output(x[1], x[0], x[2] + "_median", "Q")))
    (std.zip(q).combine_latest(
        filename_node, emit_on=0).map(lambda l: (*l[0], l[1])).sink(
            lambda x: save_output(x[1], x[0], x[2] + "_std", "Q")))
    (z_score.combine_latest(
        filename_node, emit_on=0).starsink(lambda img, n: tifffile.imsave(
            n + "_zscore.tif", data=img.astype(np.float32))))
    # If running from a terminal don't output stuff into lists (too much mem)
    return locals()
Esempio n. 7
0
def test_double_link():
    def make_a(**kwargs):
        in_a = Stream()
        out_a = in_a.map(lambda x: x + 1)
        return locals()

    def make_b(out_a, **kwargs):
        out_b = out_a.map(lambda x: x * 2)
        return locals()

    def make_c(out_a, out_b, **kwargs):
        out_c = out_a.zip(out_b).map(sum)
        return locals()

    ab = link(make_a, make_b)
    abc = link(make_c, **ab)
    assert set(abc.keys()) == {'in_a', 'out_a', 'out_b', 'out_c'}
    L = ab["out_b"].sink_to_list()
    L2 = abc["out_c"].sink_to_list()
    for i in range(10):
        ab["in_a"].emit(i)
    assert L == [(i + 1) * 2 for i in range(10)]
    assert L2 == [((i + 1) * 2) + i + 1 for i in range(10)]
Esempio n. 8
0
def test_main_pipeline(exp_db, fast_tmp_dir, start_uid3, start_uid1,
                       start_uid2, background, exception, pe2):
    namespace = link(
        *pipeline_order,
        raw_source=Stream(stream_name="raw source"),
        db=exp_db,
    )
    iq_em = ToEventStream(
        namespace["mean"].combine_latest(namespace["q"], emit_on=0),
        ("iq", "q"))
    iq_em.sink(print)

    limg = []
    move_to_first(namespace["bg_corrected_img"].sink(lambda x: limg.append(x)))
    lbgc = namespace["mean"].sink_to_list()
    lpdf = namespace["iq_comp"].sink_to_list()
    t0 = time.time()
    if background:
        uid = start_uid1
    elif pe2:
        uid = start_uid2
    else:
        uid = -1
    for nd in exp_db[uid].documents(fill=True):
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            namespace["raw_source"].emit(nd)
        except ValueError:
            pass
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    assert len(limg) == n_events
    if exception:
        assert_lbgc = 0
    else:
        assert_lbgc = n_events
    assert len(lbgc) == assert_lbgc
    assert len(lpdf) == assert_lbgc
    assert iq_em.state == "stopped"
    destroy_pipeline(namespace["raw_source"])
    del namespace
    limg.clear()
    lbgc.clear()
    lpdf.clear()
Esempio n. 9
0
def test_tiff_pipeline(
    exp_db, fast_tmp_dir, start_uid3, start_uid1, background, exception
):
    namespace = link(
        *pipeline_order, raw_source=Stream(stream_name="raw source"),
        db=exp_db
    )
    namespace["save_kwargs"].update({"base_folder": fast_tmp_dir})
    raw_source = namespace["raw_source"]

    t0 = time.time()
    if background:
        uid = start_uid1
    else:
        uid = -1

    for nd in exp_db[uid].documents():
        name, doc = nd
        if name == "start":
            if exception:
                doc["bt_wavelength"] = "bla"
            nd = (name, doc)
        try:
            raw_source.emit(nd)
        except ValueError:
            pass
    if background:
        name = "kapton"
    else:
        name = "Au"
    t1 = time.time()
    print(t1 - t0)
    n_events = len(list(exp_db[-1].events()))
    for root, dirs, files in os.walk(fast_tmp_dir):
        level = root.replace(fast_tmp_dir, "").count(os.sep)
        indent = " " * 4 * level
        print("{}{}/".format(indent, os.path.basename(root)))
        subindent = " " * 4 * (level + 1)
        for f in files:
            print("{}{}".format(subindent, f))
    print(os.listdir(fast_tmp_dir))
    print(os.listdir(os.path.join(fast_tmp_dir, name)))
    assert name in os.listdir(fast_tmp_dir)
    for f in ["dark_sub"]:
        assert f in os.listdir(os.path.join(fast_tmp_dir, name))
        assert len(os.listdir(os.path.join(fast_tmp_dir, name, f))) == n_events
    assert "{}_{:.6}.yaml".format(
        name, exp_db[uid].start["uid"][:6]
    ) in os.listdir(os.path.join(fast_tmp_dir, name, "meta"))
Esempio n. 10
0
def test_link():
    def make_a():
        source = Stream()
        out_a = source.map(lambda x: x + 1)
        return locals()

    def make_b(out_a, **kwargs):
        out_b = out_a.map(lambda x: x * 2)
        return locals()

    ns = link(make_a, make_b)
    L = ns["out_b"].sink_to_list()
    for i in range(10):
        ns["source"].emit(i)
    assert L == [(i + 1) * 2 for i in range(10)]
Esempio n. 11
0
def create_analysis_pipeline(
    order,
    stage_blacklist=(),
    publisher=Publisher(glbl_dict["inbound_proxy_address"], prefix=b"an"),
    **kwargs,
):
    """Create the analysis pipeline from an list of chunks and pipeline kwargs

    Parameters
    ----------
    order : list of functions
        The list of pipeline chunk functions
    kwargs : Any
        The kwargs to pass to the pipeline creation

    Returns
    -------
    namespace : dict
        The namespace of the pipeline

    """
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    source = namespace["source"]

    # do inspection of pipeline for ToEventModel nodes, maybe?
    # for analyzed data with independent data (vis and save)
    # strip the dependant vars form the raw data
    raw_stripped = move_to_first(source.starmap(StripDepVar()))
    namespace.update(
        to_event_stream_with_ind(
            raw_stripped,
            *[
                node
                for node in namespace.values()
                if isinstance(node, SimpleToEventStream)
                and node.md.get("analysis_stage", None) not in stage_blacklist
            ],
            publisher=publisher,
        )
    )

    return namespace
Esempio n. 12
0
def test_raw_pipeline_parallel(n):
    # caplog.set_level(logging.CRITICAL)
    # link the pipeline up
    gg_namespace = dict(g_namespace)
    s_ns = {
        k: v.scatter(backend="thread")
        for k, v in gg_namespace.items() if isinstance(v, Stream)
    }
    gg_namespace.update(
        {"_" + k: v
         for k, v in gg_namespace.items() if isinstance(v, Stream)})
    gg_namespace.update(s_ns)
    namespace = link(*pipeline_order, **gg_namespace)

    geo_input = namespace["_geo_input"]

    raw_background_dark = namespace["_raw_background_dark"]
    raw_background = namespace["_raw_background"]
    raw_foreground_dark = namespace["_raw_foreground_dark"]
    raw_foreground = namespace["_raw_foreground"]

    a = namespace[n]
    futures = a.sink_to_list()
    b = a.buffer(10)
    g = b.gather()
    g.sink(lambda x: print("gathered data", time.time()))
    L = g.sink_to_list()

    a = geo.getPyFAI()
    yield geo_input.emit(a)
    for s in [raw_background_dark, raw_background, raw_foreground_dark]:
        yield s.emit(np.zeros(img.shape))
    ii = 2
    for i in range(ii):
        rimg = np.random.random(img.shape)
        yield raw_foreground.emit(img + rimg)
    while len(L) < ii:
        yield gen.sleep(.01)

    destroy_pipeline(raw_foreground)
    del namespace
    futures.clear()
    L.clear()
Esempio n. 13
0
def test_tomo_piecewise_pipeline(rand_size):
    ns = dict(
        qoi=Stream(),
        x=Stream(),
        th=Stream(),
        th_dim=Stream(),
        x_dim=Stream(),
        th_extents=Stream(),
        x_extents=Stream(),
        center=Stream(),
    )
    x_linspace = np.linspace(0, 5, 6)
    th_linspace = np.linspace(0, 180, 6)

    ns["th_dimension"] = len(th_linspace)
    ns["x_dimension"] = len(x_linspace)

    ns.update(**link(*[tomo_prep, tomo_pipeline_piecewise], **ns))

    L = ns["rec"].sink_to_list()

    ns["th_dim"].emit(len(th_linspace))
    ns["x_dim"].emit(len(x_linspace))
    ns["th_extents"].emit([0, 180])
    ns["x_extents"].emit([x_linspace[0], x_linspace[-1]])
    ns["center"].emit(2.5)

    # np.random.seed(42)

    for x in x_linspace:
        for th in th_linspace:
            ns["x"].emit(x)
            ns["th"].emit(th)
            ns["qoi"].emit(np.random.random(rand_size))

    assert len(L) == len(x_linspace) * len(th_linspace)
    if rand_size:
        assert L[-1].shape == (*rand_size, len(x_linspace), len(th_linspace))
    else:
        assert L[-1].shape == (len(x_linspace), len(th_linspace))
    destroy_pipeline(ns["qoi"])
    del ns
    L.clear()
Esempio n. 14
0
def create_analysis_pipeline(
    order, inbound_proxy_address=glbl_dict["inbound_proxy_address"], **kwargs
):
    """Create the analysis pipeline from an list of chunks and pipeline kwargs

    Parameters
    ----------
    order : list of functions
        The list of pipeline chunk functions
    kwargs : Any
        The kwargs to pass to the pipeline creation

    Returns
    -------
    namespace : dict
        The namespace of the pipeline

    """
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    source = namespace["source"]

    # do inspection of pipeline for ToEventModel nodes, maybe?
    # for analyzed data with independent data (vis and save)
    an_with_ind_pub = Publisher(inbound_proxy_address, prefix=b"an")
    # strip the dependant vars form the raw data
    raw_stripped = move_to_first(source.starmap(StripDepVar()))
    namespace.update(
        to_event_stream_with_ind(
            raw_stripped,
            *[
                node
                for node in namespace.values()
                if isinstance(node, SimpleToEventStream)
            ],
            publisher=an_with_ind_pub
        )
    )

    return namespace
Esempio n. 15
0
def test_qoi_pipeline():
    # link the pipeline up
    namespace = link(*(pipeline_order + [max_intensity_mean, max_gr_mean]),
                     **g_namespace)

    geometry = namespace["geometry"]

    mean_max = namespace["mean_max"]
    raw_background_dark = namespace["raw_background_dark"]
    raw_background = namespace["raw_background"]
    raw_foreground_dark = namespace["raw_foreground_dark"]
    raw_foreground = namespace["raw_foreground"]

    sl = mean_max.sink_to_list()
    geometry.emit(geo)
    for s in [raw_background_dark, raw_background, raw_foreground_dark]:
        s.emit(np.zeros(img.shape))
    raw_foreground.emit(img)
    del namespace
    destroy_pipeline(raw_foreground)
    assert len(sl) == 1
    sl.clear()
Esempio n. 16
0
def test_extra_pipeline():
    # link the pipeline up
    namespace = link(*(pipeline_order + [median_gen, std_gen, z_score_gen]),
                     **g_namespace)

    geometry = namespace["geometry"]

    z_score = namespace["z_score"]
    raw_background_dark = namespace["raw_background_dark"]
    raw_background = namespace["raw_background"]
    raw_foreground_dark = namespace["raw_foreground_dark"]
    raw_foreground = namespace["raw_foreground"]

    sl = z_score.sink_to_list()
    geometry.emit(geo)
    for s in [raw_background_dark, raw_background, raw_foreground_dark]:
        s.emit(np.zeros(img.shape))
    raw_foreground.emit(img)
    del namespace
    destroy_pipeline(raw_foreground)
    assert len(sl) == 1
    sl.clear()
Esempio n. 17
0
def start_analysis(save=True, vis=True, **kwargs):
    """Start analysis pipeline [Depreciated]

    Parameters
    ----------
    mask_kwargs : dict
        The kwargs passed to the masking see xpdtools.tools.mask_img
    pdf_kwargs : dict
        The kwargs passed to the pdf generator, see xpdtools.tools.pdf_getter
    fq_kwargs : dict
        The kwargs passed to the fq generator, see xpdtools.tools.fq_getter
    mask_setting : dict
        The setting of the mask
    save_template : str
        The template string for file saving
    base_folder : str
        The base folder for saving files
    """
    warn(DeprecationWarning("Use the server instead"))
    # TODO: also start up grave vis, maybe?
    d = RemoteDispatcher(glbl_dict["outbound_proxy_address"])
    install_qt_kicker(
        loop=d.loop
    )  # This may need to be d._loop depending on tag
    order = pipeline_order
    if save:
        order += save_pipeline_order
    if vis:
        order += [vis_pipeline]
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    raw_source = namespace["raw_source"]
    d.subscribe(lambda *x: raw_source.emit(x))
    print("Starting Analysis Server")
    d.start()
Esempio n. 18
0
def start_analysis(save=True, vis=True, **kwargs):
    """Start analysis pipeline [Depreciated]

    Parameters
    ----------
    mask_kwargs : dict
        The kwargs passed to the masking see xpdtools.tools.mask_img
    pdf_kwargs : dict
        The kwargs passed to the pdf generator, see xpdtools.tools.pdf_getter
    fq_kwargs : dict
        The kwargs passed to the fq generator, see xpdtools.tools.fq_getter
    mask_setting : dict
        The setting of the mask
    save_template : str
        The template string for file saving
    base_folder : str
        The base folder for saving files
    """
    warn(DeprecationWarning("Use the server instead"))
    # TODO: also start up grave vis, maybe?
    d = RemoteDispatcher(glbl_dict["outbound_proxy_address"])
    install_qt_kicker(
        loop=d.loop
    )  # This may need to be d._loop depending on tag
    order = pipeline_order
    if save:
        order += save_pipeline_order
    if vis:
        order += [vis_pipeline]
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    raw_source = namespace["raw_source"]
    d.subscribe(lambda *x: raw_source.emit(x))
    print("Starting Analysis Server")
    d.start()
Esempio n. 19
0
import numpy as np
from tifffile import imread
import pyFAI
from xpdtools.pipelines.raw_pipeline import pipeline_order, namespace

from profilehooks import profile
from rapidz.link import link

namespace["mask_setting"].update(setting="first")

namespace = link(*pipeline_order, **namespace)

# dark_corrected_background.sink(print)
# pol_corrected_img_zip.sink(print)
# mask.sink(print)
# binner.sink(print)
# mean.sink(print)

geo = pyFAI.load("test.poni")
img = imread("test.tiff")

namespace["geometry"].emit(geo)
namespace["composition"].emit("Au")
for n in [
        namespace["raw_background_dark"],
        namespace["raw_background"],
        namespace["raw_foreground_dark"],
]:
    n.emit(np.zeros(img.shape))

Esempio n. 20
0
                stream_name="raw_foreground",
            )
            for image_name in image_names
        ]
    ).map(np.float32)
    raw_source.starsink(StartStopCallback())
    return locals()


pipeline_order = [
    start_gen,
    image_process,
    calibration,
    clear_geo_gen,
    save_cal,
    scattering_correction,
    gen_mask,
    integration,
    pdf_gen,
    clear_comp,
]

# If main print visualize pipeline
if __name__ == "__main__":  # pragma: no cover
    from rapidz import Stream
    from rapidz.link import link

    raw_source = Stream(stream_name="raw_source")
    ns = link(*pipeline_order, raw_source=raw_source)
    ns["raw_source"].visualize(source_node=True)
Esempio n. 21
0

def astype(x, ret_type='float32'):
    return x.astype(ret_type)


def pipeline(raw_source):
    b = (raw_source.map(astype).map(np.sum)
         # .sink(print)
         .ToEventStream(('sum', )).DBFriendly().starsink(db2.insert)
         )
    return locals()


namespace = link(pipeline,
                 raw_source=FromEventStream('event', ('data', 'pe1_image'),
                                            principle=True))

# vis = False
vis = True
# source.visualize(source_node=True)
# '''
for hdr in list((db[-1], )):
    for e in hdr.documents(fill=True):
        if e[0] == 'start':
            e[1].update(composition_string='EuTiO3')
        if e[0] == 'event' and vis:
            plt.pause(.1)
        if e[0] == 'event':
            if e[1]['seq_num'] > 3:
                continue
Esempio n. 22
0
def run_server(
    prefix=None,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    _publisher=None,
    **kwargs
):
    """Start up the QOI server

    Parameters
    ----------
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    """
    if prefix is None:
        prefix = [b"an", b"raw"]

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    install_qt_kicker(loop=d.loop)

    if _publisher is None:
        an_with_ind_pub = Publisher(inbound_proxy_address, prefix=b"qoi")
    else:
        an_with_ind_pub = _publisher

    raw_source = Stream()

    # create amorphous pipeline
    amorphous_ns = link(
        *[amorphsivity_fem, amorphsivity_pipeline, amorphsivity_tem],
        source=Stream(),
        **kwargs
    )
    # Combine the data outputs with the raw independent data
    amorphous_ns.update(
        to_event_stream_with_ind(
            move_to_first(raw_source.starmap(StripDepVar())),
            *[
                node
                for node in amorphous_ns.values()
                if isinstance(node, SimpleToEventStream)
            ],
            publisher=an_with_ind_pub
        )
    )

    rr = RunRouter(
        [
            lambda x: lambda *y: raw_source.emit(y)
            if x["analysis_stage"] == "raw"
            else None,
            lambda x: lambda *y: amorphous_ns["source"].emit(y)
            if x["analysis_stage"] == "pdf"
            else None,
        ]
    )
    d.subscribe(rr)
    print("Starting QOI Server")
    d.start()
Esempio n. 23
0

def astype(x, ret_type='float32'):
    return x.astype(ret_type)


def pipeline(raw_source):
    b = (
        raw_source.map(astype).map(np.sum)
        # .sink(print)
        .ToEventStream(('sum', )).DBFriendly().starsink(db2.insert))
    return locals()


namespace = link(pipeline,
                 raw_source=FromEventStream('event', ('data', 'pe1_image'),
                                            principle=True))

# vis = False
vis = True
# source.visualize(source_node=True)
# '''
for hdr in list((db[-1], )):
    for e in hdr.documents(fill=True):
        if e[0] == 'start':
            e[1].update(composition_string='EuTiO3')
        if e[0] == 'event' and vis:
            plt.pause(.1)
        if e[0] == 'event':
            if e[1]['seq_num'] > 3:
                continue