Esempio n. 1
0
    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', prefix=[b'sb', b'not_sb'])
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()
Esempio n. 2
0
    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', deserializer=cloudpickle.loads)
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()
Esempio n. 3
0
def run_server(
    handlers=None,
    prefix=None,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
):
    """Start up the visualization server

    Parameters
    ----------
    handlers : dict
        The map between handler specs and handler classes, defaults to
        the map used by the experimental databroker if possible
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    """

    if handlers is None:
        for db in ["exp_db", "an_db"]:
            if db in glbl_dict:
                handlers = glbl_dict[db].reg.handler_reg
                break

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    install_qt_kicker(loop=d.loop)

    func_l = [
        lambda x: if_correct_start(
            LiveImage(
                handler_reg=handlers,
                cmap="viridis",
                norm=SymLogNorm(1),
                limit_func=lambda x: (np.nanmin(x), np.nanmax(x)),
            ),
            x,
        ),
        lambda x: LiveWaterfall(),
    ]
    if Live3DView:
        func_l.append(lambda x: Live3DView() if 'tomo' in x['analysis_stage'] else None)
    func_l.append(
        lambda x: BestEffortCallback(table_enabled=False, overplot=False)
    )
    rr = RunRouter(func_l)

    d.subscribe(rr)
    print("Starting Viz Server")
    d.start()
Esempio n. 4
0
def run_server(
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    outbound_prefix=(b"an", b"qoi"),
    inbound_prefix=b"tomo",
    _publisher=None,
    **kwargs,
):
    """Server for performing tomographic reconstructions

    Parameters
    ----------
    outbound_proxy_address : str, optional
        The outbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    outbound_prefix : bytes or sequence of bytes
        The data channels to listen to
    inbound_prefix : bytes
        The data channel to publish to
    kwargs : dict
        kwargs passed to the reconstruction, for instance ``algorithm`` could
        be passed in with the associated tomopy algorithm to change the
        reconstructgion algorithm from gridrec to something else.

    """
    print(kwargs)
    db = glbl_dict["exp_db"]
    handler_reg = db.reg.handler_reg
    publisher = Publisher(inbound_proxy_address, prefix=inbound_prefix)

    if _publisher:
        publisher = _publisher

    rr = RunRouter(
        [
            lambda x: tomo_callback_factory(
                x, publisher=publisher, handler_reg=handler_reg, **kwargs
            )
        ]
    )

    d = RemoteDispatcher(outbound_proxy_address, prefix=outbound_prefix)
    install_qt_kicker(loop=d.loop)

    d.subscribe(rr)
    print("Starting Tomography Server")
    d.start()
Esempio n. 5
0
def run_server(
    data_dir,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    prefix=b"an",
):
    """Start up the databroker server for analyzed data.

    Parameters
    ----------
    data_dir : str
        The directory to save the array data into.
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``b"an"``
    """

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    an_broker = glbl_dict["an_db"]

    an_source = Stream()
    an_source.Store(data_dir, NpyWriter).starsink(an_broker.insert)

    rr = RunRouter(
        [
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "pdf"
            else None,
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "integration"
            else None,
        ]
    )

    d.subscribe(rr)

    print("Starting DB Server")
    d.start()
Esempio n. 6
0
def test_zmq_components():
    # The test `test_zmq` runs Proxy and RemoteDispatcher in a separate
    # process, which coverage misses.
    pid = os.getpid()

    def delayed_sigint(delay):  # pragma: no cover
        time.sleep(delay)
        os.kill(os.getpid(), signal.SIGINT)

    proxy = Proxy(5567, 5568)
    assert not proxy.closed
    threading.Thread(target=delayed_sigint, args=(5, )).start()
    try:
        proxy.start()
        # delayed_sigint stops the proxy
    except KeyboardInterrupt:
        ...
    assert proxy.closed
    with pytest.raises(RuntimeError):
        proxy.start()

    proxy = Proxy()  # random port
    threading.Thread(target=delayed_sigint, args=(5, )).start()
    try:
        proxy.start()
        # delayed_sigint stops the proxy
    except KeyboardInterrupt:
        ...

    repr(proxy)

    # test that two ways of specifying address are equivalent
    d = RemoteDispatcher('localhost:5555')
    assert d.address == ('localhost', 5555)
    d = RemoteDispatcher(('localhost', 5555))
    assert d.address == ('localhost', 5555)

    repr(d)
Esempio n. 7
0
def start_analysis(save=True, vis=True, **kwargs):
    """Start analysis pipeline [Depreciated]

    Parameters
    ----------
    mask_kwargs : dict
        The kwargs passed to the masking see xpdtools.tools.mask_img
    pdf_kwargs : dict
        The kwargs passed to the pdf generator, see xpdtools.tools.pdf_getter
    fq_kwargs : dict
        The kwargs passed to the fq generator, see xpdtools.tools.fq_getter
    mask_setting : dict
        The setting of the mask
    save_template : str
        The template string for file saving
    base_folder : str
        The base folder for saving files
    """
    warn(DeprecationWarning("Use the server instead"))
    # TODO: also start up grave vis, maybe?
    d = RemoteDispatcher(glbl_dict["outbound_proxy_address"])
    install_qt_kicker(
        loop=d.loop
    )  # This may need to be d._loop depending on tag
    order = pipeline_order
    if save:
        order += save_pipeline_order
    if vis:
        order += [vis_pipeline]
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    raw_source = namespace["raw_source"]
    d.subscribe(lambda *x: raw_source.emit(x))
    print("Starting Analysis Server")
    d.start()
Esempio n. 8
0
    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', prefix=[b'sb', b'not_sb'])
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()
Esempio n. 9
0
    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', deserializer=cloudpickle.loads)
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()
Esempio n. 10
0
def run_server(
    data_dir,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    prefix=b"an",
):
    """Start up the databroker server for analyzed data.

    Parameters
    ----------
    data_dir : str
        The directory to save the array data into.
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``b"an"``
    """

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    an_broker = glbl_dict["an_db"]

    an_source = Stream()
    an_source.Store(data_dir, NpyWriter).starsink(an_broker.insert)

    rr = RunRouter([
        lambda x: (lambda *nd: an_source.emit(nd))
        if x.get("analysis_stage", None) == "pdf" else None,
        lambda x: (lambda *nd: an_source.emit(nd))
        if x.get("analysis_stage", None) == "integration" else None,
    ])

    d.subscribe(rr)

    print("Starting DB Server")
    d.start()
Esempio n. 11
0
def start_analysis(save=True, vis=True, **kwargs):
    """Start analysis pipeline [Depreciated]

    Parameters
    ----------
    mask_kwargs : dict
        The kwargs passed to the masking see xpdtools.tools.mask_img
    pdf_kwargs : dict
        The kwargs passed to the pdf generator, see xpdtools.tools.pdf_getter
    fq_kwargs : dict
        The kwargs passed to the fq generator, see xpdtools.tools.fq_getter
    mask_setting : dict
        The setting of the mask
    save_template : str
        The template string for file saving
    base_folder : str
        The base folder for saving files
    """
    warn(DeprecationWarning("Use the server instead"))
    # TODO: also start up grave vis, maybe?
    d = RemoteDispatcher(glbl_dict["outbound_proxy_address"])
    install_qt_kicker(
        loop=d.loop
    )  # This may need to be d._loop depending on tag
    order = pipeline_order
    if save:
        order += save_pipeline_order
    if vis:
        order += [vis_pipeline]
    namespace = link(
        *order, raw_source=Stream(stream_name="raw source"), **kwargs
    )
    raw_source = namespace["raw_source"]
    d.subscribe(lambda *x: raw_source.emit(x))
    print("Starting Analysis Server")
    d.start()
Esempio n. 12
0
def run_server(base_folders=None,
               template=base_template,
               outbound_proxy_address=glbl_dict["outbound_proxy_address"],
               db_names=("exp_db", "an_db"),
               prefix=None):
    """Run file saving server

    Parameters
    ----------
    base_folders : list or str or str, optional
        Either a list of strings for base folders to save data into or a
        single str for a base folder to save data into.

        Defaults to the value of ``glbl_dict["tiff_base"]``.
    template : str, optional
        The string used as a template for the file names. Please see the
        :ref:`xpdan_callbacks` module docs for mor information on the
         templating.

        Defaults to::

          "{base_folder}/{folder_prefix}/"
          "{start[analysis_stage]}/{start[sample_name]}_{human_timestamp}"
          "_{__independent_vars__}{start[uid]:.6}_{event[seq_num]:04d}{ext}"
    outbound_proxy_address : str
        The address of the ZMQ proxy
    db_names : iterable of str
        The names of the databases in the ``glbl_dict`` which to use for data
        loading handlers
    prefix : binary strings
        Which topics to listen on for zmq
    """
    if prefix is None:
        prefix = [b'an', b'raw']
    if base_folders is None:
        base_folders = []

    if isinstance(base_folders, str):
        base_folders = [base_folders]
    if isinstance(base_folders, tuple):
        base_folders = list(base_folders)
    if isinstance(glbl_dict["tiff_base"], str):
        glbl_dict["tiff_base"] = [glbl_dict["tiff_base"]]

    base_folders += glbl_dict["tiff_base"]
    # TODO: support other protocols? (REST, maybe)
    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    dbs = [glbl_dict[k] for k in db_names if k in glbl_dict]
    handlers = {}
    for db in dbs:
        handlers.update(db.reg.handler_reg)
    print(base_folders)

    rr = RunRouter(
        [setup_saver],
        base_folders=base_folders,
        template=template,
        handler_reg=handlers,
    )

    d.subscribe(rr)
    print("Starting Save Server")
    d.start()
Esempio n. 13
0
def run_server(
    order=order,
    radiogram_order=radiogram_order,
    db=glbl_dict["exp_db"],
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    diffraction_dets=glbl_dict["diffraction_dets"],
    radiogram_dets=glbl_dict["radiogram_dets"],
    prefix=b"raw",
    inbound_prefix=b"an",
    zscore=False,
    stage_blacklist=(),
    _publisher=None,
    **kwargs,
):
    """Function to run the analysis server.

    Parameters
    ----------
    order : list, optional
        The order of pipeline chunk functions to be called. Defaults to the
        standard order, ``xpdan.startup.analysis_server.order``
    radiogram_order : list, optional
        The order of pipeline chunk functions to be called for radiograph
        analysis. Defaults to the standard order,
        ``xpdan.startup.analysis_server.radiogram_order``
    db : databroker.Broker instance, optional
        The databroker to pull data from. This is used for accessing dark and
        background data. Defaults to the location listed in the
        ``xpdconf.conf.glbl_dict``.
    outbound_proxy_address : str, optional
        The location of the ZMQ proxy sending data to this server. Defaults
        to the location listed in the ``xpdconf.conf.glbl_dict``.
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    diffraction_dets : list of str, optional
        The detectors used for diffraction, defaults to
        ``glbl_dict["diffraction_dets"]``, pulled from the config file.
    radiogram_dets: list of str, optional
        The detectors used for radiographs, defaults to
        ``glbl_dict["diffraction_dets"]``, pulled from the config file.
    prefix : bytes or list of bytes, optional
        Which publisher(s) to listen to for data. Defaults to ``b"raw"``
    inbound_prefix : bytees
        The prefix for outbound data, defaults to ``b"an"``
    zscore : bool, optional
        If True compute Z-Score, defaults to False
    stage_blacklist : list of str, optional
        Stages to not publish. Defaults to an empty tuple. Not publishing
        some of the large memory datasets (mask, mask_overlay,
        bg_corrected_img, dark_sub) could speed up data processing by cutting
        down on process communication times. Note that blacklisting some of
        these (particularly mask and dark_sub) will cause some files to not be
        written out by the ``save_server``.
    kwargs : Any
        Keyword arguments passed into the pipeline creation. These are used
        to modify the data processing.

    If using the default pipeline these include:

      - ``bg_scale=1`` The background scale factor. Defaults to 1
      - ``calib_setting`` : The calibration setting, if set to
        ``{"setting": False}`` the user will not be prompted to perform
        calibration on calibration samples.
        This is useful for not performing calibration when re analyzing an
        entire experiment.
      - ``polarization_factor`` The polarization factor used to correct
        the image. Defaults to .99
      - ``mask_setting`` The setting for the frequency of the mask. If set
        to ``{'setting': 'auto'}`` each image gets a mask generated for it,
        if set to ``{'setting': 'first'}`` only the first image in the
        series has a mask generated for it and all subsequent images in the
        series use that mask, if set to ``{'setting': 'none'}`` then no
        image is masked. Defaults to ``{'setting': 'auto'}``.
      - ``mask_kwargs`` The keyword arguments passed to
        ``xpdtools.tools.mask_img``. Defaults to ``dict(edge=30,
        lower_thresh=0.0, upper_thresh=None, alpha=3, auto_type="median",
        tmsk=None,)``
      - kwargs passed to PDFgetx3. Please see the PDFgetx3 documentation:
        https://www.diffpy.org/doc/pdfgetx/2.0.0/options.html#pdf-parameters
    """
    print(kwargs)
    db.prepare_hook = lambda x, y: copy.deepcopy(y)

    publisher = Publisher(inbound_proxy_address, prefix=inbound_prefix)

    if _publisher:
        publisher = _publisher
    if "db" not in kwargs:
        kwargs.update(db=db)

    d = RemoteDispatcher(
        outbound_proxy_address,
        # accept the raw data
        prefix=prefix,
    )
    install_qt_kicker(loop=d.loop)

    if zscore:
        _order = z_score_order
    else:
        _order = order
    rr = RunRouter(
        [diffraction_router],
        xrd_namespace=create_analysis_pipeline(
            order=_order,
            stage_blacklist=stage_blacklist,
            publisher=publisher,
            **kwargs,
        ),
        diffraction_dets=diffraction_dets,
    )

    rr2 = RunRouter(
        [radiogram_router],
        order=radiogram_order,
        radiogram_dets=radiogram_dets,
        publisher=publisher,
        **kwargs,
    )

    d.subscribe(rr)
    d.subscribe(rr2)
    print("Starting Analysis Server")
    d.start()
Esempio n. 14
0
def run_server(
    base_folders=None,
    template=base_template,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    db_names=("exp_db", "an_db"),
    prefix=None
):
    """Run file saving server

    Parameters
    ----------
    base_folders : list or str or str, optional
        Either a list of strings for base folders to save data into or a
        single str for a base folder to save data into.

        Defaults to the value of ``glbl_dict["tiff_base"]``.
    template : str, optional
        The string used as a template for the file names. Please see the
        :ref:`xpdan_callbacks` module docs for mor information on the
         templating.

        Defaults to::

          "{base_folder}/{folder_prefix}/"
          "{start[analysis_stage]}/{start[sample_name]}_{human_timestamp}"
          "_{__independent_vars__}{start[uid]:.6}_{event[seq_num]:04d}{ext}"
    outbound_proxy_address : str
        The address of the ZMQ proxy
    db_names : iterable of str
        The names of the databases in the ``glbl_dict`` which to use for data
        loading handlers
    prefix : binary strings
        Which topics to listen on for zmq
    """
    if prefix is None:
        prefix = [b'an', b'raw']
    if base_folders is None:
        base_folders = []

    if isinstance(base_folders, str):
        base_folders = [base_folders]
    if isinstance(base_folders, tuple):
        base_folders = list(base_folders)
    if isinstance(glbl_dict["tiff_base"], str):
        glbl_dict["tiff_base"] = [glbl_dict["tiff_base"]]

    base_folders += glbl_dict["tiff_base"]
    # TODO: support other protocols? (REST, maybe)
    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    dbs = [glbl_dict[k] for k in db_names if k in glbl_dict]
    handlers = {}
    for db in dbs:
        handlers.update(db.reg.handler_reg)
    print(base_folders)

    rr = RunRouter(
        [setup_saver],
        base_folders=base_folders,
        template=template,
        handler_reg=handlers,
    )

    d.subscribe(rr)
    print("Starting Save Server")
    d.start()
Esempio n. 15
0
def run_server(
    order=order,
    radiogram_order=radiogram_order,
    db=glbl_dict["exp_db"],
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    diffraction_dets=glbl_dict["diffraction_dets"],
    radiogram_dets=glbl_dict["radiogram_dets"],
    prefix=b"raw",
    zscore=False,
    **kwargs
):
    """Function to run the analysis server.

    Parameters
    ----------
    order : list, optional
        The order of pipeline chunk functions to be called. Defaults to the
        standard order, ``xpdan.startup.analysis_server.order``
    radiogram_order : list, optional
        The order of pipeline chunk functions to be called for radiograph
        analysis. Defaults to the standard order,
        ``xpdan.startup.analysis_server.radiogram_order``
    db : databroker.Broker instance, optional
        The databroker to pull data from. This is used for accessing dark and
        background data. Defaults to the location listed in the
        ``xpdconf.conf.glbl_dict``.
    outbound_proxy_address : str, optional
        The location of the ZMQ proxy sending data to this server. Defaults
        to the location listed in the ``xpdconf.conf.glbl_dict``.
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    diffraction_dets : list of str, optional
        The detectors used for diffraction, defaults to
        ``glbl_dict["diffraction_dets"]``, pulled from the config file.
    radiogram_dets: list of str, optional
        The detectors used for radiographs, defaults to
        ``glbl_dict["diffraction_dets"]``, pulled from the config file.
    prefix : bytes or list of bytes, optional
        Which publisher(s) to listen to for data. Defaults to ``b"raw"``
    zscore : bool, optional
        If True compute Z-Score, defaults to False
    kwargs : Any
        Keyword arguments passed into the pipeline creation. These are used
        to modify the data processing.

    If using the default pipeline these include:

      - ``bg_scale=1`` The background scale factor. Defaults to 1
      - ``calib_setting`` : The calibration setting, if set to
        ``{"setting": False}`` the user will not be prompted to perform
        calibration on calibration samples.
        This is useful for not performing calibration when re analyzing an
        entire experiment.
      - ``polarization_factor`` The polarization factor used to correct
        the image. Defaults to .99
      - ``mask_setting`` The setting for the frequency of the mask. If set
        to ``{'setting': 'auto'}`` each image gets a mask generated for it,
        if set to ``{'setting': 'first'}`` only the first image in the
        series has a mask generated for it and all subsequent images in the
        series use that mask, if set to ``{'setting': 'none'}`` then no
        image is masked. Defaults to ``{'setting': 'auto'}``.
      - ``mask_kwargs`` The keyword arguments passed to
        ``xpdtools.tools.mask_img``. Defaults to ``dict(edge=30,
        lower_thresh=0.0, upper_thresh=None, alpha=3, auto_type="median",
        tmsk=None,)``
      - kwargs passed to PDFgetx3. Please see the PDFgetx3 documentation:
        https://www.diffpy.org/doc/pdfgetx/2.0.0/options.html#pdf-parameters
    """
    print(kwargs)
    db.prepare_hook = lambda x, y: copy.deepcopy(y)

    if "db" not in kwargs:
        kwargs.update(db=db)

    d = RemoteDispatcher(
        outbound_proxy_address,
        # accept the raw data
        prefix=prefix,
    )
    install_qt_kicker(loop=d.loop)

    if zscore:
        rr = RunRouter(
            [diffraction_router],
            order=z_score_order,
            diffraction_dets=diffraction_dets,
            inbound_proxy_address=inbound_proxy_address,
        )
    else:
        rr = RunRouter(
            [diffraction_router],
            order=order,
            diffraction_dets=diffraction_dets,
            inbound_proxy_address=inbound_proxy_address,
        )

    rr2 = RunRouter(
        [radiogram_router],
        order=radiogram_order,
        radiogram_dets=radiogram_dets,
        inbound_proxy_address=inbound_proxy_address,
    )

    d.subscribe(rr)
    d.subscribe(rr2)
    print("Starting Analysis Server")
    d.start()
Esempio n. 16
0
def run_server(
    prefix=None,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    inbound_proxy_address=glbl_dict["inbound_proxy_address"],
    _publisher=None,
    **kwargs
):
    """Start up the QOI server

    Parameters
    ----------
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    inbound_proxy_address : str, optional
        The inbound ip address for the ZMQ server. Defaults to the value
        from the global dict
    """
    if prefix is None:
        prefix = [b"an", b"raw"]

    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    install_qt_kicker(loop=d.loop)

    if _publisher is None:
        an_with_ind_pub = Publisher(inbound_proxy_address, prefix=b"qoi")
    else:
        an_with_ind_pub = _publisher

    raw_source = Stream()

    # create amorphous pipeline
    amorphous_ns = link(
        *[amorphsivity_fem, amorphsivity_pipeline, amorphsivity_tem],
        source=Stream(),
        **kwargs
    )
    # Combine the data outputs with the raw independent data
    amorphous_ns.update(
        to_event_stream_with_ind(
            move_to_first(raw_source.starmap(StripDepVar())),
            *[
                node
                for node in amorphous_ns.values()
                if isinstance(node, SimpleToEventStream)
            ],
            publisher=an_with_ind_pub
        )
    )

    rr = RunRouter(
        [
            lambda x: lambda *y: raw_source.emit(y)
            if x["analysis_stage"] == "raw"
            else None,
            lambda x: lambda *y: amorphous_ns["source"].emit(y)
            if x["analysis_stage"] == "pdf"
            else None,
        ]
    )
    d.subscribe(rr)
    print("Starting QOI Server")
    d.start()
Esempio n. 17
0
def run_server(
    folder,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    prefix=None,
    handlers=None,
):
    """Start up the portable databroker server

    Parameters
    ----------
    folder : str
        The location where to save the portable databrokers
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    handlers : dict
        The map between handler specs and handler classes, defaults to
        the map used by the experimental databroker if possible
    """
    # TODO: convert to bytestrings if needed
    # TODO: maybe separate this into different processes?
    # TODO: support multiple locations for folders
    if prefix is None:
        prefix = [b"an", b"raw"]
    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    portable_folder = folder
    portable_configs = {}
    for folder_name in ["an", "raw"]:
        fn = os.path.join(portable_folder, folder_name)
        os.makedirs(fn, exist_ok=True)
        # if the path doesn't exist then make the databrokers
        with open(os.path.join(portable_folder, f"{folder_name}.yml"),
                  "w") as f:
            f.write(portable_template.format(folder_name))
        print(portable_template.format(folder_name))

        print(fn)
        portable_configs[folder_name] = yaml.load(
            io.StringIO(portable_template.format(fn)))
        os.makedirs(os.path.join(fn, "data"), exist_ok=True)

    # TODO: add more files here, eg. a databroker readme/tutorial
    with open(os.path.join(portable_folder, "db_load.py"), "w") as f:
        f.write(load_script)
    an_broker = Broker.from_config(portable_configs["an"])

    an_source = Stream()
    zed = an_source.Store(
        os.path.join(
            portable_configs["an"]["metadatastore"]["config"]["directory"],
            "data",
        ),
        NpyWriter,
    )
    zed.starsink(an_broker.insert)

    raw_broker = Broker.from_config(portable_configs["raw"])
    if handlers is None:
        handlers = raw_broker.reg.handler_reg

    raw_source = Stream()
    raw_source.starmap(
        ExportCallback(
            os.path.join(
                portable_configs["raw"]["metadatastore"]["config"]
                ["directory"],
                "data",
            ),
            handler_reg=handlers,
        )).starsink(raw_broker.insert)

    rr = RunRouter([
        lambda x: (lambda *nd: raw_source.emit(nd))
        if x.get("analysis_stage", "") == "raw" else None
    ] + [
        lambda x: (lambda *nd: an_source.emit(nd))
        if x.get("analysis_stage", None) == "pdf" else None,
        lambda x: (lambda *nd: an_source.emit(nd))
        if x.get("analysis_stage", None) == "integration" else None,
    ])

    d.subscribe(rr)

    print("Starting Portable DB Server")
    d.start()
Esempio n. 18
0
def run_server(
    folder,
    outbound_proxy_address=glbl_dict["outbound_proxy_address"],
    prefix=None,
    handlers=None,
):
    """Start up the portable databroker server

    Parameters
    ----------
    folder : str
        The location where to save the portable databrokers
    outbound_proxy_address : str, optional
        The address and port of the zmq proxy. Defaults to
        ``glbl_dict["outbound_proxy_address"]``
    prefix : bytes or list of bytes, optional
        The Publisher channels to listen to. Defaults to
        ``[b"an", b"raw"]``
    handlers : dict
        The map between handler specs and handler classes, defaults to
        the map used by the experimental databroker if possible
    """
    # TODO: convert to bytestrings if needed
    # TODO: maybe separate this into different processes?
    # TODO: support multiple locations for folders
    if prefix is None:
        prefix = [b"an", b"raw"]
    d = RemoteDispatcher(outbound_proxy_address, prefix=prefix)
    portable_folder = folder
    portable_configs = {}
    for folder_name in ["an", "raw"]:
        fn = os.path.join(portable_folder, folder_name)
        os.makedirs(fn, exist_ok=True)
        # if the path doesn't exist then make the databrokers
        with open(
            os.path.join(portable_folder, f"{folder_name}.yml"), "w"
        ) as f:
            f.write(portable_template.format(folder_name))
        print(portable_template.format(folder_name))

        print(fn)
        portable_configs[folder_name] = yaml.load(
            io.StringIO(portable_template.format(fn))
        )
        os.makedirs(os.path.join(fn, "data"), exist_ok=True)

    # TODO: add more files here, eg. a databroker readme/tutorial
    with open(os.path.join(portable_folder, "db_load.py"), "w") as f:
        f.write(load_script)
    an_broker = Broker.from_config(portable_configs["an"])

    an_source = Stream()
    zed = an_source.Store(
        os.path.join(
            portable_configs["an"]["metadatastore"]["config"]["directory"],
            "data",
        ),
        NpyWriter,
    )
    zed.starsink(an_broker.insert)

    raw_broker = Broker.from_config(portable_configs["raw"])
    if handlers is None:
        handlers = raw_broker.reg.handler_reg

    raw_source = Stream()
    raw_source.starmap(
        ExportCallback(
            os.path.join(
                portable_configs["raw"]["metadatastore"]["config"][
                    "directory"
                ],
                "data",
            ),
            handler_reg=handlers,
        )
    ).starsink(raw_broker.insert)

    rr = RunRouter(
        [
            lambda x: (lambda *nd: raw_source.emit(nd))
            if x.get("analysis_stage", "") == "raw"
            else None
        ]
        + [
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "pdf"
            else None,
            lambda x: (lambda *nd: an_source.emit(nd))
            if x.get("analysis_stage", None) == "integration"
            else None,
        ]
    )

    d.subscribe(rr)

    print("Starting Portable DB Server")
    d.start()