Exemplo n.º 1
0
from databroker.v1 import from_config
from databroker.v0 import Broker
from .. import load_config

name = 'xfp'
v0_catalog = Broker.from_config(load_config(f'{name}/{name}.yml'))
v1_catalog = from_config(load_config(f'{name}/{name}.yml'))
catalog = from_config(load_config(f'{name}/{name}.yml')).v2
Exemplo n.º 2
0
from databroker.v1 import from_config
from databroker.v0 import Broker
from .. import load_config_central

name = 'qas'
v0_central = Broker.from_config(load_config_central(f'{name}/{name}.yml', name))
v1_central = from_config(load_config_central(f'{name}/{name}.yml', name))
central = from_config(load_config_central(f'{name}/{name}.yml', name)).v2
Exemplo n.º 3
0
try:
    from databroker.v0 import Broker
except ModuleNotFoundError:
    from databroker import Broker

from databroker._core import register_builtin_handlers

#  srx detector, to be moved to filestore
# from databroker.assets.handlers import Xspress3HDF5Handler
from databroker.assets.handlers import HandlerBase

import logging

logger = logging.getLogger(__name__)

db = Broker.named("srx")
try:
    register_builtin_handlers(db.reg)
except Exception as ex:
    logger.error(f"Error while registering default SRX handlers: {ex}")


class BulkXSPRESS(HandlerBase):
    HANDLER_NAME = "XPS3_FLY"

    def __init__(self, resource_fn):
        self._handle = h5py.File(resource_fn, "r")

    def __call__(self):
        return self._handle["entry/instrument/detector/data"][:]
Exemplo n.º 4
0
EpicsSignalBase.set_defaults(timeout=10, connection_timeout=10)  # new style
EpicsSignal.set_defaults(timeout=10, connection_timeout=10)  # new style

import nslsii
from datetime import datetime

# Register bluesky IPython magics.
if not is_re_worker_active():
    from bluesky.magics import BlueskyMagics

    get_ipython().register_magics(BlueskyMagics)

from bluesky.preprocessors import stage_decorator, run_decorator
from databroker.v0 import Broker

db = Broker.named("fxi")
del Broker

nslsii.configure_base(get_ipython().user_ns, db, bec=True)

nslsii.configure_kafka_publisher(RE, "fxi")

# The following plan stubs should not be imported directly in the global namespace.
#   Otherwise Queue Server will not be able to load the startup files.
del one_1d_step
del one_nd_step
del one_shot

# Make new RE.md storage available in old environments.
from pathlib import Path
Exemplo n.º 5
0
try:
    from databroker.v0 import Broker
except ModuleNotFoundError:
    from databroker import Broker

from databroker._core import register_builtin_handlers

#  srx detector, to be moved to filestore
# from databroker.assets.handlers import Xspress3HDF5Handler
from databroker.assets.handlers import HandlerBase

import logging

logger = logging.getLogger(__name__)

db = Broker.named("xfm")
try:
    register_builtin_handlers(db.reg)
except Exception as ex:
    logger.error(f"Error while registering default SRX handlers: {ex}")


class BulkXSPRESS(HandlerBase):
    HANDLER_NAME = "XPS3_FLY"

    def __init__(self, resource_fn):
        self._handle = h5py.File(resource_fn, "r")

    def __call__(self):
        return self._handle["entry/instrument/detector/data"][:]
Exemplo n.º 6
0
try:
    from databroker.v0 import Broker
except ModuleNotFoundError:
    from databroker import Broker

from databroker._core import register_builtin_handlers

#  srx detector, to be moved to filestore
# from databroker.assets.handlers import Xspress3HDF5Handler
from databroker.assets.handlers import HandlerBase

import logging

logger = logging.getLogger(__name__)

db = Broker.named("tes")
try:
    register_builtin_handlers(db.reg)
except Exception as ex:
    logger.error(f"Error while registering default SRX handlers: {ex}")


class BulkXSPRESS(HandlerBase):
    HANDLER_NAME = "XPS3_FLY"

    def __init__(self, resource_fn):
        self._handle = h5py.File(resource_fn, "r")

    def __call__(self):
        return self._handle["entry/instrument/detector/data"][:]
from pathlib import Path
from timeit import default_timer as timer
import shlex, subprocess

import bluesky.plan_stubs as bps
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.ion()

from ophyd import (ProsilicaDetector, SingleTrigger, Component as Cpt, Device,
                   EpicsSignal, EpicsSignalRO, ImagePlugin, StatsPlugin, ROIPlugin,
                   DeviceStatus)


db_archiver = Broker.named('iss-archiver')
arch_iss  = db_archiver.event_sources_by_name['arch_iss']

# args = shlex.split('python /home/xf08id/.ipython/profile_sample-environment/iocs/ioc_ramping.py')
# args = shlex.split('conda activate collection-2021-1.2; gnome-terminal -e "python /home/xf08id/.ipython/profile_sample-environment/iocs/ioc_ramping.py"')
# args = shlex.split('"python /home/xf08id/.ipython/profile_sample-environment/iocs/ioc_ramping.py"')
ioc_args = shlex.split('gnome-terminal -- python /home/xf08id/.ipython/profile_sample-environment/iocs/ioc_ramping.py')
ioc_process = subprocess.Popen(ioc_args)


def get_pid(input_args):
    input = ['pidof'] + input_args
    return list(map(int, subprocess.check_output(input).split()))[0]

def kill_pid_ioc():
    pid_ioc = get_pid(ioc_args)
Exemplo n.º 8
0
from ophyd.signal import EpicsSignalBase
# EpicsSignalBase.set_default_timeout(timeout=10, connection_timeout=10)  # old style
EpicsSignalBase.set_defaults(timeout=10, connection_timeout=10)  # new style

import nslsii
from datetime import datetime

# Register bluesky IPython magics.
from bluesky.magics import BlueskyMagics

get_ipython().register_magics(BlueskyMagics)

from bluesky.preprocessors import stage_decorator, run_decorator
from databroker.v0 import Broker
db = Broker.named('fxi')
del Broker

nslsii.configure_base(get_ipython().user_ns, db, bec=True)

# Make new RE.md storage available in old environments.
from pathlib import Path

import appdirs

try:
    from bluesky.utils import PersistentDict
except ImportError:
    import msgpack
    import msgpack_numpy
    import zict
Exemplo n.º 9
0
try:
    from databroker.v0 import Broker
except ModuleNotFoundError:
    from databroker import Broker

try:
   from pyxrf.api_dev import db
except ImportError:
    db = None
    print("Error importing pyXRF. Continuing without import.")

if not db:
    # Register the data broker
    try:
        db = Broker.named("srx")
    except AttributeError:
        db = Broker.named("temp")
        print("Using temporary databroker.")

pyxrf_version = pyxrf.__version__


def _extract_metadata_from_header(hdr):
    """
    Extract metadata from start and stop document. Metadata extracted from other document
    in the scan are beamline specific and added to dictionary at later time.
    """
    start_document = hdr.start

    mdata = ScanMetadataXRF()
Exemplo n.º 10
0
try:
    from databroker.v0 import Broker
except ModuleNotFoundError:
    from databroker import Broker

from hxntools.handlers.xspress3 import Xspress3HDF5Handler
from hxntools.handlers.timepix import TimepixHDF5Handler

db = Broker.named("hxn")
# db_analysis = Broker.named('hxn_analysis')

db.reg.register_handler(Xspress3HDF5Handler.HANDLER_NAME,
                        Xspress3HDF5Handler,
                        overwrite=True)
db.reg.register_handler(TimepixHDF5Handler._handler_name,
                        TimepixHDF5Handler,
                        overwrite=True)
Exemplo n.º 11
0
def build_db_from_config():
    config = get_db_config()
    db = Broker.from_config(config)
    return db
Exemplo n.º 12
0
def build_db_from_init():
    mds = build_mds_from_config()
    assets = build_assets_from_config()
    event_sources = build_event_sources_from_config()
    db = Broker(mds, assets, event_sources=event_sources)
    return db
Exemplo n.º 13
0
            raise RuntimeError('validate not implemented yet')

        d_ids = [res_uid + '/' + str(datum_count+j) for j in range(len(dkwargs_table))]
        datum_counts[res_uid] = datum_count + len(dkwargs_table)

        dkwargs_table = pd.DataFrame(dkwargs_table)
        datum_kwarg_list = [ dict(r) for _, r in dkwargs_table.iterrows()]

        method_name = "bulk_register_datum_table"

        self._bulk_insert_datum(self._datum_col, resource_uid, d_ids, datum_kwarg_list)
        return d_ids


mds_db1 = MDS(_mds_config_db1, auth=False)
db1 = Broker(mds_db1, CompositeRegistry(_fs_config_db1))


# wrapper for two databases
class CompositeBroker(Broker):
    """wrapper for two databases"""

    # databroker.headersource.MDSROTemplate
    def _bulk_insert_events(self, event_col, descriptor, events, validate, ts):

        descriptor_uid = doc_or_uid_to_uid(descriptor)

        to_write = []
        for ev in events:
            data = dict(ev['data'])