Ejemplo n.º 1
0
def main(args=None):
    parser = create_parser()
    ns = parser.parse_args(args)
    nd = ns.__dict__
    if ns.cmd in FACILITY_PARSER_MAP:
        db = Broker.named(nd['db_name'])
        db_path = db.get_config()['metadatastore']['config']['directory']
        writer = NpyWriter(db.fs, db_path)
        for n, d in FACILITY_PARSER_MAP[ns.cmd]['cmd'](nd['input_data']):
            if n == 'descriptor':
                for k in ['tof', 'intensity', 'error']:
                    d['data_keys'][k]['external'] = True
            if n == 'event':
                for k in ['tof', 'intensity', 'error']:
                    d['data'][k] = writer.write(d['data'][k])
                    d['filled'][k] = False
            print(n)
            pprint(d)
            db.insert(n, d)
    else:
        db_config_path = os.path.expanduser('~/.config/databroker/'
                                            '{}.yaml'.format(nd['name']))
        path = os.path.expanduser(nd['path'])
        config = {'description': 'lightweight personal database',
                  'metadatastore': {'module': 'databroker.headersource.sqlite',
                                    'class': 'MDS',
                                    'config': {'directory': path,
                                               'timezone': 'US/Eastern'}},
                  'assets': {'module': 'databroker.assets.sqlite',
                             'class': 'Registry',
                             'config': {'dbpath': os.path.join(
                                 path, 'database.sql')}}}
        os.makedirs(path, exist_ok=True)
        os.makedirs(os.path.split(db_config_path)[0], exist_ok=True)
        with open(db_config_path, 'w', encoding='utf8') as f:
            yaml.dump(config, f)
Ejemplo n.º 2
0
# import nslsii

# Register bluesky IPython magics.
#from bluesky.magics import BlueskyMagics
#get_ipython().register_magics(BlueskyMagics)

#nslsii.configure_base(get_ipython().user_ns, 'amx')
import bluesky.plans as bp

from bluesky.run_engine import RunEngine
from bluesky.utils import get_history
RE = RunEngine(get_history())
beamline = os.environ["BEAMLINE_ID"]
from databroker import Broker
db = Broker.named(beamline)

RE.subscribe(db.insert)

# from bluesky.callbacks.best_effort import BestEffortCallback
# bec = BestEffortCallback()
# RE.subscribe(bec)


# convenience imports
# from ophyd.commands import *
from bluesky.callbacks import *
# from bluesky.spec_api import *
# from bluesky.global_state import gs, abort, stop, resume
# from databroker import (DataBroker as db, get_events, get_images,
#                                                 get_table, get_fields, restream, process)
Ejemplo n.º 3
0
from databroker import Broker
db = Broker.named("iss")

import sys
sys.path.insert(0, '/home/xf08id/Repos/workflows')
import interpolation

data = dict()
data['requester'] = "xf08id-ws02"
#data['uid'] = "55f14401-8c60-4474-a24e-62b7722c933c"
data['uid'] = db[-1].start['uid']

store =data.copy()
signal = None
context = None

interpolation.create_req_func(data, store, signal, context)
interpolation.process_run_func(data, store, signal, context)
Ejemplo n.º 4
0
import h5py
from databroker import Broker
from databroker._core import register_builtin_handlers

db = Broker.named('xfm')
register_builtin_handlers(db.reg)


# srx detector, to be moved to filestore
from databroker.assets.handlers import Xspress3HDF5Handler, HandlerBase

class BulkXSPRESS(HandlerBase):
    HANDLER_NAME = 'XPS3_FLY'
    def __init__(self, resource_fn):
        self._handle = h5py.File(resource_fn, 'r')

    def __call__(self):
        return self._handle['entry/instrument/detector/data'][:]

db.reg.register_handler(BulkXSPRESS.HANDLER_NAME, BulkXSPRESS,
                       overwrite=True)

class ZebraHDF5Handler(HandlerBase):
    HANDLER_NAME = 'ZEBRA_HDF51'
    def __init__(self, resource_fn):
        self._handle = h5py.File(resource_fn, 'r')

    def __call__(self, column):
        return self._handle[column][:]

class SISHDF5Handler(HandlerBase):
Ejemplo n.º 5
0
from databroker import Broker

db = Broker.named('hxn')
db_analysis = Broker.named('hxn_analysis')

from hxntools.handlers.xspress3 import Xspress3HDF5Handler
from hxntools.handlers.timepix import TimepixHDF5Handler

db.reg.register_handler(Xspress3HDF5Handler.HANDLER_NAME,
                       Xspress3HDF5Handler)
db.reg.register_handler(TimepixHDF5Handler._handler_name,
                       TimepixHDF5Handler, overwrite=True)

Ejemplo n.º 6
0
from databroker import Broker

db = Broker.named('hxn')
#db_analysis = Broker.named('hxn_analysis')

from hxntools.handlers.xspress3 import Xspress3HDF5Handler
from hxntools.handlers.timepix import TimepixHDF5Handler

db.reg.register_handler(Xspress3HDF5Handler.HANDLER_NAME,
                       Xspress3HDF5Handler)
db.reg.register_handler(TimepixHDF5Handler._handler_name,
                       TimepixHDF5Handler, overwrite=True)

Ejemplo n.º 7
0
# Set up a RunEngine and use metadata backed by a sqlite file.
from bluesky import RunEngine
from bluesky.utils import get_history
RE = RunEngine(get_history())

# Set up a Broker.
from databroker import Broker
db = Broker.named('csx')

# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.
RE.subscribe(db.insert)

# Set up SupplementalData.
from bluesky import SupplementalData
sd = SupplementalData()
RE.preprocessors.append(sd)

# Add a progress bar.
from bluesky.utils import ProgressBarManager
pbar_manager = ProgressBarManager()
RE.waiting_hook = pbar_manager

# Register bluesky IPython magics.
from bluesky.magics import BlueskyMagics
get_ipython().register_magics(BlueskyMagics)

# Set up the BestEffortCallback.
from bluesky.callbacks.best_effort import BestEffortCallback
bec = BestEffortCallback()
RE.subscribe(bec)
Ejemplo n.º 8
0
import os
import shutil
import tempfile

import yaml
from databroker import Broker
try:
    db = Broker.named('xpd')
except NameError:
    from xpdsim import db
import logging
from pkg_resources import resource_filename as rs_fn

logger = logging.getLogger(__name__)
pytest_dir = rs_fn('xpdan', 'tests')


def load_configuration(name):
    """
    Load configuration data from a cascading series of locations.

    The precedence order is (highest priority last):

    1. The conda environment
       - CONDA_ENV/etc/{name}.yaml (if CONDA_ETC_ is defined for the env)
    2. The shipped version
    3. At the system level
       - /etc/{name}.yml
    4. In the user's home directory
       - ~/.config/{name}.yml
Ejemplo n.º 9
0
    from bluesky.utils import install_qt_kicker
    install_qt_kicker()
    print("Insalling Qt Kicker...")

# Make ophyd listen to pyepics.
from ophyd import setup_ophyd
setup_ophyd()

# Set up a RunEngine and use metadata backed by a sqlite file.
from bluesky import RunEngine
from bluesky.utils import get_history
RE = RunEngine(get_history())

# Set up a Broker.
from databroker import Broker
db = Broker.named('amx')

# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.

from bluesky import RunEngine
RE = RunEngine()

abort = RE.abort
resume = RE.resume
stop = RE.stop

RE.subscribe(db.insert)

# Set up SupplementalData.
from bluesky import SupplementalData
Ejemplo n.º 10
0
import time
import sys
from bluesky.simulators import summarize_plan


# Set up a RunEngine and use metadata backed by a sqlite file.
from bluesky import RunEngine
from bluesky.utils import get_history
RE = RunEngine({})

# Set up a Broker.
from databroker import Broker
db = Broker.named('iss')
db_analysis = Broker.named('iss-analysis')

# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.
RE.subscribe(db.insert)

# Set up SupplementalData.
from bluesky import SupplementalData
sd = SupplementalData()
RE.preprocessors.append(sd)

# Add a progress bar.
from timeit import default_timer as timer


from bluesky.utils import ProgressBarManager
pbar_manager = ProgressBarManager()
#RE.waiting_hook = pbar_manager
Ejemplo n.º 11
0
    from bluesky.utils import install_qt_kicker
    install_qt_kicker()
    print("Installing Qt Kicker...")

# Make ophyd listen to pyepics.
from ophyd import setup_ophyd
setup_ophyd()

# Set up a RunEngine and use metadata backed by a sqlite file.
from bluesky import RunEngine
from bluesky.utils import get_history
RE = RunEngine(get_history())

# Set up a Broker.
from databroker import Broker
db = Broker.named('fmx')

# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.
RE.subscribe(db.insert)

# Set up SupplementalData.
from bluesky import SupplementalData
sd = SupplementalData()
RE.preprocessors.append(sd)

# Add a progress bar.
# from bluesky.utils import ProgressBarManager
# pbar_manager = ProgressBarManager()
# RE.waiting_hook = pbar_manager
Ejemplo n.º 12
0
def interpolate_and_save(db_name,
                         db_analysis_name,
                         uid,
                         mono_name='mono1_enc',
                         pulses_per_degree=None):
    ''' Interpolate measured data and save to an analysis store. 

        Parameters
        ----------
        # TODO : change to config (don't rely on Broker.named which explores
            local directory)
        db_config: str
            the name the database (in /etc/databroker/name.yml)
        db_analysis_config : str
            the name of the analysis database (in /etc/databroker/name.yml)
        uid : str
            The uid of the data set
        mono_name : str
            the monochromator encoder name. Defaults to 'mono1_enc'
        pulses_per_degree : float
            pulses per degree of the encoder from the monochromator
            defaults to the current setup at QAS

        Returns
        -------
            interp_df : the interpolated data
            bin_df : the binned data if e0 is set
    '''
    # the pulses per degree, hard coded for now
    # TODO : Make a signal to pb1.enc1
    # and have it passed at configuration_attrs
    # (which results in data in descriptor)
    if pulses_per_degree is None:
        ppd = 23600 * 400 / 360
    else:
        ppd = pulses_per_degree

    db = Broker.named(db_name)
    hdr = db[uid]
    start = hdr.start
    if 'e0' not in start:
        e0 = 8979
        print("Warning, e0 not in start, setting to Cu: {}".format(e0))
    else:
        e0 = float(hdr.start['e0'])

    db_analysis = Broker.named(db_analysis_name)

    # the important part of Bruno's code that does the interpolation
    gen_parser = xasdata.XASdataGeneric(ppd, db=db, mono_name='mono1_enc')
    gen_parser.load(uid)
    # data saves in gen_parser.interp_df
    gen_parser.interpolate()

    # useful command for debugging, looking at energy
    # this is automatically run by gen_parser
    #energy = encoder2energy(res[:,3], ppd)

    PREFIX = "/nsls2/xf07bm/data/interpolated_data"
    write_path_template = PREFIX + '/%Y/%m/%d/'
    DIRECTORY = datetime.now().strftime(write_path_template)
    scan_id = hdr.start['scan_id']

    md = hdr.start
    filename = 'xas_' + md.get("name", str(uuid4())[:6]) + "_" + str(scan_id)
    os.makedirs(DIRECTORY, exist_ok=True)
    filepath = DIRECTORY

    # file is exported
    fileout = gen_parser.export_trace(filename, filepath)
    call(['chmod', '774', fileout])

    bin_df, bin_df_filename = bin_data(gen_parser,
                                       fileout,
                                       e0,
                                       scan_id=scan_id)

    result = dict(bin_df=bin_df,
                  bin_df_filename=bin_df_filename,
                  interp_df=gen_parser.interp_df,
                  interp_df_filename=fileout,
                  scan_id=scan_id)

    return result
Ejemplo n.º 13
0
    def run(self):
        """
        Overrides the `run()` function of the `multiprocessing.Process` class. Called
        by the `start` method.
        """
        logging.basicConfig(level=max(logging.WARNING, self._log_level))
        logging.getLogger(__name__).setLevel(self._log_level)

        success = True

        from .profile_tools import set_re_worker_active, clear_re_worker_active

        # Set the environment variable indicating that RE Worker is active. Status may be
        #   checked using 'is_re_worker_active()' in startup scripts or modules.
        set_re_worker_active()

        from .plan_monitoring import RunList, CallbackRegisterRun

        self._active_run_list = RunList(
        )  # Initialization should be done before communication is enabled.

        self._comm_to_manager.add_method(self._request_state_handler,
                                         "request_state")
        self._comm_to_manager.add_method(self._request_plan_report_handler,
                                         "request_plan_report")
        self._comm_to_manager.add_method(self._request_run_list_handler,
                                         "request_run_list")
        self._comm_to_manager.add_method(self._command_close_env_handler,
                                         "command_close_env")
        self._comm_to_manager.add_method(self._command_confirm_exit_handler,
                                         "command_confirm_exit")
        self._comm_to_manager.add_method(self._command_run_plan_handler,
                                         "command_run_plan")
        self._comm_to_manager.add_method(self._command_pause_plan_handler,
                                         "command_pause_plan")
        self._comm_to_manager.add_method(self._command_continue_plan_handler,
                                         "command_continue_plan")
        self._comm_to_manager.add_method(self._command_reset_worker_handler,
                                         "command_reset_worker")
        self._comm_to_manager.start()

        self._exit_event = threading.Event()
        self._exit_confirmed_event = threading.Event()
        self._re_report_lock = threading.Lock()

        from bluesky import RunEngine
        from bluesky.run_engine import get_bluesky_event_loop
        from bluesky.callbacks.best_effort import BestEffortCallback
        from bluesky_kafka import Publisher as kafkaPublisher
        from bluesky.utils import PersistentDict

        from .profile_tools import global_user_namespace

        # TODO: TC - Do you think that the following code may be included in RE.__init__()
        #   (for Python 3.8 and above)
        # Setting the default event loop is needed to make the code work with Python 3.8.
        loop = get_bluesky_event_loop()
        asyncio.set_event_loop(loop)

        try:
            keep_re = self._config_dict["keep_re"]
            startup_dir = self._config_dict.get("startup_dir", None)
            startup_module_name = self._config_dict.get(
                "startup_module_name", None)
            startup_script_path = self._config_dict.get(
                "startup_script_path", None)

            self._re_namespace = load_worker_startup_code(
                startup_dir=startup_dir,
                startup_module_name=startup_module_name,
                startup_script_path=startup_script_path,
                keep_re=keep_re,
            )

            if keep_re and ("RE" not in self._re_namespace):
                raise RuntimeError(
                    "Run Engine is not created in the startup code and 'keep_re' option is activated."
                )
            self._existing_plans = plans_from_nspace(self._re_namespace)
            self._existing_devices = devices_from_nspace(self._re_namespace)
            logger.info("Startup code loading was completed")

        except Exception as ex:
            logger.exception(
                "Failed to start RE Worker environment. Error while loading startup code: %s.",
                str(ex),
            )
            success = False

        # Load lists of allowed plans and devices
        logger.info("Loading the lists of allowed plans and devices ...")
        path_pd = self._config_dict["existing_plans_and_devices_path"]
        path_ug = self._config_dict["user_group_permissions_path"]
        try:
            self._allowed_plans, self._allowed_devices = load_allowed_plans_and_devices(
                path_existing_plans_and_devices=path_pd,
                path_user_group_permissions=path_ug)
        except Exception as ex:
            logger.exception(
                "Error occurred while loading lists of allowed plans and devices from '%s': %s",
                path_pd, str(ex))

        if success:
            logger.info("Instantiating and configuring Run Engine ...")

            try:
                # Make RE namespace available to the plan code.
                global_user_namespace.set_user_namespace(
                    user_ns=self._re_namespace, use_ipython=False)

                if self._config_dict["keep_re"]:
                    # Copy references from the namespace
                    self._RE = self._re_namespace["RE"]
                    self._db = self._re_namespace.get("RE", None)
                else:
                    # Instantiate a new Run Engine and Data Broker (if needed)
                    md = {}
                    if self._config_dict["use_persistent_metadata"]:
                        # This code is temporarily copied from 'nslsii' before better solution for keeping
                        #   continuous sequence Run ID is found. TODO: continuous sequence of Run IDs.
                        directory = os.path.expanduser("~/.config/bluesky/md")
                        os.makedirs(directory, exist_ok=True)
                        md = PersistentDict(directory)

                    self._RE = RunEngine(md)
                    self._re_namespace["RE"] = self._RE

                    def factory(name, doc):
                        # Documents from each run are routed to an independent
                        #   instance of BestEffortCallback
                        bec = BestEffortCallback()
                        return [bec], []

                    # Subscribe to Best Effort Callback in the way that works with multi-run plans.
                    rr = RunRouter([factory])
                    self._RE.subscribe(rr)

                    # Subscribe RE to databroker if config file name is provided
                    self._db = None
                    if "databroker" in self._config_dict:
                        config_name = self._config_dict["databroker"].get(
                            "config", None)
                        if config_name:
                            logger.info(
                                "Subscribing RE to Data Broker using configuration '%s'.",
                                config_name)
                            from databroker import Broker

                            self._db = Broker.named(config_name)
                            self._re_namespace["db"] = self._db

                            self._RE.subscribe(self._db.insert)

                # Subscribe Run Engine to 'CallbackRegisterRun'. This callback is used internally
                #   by the worker process to keep track of the runs that are open and closed.
                run_reg_cb = CallbackRegisterRun(
                    run_list=self._active_run_list)
                self._RE.subscribe(run_reg_cb)

                if "kafka" in self._config_dict:
                    logger.info(
                        "Subscribing to Kafka: topic '%s', servers '%s'",
                        self._config_dict["kafka"]["topic"],
                        self._config_dict["kafka"]["bootstrap"],
                    )
                    kafka_publisher = kafkaPublisher(
                        topic=self._config_dict["kafka"]["topic"],
                        bootstrap_servers=self._config_dict["kafka"]
                        ["bootstrap"],
                        key="kafka-unit-test-key",
                        # work with a single broker
                        producer_config={
                            "acks": 1,
                            "enable.idempotence": False,
                            "request.timeout.ms": 5000
                        },
                        serializer=partial(msgpack.dumps, default=mpn.encode),
                    )
                    self._RE.subscribe(kafka_publisher)

                if "zmq_data_proxy_addr" in self._config_dict:
                    from bluesky.callbacks.zmq import Publisher

                    publisher = Publisher(
                        self._config_dict["zmq_data_proxy_addr"])
                    self._RE.subscribe(publisher)

                self._execution_queue = queue.Queue()

                self._state["environment_state"] = "ready"

            except BaseException as ex:
                success = False
                logger.exception(
                    "Error occurred while initializing the environment: %s.",
                    str(ex))

        if success:
            logger.info("RE Environment is ready")
            self._execute_in_main_thread()
        else:
            self._exit_event.set()

        logger.info("Environment is waiting to be closed ...")
        self._state["environment_state"] = "closing"

        # Wait until confirmation is received from RE Manager
        while not self._exit_confirmed_event.is_set():
            ttime.sleep(0.02)

        # Clear the environment variable indicating that RE Worker is active. It is an optional step
        #   since the process is about to close, but we still do it for consistency.
        clear_re_worker_active()

        self._RE = None

        self._comm_to_manager.stop()

        logger.info("Run Engine environment was closed successfully")