Пример #1
0
def db(request):
    """Return a data broker
    """
    from portable_mds.sqlite.mds import MDS
    from filestore.utils import install_sentinels
    import filestore.fs
    from databroker import Broker
    import tempfile
    import shutil
    from uuid import uuid4
    td = tempfile.mkdtemp()
    db_name = "fs_testing_v1_disposable_{}".format(str(uuid4()))
    test_conf = dict(database=db_name, host='localhost', port=27017)
    install_sentinels(test_conf, 1)
    fs = filestore.fs.FileStoreMoving(test_conf, version=1)

    def delete_dm():
        print("DROPPING DB")
        fs._connection.drop_database(db_name)

    request.addfinalizer(delete_dm)

    def delete_tmpdir():
        shutil.rmtree(td)

    request.addfinalizer(delete_tmpdir)

    return Broker(MDS({'directory': td, 'timezone': 'US/Eastern'}), fs)
Пример #2
0
def build_pymongo_backed_broker(request):
    '''Provide a function level scoped MDS instance talking to
    temporary database on localhost:27017 with v1 schema.

    '''
    from ..headersource.mongo import MDS
    from ..assets.utils import create_test_database
    from ..assets.mongo import Registry

    db_name = "mds_testing_disposable_{}".format(str(uuid.uuid4()))
    md_test_conf = dict(database=db_name,
                        host='localhost',
                        port=27017,
                        timezone='US/Eastern',
                        version=1)
    mds = MDS(md_test_conf, auth=False)

    db_name = "fs_testing_base_disposable_{uid}"
    fs_test_conf = create_test_database(host='localhost',
                                        port=27017,
                                        version=1,
                                        db_template=db_name)
    fs = Registry(fs_test_conf)

    def delete_fs():
        print("DROPPING DB")
        fs._connection.drop_database(fs_test_conf['database'])
        mds._connection.drop_database(md_test_conf['database'])

    request.addfinalizer(delete_fs)

    return Broker(mds, fs)
Пример #3
0
def test_filter_fields(mds_all):
    mds = mds_all
    temperature_ramp.run(mds)
    db = Broker(mds, fs=None)
    hdr = db[-1]
    unwanted_fields = ['point_det']
    out = nexus.filter_fields(hdr, unwanted_fields)
    #original list is ('point_det', 'boolean_det', 'ccd_det_info', 'Tsam'),
    # only ('boolean_det', 'ccd_det_info', 'Tsam') left after filtering out
    assert len(out)==3
Пример #4
0
def test_hdf5_export_with_fields_single(mds_all):
    """
    Test the hdf5 export with a single header and
    verify the output is correct; fields kwd is used.
    """
    mds = mds_all
    temperature_ramp.run(mds)
    db = Broker(mds, fs=None)
    hdr = db[-1]
    fname = tempfile.NamedTemporaryFile()
    hdf5.export(hdr, fname.name, mds, fields=['point_dev'])
    shallow_header_verify(fname.name, hdr, mds, fields=['point_dev'])
Пример #5
0
def test_hdf5_export_single_stream_name(mds_all):
    """
    Test the hdf5 export with a single header and
    verify the output is correct. No uid is used.
    """
    mds = mds_all
    temperature_ramp.run(mds)
    db = Broker(mds, fs=None)
    hdr = db[-1]
    fname = tempfile.NamedTemporaryFile()
    hdf5.export(hdr, fname.name, mds, stream_name='primary')
    shallow_header_verify(fname.name, hdr, mds, stream_name='primary')
Пример #6
0
def test_nexus_export_single_no_uid(mds_all):
    """
    Test the NeXus HDF5 export with a single header and
    verify the output is correct. No uid is used.
    """
    mds = mds_all
    temperature_ramp.run(mds)
    db = Broker(mds, fs=None)
    hdr = db[-1]
    fname = tempfile.NamedTemporaryFile()
    nexus.export(hdr, fname.name, mds, use_uid=False)
    shallow_header_verify(fname.name, hdr, mds, use_uid=False)
    validate_basic_NeXus_structure(fname.name)
Пример #7
0
def build_client_backend_broker(request):
    from ..headersource.client import MDS
    from ..assets.utils import create_test_database
    from ..assets.mongo import Registry
    import requests.exceptions
    from random import randint
    import ujson

    port = randint(9000, 60000)
    testing_config = dict(mongohost='localhost',
                          mongoport=27017,
                          database='mds_test' + str(uuid.uuid4()),
                          serviceport=port,
                          tzone='US/Eastern')

    proc = start_md_server(testing_config)

    tmds = MDS({'host': 'localhost', 'port': port, 'timezone': 'US/Eastern'})
    db_name = "fs_testing_base_disposable_{uid}"
    fs_test_conf = create_test_database(host='localhost',
                                        port=27017,
                                        version=1,
                                        db_template=db_name)
    fs = Registry(fs_test_conf)

    def tear_down():
        stop_md_server(proc, testing_config)

    request.addfinalizer(tear_down)

    base_url = 'http://{}:{}/'.format('localhost',
                                      testing_config['serviceport'])
    # Wait here until the server responds. Time out after 1 minute.
    TIMEOUT = 60  # seconds
    startup_time = time.time()
    url = base_url + 'run_start'
    message = dict(query={}, signature='find_run_starts')
    print("Waiting up to 60 seconds for the server to start up....")
    while True:
        if time.time() - startup_time > TIMEOUT:
            raise Exception("Server startup timed out.")
        try:
            r = requests.get(url, params=ujson.dumps(message))
        except requests.exceptions.ConnectionError:
            time.sleep(1)
            continue
        else:
            break
    print("Server is up!")

    return Broker(tmds, fs)
Пример #8
0
def test_hdf5_export_list(mds_all):
    """
    Test the hdf5 export with a list of headers and
    verify the output is correct
    """
    mds = mds_all
    temperature_ramp.run(mds)
    temperature_ramp.run(mds)
    db = Broker(mds, fs=None)
    hdrs = db[-2:]
    fname = tempfile.NamedTemporaryFile()
    # test exporting a list of headers
    hdf5.export(hdrs, fname.name, mds)
    for hdr in hdrs:
        shallow_header_verify(fname.name, hdr, mds)
Пример #9
0
def test_nexus_export_list(mds_all):
    """
    Test the NeXus HDF5 export with a list of headers and
    verify the output is correct
    """
    mds = mds_all
    temperature_ramp.run(mds)
    temperature_ramp.run(mds)
    db = Broker(mds, fs=None)
    hdrs = db[-2:]
    fname = tempfile.NamedTemporaryFile()
    # test exporting a list of headers
    nexus.export(hdrs, fname.name, mds)
    for hdr in hdrs:
        shallow_header_verify(fname.name, hdr, mds)
        validate_basic_NeXus_structure(fname.name)
def init_db(host, port, mdsname, fsname, handlers=None):
    ''' Initialize a database.
        This is the general function used in SciStreams to handle this

        Need to supply a host, port, mdsname and fsname

        Parameters
        ----------

        host : the host ip address (usually localhost)
        port : the port to run on
        mdsname : the database name for the metadatastore
        fsname : the database name for the filestore

        handlers : a dictionary of handlers of format
            key : handler function

        Returns
        -------
        The database

        Notes
        -----
        The returned object contains open sockets. It cannot be pickled.
        It is recommended to be initialized in a local library for distributed
        computing.
    '''
    mds_conf = {
        'host': host,
        'port': port,
        'database': mdsname,
        'timezone': 'US/Eastern',
    }

    reg_conf = {'host': host, 'port': port, 'database': fsname}

    mds = MDSRO(mds_conf)
    reg = RegistryRO(reg_conf)

    if handlers is not None:
        for handler_key, handler_function in handlers.items():
            reg.register_handler(handler_key, handler_function, overwrite=True)

    db = Broker(mds, reg)

    return db
Пример #11
0
from pygerm.ophyd import GeRM
from pygerm.handler import GeRMHandler, BinaryGeRMHandler

import numpy as np
import pandas as pd
from lmfit import Model

# generic configuration, is already on the beamline
reg = Registry({'dbpath': '/tmp/fs.sqlite'})
reg.register_handler('GeRM', GeRMHandler)
reg.register_handler('BinaryGeRM', BinaryGeRMHandler)

mds = MDS({'directory': '/tmp/mds.sqlite', 'timezone': 'US/Eastern'})

db = Broker(mds, reg=reg)

RE = bs.RunEngine()
RE.subscribe(db.insert)

# create the GeRM object
germ = GeRM('XF:28IDC-ES:1{Det:GeRM1}',
            name='germ',
            read_attrs=[
                'filepath', 'last_file', 'chip', 'chan', 'td', 'pd', 'ts',
                'count'
            ],
            configuration_attrs=['frametime'])

# gaussian fit
from filestore.fs import FileStoreRO
from databroker import Broker
mds_config = {'host': 'localhost',
              'port': 27017,
              'database': 'metadatastore-production-v1',
              'timezone': 'US/Eastern'}
fs_config = {'host': 'localhost',
             'port': 27017,
             'database': 'filestore-production-v1'}
mds = MDS(mds_config)
#mds_readonly = MDSRO(mds_config)
# As we aren't writing any files at the moment, use 
# the readonly version
fs_readonly = FileStoreRO(fs_config)

db = Broker(mds, fs_readonly)


# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.
from bluesky.global_state import gs
gs.RE.subscribe('all', mds.insert)

# Import matplotlib and put it in interactive mode.
import matplotlib.pyplot as plt
plt.ion()

# Make plots update live while scans run.
from bluesky.utils import install_qt_kicker
install_qt_kicker()
Пример #13
0
def make_broker(dirname):
    mds = MDS({'directory': dirname,
               'timezone': tzlocal.get_localzone().zone})
    fs = FileStore({'dbpath': os.path.join(dirname, 'filestore.db')})
    return Broker(mds, fs)
Пример #14
0
ip = get_ipython()
ip.enable_matplotlib('notebook')
del get_ipython, ip

# Set up a databroker aimed at the hidden directory ~/.data-cache.
import os
import tzlocal
from portable_mds.sqlite.mds import MDS
from portable_fs.sqlite.fs import FileStore
from databroker import Broker

dirname = os.path.expanduser('~/.data-cache/')
mds = MDS({'directory': dirname,
            'timezone': tzlocal.get_localzone().zone})
fs = FileStore({'dbpath': os.path.join(dirname, 'filestore.db')})
db = Broker(mds, fs)
del dirname, mds, fs, MDS, FileStore, Broker, tzlocal  # clean up namespace

from bluesky.global_state import gs
RE = gs.RE  # alias for convenience

# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.
gs.RE.subscribe('all', db.mds.insert)

# Import matplotlib and put it in interactive mode.
import matplotlib.pyplot as plt
plt.ion()

# Make plots live-update while scans run.
from bluesky.utils import install_nb_kicker
Пример #15
0
        ret = d_ids
        return ret


_mds_config = {
    'host': 'xf03id-ca1',
    'port': 27017,
    'database': 'datastore-new',
    'timezone': 'US/Eastern'
}
mds = MDS(_mds_config, auth=False)

_fs_config = {'host': 'xf03id-ca1', 'port': 27017, 'database': 'filestore-new'}

db_new = Broker(mds, CompositeRegistry(_fs_config))

_mds_config_old = {
    'host': 'xf03id-ca1',
    'port': 27017,
    'database': 'datastore',
    'timezone': 'US/Eastern'
}
mds_old = MDS(_mds_config_old, auth=False)

_fs_config_old = {'host': 'xf03id-ca1', 'port': 27017, 'database': 'filestore'}

db_old = Broker(mds_old, CompositeRegistry(_fs_config_old))

### Cluster Broker
Пример #16
0
from metadatastore.mds import MDS
# from metadataclient.mds import MDS
from databroker import Broker
from databroker.core import register_builtin_handlers
from filestore.fs import FileStore

# pull from /etc/metadatastore/connection.yaml
mds = MDS({'host': 'xf05id-ca1',
           'database': 'datastore',
           'port': 27017,
           'timezone': 'US/Eastern'}, auth=False)

# pull configuration from /etc/filestore/connection.yaml
db = Broker(mds, FileStore({'host': 'xf05id-ca1',
                            'database': 'filestore',
                            'port': 27017,
                            'timezone': 'US/Eastern',
                            }))
register_builtin_handlers(db.fs)


# srx detector, to be moved to filestore
from filestore.handlers import Xspress3HDF5Handler, HandlerBase
class BulkXSPRESS(HandlerBase):
    HANDLER_NAME = 'XPS3_FLY'
    def __init__(self, resource_fn):
        self._handle = h5py.File(resource_fn, 'r')

    def __call__(self):
        return self._handle['entry/instrument/detector/data'][:]
Пример #17
0
#from hxntools.handlers import register
#import filestore
from metadatastore.mds import MDS
from databroker import Broker
from filestore.fs import FileStore

# database #1
_mds_config = {'host': 'xf03id-ca1',
               'port': 27017,
               'database': 'datastore-new',
               'timezone': 'US/Eastern'}
mds = MDS(_mds_config)
_fs_config = {'host': 'xf03id-ca1',
              'port': 27017,
              'database': 'filestore-new'}
db1 = Broker(mds, FileStore(_fs_config))

# database #2
_mds_config = {'host': 'xf03id-ca1',
               'port': 27017,
               'database': 'datastore-1',
               'timezone': 'US/Eastern'}
mds = MDS(_mds_config)
_fs_config = {'host': 'xf03id-ca1',
              'port': 27017,
              'database': 'filestore-1'}
db2 = Broker(mds, FileStore(_fs_config))

# database old
_mds_config_old = {'host': 'xf03id-ca1',
                   'port': 27017,
Пример #18
0
def build_db_from_init():
    mds = build_mds_from_config()
    assets = build_assets_from_config()
    event_sources = build_event_sources_from_config()
    db = Broker(mds, assets, event_sources=event_sources)
    return db
Пример #19
0
from databroker import Broker
from databroker.core import register_builtin_handlers
from filestore.fs import FileStore

# pull from /etc/metadatastore/connection.yaml or
# /home/BLUSER/.config/metdatastore/connection.yml
mds = MDS({'host': 'xf21id1-ca1',
   'database': 'metadatastore',
   'port': 27017,
   'timezone': 'US/Eastern'}, auth=False)
# mds = MDS({'host': CA, 'port': 7770})

# pull configuration from /etc/filestore/connection.yaml or
# /home/BLUSER/.config/filestore/connection.yml
db = Broker(mds, FileStore({'host': 'xf21id1-ca1',
		    'port': 27017,
		    'database': 'filestore'}))
register_builtin_handlers(db.fs)

# Subscribe metadatastore to documents.
# If this is removed, data is not saved to metadatastore.
from bluesky.global_state import gs
gs.RE.subscribe('all', mds.insert)

# At the end of every run, verify that files were saved and
# print a confirmation message.
#from bluesky.callbacks.broker import verify_files_saved
#gs.RE.subscribe('stop', post_run(verify_files_saved))

# Import matplotlib and put it in interactive mode.
import matplotlib.pyplot as plt
Пример #20
0
RE = CustomRunEngine()
gs.RE = RE

mds = MDS(
    {
        'host': 'xf16idc-ca',
        'database': 'metadatastore_production_v1',
        'port': 27017,
        'timezone': 'US/Eastern'
    },
    auth=False)

db = Broker(
    mds,
    FileStore({
        'host': 'xf16idc-ca',
        'database': 'filestore',
        'port': 27017
    }))

register_builtin_handlers(db.fs)
RE.subscribe('all', mds.insert)

if is_ipython():
    # FIXME: Remove this once we migrate to PYTHON 3.5
    from IPython import get_ipython
    from IPython.core.pylabtools import backend2gui
    from matplotlib import get_backend
    ip = get_ipython()
    ipython_gui_name = backend2gui.get(get_backend())
    if ipython_gui_name: