예제 #1
0
def build_pymongo_backed_broker():
    """Provide a function level scoped MDS instance talking to
    temporary database on localhost:27017 with v1 schema.

    """
    from databroker.broker import Broker
    from metadatastore.mds import MDS
    from filestore.utils import create_test_database
    from filestore.fs import FileStore
    from filestore.handlers import NpyHandler

    db_name = "mds_testing_disposable_{}".format(str(uuid.uuid4()))
    mds_test_conf = dict(database=db_name, host='localhost',
                         port=27017, timezone='US/Eastern')
    try:
       # nasty details: to save MacOS user
        mds = MDS(mds_test_conf, 1, auth=False)
    except TypeError:
        mds = MDS(mds_test_conf, 1)

    db_name = "fs_testing_base_disposable_{}".format(str(uuid.uuid4()))
    fs_test_conf = create_test_database(host='localhost',
                                        port=27017,
                                        version=1,
                                        db_template=db_name)
    fs = FileStore(fs_test_conf, version=1)
    fs.register_handler('npy', NpyHandler)

    db = Broker(mds, fs)
    insert_imgs(db.mds, db.fs, 1, (2048,2048))

    return db
예제 #2
0
def test_export(broker_factory, RE):
    from databroker.broker import Broker
    from filestore.fs import FileStoreRO

    # Subclass ReaderWithFSHandler to implement get_file_list, required for
    # file copying. This should be added upstream in bluesky.
    class Handler(ReaderWithFSHandler):
        def get_file_list(self, datum_kwarg_gen):
            return ['{name}_{index}.npy'.format(name=self._name, **kwargs)
                    for kwargs in datum_kwarg_gen]

    db1 = broker_factory()
    db2 = broker_factory()
    RE.subscribe('all', db1.mds.insert)

    # test mds only
    uid, = RE(count([det]))
    db1.export(db1[uid], db2)
    assert db2[uid] == db1[uid]
    assert list(db2.get_events(db2[uid])) == list(db1.get_events(db1[uid]))

    # test file copying
    if not hasattr(db1.fs, 'copy_files'):
        raise pytest.skip("This filestore does not implement copy_files.")

    dir1 = tempfile.mkdtemp()
    dir2 = tempfile.mkdtemp()
    detfs = ReaderWithFileStore('detfs', {'image': lambda: np.ones((5, 5))},
                                fs=db1.fs, save_path=dir1)
    uid, = RE(count([detfs]))

    # Use a read only filestore
    mds2 = db1.mds
    fs2 = db1.fs
    fs3 = FileStoreRO(fs2.config, version=1)
    db1 = Broker(fs=fs3, mds=mds2)

    db1.fs.register_handler('RWFS_NPY', Handler)
    db2.fs.register_handler('RWFS_NPY', Handler)

    (from_path, to_path), = db1.export(db1[uid], db2, new_root=dir2)
    assert os.path.dirname(from_path) == dir1
    assert os.path.dirname(to_path) == dir2
    assert db2[uid] == db1[uid]
    image1, = db1.get_images(db1[uid], 'image')
    image2, = db2.get_images(db2[uid], 'image')
예제 #3
0
def test_spec_to_document(sf, mds_all, scan_ids):
    map = {
        'start': mds_all.insert_run_start,
        'stop': mds_all.insert_run_stop,
        'descriptor': mds_all.insert_descriptor,
        'event': mds_all.insert_event
    }
    start_uids = list()

    db = Broker(mds_all, fs=None)

    for document_name, document in spec.spec_to_document(
            sf, mds_all, scan_ids=scan_ids, validate=True):
        document = dict(document)
        del document['_name']
        if not isinstance(document_name, str):
            document_name = document_name.name
        # insert the documents
        if document_name == 'start':
            document['beamline_id'] = 'test'
            start_uids.append(document['uid'])
        map[document_name](**document)

    # make sure we are not trying to add duplicates
    assert len(start_uids) == len(set(start_uids))

    # smoketest the retrieval
    hdrs = []
    for uid in start_uids:
        hdr = db[uid]
        # make sure we only get one back
        assert isinstance(hdr, Header)
        hdrs.append(hdr)

    # make sure we are not getting duplicates back out
    hdr_uids = [hdr.start.uid for hdr in hdrs]
    assert len(hdr_uids) == len(set(hdr_uids))
    if isinstance(sf, spec.Specscan):
        sf = [sf]
    if isinstance(sf, str):
        sf = spec.Specfile(sf)
    for hdr, specscan in zip(hdrs, sf):
        for descriptor in hdr.descriptors:
            ev = list(mds_all.get_events_generator(descriptor))
            if descriptor.get('name') == 'baseline':
                # we better only have one baseline event
                assert len(ev) == 1
            else:
                assert len(specscan.scan_data) == len(ev)
예제 #4
0
def mds_all(request):
    '''Provide a function level scoped metadatastore instance talking to
    temporary database on localhost:27017 with focus on v1.
    '''
    db_name = "mds_testing_disposable_{}".format(str(uuid.uuid4()))
    test_conf = dict(database=db_name,
                     host='localhost',
                     port=27017,
                     timezone='US/Eastern',
                     mongo_user='******',
                     mongo_pwd='jerry')
    #version_v = request.param
    mds = MDS(test_conf, auth=AUTH)

    db = Broker(mds, fs=None)

    def delete_dm():
        print("DROPPING DB")
        mds._connection.drop_database(db_name)

    request.addfinalizer(delete_dm)

    return db.mds
예제 #5
0
from bluesky.callbacks.broker import LiveImage
import numpy as np
import zmq.asyncio as zmq_asyncio
from bluesky.utils import install_qt_kicker

# from xpdan.tools import better_mask_img

d = {
    'directory': '/home/christopher/live_demo_data',
    'timezone': tzlocal.get_localzone().zone,
    'dbpath': os.path.join('/home/christopher/live_demo_data', 'filestore')
}
mds = MDSRO(d)
fs = RegistryRO(d)
fs.register_handler('AD_TIFF', AreaDetectorTiffHandler)
db = Broker(mds=mds, reg=fs)
td = TemporaryDirectory()
source = conf_main_pipeline(
    db,
    td.name,
    # vis=False,
    write_to_disk=False,
    # mask_setting=None
)

# a = LiveImage('pe1_image')
loop = zmq_asyncio.ZMQEventLoop()
install_qt_kicker(loop=loop)


def put_in_queue(nd):
예제 #6
0
파일: run_main.py 프로젝트: xpdAcq/xpdAn
"""Example for XPD data"""
import os

import matplotlib.pyplot as plt
# pull from local data, not needed at beamline
from databroker.broker import Broker
from databroker._core import temp_config
import numpy as np
from tempfile import TemporaryDirectory
import copy
from pprint import pprint

db = Broker.named('live_demo_data')
db.prepare_hook = lambda x, y: copy.deepcopy(y)
td = TemporaryDirectory()

tmp = {'assets': {'config': {'dbpath': '/tmp/tmp5ucwapzn/assets.sqlite'}, 'class': 'Registry', 'module': 'databroker.assets.sqlite'}, 'description': 'temporary', 'metadatastore': {'config': {'directory': '/tmp/tmp5ucwapzn', 'timezone': 'US/Eastern'}, 'class': 'MDS', 'module': 'databroker.headersource.sqlite'}}

print(tmp)
db2 = Broker.from_config(tmp)

from xpdconf.conf import glbl_dict
glbl_dict.update(exp_db=db)

from rapidz import Stream
from rapidz.link import link

from shed.translation import ToEventStream, FromEventStream


def astype(x, ret_type='float32'):
예제 #7
0
파일: pub.py 프로젝트: chiahaoliu/xpdAn
import time
import multiprocessing
from shed.event_streams import istar
from shed.utils import to_event_model
import copy
import numpy as np

# from xpdan.tools import better_mask_img

d = {'directory': '/home/christopher/live_demo_data',
     'timezone': tzlocal.get_localzone().zone,
     'dbpath': os.path.join('/home/christopher/live_demo_data', 'filestore')}
mds = MDSRO(d)
fs = RegistryRO(d)
fs.register_handler('AD_TIFF', AreaDetectorTiffHandler)
db = Broker(mds=mds, reg=fs)

p = Publisher('127.0.0.1:5567')  # noqa
db.prepare_hook = lambda a, x: copy.deepcopy(x)
# '''
for name, doc in db[-1].documents():
    p(name, doc)
    time.sleep(10)
#    input()
'''
g = to_event_model([np.random.random((10, 10)) for _ in range(10)],
                   output_info=[('pe1_image', {'dtype': 'array',
                                               'shape': (10, 10)})])
for name, doc in g:
    print(doc)
    p(name, doc)
예제 #8
0
from databroker.broker import Broker
from shed.savers import NpyWriter

from shed_sidewinder.pal_xfel import parse_hdf5

db_path = '/path/to/db'
config = {'description': 'lightweight personal database',
          'metadatastore': {'module': 'databroker.headersource.sqlite',
                            'class': 'MDS',
                            'config': {'directory': db_path,
                                       'timezone': 'US/Eastern'}},
          'assets': {'module': 'databroker.assets.sqlite',
                     'class': 'Registry',
                     'config': {'dbpath': db_path + '/database.sql'}}}

db = Broker.from_config(config)
writer = NpyWriter(db.fs, db_path)

for n, d in parse_hdf5('/path/to/hdf5.file'):
    if n == 'descriptor':
        d['data_keys']['image']['external'] = True
    if n == 'event':
        d['data']['image'] = writer.write(d['data']['image'])
        d['filled']['image'] = False
    print(n)
    pprint(d)
    db.insert(n, d)


# from bluesky.callbacks.broker import LiveImage
# import matplotlib.pyplot as plt
예제 #9
0
import copy

from databroker.broker import Broker
from xpdan.startup.analysis_server import order, run_server

db = Broker.named("live_demo_data")
db.prepare_hook = lambda x, y: copy.deepcopy(y)

run_server(order=order, db=db, mask_setting={"setting": "first"})
예제 #10
0
"""Example for XPD data"""
import os

import matplotlib.pyplot as plt
# pull from local data, not needed at beamline
from databroker.broker import Broker
from databroker._core import temp_config
import numpy as np
from tempfile import TemporaryDirectory
import copy
from pprint import pprint

db = Broker.named('live_demo_data')
db.prepare_hook = lambda x, y: copy.deepcopy(y)
td = TemporaryDirectory()

tmp = {
    'assets': {
        'config': {
            'dbpath': '/tmp/tmp5ucwapzn/assets.sqlite'
        },
        'class': 'Registry',
        'module': 'databroker.assets.sqlite'
    },
    'description': 'temporary',
    'metadatastore': {
        'config': {
            'directory': '/tmp/tmp5ucwapzn',
            'timezone': 'US/Eastern'
        },
        'class': 'MDS',
예제 #11
0
파일: run_main.py 프로젝트: eaculb/xpdAn
from databroker.headersource.sqlite import MDSRO
from xpdan.pipelines.main import conf_main_pipeline
from tempfile import TemporaryDirectory
import copy

# from xpdan.tools import better_mask_img

d = {
    'directory': '/home/christopher/live_demo_data',
    'timezone': tzlocal.get_localzone().zone,
    'dbpath': os.path.join('/home/christopher/live_demo_data', 'filestore')
}
mds = MDSRO(d)
fs = RegistryRO(d)
fs.register_handler('AD_TIFF', AreaDetectorTiffHandler)
db = Broker(mds=mds, reg=fs)
db.prepare_hook = lambda x, y: copy.deepcopy(y)
td = TemporaryDirectory()

vis = False
# vis = True
source = conf_main_pipeline(
    db,
    td.name,
    vis=vis,
    write_to_disk=False,
    # verbose=True
)
source.visualize()
'''
seen = False
예제 #12
0
        'class': 'MDS',
        'config': {
            'directory': db_path,
            'timezone': 'US/Eastern'
        }
    },
    'assets': {
        'module': 'databroker.assets.sqlite',
        'class': 'Registry',
        'config': {
            'dbpath': db_path + '/database.sql'
        }
    }
}

db = Broker.from_config(config)
writer = NpyWriter(db.fs, db_path)

for n, d in parse_hdf5('/path/to/hdf5.file'):
    if n == 'descriptor':
        d['data_keys']['image']['external'] = True
    if n == 'event':
        d['data']['image'] = writer.write(d['data']['image'])
        d['filled']['image'] = False
    print(n)
    pprint(d)
    db.insert(n, d)

# from bluesky.callbacks.broker import LiveImage
# import matplotlib.pyplot as plt
예제 #13
0
파일: run_main.py 프로젝트: Sasaank/xpdAn
"""Example for XPD data"""
import os

import matplotlib.pyplot as plt
# pull from local data, not needed at beamline
from databroker.broker import Broker
from tempfile import TemporaryDirectory
import copy
from pprint import pprint

db = Broker.named('live_demo_data')
db.prepare_hook = lambda x, y: copy.deepcopy(y)
td = TemporaryDirectory()

from xpdconf.conf import glbl_dict
glbl_dict.update(exp_db=db)

from xpdan.pipelines.main import *
from xpdan.pipelines.qoi import *
from xpdan.pipelines.vis import *

# vis = False
vis = True
# source.visualize(source_node=True)
# source.visualize(source_node=False)
# '''
for hdr in list((db[-1], )):
    for e in hdr.documents(fill=True):
        if e[0] == 'start':
            e[1].update(composition_string='EuTiO3')
        if e[0] == 'event' and vis: