def test_uri(RE, hw): bad_meta_config1 = {'uri': 'mongodb://localhost', 'host': 'localhost', 'database': 'mds_database_placholder'} bad_meta_config2 = {'uri': 'mongodb://localhost', 'port': 27017, 'database': 'mds_database_placholder'} meta_config = {'uri': 'mongodb://localhost', 'database': 'mds_database_placholder'} asset_config = {'uri': 'mongodb://localhost', 'database': 'assets_database_placeholder'} config = copy.deepcopy(EXAMPLE) config['metadatastore']['config'] = bad_meta_config1 config['assets']['config'] = asset_config with pytest.raises(InvalidConfig): broker = Broker.from_config(config) config['metadatastore']['config'] = bad_meta_config2 with pytest.raises(InvalidConfig): broker = Broker.from_config(config) config['metadatastore']['config'] = meta_config broker = Broker.from_config(config) RE.subscribe(broker.insert) uid, = get_uids(RE(count([hw.det]))) run = broker[uid] config['api_version'] = 0 broker = Broker.from_config(config)
def test_handler_registration(): db = Broker.from_config(EXAMPLE) assert 'AD_HDF5' in db.reg.handler_reg # builtin assert 'FOO' in db.reg.handler_reg # specified by config db = Broker.from_config(EXAMPLE, auto_register=False) assert 'AD_HDF5' not in db.reg.handler_reg # builtin assert 'FOO' in db.reg.handler_reg # specified by config
def test_temp_config(): c = temp_config() db = Broker.from_config(c) uid = str(uuid.uuid4()) db.insert('start', {'uid': uid, 'time': 0}) db.insert('stop', {'uid': str(uuid.uuid4()), 'time': 1, 'run_start': uid}) db[-1]
def build_sqlite_backed_broker(request): """Uses mongoquery + sqlite -- no pymongo or mongo server anywhere""" config = temp_config() tempdir = config['metadatastore']['config']['directory'] def cleanup(): shutil.rmtree(tempdir) request.addfinalizer(cleanup) return Broker.from_config(config)
def test_transforms(RE, hw): transforms = {'transforms': {'start': 'databroker.tests.test_v2.transform.transform', 'stop': 'databroker.tests.test_v2.transform.transform', 'resource': 'databroker.tests.test_v2.transform.transform', 'descriptor': 'databroker.tests.test_v2.transform.transform'}} config = {**EXAMPLE, **transforms} broker = Broker.from_config(config) RE.subscribe(broker.insert) uid, = get_uids(RE(count([hw.det]))) run = broker[uid] for name, doc in run.documents(fill='false'): if name in {'start', 'stop', 'resource', 'descriptor'}: assert doc.get('test_key') == 'test_value'
def test_from_config(): broker = Broker.from_config(EXAMPLE) config = broker.get_config() print(config) # we explicitly test for parts we know should be accepted mds_example = EXAMPLE['metadatastore']['config'] reg_example = EXAMPLE['assets']['config'] root_map_example = EXAMPLE['root_map'] mds_config = config['metadatastore'] reg_config = config['assets'] root_map_config = config['root_map'] assert mds_example == mds_config assert reg_example == reg_config assert root_map_example == root_map_config
def build_sim_db(sim_db_dir=None): if not sim_db_dir: sim_db_dir = tempfile.mkdtemp() config = { 'metadatastore': { 'module': 'databroker.headersource.sqlite', 'class': 'MDS', 'config': { 'directory': sim_db_dir, 'timezone': 'US/Eastern' } }, 'assets': { 'module': 'databroker.assets.sqlite', 'class': 'Registry', 'config': { 'dbpath': os.path.join(sim_db_dir, 'assets.sqlite') } } } return sim_db_dir, Broker.from_config(config)
def db_all(request): '''Provide a function level scoped metadatastore instance talking to temporary database on localhost:27017 with focus on v1. ''' db_name1 = "mds_testing_disposable_{}".format(str(uuid.uuid4())) db_name2 = "mds_testing_disposable_{}".format(str(uuid.uuid4())) test_config = { 'metadatastore': { 'module': 'databroker.headersource.mongo', 'class': 'MDS', 'config': { 'host': 'localhost', 'port': 27017, 'database': db_name1, 'timezone': 'US/Eastern' } }, 'assets': { 'module': 'databroker.assets.mongo', 'class': 'Registry', 'config': { 'host': 'localhost', 'port': 27017, 'database': db_name2 } } } db = Broker.from_config(test_config) def delete_dm(): print("DROPPING DB") db.mds._connection.drop_database(db_name1) db.mds._connection.drop_database(db_name2) request.addfinalizer(delete_dm) return db
def run_server( folder, outbound_proxy_address=glbl_dict["outbound_proxy_address"], prefix=None, handlers=None, ): """Start up the portable databroker server Parameters ---------- folder : str The location where to save the portable databrokers outbound_proxy_address : str, optional The address and port of the zmq proxy. Defaults to ``glbl_dict["outbound_proxy_address"]`` prefix : bytes or list of bytes, optional The Publisher channels to listen to. Defaults to ``[b"an", b"raw"]`` handlers : dict The map between handler specs and handler classes, defaults to the map used by the experimental databroker if possible """ # TODO: convert to bytestrings if needed # TODO: maybe separate this into different processes? # TODO: support multiple locations for folders if prefix is None: prefix = [b"an", b"raw"] d = RemoteDispatcher(outbound_proxy_address, prefix=prefix) portable_folder = folder portable_configs = {} for folder_name in ["an", "raw"]: fn = os.path.join(portable_folder, folder_name) os.makedirs(fn, exist_ok=True) # if the path doesn't exist then make the databrokers with open(os.path.join(portable_folder, f"{folder_name}.yml"), "w") as f: f.write(portable_template.format(folder_name)) print(portable_template.format(folder_name)) print(fn) portable_configs[folder_name] = yaml.load( io.StringIO(portable_template.format(fn))) os.makedirs(os.path.join(fn, "data"), exist_ok=True) # TODO: add more files here, eg. a databroker readme/tutorial with open(os.path.join(portable_folder, "db_load.py"), "w") as f: f.write(load_script) an_broker = Broker.from_config(portable_configs["an"]) an_source = Stream() zed = an_source.Store( os.path.join( portable_configs["an"]["metadatastore"]["config"]["directory"], "data", ), NpyWriter, ) zed.starsink(an_broker.insert) raw_broker = Broker.from_config(portable_configs["raw"]) if handlers is None: handlers = raw_broker.reg.handler_reg raw_source = Stream() raw_source.starmap( ExportCallback( os.path.join( portable_configs["raw"]["metadatastore"]["config"] ["directory"], "data", ), handler_reg=handlers, )).starsink(raw_broker.insert) rr = RunRouter([ lambda x: (lambda *nd: raw_source.emit(nd)) if x.get("analysis_stage", "") == "raw" else None ] + [ lambda x: (lambda *nd: an_source.emit(nd)) if x.get("analysis_stage", None) == "pdf" else None, lambda x: (lambda *nd: an_source.emit(nd)) if x.get("analysis_stage", None) == "integration" else None, ]) d.subscribe(rr) print("Starting Portable DB Server") d.start()
def run_server( folder, outbound_proxy_address=glbl_dict["outbound_proxy_address"], prefix=None, handlers=None, ): """Start up the portable databroker server Parameters ---------- folder : str The location where to save the portable databrokers outbound_proxy_address : str, optional The address and port of the zmq proxy. Defaults to ``glbl_dict["outbound_proxy_address"]`` prefix : bytes or list of bytes, optional The Publisher channels to listen to. Defaults to ``[b"an", b"raw"]`` handlers : dict The map between handler specs and handler classes, defaults to the map used by the experimental databroker if possible """ # TODO: convert to bytestrings if needed # TODO: maybe separate this into different processes? # TODO: support multiple locations for folders if prefix is None: prefix = [b"an", b"raw"] d = RemoteDispatcher(outbound_proxy_address, prefix=prefix) portable_folder = folder portable_configs = {} for folder_name in ["an", "raw"]: fn = os.path.join(portable_folder, folder_name) os.makedirs(fn, exist_ok=True) # if the path doesn't exist then make the databrokers with open( os.path.join(portable_folder, f"{folder_name}.yml"), "w" ) as f: f.write(portable_template.format(folder_name)) print(portable_template.format(folder_name)) print(fn) portable_configs[folder_name] = yaml.load( io.StringIO(portable_template.format(fn)) ) os.makedirs(os.path.join(fn, "data"), exist_ok=True) # TODO: add more files here, eg. a databroker readme/tutorial with open(os.path.join(portable_folder, "db_load.py"), "w") as f: f.write(load_script) an_broker = Broker.from_config(portable_configs["an"]) an_source = Stream() zed = an_source.Store( os.path.join( portable_configs["an"]["metadatastore"]["config"]["directory"], "data", ), NpyWriter, ) zed.starsink(an_broker.insert) raw_broker = Broker.from_config(portable_configs["raw"]) if handlers is None: handlers = raw_broker.reg.handler_reg raw_source = Stream() raw_source.starmap( ExportCallback( os.path.join( portable_configs["raw"]["metadatastore"]["config"][ "directory" ], "data", ), handler_reg=handlers, ) ).starsink(raw_broker.insert) rr = RunRouter( [ lambda x: (lambda *nd: raw_source.emit(nd)) if x.get("analysis_stage", "") == "raw" else None ] + [ lambda x: (lambda *nd: an_source.emit(nd)) if x.get("analysis_stage", None) == "pdf" else None, lambda x: (lambda *nd: an_source.emit(nd)) if x.get("analysis_stage", None) == "integration" else None, ] ) d.subscribe(rr) print("Starting Portable DB Server") d.start()
def test_root_map(): db = Broker.from_config(EXAMPLE) assert 'foo' in db.reg.root_map assert db.reg.root_map['foo'] == EXAMPLE['root_map']['foo'] assert db.reg.root_map['boo'] == EXAMPLE['root_map']['boo']
from ophyd.sim import det4, motor1, motor2, motor3 from bluesky import RunEngine from bluesky.callbacks.best_effort import BestEffortCallback from databroker.tests.utils import temp_config from databroker import Broker # db setup config = temp_config() tempdir = config['metadatastore']['config']['directory'] def cleanup(): shutil.rmtree(tempdir) db = Broker.from_config(config) RE = RunEngine({}) # subscribe BEC bec = BestEffortCallback() RE.subscribe(bec) RE.subscribe(db.insert) # move motor to a reproducible location RE(mov(motor1, 0)) RE(mov(motor2, 0)) RE(relative_outer_product_scan([det4], motor1, -1, 0, 10, motor2, -2, 0, 20, True))
def test_from_config(): broker = Broker.from_config(EXAMPLE) config = broker.get_config() assert EXAMPLE == config
def test_auto_register(): db_auto = Broker.from_config(temp_config()) db_manual = Broker.from_config(temp_config(), auto_register=False) assert db_auto.reg.handler_reg assert not db_manual.reg.handler_reg
def main(): server = "otz" otz_config = { "description": "heavyweight shared database", "metadatastore": { "module": "databroker.headersource.mongo", "class": "MDS", "config": { "host": server, "port": 27017, "database": "metadatastore-production-v1", "timezone": "US/Central" } }, "assets": { "module": "databroker.assets.mongo", "class": "Registry", "config": { "host": server, "port": 27017, "database": "filestore-production-v1" } } } otz_db = Broker.from_config(otz_config) tbl = pyRestTable.Table() tbl.labels = "beamline uid plan_name".split() headers = otz_db(since='2018-09-01', until='2018-11-06') for h in headers: row = (h.start["beamline_id"], h.start["login_id"], h.start["plan_name"]) tbl.addRow(row) print(tbl) tbl = pyRestTable.Table() tbl.labels = "date/time # uid plan_name".split() headers = otz_db(beamline_id='3-ID') for h in headers: t_float = h.start["time"] dt = datetime.datetime.fromtimestamp(t_float) row = [dt, h.start["scan_id"], h.start["uid"], h.start["plan_name"]] tbl.addRow(row) print(tbl) # build a local sqlite database for testing test_dir = "/tmp/bluesky" if not os.path.exists(test_dir): os.makedirs(test_dir) test_config = { "description": "lightweight personal database", "metadatastore": { "module": "databroker.headersource.sqlite", "class": "MDS", "config": { "directory": test_dir, "timezone": "US/Central" } }, "assets": { "module": "databroker.assets.sqlite", "class": "Registry", "config": { "dbpath": test_dir + "/database.sql" } } } test_db = Broker.from_config(test_config) # test the export from otz using a local sqlite db if os.path.exists(test_dir): shutil.rmtree(test_dir) os.makedirs(test_dir) for h in otz_db.stream(headers): tag, doc = h print(tag) if tag == "descriptor": print(sorted(doc.data_keys.keys())) elif tag == "event": print(doc.seq_num) test_db.insert(tag, doc) if tag == "descriptor": print(sorted(doc.data_keys.keys())) elif tag == "event": print(sorted(doc.data_keys.keys()))
def test_root_map(): db = Broker.from_config(EXAMPLE) assert 'foo' in db.reg.root_map assert db.reg.root_map['foo'] == 'bar' assert db.reg.root_map['boo'] == 'far'
def test_auto_register(): db_auto = Broker.from_config(EXAMPLE) db_manual = Broker.from_config(EXAMPLE, auto_register=False) assert 'AD_HDF5' in db_auto.reg.handler_reg assert 'AD_HDF5' not in db_manual.reg.handler_reg
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # PV names self.pv_names = loadPV() # ZMQ Ports self.zmqSendPort = 5301 self.zmqRecvPort = 5201 try: self.zmqSendSock = CONTEXT.socket(zmq.PUB) self.zmqSendSock.bind("tcp://*:" + str(self.zmqSendPort)) except: self.zmqSendSock = None print("Failed to bind to socket : {}".format(self.zmqSendPort)) # MainWindow Title self.setWindowTitle("DataViewer") # Initialize self._dragging = False self._last_y_axis_type = 0 self._last_tab_index = 0 self.plot_type = 'measure' self.start_timer = 0 self.settings = {} self.settings['E0'] = 8333 self.settings['sdd'] = False self.settings['ratio'] = 0.7 self.settings['plot_type'] = 'measure' self.settings['blink'] = False self.settings['scanCounts'] = 1 # DataBroker self.dbv1 = Broker.from_config(config) self.db = self.dbv1.v2 # Main QWidget main_panel = qt.QWidget(self) main_panel.setLayout(qt.QVBoxLayout()) self.setCentralWidget(main_panel) # Status Widget self.status = StatusWidget(self) self.status.setSizePolicy(qt.QSizePolicy.Expanding, qt.QSizePolicy.Fixed) # Intialize plot self.plot = Plot1DCustom(self, 'mpl') self.plot.getLegendsDockWidget().show() self.plot.setBackgroundColor('#FCF9F6') self.plot.setGraphXLabel("Energy [eV]") self.plot.setGraphYLabel("Counts [Arbs.]") self.plot.setDataMargins(0.01, 0.01, 0.01, 0.01) # Layout main_panel.layout().addWidget(self.status) main_panel.layout().addWidget(self.plot) # Adjust operation graph's margin(left, bottom, width height) self.plot._backend.ax.set_position([0.1, 0.05, 0.83, 0.93]) self.plot._backend.ax2.set_position([0.1, 0.05, 0.83, 0.93]) # self.updatePlotThread = UpdatePlotThread(self) # self.updatePlotThread.daemon = True # self.updatePlotThread.start() self.updatePlotThread = TestThread(self) self.updatePlotThread.daemon = True self.updatePlotThread.start() # Upper pannel for safety self.status.abortButton.clicked.connect(self.abortScan) # Manage zoom history of plot self.status.x_axis_type_combo_box.currentIndexChanged.connect( self.clearZoomHistory) self.status.y_axis_type_combo_box.currentIndexChanged.connect( self.clearZoomHistory) # Start ZMQ Recv Thread self.zmqRecvThread = QThreadFuture(self.receiveZmq) self.zmqRecvThread.start() if self.status: # RunEngine Notifier self._dummyIndex = 0 self.notifyTimer = qt.QTimer() self.notifyTimer.timeout.connect(self._notifyColor) self.notifyTimer.start(1000) # As a thread that monitors the DCM moving state, check the case # that it is normally located but is displaying as moving # self.checkDcmThread = CheckDcmThread() # self.checkDcmThread.daemon = True # self.checkDcmThread.start() # Connections self.status.num_of_history_spin_box.valueChanged.connect( self.updatePlotThread.trigger) self.status.x_axis_type_combo_box.currentIndexChanged.connect( self.updatePlotThread.trigger) self.status.y_axis_type_combo_box.currentIndexChanged.connect( self.updatePlotThread.trigger) self.status.derivativeCB.stateChanged.connect( self.updatePlotThread.trigger) # Check for dragging self.plot.sigPlotSignal.connect(self.checkDragging) # Initial query self.sendZmq('ViewerInitialized')
def test_from_config(): Broker.from_config(EXAMPLE)
'database': 'metadatastore_production_v1', 'timezone': 'US/Eastern' }, }, 'assets': { 'module': 'databroker.assets.mongo', 'class': 'Registry', 'config': { 'host': 'xf16idc-ca', 'port': 27017, 'database': 'filestore', }, }, } db = Broker.from_config(db_config) bec = None #import bluesky #from ophyd import * #from ophyd.commands import * def reload_macros(file='~/.ipython/profile_collection/startup/99-macros.py'): ipy = get_ipython() ipy.magic('run -i ' + file) def is_ipython(): ip = True if 'ipykernel' in sys.modules:
from bluesky import RunEngine from bluesky.callbacks.best_effort import BestEffortCallback from databroker.tests.utils import temp_config from databroker import Broker # db setup config = temp_config() tempdir = config['metadatastore']['config']['directory'] def cleanup(): shutil.rmtree(tempdir) db = Broker.from_config(config) RE = RunEngine({}) # subscribe BEC bec = BestEffortCallback() RE.subscribe(bec) RE.subscribe(db.insert) # move motor to a reproducible location RE(mov(motor1, 0)) RE(mov(motor2, 0)) RE( relative_outer_product_scan([det4], motor1, -1, 0, 10, motor2, -2, 0, 20, True)) RE(outer_product_scan([det4], motor1, -1, 0, 10, motor2, -2, 0, 20, True))
def build_db_from_config(): config = get_db_config() db = Broker.from_config(config) return db
''' Author: Jorge Diaz Exercise 8 ''' import uuid import humanize import pandas as pd import os from databroker.tests.utils import temp_config from databroker import Broker # this will create a temporary databroker object with nothing in it db = Broker.from_config(temp_config()) # These transformations are the transformations necessary # to convert the hex values that come from the ADC's into units of Volts. fc = 7.62939453125e-05 def adc2counts(x): return ((int(x, 16) >> 8) - 0x40000) * fc \ if (int(x, 16) >> 8) > 0x1FFFF else (int(x, 16) >> 8)*fc def enc2counts(x): return int(x) if int(x) <= 0 else -(int(x) ^ 0xffffff - 1) class PizzaBoxANHandler(): def __init__(self, resource_path, chunk_size=1024):
shutter_control=True, auto_load_calib=True, calib_config_name=glbl_dict["calib_config_name"], # instrument config det_image_field=glbl_dict["image_fields"], all_folders=ALL_FOLDERS, userscript_dir=USERSCRIPT_DIR, _exclude_dir=[HOME_DIR, BLCONFIG_DIR, YAML_DIR], archive_base_dir=ARCHIVE_BASE_DIR, )) if glbl_dict["exp_broker_name"] == "xpd_sim_databroker": with open(sim_db_config_path, "r") as f: db_config = yaml.safe_load(f) db_config["metadatastore"]["config"]["directory"] = db_config[ "metadatastore"]["config"]["directory"].format(**glbl_dict) db_config["assets"]["config"]["dbpath"] = db_config["assets"]["config"][ "dbpath"].format(**glbl_dict) glbl_dict["exp_db"] = Broker.from_config(db_config) else: glbl_dict["exp_db"] = Broker.named(glbl_dict["exp_broker_name"]) glbl_dict.update({ k: os.path.join(glbl_dict["base_dir"], glbl_dict[z]) for k, z in zip( ["home_dir", "blconfig_dir"], ["home_dir_name", "blconfig_dir_name"], ) })