def test_export(broker_factory, RE, hw): from ophyd import sim db1 = broker_factory() db2 = broker_factory() RE.subscribe(db1.insert) # test mds only uid, = get_uids(RE(count([hw.det]))) db1.export(db1[uid], db2) assert db2[uid] == db1[uid] assert list(db2.get_events(db2[uid])) == list(db1.get_events(db1[uid])) # test file copying if not hasattr(db1.reg, 'copy_files'): raise pytest.skip("This Registry does not implement copy_files.") dir1 = tempfile.mkdtemp() dir2 = tempfile.mkdtemp() detfs = sim.SynSignalWithRegistry(name='detfs', func=lambda: np.ones((5, 5)), save_path=dir1) uid, = get_uids(RE(count([detfs]))) db1.reg.register_handler('NPY_SEQ', sim.NumpySeqHandler) db2.reg.register_handler('NPY_SEQ', sim.NumpySeqHandler) (from_path, to_path), = db1.export(db1[uid], db2, new_root=dir2) assert os.path.dirname(from_path) == dir1 assert os.path.dirname(to_path) == dir2 assert db2[uid] == db1[uid] image1, = db1.get_images(db1[uid], 'detfs') image2, = db2.get_images(db2[uid], 'detfs')
def test_header_equality(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) uid2, = get_uids(RE(count([hw.det]))) h = db[uid] h2 = db[uid2] assert h != [] assert h != h2 assert h == db[uid]
def test_get_events(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) h = db[uid] assert len(list(db.get_events(h))) == 1 assert len(list(h.documents())) == 1 + 3 RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det], num=7))) h = db[uid] assert len(list(db.get_events(h))) == 7 assert len(list(h.documents())) == 7 + 3
def test_get_events_filtering_field(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det], num=7))) h = db[uid] assert len(list(db.get_events(h, fields=['det']))) == 7 assert len(list(h.documents(fields=['det']))) == 7 + 3 uids = get_uids(RE(pchain(count([hw.det1], num=7), count([hw.det2], num=3)))) headers = db[uids] assert len(list(db.get_events(headers, fields=['det1']))) == 7 assert len(list(db.get_events(headers, fields=['det2']))) == 3
def test_config_data(db, RE, hw): # simple case: one Event Descriptor, one stream RE.subscribe(db.insert) from ophyd import Device, sim, Component as C class SynWithConfig(Device): x = C(sim.Signal, value=0) y = C(sim.Signal, value=2) z = C(sim.Signal, value=3) det = SynWithConfig(name='det') det.x.name = 'x' det.y.name = 'y' det.z.name = 'z' det.read_attrs = ['x'] det.configuration_attrs = ['y', 'z'] uid, = get_uids(RE(count([det]))) h = db[uid] actual = h.config_data('det') expected = {'primary': [{'y': 2, 'z': 3}]} assert actual == expected # generate two Event Descriptors in the primary stream @run_decorator() def plan(): # working around 'yield from' here which breaks py2 for msg in configure(det, {'z': 3}): # no-op yield msg for msg in trigger_and_read([det]): yield msg # changing the config after a read generates a new Event Descriptor for msg in configure(det, {'z': 4}): yield msg for msg in trigger_and_read([det]): yield msg uid, = get_uids(RE(plan())) h = db[uid] actual = h.config_data('det') expected = {'primary': [{'y': 2, 'z': 3}, {'y': 2, 'z': 4}]} assert actual == expected # generate two streams, primary and baseline -- one Event Descriptor each uid, = get_uids(RE(baseline_wrapper(count([det]), [det]))) h = db[uid] actual = h.config_data('det') expected = {'primary': [{'y': 2, 'z': 4}], 'baseline': [{'y': 2, 'z': 4}]} assert actual == expected
def test_scan_id_lookup(db, RE, hw): RE.subscribe(db.insert) RE.md.clear() uid1, = get_uids(RE(count([hw.det]), marked=True)) # scan_id=1 assert uid1 == db[1]['start']['uid'] RE.md.clear() uid2, = get_uids(RE(count([hw.det]))) # scan_id=1 again # Now we find uid2 for scan_id=1, but we can get the old one by # being more specific. assert uid2 == db[1]['start']['uid'] assert uid1 in [run['start']['uid'] for run in db(scan_id=1, marked=True)]
def test_uri(RE, hw): bad_meta_config1 = {'uri': 'mongodb://localhost', 'host': 'localhost', 'database': 'mds_database_placholder'} bad_meta_config2 = {'uri': 'mongodb://localhost', 'port': 27017, 'database': 'mds_database_placholder'} meta_config = {'uri': 'mongodb://localhost', 'database': 'mds_database_placholder'} asset_config = {'uri': 'mongodb://localhost', 'database': 'assets_database_placeholder'} config = copy.deepcopy(EXAMPLE) config['metadatastore']['config'] = bad_meta_config1 config['assets']['config'] = asset_config with pytest.raises(InvalidConfig): broker = Broker.from_config(config) config['metadatastore']['config'] = bad_meta_config2 with pytest.raises(InvalidConfig): broker = Broker.from_config(config) config['metadatastore']['config'] = meta_config broker = Broker.from_config(config) RE.subscribe(broker.insert) uid, = get_uids(RE(count([hw.det]))) run = broker[uid] config['api_version'] = 0 broker = Broker.from_config(config)
def test_table_index_name(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det], 5))) h = db[uid] name = h.table().index.name assert name == 'seq_num'
def test_stream_name(db, RE, hw): # subscribe db.insert RE.subscribe(db.insert) # custom plan that will generate two streams @run_decorator() def myplan(dets): ''' Simple plan to trigger two detectors. Meant for test only. ''' for msg in trigger_and_read([dets[0]], name='primary'): yield msg for msg in trigger_and_read([dets[1]], name='secondary'): yield msg # this test is meaningless (will always pass) # if our two detectors have the same name. Ensure this is true assert hw.det.name != hw.det2.name rs_uid, = get_uids(RE(myplan([hw.det, hw.det2]))) h = db[rs_uid] assert h.fields() == {'det', 'det2'} assert h.fields(stream_name='primary') == {'det'} assert h.fields(stream_name='secondary') == {'det2'}
def test_filtering_fields(db, RE, hw): from bluesky.preprocessors import run_decorator from bluesky.plan_stubs import trigger_and_read RE.subscribe(db.insert) m1, m2 = hw.motor1, hw.motor2 m1.acceleration.put(2) m2.acceleration.put(3) @run_decorator() def round_robin_plan(): for j in range(7): yield from trigger_and_read([m1], 'a') yield from trigger_and_read([m2], 'b') uid, = get_uids(RE(round_robin_plan())) h = db[uid] for fields in ( [m1.name, m1.acceleration.name], [m2.name, m2.acceleration.name]): for name, doc in h.documents( fields=fields): if name == 'event': assert set(doc['data']) == set(fields)
def test_external_access_without_handler(db, RE, hw): from ophyd.sim import NumpySeqHandler RE.subscribe(db.insert) rs_uid, = get_uids(RE(count([hw.img], 2))) # Clear the handler registry. We'll reinstate the relevant handler below. for spec in list(db.reg.handler_reg): db.reg.deregister_handler(spec) h = db[rs_uid] # Get unfilled event. ev, ev2 = db.get_events(h, fields=['img']) assert isinstance(ev['data']['img'], str) assert not ev['filled']['img'] # Get filled event -- error because no handler is registered. with pytest.raises(KeyError): ev, ev2 = db.get_events(h, fields=['img'], fill=True) # Get filled event -- error because no handler is registered. with pytest.raises(KeyError): list(db.get_images(h, 'img')) # Use a one-off handler registry. # This functionality used to be supported, but has been removed, so the # test here just verifies that it raised the expected type of error. if hasattr(db, 'v1') or hasattr(db, 'v2'): with pytest.raises(NotImplementedError): ev, ev2 = db.get_events( h, fields=['img'], fill=True, handler_registry={'NPY_SEQ': NumpySeqHandler})
def test_repr_html(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det], 5))) h = db[uid] # smoke test h._repr_html_()
def test_filters(db_empty, RE, hw): db = db_empty RE.subscribe(db.insert) RE(count([hw.det]), user='******') dan_uid, = get_uids(RE(count([hw.det]), user='******', purpose='calibration')) ken_calib_uid, = get_uids(RE(count([hw.det]), user='******', purpose='calibration')) assert len(list(db())) == 3 db.add_filter(user='******') assert len(db.filters) == 1 assert len(list(db())) == 1 header, = db() assert header['start']['uid'] == dan_uid db.clear_filters() assert len(db.filters) == 0 assert len(list(db(purpose='calibration'))) == 2 db.add_filter(user='******') assert len(list(db(purpose='calibration'))) == 1 header, = db(purpose='calibration') assert header['start']['uid'] == ken_calib_uid db.clear_filters() db.add_filter(since='2017') db.add_filter(since='2017') assert len(db.filters) == 1 db.add_filter(since='2016', until='2017') assert len(db.filters) == 2 assert db.filters['since'] == '2016' list(db()) # after search, time content keeps the same assert db.filters['since'] == '2016' # Check again using old names (start_time, stop_time) db.clear_filters() db.add_filter(start_time='2017') db.add_filter(start_time='2017') assert len(db.filters) == 1 db.add_filter(start_time='2016', stop_time='2017') assert len(db.filters) == 2 assert db.filters['start_time'] == '2016' with pytest.warns(UserWarning): list(db())
def test_process(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) c = itertools.count() def f(name, doc): next(c) db.process(db[uid], f) assert next(c) == len(list(db.restream(db[uid])))
def test_deprecated_stream_method(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) h = db[uid] # h.stream() is the same as h.documents() but it warns expected = list(h.documents()) with pytest.warns(UserWarning): actual = list(h.stream()) assert actual == expected
def test_get_fields(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det1, hw.det2]))) actual = db.get_fields(db[uid]) expected = set(['det1', 'det2']) assert actual == expected actual = db[uid].fields() assert actual == expected actual = db[uid].fields('primary') assert actual == expected
def test_prepare_hook_deep_copy(db, RE, hw): # configure to return plain dicts db.prepare_hook = lambda name, doc: copy.deepcopy(dict(doc)) RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) for h in (db[uid], list(db())[0]): for doc in _get_docs(h): assert not isinstance(doc, DeprecatedDoct) assert not isinstance(doc, doct.Document)
def test_externals(db, RE, hw): def external_fetcher(start, stop): return start['uid'] RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det], 5))) db.external_fetchers['suid'] = external_fetcher h = db[uid] assert h.ext.suid == h.start['uid']
def test_no_descriptor_name(db, RE, hw): def local_insert(name, doc): doc.pop('name', None) return db.insert(name, doc) RE.subscribe(local_insert) uid, = get_uids(RE(count([hw.det]))) h = db[uid] db.get_fields(h, name='primary') assert h['start']['uid'] == uid assert len(h.descriptors) == 1 assert h.stream_names == ['primary']
def test_prepare_hook_old_style(db, RE, hw): # configure to return old-style doct.Document objects db.prepare_hook = wrap_in_doct RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) # Test Broker.__getitem__ and Broker.__call__ means of creating Headers. for h in (db[uid], list(db())[0]): for doc in _get_docs(h): assert not isinstance(doc, DeprecatedDoct) assert isinstance(doc, doct.Document)
def test_prepare_hook_default(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) # check default -- returning a subclass of doct.Document that warns # when you use getattr for getitem assert db.prepare_hook == wrap_in_deprecated_doct h = db[uid] for doc in _get_docs(h): assert isinstance(doc, DeprecatedDoct) assert isinstance(doc, doct.Document)
def test_find_by_float_time(db_empty, RE, hw): db = db_empty RE.subscribe(db.insert) before, = get_uids(RE(count([hw.det]))) ttime.sleep(0.25) t = ttime.time() during, = get_uids(RE(count([hw.det]))) ttime.sleep(0.25) after, = get_uids(RE(count([hw.det]))) assert len(list(db())) == 3 # We'll find the one by specifying a time window around its start time. header, = db(since=t - 0.1, until=t + 0.2) assert header['start']['uid'] == during # Test the old names with pytest.warns(UserWarning): header, = db(start_time=t - 0.1, stop_time=t + 0.2) assert header['start']['uid'] == during
def test_data_method(db, RE, hw): RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det, hw.det2], 5))) h = db[uid] actual = list(h.data('det')) expected = [1, 1, 1, 1, 1] assert actual == expected # Check that this works twice. This once exposed a bug in caching logic. actual = list(h.data('det')) expected = [1, 1, 1, 1, 1] assert actual == expected
def test_dict_header(db, RE, hw): # Ensure that we aren't relying on h being a doct as opposed to a dict. RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) h = db[uid] expected = list(db.get_events(h)) actual = list(db.get_events(dict(h))) assert actual == expected h['start'] h['stop'] h['descriptors'] with pytest.raises(KeyError): h['events']
def test_order(db, RE, hw): from ophyd import sim RE.subscribe(db.insert) d = sim.SynPeriodicSignal(name='d', period=.5) uid, = get_uids(RE(monitor_during_wrapper(count([hw.det], num=7, delay=0.1), [d]))) t0 = None for name, doc in db[uid].documents(): # TODO: include datums in here at some point if name in ['event']: t1 = doc['time'] if t0: assert t1 > t0 t0 = t1
def test_filtering_stream_name(db, RE, hw): from ophyd import sim # one event stream RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det], num=7), bc=1)) h = db[uid] assert len(list(h.descriptors)) == 1 assert list(h.stream_names) == ['primary'] assert len(list(db.get_events(h, stream_name='primary'))) == 7 assert len(db.get_table(h, stream_name='primary')) == 7 assert len(list(db.get_events(h, stream_name='primary', fields=['det']))) == 7 assert len(db.get_table(h, stream_name='primary', fields=['det'])) == 7 assert len(list(h.documents(stream_name='primary'))) == 7 + 3 assert len(h.table(stream_name='primary')) == 7 assert len(list(h.documents(stream_name='primary', fields=['det']))) == 7 + 3 assert len(h.table(stream_name='primary', fields=['det'])) == 7 assert len(db.get_table(h, stream_name='primary', fields=['det', 'bc'])) == 7 # two event streams: 'primary' and 'd_monitor' d = sim.SynPeriodicSignal(name='d', period=.5) uid, = get_uids(RE(monitor_during_wrapper(count([hw.det], num=7, delay=0.1), [d]))) h = db[uid] assert len(list(h.descriptors)) == 2 assert set(h.stream_names) == set(['primary', 'd_monitor']) assert len(list(db.get_events(h, stream_name='primary'))) == 7 assert len(list(h.documents(stream_name='primary'))) == 7 + 3 assert len(db.get_table(h, stream_name='primary')) == 7 assert len(db.get_table(h)) == 7 # 'primary' by default assert len(h.table(stream_name='primary')) == 7 assert len(h.table()) == 7 # 'primary' by default
def resource_roundtrip(broker_factory, RE, hw): db = broker_factory() db2 = broker_factory() from ophyd.sim import NumpySeqHandler import copy db.prepare_hook = lambda name, doc: copy.deepcopy(dict(doc)) for spec in NumpySeqHandler.specs: db.reg.register_handler(spec, NumpySeqHandler) RE.subscribe(db.insert) RE.subscribe(lambda *x: L.append(x)) uid, = get_uids(RE(count([hw.img], num=7, delay=0.1))) for nd in db[-1].documents(): db2.insert(*nd)
def test_transforms(RE, hw): transforms = {'transforms': {'start': 'databroker.tests.test_v2.transform.transform', 'stop': 'databroker.tests.test_v2.transform.transform', 'resource': 'databroker.tests.test_v2.transform.transform', 'descriptor': 'databroker.tests.test_v2.transform.transform'}} config = {**EXAMPLE, **transforms} broker = Broker.from_config(config) RE.subscribe(broker.insert) uid, = get_uids(RE(count([hw.det]))) run = broker[uid] for name, doc in run.documents(fill='false'): if name in {'start', 'stop', 'resource', 'descriptor'}: assert doc.get('test_key') == 'test_value'
def test_find_by_string_time(db_empty, RE, hw): db = db_empty RE.subscribe(db.insert) uid, = get_uids(RE(count([hw.det]))) yesterday = date.fromtimestamp(db[uid].start['time']) + timedelta(days=-1) tomorrow = yesterday + timedelta(days=2) day_after_tom = yesterday + timedelta(days=3) yesterday_str = yesterday.strftime('%Y-%m-%d') tomorrow_str = tomorrow.strftime('%Y-%m-%d') day_after_tom_str = day_after_tom.strftime('%Y-%m-%d') assert len(list(db(since=yesterday_str, until=tomorrow_str))) == 1 assert len(list(db(since=tomorrow_str, until=day_after_tom_str))) == 0
def test_external_access_with_handler(db, RE, hw): from ophyd.sim import NumpySeqHandler RE.subscribe(db.insert) rs_uid, = get_uids(RE(count([hw.img], 2))) # For some db fixtures, this is already registered and is therefore a # no-op. db.reg.register_handler('NPY_SEQ', NumpySeqHandler) h = db[rs_uid] EXPECTED_SHAPE = (10, 10) # via ophyd.sim.img ev, ev2 = db.get_events(h, fields=['img'], fill=True) assert ev['data']['img'].shape == EXPECTED_SHAPE ims = db.get_images(h, 'img')[0] assert ims.shape == EXPECTED_SHAPE assert ev['filled']['img'] ev, ev2 = db.get_events(h, fields=['img']) assert ev is not ev2 assert ev['filled'] is not ev2['filled'] assert not ev['filled']['img'] datum = ev['data']['img'] if hasattr(db, 'v1') or hasattr(db, 'v2'): with pytest.raises(NotImplementedError): ev_ret, = db.fill_events([ev], h.descriptors, inplace=True) if hasattr(db, 'v1') or hasattr(db, 'v2'): with pytest.raises(NotImplementedError): ev2_filled = db.fill_event(ev2, inplace=False) # table with fill=False (default) table = db.get_table(h, fields=['img']) datum_id = table['img'].iloc[0] assert isinstance(datum_id, str) # table with fill=True table = db.get_table(h, fields=['img'], fill=True) img = table['img'].iloc[0] assert not isinstance(img, str) assert img.shape == EXPECTED_SHAPE