def test_count_delay_argument(): # num=7 but delay only provides 5 entries with pytest.raises(ValueError): # count raises ValueError when delay generator is expired list(count([det], num=7, delay=(2 ** i for i in range(5)))) # num=6 with 5 delays between should product 6 readings msgs = count([det], num=6, delay=(2 ** i for i in range(5))) read_count = len([msg for msg in msgs if msg.command == "read"]) assert read_count == 6 # num=5 with 5 delays should produce 5 readings msgs = count([det], num=5, delay=(2 ** i for i in range(5))) read_count = len([msg for msg in msgs if msg.command == "read"]) assert read_count == 5 # num=4 with 5 delays should produce 4 readings msgs = count([det], num=4, delay=(2 ** i for i in range(5))) read_count = len([msg for msg in msgs if msg.command == "read"]) assert read_count == 4 # num=None with 5 delays should produce 6 readings msgs = count([det], num=None, delay=(2 ** i for i in range(5))) read_count = len([msg for msg in msgs if msg.command == "read"]) assert read_count == 6
def test_data_key(db, RE): RE.subscribe('all', db.mds.insert) RE(count([det1])) RE(count([det1, det2])) result1 = db(data_key='det1') result2 = db(data_key='det2') assert len(result1) == 2 assert len(result2) == 1
def test_mixture(hw): D = SupplementalData(baseline=[hw.det2], flyers=[hw.flyer1], monitors=[hw.rand]) original = list(count([hw.det])) processed = list(D(count([hw.det]))) assert len(processed) == 2 + 5 + 10 + len(original)
def run_exp(delay): # pragma: no cover time.sleep(delay) print("running exp") p = Publisher(proxy[0], prefix=b"raw") RE.subscribe(p) det = SynSignal(func=lambda: np.ones(10), name="gr") RE(bp.count([det], md=dict(analysis_stage="raw"))) RE(bp.count([det], md=dict(analysis_stage="pdf")))
def test_get_events(db, RE): RE.subscribe('all', db.mds.insert) uid, = RE(count([det])) h = db[uid] assert len(list(db.get_events(h))) == 1 RE.subscribe('all', db.mds.insert) uid, = RE(count([det], num=7)) h = db[uid] assert len(list(db.get_events(h))) == 7
def test_monitors(hw): det = hw.det rand = hw.rand rand2 = hw.rand2 # no-op D = SupplementalData() original = list(count([det])) processed = list(D(count([det]))) assert len(processed) == len(original) # one monitor D.monitors.append(rand) original = list(count([det])) processed = list(D(count([det]))) assert len(processed) == 2 + len(original) # two monitors D.monitors.append(rand2) processed = list(D(count([det]))) assert len(processed) == 4 + len(original) # two monitors applied a plan with consecutive runs original = list(list(count([det])) + list(count([det]))) processed = list(D(pchain(count([det]), count([det])))) assert len(processed) == 8 + len(original)
def insert_imgs( RE, reg, n, shape, save_dir, detector_name="pe1_image", **kwargs ): """ Insert images into mds and fs for testing Parameters ---------- RE: bluesky.run_engine.RunEngine instance reg: Registry instance n: int Number of images to take shape: tuple of ints The shape of the resulting images save_dir Returns ------- """ # Create detectors dark_det = sim.SynSignalWithRegistry( name=detector_name, func=lambda: (np.random.random(shape) * 65535).astype("uint16"), reg=reg, save_path=save_dir, ) light_det = sim.SynSignalWithRegistry( name=detector_name, func=lambda: (np.random.random(shape) * 65535).astype("uint16"), reg=reg, save_path=save_dir, ) beamtime_uid = str(uuid4()) base_md = dict( beamtime_uid=beamtime_uid, calibration_md=pyFAI_calib, bt_wavelength=0.1847, folder_tag_list=["sample_name"], **kwargs ) # Insert the dark images dark_md = base_md.copy() dark_md.update(name="test-dark", is_dark=True) dark_uid = RE(count([dark_det], num=1), **dark_md) # Insert the light images light_md = base_md.copy() light_md.update(name="test", sc_dk_field_uid=dark_uid) uid = RE(count([light_det], num=n), **light_md) return uid
def test_get_events_filtering_field(db, RE): RE.subscribe('all', db.mds.insert) uid, = RE(count([det], num=7)) h = db[uid] assert len(list(db.get_events(h, fields=['det']))) == 7 with pytest.raises(ValueError): list(db.get_events(h, fields=['not_a_field'])) uids = RE(pchain(count([det1], num=7), count([det2], num=3))) headers = db[uids] assert len(list(db.get_events(headers, fields=['det1']))) == 7 assert len(list(db.get_events(headers, fields=['det2']))) == 3
def test_count(RE, hw): det = hw.det motor = hw.motor actual_intensity = [] col = collector('det', actual_intensity) motor.set(0) plan = bp.count([det]) RE(plan, {'event': col}) assert actual_intensity[0] == 1. # multiple counts, via updating attribute actual_intensity = [] col = collector('det', actual_intensity) plan = bp.count([det], num=3, delay=0.05) RE(plan, {'event': col}) assert actual_intensity == [1., 1., 1.]
def test_scan_id_lookup(db, RE): RE.subscribe('all', db.mds.insert) RE.md.clear() uid1, = RE(count([det]), marked=True) # scan_id=1 assert uid1 == db[1]['start']['uid'] RE.md.clear() uid2, = RE(count([det])) # scan_id=1 again # Now we find uid2 for scan_id=1, but we can get the old one by # being more specific. assert uid2 == db[1]['start']['uid'] assert uid1 == db(scan_id=1, marked=True)[0]['start']['uid']
def test_configuration(db, RE): det_with_conf = Reader('det_with_conf', {'a': lambda: 1, 'b': lambda: 2}) RE.subscribe('all', db.mds.insert) uid, = RE(count([det_with_conf]), c=3) h = db[uid] # check that config is not included by default ev = next(db.get_events(h)) assert set(ev['data'].keys()) == set(['a','b']) # find config in descriptor['configuration'] ev = next(db.get_events(h, fields=['a', 'b'])) assert 'b' in ev['data'] assert ev['data']['b'] == 2 assert 'b' in ev['timestamps'] # find config in start doc ev = next(db.get_events(h, fields=['a', 'c'])) assert 'c' in ev['data'] assert ev['data']['c'] == 3 assert 'c' in ev['timestamps'] # find config in stop doc ev = next(db.get_events(h, fields=['a', 'exit_status'])) assert 'exit_status' in ev['data'] assert ev['data']['exit_status'] == 'success' assert 'exit_status' in ev['timestamps']
def test_raise_conditions(key, db, RE): RE.subscribe('all', db.mds.insert) for _ in range(5): RE(count([det])) with pytest.raises(ValueError): db[key]
def run_exp(delay): # pragma: no cover time.sleep(delay) print("running exp") p = Publisher(proxy[0], prefix=b"raw") RE.subscribe(p) # Tiny fake pipeline pp = Publisher(proxy[0], prefix=b"an") raw_source = Stream() SimpleFromEventStream( "event", ("data", "img"), raw_source.starmap(Retrieve({"NPY_SEQ": NumpySeqHandler})), principle=True, ).map(lambda x: x * 2).SimpleToEventStream( ("img2",), analysis_stage="pdf" ).starsink( pp ) RE.subscribe(lambda *x: raw_source.emit(x)) RE(bp.count([hw.img], md=dict(analysis_stage="raw"))) print("finished exp") p.close()
def test_hints(RE): class Detector: def __init__(self, name): self.name = name self.parent = None self.hints = {'vis': 'placeholder'} def read(self): return {} def describe(self): return {} def read_configuration(self): return {} def describe_configuration(self): return {} det = Detector('det') collector = [] RE(count([det]), {'descriptor': lambda name, doc: collector.append(doc)}) doc = collector.pop() assert doc['hints']['det'] == {'vis': 'placeholder'}
def test_find_by_float_time(db, RE): RE.subscribe('all', db.mds.insert) before, = RE(count([det])) ttime.sleep(0.25) t = ttime.time() during, = RE(count([det])) ttime.sleep(0.25) after, = RE(count([det])) # Three runs in total were saved. assert len(db()) == 3 # We'll find the one by specifying a time window around its start time. header, = db(start_time=t - 0.1, stop_time=t + 0.2) assert header['start']['uid'] == during
def take_dark(): """a plan for taking a single dark frame""" print('INFO: closing shutter...') yield from bp.abs_set(glbl.shutter, 0) if glbl.shutter_control: yield from bp.sleep(2) print('INFO: taking dark frame....') # upto this stage, glbl.pe1c has been configured to so exposure time is # correct acq_time = glbl.area_det.cam.acquire_time.get() num_frame = glbl.area_det.images_per_set.get() computed_exposure = acq_time * num_frame # update md _md = {'sp_time_per_frame': acq_time, 'sp_num_frames': num_frame, 'sp_computed_exposure': computed_exposure, 'sp_type': 'ct', # 'sp_uid': str(uuid.uuid4()), # dark plan doesn't need uid 'sp_plan_name': 'dark_{}'.format(computed_exposure), 'dark_frame': True} c = bp.count([glbl.area_det], md=_md) yield from bp.subs_wrapper(c, {'stop': [_update_dark_dict_list]}) print('opening shutter...') yield from bp.abs_set(glbl.shutter, 1) if glbl.shutter_control: yield from bp.sleep(2)
def test_flyer_with_collect_asset_documents(RE): from ophyd.sim import det, new_trivial_flyer, trivial_flyer from bluesky.preprocessors import fly_during_wrapper assert hasattr(new_trivial_flyer, 'collect_asset_docs') assert hasattr(trivial_flyer, 'collec_asset_docs') == False RE(fly_during_wrapper(count([det], num=5), [new_trivial_flyer, trivial_flyer]))
def run_exp(delay): # pragma: no cover time.sleep(delay) print("running exp") p = Publisher(proxy[0], prefix=b"raw") RE.subscribe(p) RE(bp.count([hw.img], md=dict(analysis_stage="raw")))
def test_export(broker_factory, RE): from databroker.broker import Broker from filestore.fs import FileStoreRO # Subclass ReaderWithFSHandler to implement get_file_list, required for # file copying. This should be added upstream in bluesky. class Handler(ReaderWithFSHandler): def get_file_list(self, datum_kwarg_gen): return ['{name}_{index}.npy'.format(name=self._name, **kwargs) for kwargs in datum_kwarg_gen] db1 = broker_factory() db2 = broker_factory() RE.subscribe('all', db1.mds.insert) # test mds only uid, = RE(count([det])) db1.export(db1[uid], db2) assert db2[uid] == db1[uid] assert list(db2.get_events(db2[uid])) == list(db1.get_events(db1[uid])) # test file copying if not hasattr(db1.fs, 'copy_files'): raise pytest.skip("This filestore does not implement copy_files.") dir1 = tempfile.mkdtemp() dir2 = tempfile.mkdtemp() detfs = ReaderWithFileStore('detfs', {'image': lambda: np.ones((5, 5))}, fs=db1.fs, save_path=dir1) uid, = RE(count([detfs])) # Use a read only filestore mds2 = db1.mds fs2 = db1.fs fs3 = FileStoreRO(fs2.config, version=1) db1 = Broker(fs=fs3, mds=mds2) db1.fs.register_handler('RWFS_NPY', Handler) db2.fs.register_handler('RWFS_NPY', Handler) (from_path, to_path), = db1.export(db1[uid], db2, new_root=dir2) assert os.path.dirname(from_path) == dir1 assert os.path.dirname(to_path) == dir2 assert db2[uid] == db1[uid] image1, = db1.get_images(db1[uid], 'image') image2, = db2.get_images(db2[uid], 'image')
def test_filled(RE, hw, db): collector = [] def collect(name, doc): if name == 'event': collector.append(doc) RE(count([hw.det]), collect) event, = collector assert event['filled'] == {} collector.clear() hw.img.reg = db.reg RE(count([hw.img]), collect) event, = collector assert event['filled'] == {'img': False}
def test_process(db, RE): uid = RE.subscribe('all', db.mds.insert) uid = RE(count([det])) c = itertools.count() def f(name, doc): next(c) db.process(db[uid], f) assert next(c) == len(list(db.restream(db[uid])))
def test_process(db, RE, hw): uid = RE.subscribe(db.insert) uid = RE(count([hw.det])) c = itertools.count() def f(name, doc): next(c) db.process(db[uid], f) assert next(c) == len(list(db.restream(db[uid])))
def run_exp(delay): # pragma: no cover time.sleep(delay) print("running exp") p = Publisher(proxy[0], prefix=b"raw") RE.subscribe(p) RE.subscribe(db.insert) dark, = RE(bp.count([hw.img], md=dict(analysis_stage="raw"))) flat, = RE(bp.count([hw.img], md=dict(analysis_stage="raw"))) RE( bp.count( [hw.img], md=dict( analysis_stage="raw", sc_dk_field_uid=dark, sc_flat_field_uid=flat, ), ) )
def test_deprecated_stream_method(db, RE, hw): RE.subscribe(db.insert) uid, = RE(count([hw.det])) h = db[uid] # h.stream() is the same as h.documents() but it warns expected = list(h.documents()) with pytest.warns(UserWarning): actual = list(h.stream()) assert actual == expected
def insert_imgs(RE, reg, n, shape, save_dir=tempfile.mkdtemp(), **kwargs): """ Insert images into mds and fs for testing Parameters ---------- RE: bluesky.run_engine.RunEngine instance reg: Registry instance n: int Number of images to take shape: tuple of ints The shape of the resulting images save_dir Returns ------- """ # Create detectors dark_det = ReaderWithRegistry('pe1_image', {'pe1_image': lambda: np.ones(shape)}, reg=reg, save_path=save_dir) light_det = ReaderWithRegistry('pe1_image', {'pe1_image': lambda: np.ones(shape)}, reg=reg, save_path=save_dir) beamtime_uid = str(uuid4()) base_md = dict(beamtime_uid=beamtime_uid, calibration_md=pyFAI_calib, bt_wavelength=0.1847, **kwargs) # Insert the dark images dark_md = base_md.copy() dark_md.update(name='test-dark', is_dark=True) dark_uid = RE(count([dark_det], num=1), **dark_md) # Insert the light images light_md = base_md.copy() light_md.update(name='test', sc_dk_field_uid=dark_uid) uid = RE(count([light_det], num=n), **light_md) return uid
def test_full_text_search(db, RE): RE.subscribe('all', db.mds.insert) uid, = RE(count([det]), foo='some words') RE(count([det])) assert len(db()) == 2 try: db('some words') except NotImplementedError: raise pytest.skip("This mongo-like backend does not support $text.") assert len(db('some words')) == 1 header, = db('some words') assert header['start']['uid'] == uid # Full text search does *not* apply to keys. assert len(db('foo')) == 0
def test_max_age(RE): """ Test the a dark frame is reused until it expires, and then re-taken. """ dark_frame_preprocessor = bluesky_darkframes.DarkFramePreprocessor( dark_plan=dark_plan, detector=det, max_age=1) RE.preprocessors.append(dark_frame_preprocessor) # The first executation adds something to the cache. RE(count([det])) assert len(dark_frame_preprocessor.cache) == 1 state, = dark_frame_preprocessor.cache # A second execution reuses the cache entry, adds nothing. RE(count([det])) assert len(dark_frame_preprocessor.cache) == 1 dark_frame_preprocessor.get_snapshot(state) # Wait for it to age out. time.sleep(1.01) with pytest.raises(bluesky_darkframes.NoMatchingSnapshot): dark_frame_preprocessor.get_snapshot(state)
def test_get_fields(db, RE, hw): RE.subscribe(db.insert) uid, = RE(count([hw.det1, hw.det2])) actual = db.get_fields(db[uid]) expected = set(['det1', 'det2']) assert actual == expected actual = db[uid].fields() assert actual == expected actual = db[uid].fields('primary') assert actual == expected
def test_mid_scan_dark_frames(RE): dark_frame_preprocessor = bluesky_darkframes.DarkFramePreprocessor( dark_plan=dark_plan, detector=det, max_age=0) RE.preprocessors.append(dark_frame_preprocessor) def verify_three_dark_frames(name, doc): if name == 'stop': assert doc['num_events']['dark'] == 3 RE(count([det], 3), verify_three_dark_frames)
def motscansw(seconds, motor, start, stop, steps, samplename='motscan', sampleid=''): # TODO: do it more generally # yield from bps.mv(sw_det.setexp, seconds) yield from bps.mv(sw_det.waxs.cam.acquire_time, seconds) yield from bps.mv(sw_det.saxs.cam.acquire_time, seconds) md = RE.md md['sample'] = samplename md['sampleid'] = sampleid first_scan_id = None dt = datetime.now() formatted_date = dt.strftime('%Y-%m-%d') for i, pos in enumerate(np.linspace(start, stop, steps)): yield from bps.mv(motor, pos) uid = (yield from bp.count([sw_det], md=md)) hdr = db[uid] quick_view(hdr) if i == 0: first_scan_id = hdr.start['scan_id'] dt = datetime.fromtimestamp(hdr.start['time']) formatted_date = dt.strftime('%Y-%m-%d') tiff_series.export(hdr.documents(fill=True), file_prefix=('{start[institution]}/' '{start[user]}/' '{start[project]}/' f'{formatted_date}/' f'{first_scan_id}-' '{start[scan_id]}' '-{start[sample]}-' f'{pos:.2f}-'), directory='Z:/images/users/') csv.export(hdr.documents(stream_name='baseline'), file_prefix=('{institution}/' '{user}/' '{project}/' f'{formatted_date}/' f'{first_scan_id}-' '{scan_id}-{sample}-' f'{pos:.2f}-'), directory='Z:/images/users/') csv.export( hdr.documents(stream_name='Izero Mesh Drain Current_monitor'), file_prefix=('{institution}/' '{user}/' '{project}/' f'{formatted_date}/' f'{first_scan_id}-' '{scan_id}-{sample}-' f'{pos:.2f}-'), directory='Z:/images/users/')
def test_export_events(RE, hw): '''Test to see if the suitcase.csv.export works on events. ''' collector = [] def collect(name, doc): collector.append((name, doc)) RE.subscribe(collect) RE(count([hw.det], 5)) with tempfile.NamedTemporaryFile(mode='w') as f: # We don't actually need f itself, just a filepath to template on. meta, *csvs = export(collector, f.name) csv, = csvs docs = (doc for name, doc in collector) start, descriptor, *events, stop = docs expected = {} expected_dict = {'data': {'det': [], 'seq_num': []}, 'time': []} for event in events: expected_dict['data']['det'].append(event['data']['det']) expected_dict['data']['seq_num'].append(event['seq_num']) expected_dict['time'].append(event['time']) expected['events'] = pandas.DataFrame(expected_dict['data'], index=expected_dict['time']) expected['events'].index.name = 'time' with open(meta) as f: actual = json.load(f) # This next section is used to convert lists to tuples for the assert below for dims in actual['start']['hints']['dimensions']: new_dims = [] for dim in dims: if type(dim) is list: new_dims.append(tuple(dim)) else: new_dims.append(dim) actual['start']['hints']['dimensions'] = [tuple(new_dims)] expected.update({ 'start': start, 'stop': stop, 'descriptors': { 'primary': [descriptor] } }) actual['events'] = pandas.read_csv(csv, index_col=0) assert actual.keys() == expected.keys() assert actual['start'] == expected['start'] assert actual['descriptors'] == expected['descriptors'] assert actual['stop'] == expected['stop'] assert_frame_equal(expected['events'], actual['events'])
def generate_example_data(callback): from ophyd.sim import det, motor1, motor2, motor3 motor1.set(3.1).wait() motor2.set(-1000.02).wait() motor3.set(5.01).wait() RE = RunEngine() sd = SupplementalData(baseline=[motor1, motor2, motor3]) RE.preprocessors.append(sd) RE.md["operator"] = "Dmitri" RE(count([det], 5, delay=0.05), RewriteTimes("2020-01-01 9:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-01-01 9:05", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-01-01 9:07", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 9:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 9:05", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 13:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:05", callback)) RE( count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:07", callback), operator="Michael", ) RE( count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:08", callback), operator="Michael", ) _generate_newton_data( RE, callback, [ "2020-02-02 9:00", "2020-02-02 10:00", "2020-02-02 12:00", "2020-02-02 13:00", "2020-02-02 15:00", "2020-02-02 17:00", "2020-02-02 19:00", ], )
def test_no_descriptor_name(db, RE, hw): def local_insert(name, doc): doc.pop('name', None) return db.insert(name, doc) RE.subscribe(local_insert) uid, = get_uids(RE(count([hw.det]))) h = db[uid] db.get_fields(h, name='primary') assert h['start']['uid'] == uid assert len(h.descriptors) == 1 assert h.stream_names == ['primary']
def test_disable(RE): dark_frame_preprocessor = bluesky_darkframes.DarkFramePreprocessor( dark_plan=dark_plan, detector=det, max_age=3) RE.preprocessors.append(dark_frame_preprocessor) dark_frame_preprocessor.disable() def verify_no_dark_stream(name, doc): if name == 'stop': assert 'dark' not in doc['num_events'] RE(count([det]), verify_no_dark_stream)
def run_exp(delay): # pragma: no cover time.sleep(delay) print("running exp") p = Publisher(proxy[0], prefix=b"raw") RE.subscribe(p) z = np.zeros(10) z[3] = 1 x = SynSignal(func=lambda: np.arange(10), name="x") y = SynSignal(func=lambda: z, name="y") RE(bp.count([x, y], md=dict(analysis_stage="raw")))
def sleep_and_count(detectors, waitTime=2): """sleep for waitTime and then count :param detectors : ophyd counters and signals :param waitTime : sleep time """ # wait for finish yield from bps.sleep(waitTime) # move to energy yield from bp.count(detectors)
def ct(self, line): if line.strip(): dets = eval(line, self.shell.user_ns) else: dets = self.detectors plan = bp.count(dets) print("[This data will not be saved. " "Use the RunEngine to collect data.]") self.RE(plan, _ct_callback) self._ensure_idle() return None
def test_prepare_hook_deep_copy(db, RE, hw): # configure to return plain dicts db.prepare_hook = lambda name, doc: copy.deepcopy(dict(doc)) RE.subscribe(db.insert) uid, = RE(count([hw.det])) for h in (db[uid], list(db())[0]): for doc in _get_docs(h): assert not isinstance(doc, DeprecatedDoct) assert not isinstance(doc, doct.Document)
def test_partial_uid_lookup(db, RE): RE.subscribe('all', db.mds.insert) # Create enough runs that there are two that begin with the same char. for _ in range(50): RE(count([det])) with pytest.raises(ValueError): # Some letter will happen to be the first letter of more than one uid. for first_letter in string.ascii_lowercase: db[first_letter]
def test_partial_uid_lookup(db, RE, hw): RE.subscribe(db.insert) # Create enough runs that there are two that begin with the same char. for _ in range(50): RE(count([hw.det])) with pytest.raises(ValueError): # Some letter will happen to be the first letter of more than one uid. for first_letter in string.ascii_lowercase: db[first_letter]
def test_export_noroot(broker_factory, RE, tmpdir, hw): from ophyd import sim class BrokenSynRegistry(sim.SynSignalWithRegistry): def stage(self): self._file_stem = sim.short_uid() self._path_stem = os.path.join(self.save_path, self._file_stem) self._datum_counter = itertools.count() # This is temporarily more complicated than it will be in the future. # It needs to support old configurations that have a registry. resource = {'spec': self._spec, 'root': '', 'resource_path': self._path_stem, 'resource_kwargs': {}, 'path_semantics': os.name} # If a Registry is set, we need to allow it to generate the uid for us. if self.reg is not None: # register_resource has accidentally different parameter names... self._resource_uid = self.reg.register_resource( rpath=resource['resource_path'], rkwargs=resource['resource_kwargs'], root=resource['root'], spec=resource['spec'], path_semantics=resource['path_semantics']) # If a Registry is not set, we need to generate the uid. else: self._resource_uid = sim.new_uid() resource['uid'] = self._resource_uid self._asset_docs_cache.append(('resource', resource)) dir1 = str(tmpdir.mkdir('a')) dir2 = str(tmpdir.mkdir('b')) db1 = broker_factory() db2 = broker_factory() detfs = BrokenSynRegistry(name='detfs', func=lambda: np.ones((5, 5)), reg=db1.reg, save_path=dir1) db1.reg.register_handler('NPY_SEQ', sim.NumpySeqHandler) db2.reg.register_handler('NPY_SEQ', sim.NumpySeqHandler) RE.subscribe(db1.insert) uid, = RE(count([detfs], num=3)) file_pairs = db1.export(db1[uid], db2, new_root=dir2) for from_path, to_path in file_pairs: assert os.path.dirname(from_path) == dir1 assert os.path.dirname(to_path) == os.path.join(dir2, dir1[1:]) assert db2[uid] == db1[uid] image1s = db1.get_images(db1[uid], 'detfs') image2s = db2.get_images(db2[uid], 'detfs') for im1, im2 in zip(image1s, image2s): assert np.array_equal(im1, im2)
def test_externals(db, RE, hw): def external_fetcher(start, stop): return start['uid'] RE.subscribe(db.insert) uid, = RE(count([hw.det], 5)) db.external_fetchers['suid'] = external_fetcher h = db[uid] assert h.ext.suid == h.start['uid']
def snapsw(seconds, samplename='', sampleid='', num_images=1, dark=0): # TODO: do it more generally # yield from bps.mv(sw_det.setexp, seconds) yield from bps.mv(sw_det.waxs.cam.acquire_time, seconds) yield from bps.mv(sw_det.saxs.cam.acquire_time, seconds) yield from bps.mv(sw_det.waxs.cam.shutter_close_delay, 200) yield from bps.mv(sw_det.saxs.cam.shutter_close_delay, 200) yield from bps.mv(sw_det.waxs.cam.shutter_open_delay, 200) yield from bps.mv(sw_det.saxs.cam.shutter_open_delay, 200) if (dark): yield from bps.mv(sw_det.saxs.cam.shutter_mode, 0) if samplename is "": samplename = "dark" else: yield from bps.mv(sw_det.saxs.cam.shutter_mode, 2) if samplename is "": samplename = "snap" md = RE.md md['sample'] = samplename md['sampleid'] = sampleid md['exptime'] = seconds uid = (yield from bp.count([sw_det], num=num_images, md=md)) hdr = db[uid] quick_view(hdr) dt = datetime.fromtimestamp(hdr.start['time']) formatted_date = dt.strftime('%Y-%m-%d') energy = hdr.table(stream_name='baseline')['Beamline Energy_energy'][1] tiff_series.export(hdr.documents(fill=True), file_prefix=('{start[institution]}/' '{start[user]}/' '{start[project]}/' f'{formatted_date}/' '{start[scan_id]}-' '{start[sample]}-' f'{energy:.2f}eV-'), directory='Z:/images/users/') csv.export(hdr.documents(stream_name='baseline'), file_prefix=('{institution}/' '{user}/' '{project}/' f'{formatted_date}/' '{scan_id}-' '{sample}-' f'{energy:.2f}eV-'), directory='Z:/images/users/') csv.export(hdr.documents(stream_name='Izero Mesh Drain Current_monitor'), file_prefix=('{institution}/' '{user}/' '{project}/' f'{formatted_date}/' '{scan_id}-' '{sample}-' f'{energy:.2f}eV-'), directory='Z:/images/users/')
def test_num_events(RE, hw, db): RE.subscribe(db.insert) uid1, = RE(count([])) h = db[uid1] assert h.stop['num_events'] == {} uid2, = RE(count([hw.det], 5)) h = db[uid2] assert h.stop['num_events'] == {'primary': 5} sd = SupplementalData(baseline=[hw.det]) RE.preprocessors.append(sd) uid3, = RE(count([])) h = db[uid3] assert h.stop['num_events'] == {'baseline': 2} uid4, = RE(count([hw.det], 5)) h = db[uid4] assert h.stop['num_events'] == {'primary': 5, 'baseline': 2}
def get_catalog(): RE = RunEngine() directory = tempfile.TemporaryDirectory().name for i in range(1, 5): with Serializer(directory) as serializer: RE(scan([det], motor, -1, 1, 5 * i), serializer) with Serializer(directory) as serializer: RE(count([random_img], 3), serializer) catalog = BlueskyMsgpackCatalog(f"{directory}/*.msgpack") return catalog
def test_prepare_hook_old_style(db, RE, hw): # configure to return old-style doct.Document objects db.prepare_hook = wrap_in_doct RE.subscribe(db.insert) uid, = RE(count([hw.det])) # Test Broker.__getitem__ and Broker.__call__ means of creating Headers. for h in (db[uid], list(db())[0]): for doc in _get_docs(h): assert not isinstance(doc, DeprecatedDoct) assert isinstance(doc, doct.Document)
def test_nexus_export_single(db_all, RE): """ Test the NeXus HDF5 export with a single header and verify the output is correct """ RE.subscribe(db_all.insert) RE(count([det], 5, delay=0.1), owner="Tom") hdr = db_all[-1] fname = tempfile.NamedTemporaryFile() nexus.export(hdr, fname.name, db=db_all) shallow_header_verify(fname.name, hdr, db_all) validate_basic_NeXus_structure(fname.name)
def count_dets(_dets, _full_md): _count_plan = bp.count(_dets, md=_full_md) _count_plan = bpp.subs_wrapper(_count_plan, LiveTable(_dets)) _count_plan = bpp.finalize_wrapper( _count_plan, bps.abs_set(xpd_configuration['shutter'], XPD_SHUTTER_CONF['close'], wait=True)) yield from bps.abs_set(xpd_configuration['shutter'], XPD_SHUTTER_CONF['open'], wait=True) yield from _count_plan
def test_broker_base_no_unpack(RE, hw, db): class BrokerChecker(BrokerCallbackBase): def __init__(self, field, *, db=None): super().__init__(field, db=db) def event(self, doc): super().event(doc) assert isinstance(doc["data"][self.fields[0]], np.ndarray) bc = BrokerChecker(("img", ), db=db) RE.subscribe(bc) RE(count([hw.direct_img]))
def test_broker_base_no_unpack(RE, hw, db): class BrokerChecker(BrokerCallbackBase): def __init__(self, field, *, db=None): super().__init__(field, db=db) def event(self, doc): super().event(doc) assert isinstance(doc['data'][self.fields[0]], np.ndarray) bc = BrokerChecker(('img',), db=db) RE.subscribe(bc) RE(count([hw.direct_img]))
def _ct_dark(detectors, gain_bit_input, gain_bit_dict): yield from bps.mv(fccd.cam.fcric_gain, gain_bit_input) # if _gain_bit_input != 0: # yield from bps.sleep(fccd.cam.acquire_period.value*2.01) # This has to be 2 until we can selectively remove dark images get_fastccd_images() print('\n\nGain bit set to {} for a gain value of {}\n'.format( gain_bit_input, gain_bit_dict.get(gain_bit_input))) # TODO use md csxtools dark correction yield from bp.count(detectors, md={'fccd': { 'image': 'dark', 'gain': gain_bit_dict.get(gain_bit_input)}})
def test_find_by_string_time(db, RE): RE.subscribe('all', db.mds.insert) uid, = RE(count([det])) today = date.today() tomorrow = date.today() + timedelta(days=1) today_str = today.strftime('%Y-%m-%d') tomorrow_str = tomorrow.strftime('%Y-%m-%d') day_after_tom = date.today() + timedelta(days=2) day_after_tom_str = day_after_tom.strftime('%Y-%m-%d') assert len(db(start_time=today_str, stop_time=tomorrow_str)) == 1 assert len(db(start_time=tomorrow_str, stop_time=day_after_tom_str)) == 0
def test_find_by_float_time(db_empty, RE, hw): db = db_empty RE.subscribe(db.insert) before, = RE(count([hw.det])) ttime.sleep(0.25) t = ttime.time() during, = RE(count([hw.det])) ttime.sleep(0.25) after, = RE(count([hw.det])) assert len(list(db())) == 3 # We'll find the one by specifying a time window around its start time. header, = db(since=t - 0.1, until=t + 0.2) assert header['start']['uid'] == during # Test the old names with pytest.warns(UserWarning): header, = db(start_time=t - 0.1, stop_time=t + 0.2) assert header['start']['uid'] == during
def test_prepare_hook_default(db, RE, hw): RE.subscribe(db.insert) uid, = RE(count([hw.det])) # check default -- returning a subclass of doct.Document that warns # when you use getattr for getitem assert db.prepare_hook == wrap_in_deprecated_doct h = db[uid] for doc in _get_docs(h): assert isinstance(doc, DeprecatedDoct) assert isinstance(doc, doct.Document)
def test_plan_md(RE, hw): mutable = [] md = {'color': 'red'} def collector(name, doc): mutable.append(doc) # test genereator mutable.clear() RE(count([hw.det], md=md), collector) assert 'color' in mutable[0] # test Plan with explicit __init__ mutable.clear() RE(bp.count([hw.det], md=md), collector) assert 'color' in mutable[0] # test Plan with implicit __init__ (created via metaclasss) mutable.clear() RE(bp.scan([hw.det], hw.motor, 1, 2, 2, md=md), collector) assert 'color' in mutable[0]
def test_old_dark_plan_signature(RE): """ In bluesky-darkfarmes < 0.4.0, we expected dark_plan to take no args. Now, we expect it to accept the detector as an argument. Check that the old usage still works, but warns. """ def old_dark_plan(): return (yield from dark_plan(det)) with pytest.warns(UserWarning, match="dark_plan"): dark_frame_preprocessor = bluesky_darkframes.DarkFramePreprocessor( dark_plan=old_dark_plan, detector=det, max_age=3) RE.preprocessors.append(dark_frame_preprocessor) def verify_one_dark_frame(name, doc): if name == 'stop': assert doc['num_events']['dark'] == 1 RE(count([det]), verify_one_dark_frame) RE(count([det], 3), verify_one_dark_frame)
def test_export(RE, hw, tmpdir, db): rt = ExportCallback(str(tmpdir) + "/export_test", handler_reg={"NPY_SEQ": NumpySeqHandler}) RE.subscribe(lambda n, d: db.insert(*rt(n, d))) RE(count([hw.img], 1)) for (n, d), (n2, d2) in zip(db[-1].documents(fill=True), db[-1].documents()): if n == "event": data = d["data"]["img"] np.testing.assert_allclose(data, np.ones((10, 10))) assert d["data"]["img"] != d2["data"]["img"]
def dark_light_plan(dets, shutter=fs, md={}): ''' Simple acquisition plan: - Close shutter, take image, open shutter, take image, close shutter dets : detectors to read from motors : motors to take readings from (not fully implemented yet) fs : Fast shutter, high is closed sample_name : the sample name Example usage: >>> RE(dark_light_plan()) ''' if I0 not in dets: dets.append(I0) if I1 not in dets: dets.append(I1) start_time = time.time() uids = [] #close fast shutter, take a dark yield from bps.mv(fs, 1) mdd = md.copy() mdd.update(im_type='dark') uid = yield from bp.count(dets, md=mdd) uids.append(uid) # open fast shutter, take light yield from bps.mv(fs, 0) mdl = md.copy() mdl.update(im_type='primary') uid = yield from bp.count(dets, md=mdl) uids.append(uid) end_time = time.time() print(f'Duration: {end_time - start_time:.3f} sec') plot_dark_corrected(db[uids]) return(uids)