def test_rank_models(): RE = RunEngine() # Create accurate fit motor = SynAxis(name='motor') det = SynSignal(name='centroid', func=lambda: 5 * motor.read()['motor']['value'] + 2) fit1 = LinearFit('centroid', 'motor', update_every=None, name='Accurate') RE(scan([det], motor, -1, 1, 50), fit1) # Create inaccurate fit det2 = SynSignal(name='centroid', func=lambda: 25 * motor.read()['motor']['value'] + 2) fit2 = LinearFit('centroid', 'motor', update_every=None, name='Inaccurate') RE(scan([det2], motor, -1, 1, 50), fit2) # Create inaccurate fit det3 = SynSignal(name='centroid', func=lambda: 12 * motor.read()['motor']['value'] + 2) fit3 = LinearFit('centroid', 'motor', update_every=None, name='Midly Inaccurate') RE(scan([det3], motor, -1, 1, 50), fit3) # Rank models ranking = rank_models([fit2, fit1, fit3], target=22, x=4) assert ranking[0] == fit1 assert ranking[1] == fit3 assert ranking[2] == fit2
def describe_plans(self): from bluesky import RunEngine import bluesky.plans as bp from bluesky.callbacks.fitting import PeakStats start, end = 10, 13 min_step = 0.01 max_step = 0.15 min_change = 1 steps = 10 dets = [ self.DEVICES['inj_kicker'].current, self.DEVICES['inj_kicker'].offset ] indep = self.DEVICES['inj_kicker'].offset plan = bp.scan(dets, indep, start, end, steps) self.PLANS.append(plan) start, end = 11, 12 steps = 30 plan2 = bp.scan(dets, indep, start, end, steps) self.PLANS.append(plan2)
def test_rank_models(): RE = RunEngine() #Create accurate fit motor = Mover('motor', {'motor': lambda x: x}, {'x': 0}) det = Reader('det', {'centroid': lambda: 5 * motor.read()['motor']['value'] + 2}) fit1 = LinearFit('centroid', 'motor', update_every=None, name='Accurate') RE(scan([det], motor, -1, 1, 50), fit1) #Create inaccurate fit det2 = Reader( 'det', {'centroid': lambda: 25 * motor.read()['motor']['value'] + 2}) fit2 = LinearFit('centroid', 'motor', update_every=None, name='Inaccurate') RE(scan([det2], motor, -1, 1, 50), fit2) #Create inaccurate fit det3 = Reader( 'det', {'centroid': lambda: 12 * motor.read()['motor']['value'] + 2}) fit3 = LinearFit('centroid', 'motor', update_every=None, name='Midly Inaccurate') RE(scan([det3], motor, -1, 1, 50), fit3) #Rank models ranking = rank_models([fit2, fit1, fit3], target=22, x=4) assert ranking[0] == fit1 assert ranking[1] == fit3 assert ranking[2] == fit2
def test_to_event_model_new_api_e_stop(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = simple_to_event_stream_new_api( {t: {"data_keys": {"ct": {"units": "arb", "precision": 2}}}} ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() def f(*x): if x[0] == "stop": return source.emit(x) RE.subscribe(f) RE(scan([hw.motor], hw.motor, 0, 9, 10)) rs = d[0]["uid"] assert tt assert set(p) == {"start", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" ll = len(d) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert d[ll]["run_start"] == rs assert set(p) == {"start", "stop", "event", "descriptor"}
def test_dbfriendly(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) z = t.map(op.add, 1) n = ToEventStream(z, "out").DBFriendly() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert isinstance(d[0]["graph"], dict) h1 = d[0].get("graph_hash") assert h1 d.clear() RE(scan([hw.motor], hw.motor, 0, 9, 10)) h2 = d[0].get("graph_hash") assert h1 == h2 assert len(d) == 10 + 3 d.clear() z.args = (2, ) RE(scan([hw.motor], hw.motor, 0, 9, 10)) h2 = d[0].get("graph_hash") assert h1 != h2 assert len(d) == 10 + 3
def test_disable(RE, hw): det, motor = hw.ab_det, hw.motor bec = BestEffortCallback() RE.subscribe(bec) bec.disable_table() RE(scan([det], motor, 1, 5, 5)) assert bec._table is None bec.enable_table() RE(scan([det], motor, 1, 5, 5)) assert bec._table is not None bec.peaks.com bec.peaks['com'] assert ast.literal_eval(repr(bec.peaks)) == vars(bec.peaks) bec.clear() assert bec._table is None # smoke test bec.disable_plots() bec.enable_plots() bec.disable_baseline() bec.enable_baseline() bec.disable_heading() bec.enable_heading()
def test_check_limits(hw): det = hw.det motor = hw.motor # The motor object does not currently implement limits. # Use an assert to help us out if this changes in the future. assert not hasattr(motor, 'limits') # # check_limits should warn if it can't find check_value # TODO: Is there _any_ object to test? # with pytest.warns(UserWarning): # check_limits(scan([det], motor, -1, 1, 3)) # monkey-patch some limits motor.limits = (-2, 2) # check_limits should do nothing here check_limits(scan([det], motor, -1, 1, 3)) # check_limits should error if limits are exceeded only if object raises # this object does not raise check_limits(scan([det], motor, -3, 3, 3)) # check_limits should raise if limits are equal only if object raises # this object does not raise motor.limits = (2, 2) check_limits(scan([det], motor, -1, 1, 3))
def generate_example_catalog(data_path): data_path = Path(data_path) def factory(name, doc): serializer = Serializer(data_path / 'abc') serializer('start', doc) return [serializer], [] RE = RunEngine() sd = SupplementalData() RE.preprocessors.append(sd) sd.baseline.extend([motor1, motor2]) rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det])) RE(count([noisy_det], 5)) RE(scan([det], motor, -1, 1, 7)) RE(grid_scan([det4], motor1, -1, 1, 4, motor2, -1, 1, 7, False)) RE(scan([det], motor, -1, 1, motor2, -1, 1, 5)) RE(count([noisy_det, det], 5)) RE(count([random_img], 5)) RE(count([img], 5)) def factory(name, doc): serializer = Serializer(data_path / 'xyz') serializer('start', doc) return [serializer], [] RE = RunEngine() rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det], 3)) catalog_filepath = data_path / 'catalog.yml' with open(catalog_filepath, 'w') as file: file.write(f''' sources: abc: description: Some imaginary beamline driver: bluesky-jsonl-catalog container: catalog args: paths: {Path(data_path) / 'abc' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "00-ID" xyz: description: Some imaginary beamline driver: bluesky-jsonl-catalog container: catalog args: paths: {Path(data_path) / 'xyz' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "99-ID" ''') return str(catalog_filepath)
def my_plan(): motor = hw.motor det = hw.det motor.delay = 1 plan = bp.scan([det], motor, -5, 5, 25) plan = subs_wrapper(bp.scan([det], motor, -5, 5, 25), LivePlot(det.name, motor.name)) return (yield from plan)
def test_per_step(RE, hw): # Check default behavior, using one motor and then two. RE(scan([hw.det], hw.motor, -1, 1, 3, per_step=one_nd_step)) RE( scan([hw.det], hw.motor, -1, 1, hw.motor2, -1, 1, 3, per_step=one_nd_step)) RE(inner_product_scan([hw.det], 3, hw.motor, -1, 1, per_step=one_nd_step)) RE( inner_product_scan([hw.det], 3, hw.motor, -1, 1, hw.motor2, -1, 1, per_step=one_nd_step)) # Check that scan still accepts old one_1d_step signature: RE(scan([hw.det], hw.motor, -1, 1, 3, per_step=one_1d_step)) RE(rel_scan([hw.det], hw.motor, -1, 1, 3, per_step=one_1d_step)) # Test that various error paths include a useful error message identifying # that the problem is with 'per_step': # You can't usage one_1d_step signature with more than one motor. with pytest.raises(TypeError) as excinfo: RE( scan([hw.det], hw.motor, -1, 1, hw.motor2, -1, 1, 3, per_step=one_1d_step)) assert excinfo.match("Signature of per_step assumes 1D trajectory") # The signature must be either like one_1d_step or one_nd_step: def bad_sig(detectors, mtr, step): ... with pytest.raises(TypeError) as excinfo: RE(scan([hw.det], hw.motor, -1, 1, 3, per_step=bad_sig)) assert excinfo.match("per_step must be a callable with the signature")
def test_scan_num(RE, hw): RE(bp.scan([hw.det], hw.motor1, -1, 1, num=1)) RE(bp.scan([hw.det], hw.motor1, -1, 1, num=1.0)) with pytest.raises(ValueError): RE(bp.scan([hw.det], hw.motor1, -1, 1, num=0)) with pytest.raises(ValueError): RE(bp.scan([hw.det], hw.motor1, -1, 1, num=0.5)) with pytest.raises(ValueError): RE(bp.scan([hw.det], hw.motor1, -1, 1, num=float('nan')))
def test_daq_step_scan_args(hw, daq, daq_step_scan): """ Basic args and message inspection tests. Can I decorate a scan? Can I call a decorated scan at all? Does a decorated scan produce the messages I expect? """ logger.debug('test_daq_step_scan_args') def assert_daq_messages(msg_list): """ Make sure the msg_list is properly mutated. Checks for a daq configure message with controls arg Checks for a daq trigger/read in every bundle """ found_configure = False found_trigger = False found_read = False for msg in msg_list: if msg.command == 'configure' and msg.obj is daq: found_configure = True assert msg.kwargs['controls'] == [hw.motor] elif msg.command == 'trigger' and msg.obj is daq: found_trigger = True elif msg.command == 'read' and msg.obj is daq: found_read = True assert found_configure, 'Did not find daq configure in msg list.' assert found_trigger, 'Did not find daq trigger in msg list.' assert found_read, 'Did not find daq read in msg list.' daq_with_det = list(daq_step_scan([hw.det], hw.motor, 0, 10, 11, events=10, record=False, use_l3t=True)) assert_daq_messages(daq_with_det) daq_none_det = list(daq_step_scan([], hw.motor, 0, 10, 11, events=10, record=False, use_l3t=True)) assert_daq_messages(daq_none_det) def assert_no_lost_msg(daq_msg_list, nodaq_msg_list): """ Make sure no message from the original plan is lost. """ daq_without_daq = [msg for msg in daq_msg_list if msg.obj is not daq] assert daq_without_daq == nodaq_msg_list nodaq_with_det = list(bp.scan([hw.det], hw.motor, 0, 10, 11)) assert_no_lost_msg(nodaq_with_det, nodaq_with_det) nodaq_none_det = list(bp.scan([], hw.motor, 0, 10, 11)) assert_no_lost_msg(nodaq_none_det, nodaq_none_det)
def test_old_module_name(hw): det = hw.det motor = hw.motor motor1 = hw.motor1 motor2 = hw.motor2 from bluesky.plan_tools import (print_summary, print_summary_wrapper, plot_raster_path) with pytest.warns(UserWarning): print_summary(scan([det], motor, -1, 1, 10)) with pytest.warns(UserWarning): list(print_summary_wrapper(scan([det], motor, -1, 1, 10))) with pytest.warns(UserWarning): plan = grid_scan([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True) plot_raster_path(plan, 'motor1', 'motor2', probe_size=.3)
def _generate_simulation_data(): """ priviate function to insert data to exp_db """ if os.environ['XPDAN_SETUP'] != str(2): raise RuntimeError("ONLY insert data if you are running" "simulation") # simulated det pe1c = SimulatedPE1C('pe1c', {'pe1_image': lambda: np.random.randn(25, 25)}) # TODO : add md schema later RE = RunEngine({}) RE.subscribe(an_glbl['exp_db'].db.insert, 'all', ) RE(count([pe1c])) RE(scan([pe1c], motor, 1, 5, 5)) RE(scan([pe1c], motor, 1, 10, 10))
def test_SaveTiff(RE, hw, tmpdir): sbc = SaveTiff( handler_reg={"NPY_SEQ": NumpySeqHandler}, template="{base_folder}/{folder_prefix}/{start[hello]}{" "__independent_vars__}{ext}", base_folders=tmpdir.strpath, ) L = [] RE.subscribe(lambda *x: L.append(x)) RE( bp.scan( [hw.img], hw.motor, 0, 10, 1, md={ "hello": "world", "folder_tag_list": ["a", "b", "c"], "a": "a", "b": "b", "c": "c", }, ) ) for n, d in L: sbc(n, d) if n == "event": assert os.path.exists( tmpdir.strpath + "/a/b/c//world_motor_0,000_arb_img.tiff" )
def test_SaveTiff(RE, hw, tmpdir): sbc = SaveTiff( handler_reg={"NPY_SEQ": NumpySeqHandler}, template="{base_folder}/{folder_prefix}/{start[hello]}{" "__independent_vars__}{ext}", base_folders=tmpdir.strpath, ) L = [] RE.subscribe(lambda *x: L.append(x)) RE( bp.scan( [hw.img], hw.motor, 0, 10, 1, md={ "hello": "world", "folder_tag_list": ["a", "b", "c"], "a": "a", "b": "b", "c": "c", }, )) for n, d in L: sbc(n, d) if n == "event": assert os.path.exists(tmpdir.strpath + "/a/b/c//world_motor_0,000_arb_img.tiff")
def _gen(self): return scan(self.detectors, self.motor, self.start, self.stop, self.num, md=self.md)
def scan_gui_plan(): yield from scan(self.dets, self.motor, self.start.value(), self.stop.value(), self.steps.value(), md={'created_by': 'GUI'})
def scan_bpm(bpm_num, bpm_motor, start, end, step): detector_name = 'bpm' + str(bpm_num) device_name = 'mbpm' + str(bpm_num) motor_name = '_'.join((device_name, bpm_motor)) detector = globals()[detector_name] device = globals()[device_name] motor = getattr(device, bpm_motor) egu = motor.motor_egu.get() plan = bp.scan([detector], motor, start, end, step) result_uid = RE(plan) table = db[result_uid].table() channels = ['A', 'B', 'C', 'D'] ydata = [table['_'.join((detector_name, ch.lower()))] for ch in channels] xdata = table[motor_name] plt.xlabel('Motor position ({})'.format(egu)) plt.ylabel('Currents') plt.title('BPM {} Currents vs Motor Position'.format(bpm_num)) plots = [ plt.plot(xdata, ydata[i], label=ch) for i, ch in enumerate(channels) ] plt.legend(channels) return result_uid
def test_live_fit_plot(fresh_RE): RE = fresh_RE try: import lmfit except ImportError: raise pytest.skip('requires lmfit') def gaussian(x, A, sigma, x0): return A * np.exp(-(x - x0)**2 / (2 * sigma**2)) model = lmfit.Model(gaussian) init_guess = { 'A': 2, 'sigma': lmfit.Parameter('sigma', 3, min=0), 'x0': -0.2 } livefit = LiveFit(model, 'det', {'x': 'motor'}, init_guess, update_every=50) lfplot = LiveFitPlot(livefit, color='r') lplot = LivePlot('det', 'motor', ax=plt.gca(), marker='o', ls='none') RE(scan([det], motor, -1, 1, 50), [lplot, lfplot]) expected = {'A': 1, 'sigma': 1, 'x0': 0} for k, v in expected.items(): assert np.allclose(livefit.result.values[k], v, atol=1e-6)
def test_live_fit_plot(RE, hw): try: import lmfit except ImportError: raise pytest.skip("requires lmfit") def gaussian(x, A, sigma, x0): return A * np.exp(-(x - x0)**2 / (2 * sigma**2)) model = lmfit.Model(gaussian) init_guess = { "A": 2, "sigma": lmfit.Parameter("sigma", 3, min=0), "x0": -0.2, } livefit = LiveFit(model, "det", {"x": "motor"}, init_guess, update_every=50) lfplot = LiveFitPlot(livefit, color="r") lplot = LivePlot("det", "motor", ax=plt.gca(), marker="o", ls="none") RE(scan([hw.det], hw.motor, -1, 1, 50), [lplot, lfplot]) expected = {"A": 1, "sigma": 1, "x0": 0} for k, v in expected.items(): assert np.allclose(livefit.result.values[k], v, atol=1e-6)
def test_scan_vars(RE, daq): logger.debug('test_scan_vars') daq.configure(events=120) scan_vars = ScanVars('TST', name='tst', RE=RE) scan_vars.enable() check = CheckVals(scan_vars) RE.subscribe(check) check.plan = 'scan' RE( scan([det1, det2], motor1, 0, 10, motor2, 20, 0, motor3, 0, 1, motor, 0, 1, 11)) check.plan = 'count' RE(count([det1, det2], 11)) def custom(detector): for i in range(3): yield from create() yield from read(detector) yield from save() check.plan = 'custom' daq.configure(duration=4) RE(stage_wrapper(run_wrapper(custom(det1)), [det1])) scan_vars.disable() # Last, let's force an otherwise uncaught error to cover the catch-all # try-except block to make sure the log message doesn't error scan_vars.start({'motors': 4})
def energy_scan(start, stop, num, flyers=None, name='', **metadata): """ Example ------- >>> RE(energy_scan(11350, 11450, 2)) """ if flyers is None: flyers = [pb9.enc1, pba2.adc6, pba1.adc7] def inner(): md = {'plan_args': {}, 'plan_name': 'step scan', 'name': name} md.update(**metadata) yield from bps.open_run(md=md) # Start with a step scan. plan = bp.scan([hhm_en.energy], hhm_en.energy, start, stop, num, md={'name': name}) # Wrap it in a fly scan with the Pizza Box. plan = bpp.fly_during_wrapper(plan, flyers) # Working around a bug in fly_during_wrapper, stage and unstage the pizza box manually. for flyer in flyers: yield from bps.stage(flyer) yield from bps.stage(hhm) plan = bpp.pchain(plan) yield from plan
def test_to_event_model_new_api_multi(RE, hw): source = Stream() stop = FromEventStream("stop", (), source) t = FromEventStream( "event", ("data", "motor"), source, principle=True, stream_name="hi" ) assert t.principle tt = t.zip(stop) n = simple_to_event_stream_new_api( { t: {"data_keys": {"ct": {"units": "arb", "precision": 2}}}, tt: { "name": "final", "data_keys": {"ct": {"units": "arb", "precision": 2}}, }, }, hello="world", ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[0]["hello"] == "world" assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-3]["name"] == "final" assert d[-1]["run_start"]
def test_to_event_model_new_api_multi_parent(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) t2 = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = simple_to_event_stream_new_api( { t.zip(t2).pluck(0): { "data_keys": {"ct": {"units": "arb", "precision": 2}} } } ) tt = t.sink_to_list() p = n.pluck(0).sink_to_list() d = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert tt assert set(p) == {"start", "stop", "event", "descriptor"} assert d[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert d[1]["data_keys"]["ct"]["units"] == "arb" assert d[-1]["run_start"]
def cam_scan(detectors, camera, motor, start, stop, num, md=None, idle_time=1): def per_step(dets, motor, step): yield from one_1d_step(dets, motor, step) yield from bp.abs_set(camera, 1, wait=True) yield from bp.abs_set(camera, 0, wait=True) yield from bp.sleep(idle_time) if md is None: md = {} md = ChainMap( md, {'plan_args': {'detectors': list(map(repr, detectors)), 'num': num, 'motor': repr(motor), 'start': start, 'stop': stop, 'per_step': repr(per_step), 'idle_time': float(idle_time)}, 'plan_name': 'cam_scan', }) return (yield from bp.subs_wrapper( bp.scan(detectors, motor, start, stop, num, per_step=per_step, md=md), LiveTable(detectors + [motor])) )
def basic_scan(): print('start basic scan') basic_scan_plan = scan([], tomo_stage.finex_top, 0, 10, 5) basic_scan_gen = yield from basic_scan_plan print('done with basic scan') return basic_scan_gen
def test_last_cache(RE, hw): source = Stream() t = FromEventStream("event", ("data", "motor"), source, principle=True) assert t.principle n = ToEventStream( t, ("ct",), data_key_md={"ct": {"units": "arb"}} ).LastCache() tt = t.sink_to_list() names = n.pluck(0).sink_to_list() docs = n.pluck(1).sink_to_list() RE.subscribe(unstar(source.emit)) RE.subscribe(print) RE(scan([hw.motor], hw.motor, 0, 9, 10)) assert len(docs) == 10 + 3 + 2 assert names[-3] == "descriptor" assert names[-2] == "event" assert tt assert set(names) == {"start", "stop", "event", "descriptor"} assert docs[1]["hints"] == {"analyzer": {"fields": ["ct"]}} assert docs[1]["data_keys"]["ct"]["units"] == "arb" assert docs[-1]["run_start"]
def test_full_field_tomo_pipeline(RE, hw, db): L = [] rr = RunRouter( [lambda x: tomo_callback_factory(x, publisher=lambda *x: L.append(x), handler_reg=db.reg.handler_reg)] ) RE.subscribe(rr) direct_img = SynSignal( func=lambda: np.array(np.random.random((10, 10))), name="img", labels={"detectors"}, ) RE( bp.scan( [direct_img], hw.motor1, 0, 180, 30, md={ "tomo": { "type": "full_field", "rotation": "motor1", "center": 0.0, } }, ) ) # det1 # sinogram and recon # 30 events + start, stop, descriptor assert len(L) == (30 + 2 + 1 + 2) * 2 assert len(L[7][1]["data"]["img_tomo"].shape) == 3 assert len(L[6][1]["data"]["img_sinogram"].shape) == 3
def test_linear_fit(): #Create RunEngine RE = RunEngine() #Excepted values of fit expected = {'slope': 5, 'intercept': 2} #Create simulated devices motor = Mover('motor', {'motor': lambda x: x}, {'x': 0}) det = Reader('det', {'centroid': lambda: 5 * motor.read()['motor']['value'] + 2}) #Assemble fitting callback cb = LinearFit('centroid', 'motor', update_every=None) #Scan through variables RE(scan([det], motor, -1, 1, 50), cb) #Check accuracy of fit for k, v in expected.items(): assert np.allclose(cb.result.values[k], v, atol=1e-6) #Check we create an accurate estimate assert np.allclose(cb.eval(x=10), 52, atol=1e-5) assert np.allclose(cb.eval(motor=10), 52, atol=1e-5) assert np.allclose(cb.backsolve(52)['x'], 10, atol=1e-5)
def newport_dscan(self, motor, start, end, nsteps, nEvents, record=None, use_l3t=False, post=False): self.cleanup_RE() daq.configure(nEvents, record=record, controls=[motor], use_l3t=use_l3t) currPos = motor.wm() # remove backlash for small scans motor.mvr(-.1, wait=True) try: RE(scan([daq], motor, currPos+start, currPos+end, nsteps)) except Exception: logger.debug('RE Exit', exc_info=True) finally: self.cleanup_RE() # move back to starting point and remove backlash motor.mv(currPos, wait=True) motor.mvr(-0.1, wait=True) motor.mv(currPos) if post: run = get_run() message = 'scan {name} from {min1:.3f} to {max1:.3f} in {num1} steps'.format(name=motor.name, min1=start+currPos,max1=end+currPos, num1=nsteps) self.elog.post(message,run=int(run))
def MED(init_gas, other_gas, minT, maxT, num_steps, num_steady, num_trans, num_loops=2): """ 1. Start flowing the initial gas. 2. Scan the temperature from minT to maxT in `num_steps` evenly-spaced steps. 3. Hold temperature at maxT and take `num_steady` images. 4. Repeat (2) and (3) `num_loops` times. 5. Switch the gas to `other_gas` and take `num_trans` acquisitions. 6. Switch it back and take another `num_trans` acquisitions. Example ------- Set the gasses. They can be in any other, nothing to do with the order they are used in the plan. >>> gas.gas_list = ['O2', 'CO2'] Optionally, preview the plan. >>> print_summary(MED('O2', 'C02', 200, 300, 21, 20, 60)) Execute it. >>> RE(MED('O2', 'C02', 200, 300, 21, 20, 60)) """ # Step 1 yield from abs_set(gas, init_gas) # Steps 2 and 3 in a loop. for _ in range(num_loops): yield from subs_wrapper(scan([pe1, gas.current_gas], eurotherm, minT, maxT, num_steps), LiveTable([eurotherm, gas.current_gas])) yield from subs_wrapper(count([pe1], num_steady), LiveTable([])) # Step 4 yield from abs_set(gas, other_gas) yield from subs_wrapper(count([pe1], num_steady), LiveTable([])) # Step 6 yield from abs_set(gas, init_gas) yield from subs_wrapper(count([pe1], num_steady), LiveTable([]))
def stepping_ct(dets, exposure, motor, start, stop, *, md=None, num=3): """Take data at several points along the y-direction""" _md = md or {} sp_md = yield from _xpd_pre_plan(dets, exposure) _md.update(sp_md) return (yield from bp.scan(dets, motor, start, stop, num, md=_md))
def ascan_wimagerh5_slow(self, imagerh5, motor, start, end, nsteps, nEvents, record=True): plan_duration = (nsteps * nEvents / 120. + 0.3 * (nsteps - 1) + 4) * 10 try: imagerh5.prepare(nSec=plan_duration) except: print('imager preparation failed') return daq.configure(nEvents, record=record, controls=[motor]) this_plan = scan([daq], motor, start, end, nsteps) #we assume DAQ runs at 120Hz (event code 40 or 140) # a DAQ transition time of 0.3 seconds # a DAQ start time of about 1 sec # two extra seconds. # one extra second to wait for hdf5 file to start being written imagerh5.write() time.sleep(1) RE(this_plan) imagerh5.write_stop()
def Tramp(dets, exposure, Tstart, Tstop, Tstep, *, md=None): """ Scan over temeprature controller in steps. temeprature steps are defined by starting point, stoping point and step size Parameters ---------- detectors : list list of 'readable' objects exposure : float exposure time at each temeprature step in seconds Tstart : float starting point of temperature sequence Tstop : float stoping point of temperature sequence Tstep : float step size between Tstart and Tstop of this sequence md : dict, optional extra metadata Note ---- temperature controller that is driven will always be the one configured in global state. Please refer to http://xpdacq.github.io for more information """ pe1c, = dets if md is None: md = {} # setting up area_detector (num_frame, acq_time, computed_exposure) = _configure_pe1c(exposure) # compute Nsteps (Nsteps, computed_step_size) = _nstep(Tstart, Tstop, Tstep) # update md _md = ChainMap(md, {'sp_time_per_frame': acq_time, 'sp_num_frames': num_frame, 'sp_requested_exposure': exposure, 'sp_computed_exposure': computed_exposure, 'sp_type': 'Tramp', 'sp_startingT': Tstart, 'sp_endingT': Tstop, 'sp_requested_Tstep': Tstep, 'sp_computed_Tstep': computed_step_size, 'sp_Nsteps': Nsteps, # need a name that shows all parameters values # 'sp_name': 'Tramp_<exposure_time>', 'sp_uid': str(uuid.uuid4()), 'sp_plan_name': 'Tramp'}) plan = bp.scan([glbl.area_det], glbl.temp_controller, Tstart, Tstop, Nsteps, md=_md) plan = bp.subs_wrapper(plan, LiveTable([glbl.area_det, glbl.temp_controller])) yield from plan
def test_hints(RE, hw): motor = hw.motor expected_hint = {'fields': [motor.name]} assert motor.hints == expected_hint collector = [] def collect(*args): collector.append(args) RE(scan([], motor, 1, 2, 2), {'descriptor': collect}) name, doc = collector.pop() assert doc['hints'][motor.name] == expected_hint
def test_peak_statistics(RE): """peak statistics calculation on simple gaussian function """ x = 'motor' y = 'det' ps = PeakStats(x, y) RE.subscribe(ps) RE(scan([det], motor, -5, 5, 100)) np.allclose(ps.cen, 0, atol=1e-6) np.allclose(ps.com, 0, atol=1e-6) fwhm_gauss = 2*np.sqrt(2*np.log(2)) # theoretical value with std=1 np.allclose(ps.fwhm, fwhm_gauss, atol=1e-2)
def test_per_step(RE, hw): # Check default behavior, using one motor and then two. RE(scan([hw.det], hw.motor, -1, 1, 3, per_step=one_nd_step)) RE(scan([hw.det], hw.motor, -1, 1, hw.motor2, -1, 1, 3, per_step=one_nd_step)) RE(inner_product_scan([hw.det], 3, hw.motor, -1, 1, per_step=one_nd_step)) RE(inner_product_scan([hw.det], 3, hw.motor, -1, 1, hw.motor2, -1, 1, per_step=one_nd_step)) # Check that scan still accepts old one_1d_step signature: RE(scan([hw.det], hw.motor, -1, 1, 3, per_step=one_1d_step)) RE(rel_scan([hw.det], hw.motor, -1, 1, 3, per_step=one_1d_step)) # Test that various error paths include a useful error message identifying # that the problem is with 'per_step': # You can't usage one_1d_step signature with more than one motor. with pytest.raises(TypeError) as exc: RE(scan([hw.det], hw.motor, -1, 1, hw.motor2, -1, 1, 3, per_step=one_1d_step)) assert "Signature of per_step assumes 1D trajectory" in str(exc) # The signature must be either like one_1d_step or one_nd_step: def bad_sig(detectors, mtr, step): ... with pytest.raises(TypeError) as exc: RE(scan([hw.det], hw.motor, -1, 1, 3, per_step=bad_sig)) assert "per_step must be a callable with the signature" in str(exc)
def test_peak_statistics_compare_chx(RE): """This test focuses on gaussian function with noise. """ s = np.random.RandomState(1) noisy_det_fix = SynGauss('noisy_det_fix', motor, 'motor', center=0, Imax=1, noise='uniform', sigma=1, noise_multiplier=0.1, random_state=s) x = 'motor' y = 'noisy_det_fix' ps = PeakStats(x, y) RE.subscribe(ps) RE(scan([noisy_det_fix], motor, -5, 5, 100)) ps_chx = get_ps(ps.x_data, ps.y_data) assert np.allclose(ps.cen, ps_chx['cen'], atol=1e-6) assert np.allclose(ps.com, ps_chx['com'], atol=1e-6) assert np.allclose(ps.fwhm, ps_chx['fwhm'], atol=1e-6)
def test_SaveBaseClass(RE, hw, tmpdir): sbc = SaveBaseClass( "{base_folder}/{folder_prefix}/{start[hello]}{" "__independent_vars__}", handler_reg={}, base_folders=tmpdir.strpath, ) L = [] RE.subscribe(lambda *x: L.append(x)) RE( bp.scan( [hw.direct_img], hw.motor, 0, 10, 1, md={ "hello": "world", "folder_tag_list": ["a", "b", "c"], "a": "a", "b": "b", "c": "c", }, ) ) name_param = { "start": ( "start_template", "{base_folder}/a/b/c//world{__independent_vars__}", ), "event": ( "filenames", [f"{tmpdir.strpath}/a/b/c//world_motor_0,000_arb_"], ), } for n, d in L: sbc(n, d) key = name_param.get(n, "") if key: assert getattr(sbc, key[0], "") == name_param[n][1]
def test_live_fit(): try: import lmfit except ImportError: raise pytest.skip('requires lmfit') def gaussian(x, A, sigma, x0): return A*np.exp(-(x - x0)**2/(2 * sigma**2)) model = lmfit.Model(gaussian) init_guess = {'A': 2, 'sigma': lmfit.Parameter('sigma', 3, min=0), 'x0': -0.2} cb = LiveFit(model, 'det', {'x': 'motor'}, init_guess) RE(scan([det], motor, -1, 1, 100), cb) # results are in cb.result.values expected = {'A': 1, 'sigma': 1, 'x0': 0} for k, v in expected.items(): assert np.allclose(cb.result.values[k], v, atol=1e-6)
def test_plan_md(RE, hw): mutable = [] md = {'color': 'red'} def collector(name, doc): mutable.append(doc) # test genereator mutable.clear() RE(count([hw.det], md=md), collector) assert 'color' in mutable[0] # test Plan with explicit __init__ mutable.clear() RE(bp.count([hw.det], md=md), collector) assert 'color' in mutable[0] # test Plan with implicit __init__ (created via metaclasss) mutable.clear() RE(bp.scan([hw.det], hw.motor, 1, 2, 2, md=md), collector) assert 'color' in mutable[0]
def test_live_fit_plot(RE, hw): try: import lmfit except ImportError: raise pytest.skip('requires lmfit') def gaussian(x, A, sigma, x0): return A * np.exp(-(x - x0) ** 2 / (2 * sigma ** 2)) model = lmfit.Model(gaussian) init_guess = {'A': 2, 'sigma': lmfit.Parameter('sigma', 3, min=0), 'x0': -0.2} livefit = LiveFit(model, 'det', {'x': 'motor'}, init_guess, update_every=50) lfplot = LiveFitPlot(livefit, color='r') lplot = LivePlot('det', 'motor', ax=plt.gca(), marker='o', ls='none') RE(scan([hw.det], hw.motor, -1, 1, 50), [lplot, lfplot]) expected = {'A': 1, 'sigma': 1, 'x0': 0} for k, v in expected.items(): assert np.allclose(livefit.result.values[k], v, atol=1e-6)
def test_save_server(RE, hw, tmpdir): L = [] RE.subscribe(lambda *x: L.append(x)) RE.subscribe( RunRouter( [setup_saver], base_folders=tmpdir.strpath, template="{base_folder}/{folder_prefix}/" "{start[analysis_stage]}/" "{start[sample_name]}_" "{human_timestamp}_" "{__independent_vars__}" "{start[uid]:.6}_" "{event[seq_num]:04d}{ext}", handler_reg={"NPY_SEQ": NumpySeqHandler}, ) ) RE( bp.scan( [hw.img], hw.motor, 0, 10, 1, md={ "sample_name": "world", "folder_tag_list": ["a", "b", "c"], "a": "a", "b": "b", "c": "c", "analysis_stage": "dark_sub", }, ) ) for n, d in L: if n == "event": start = L[0][1] s = f"/a/b/c//{start['analysis_stage']}/world_{_timestampstr(start['time'])}_motor_0,000_arb_{start['uid']:.6}_{d['seq_num']:04d}_img.tiff" assert os.path.exists(tmpdir.strpath + s)
def test_live_fit(RE, hw): try: import lmfit except ImportError: raise pytest.skip("requires lmfit") def gaussian(x, A, sigma, x0): return A * np.exp(-(x - x0) ** 2 / (2 * sigma ** 2)) model = lmfit.Model(gaussian) init_guess = { "A": 2, "sigma": lmfit.Parameter("sigma", 3, min=0), "x0": -0.2, } cb = LiveFit(model, "det", {"x": "motor"}, init_guess, update_every=50) RE(scan([hw.det], hw.motor, -1, 1, 50), cb) # results are in cb.result.values expected = {"A": 1, "sigma": 1, "x0": 0} for k, v in expected.items(): assert np.allclose(cb.result.values[k], v, atol=1e-6)
def test_live_fit_plot(RE, hw): try: import lmfit except ImportError: raise pytest.skip("requires lmfit") def gaussian(x, A, sigma, x0): return A * np.exp(-(x - x0) ** 2 / (2 * sigma ** 2)) model = lmfit.Model(gaussian) init_guess = { "A": 2, "sigma": lmfit.Parameter("sigma", 3, min=0), "x0": -0.2, } livefit = LiveFit( model, "det", {"x": "motor"}, init_guess, update_every=50 ) lfplot = LiveFitPlot(livefit, color="r") lplot = LivePlot("det", "motor", ax=plt.gca(), marker="o", ls="none") RE(scan([hw.det], hw.motor, -1, 1, 50), [lplot, lfplot]) expected = {"A": 1, "sigma": 1, "x0": 0} for k, v in expected.items(): assert np.allclose(livefit.result.values[k], v, atol=1e-6)
def test_strip_dep_var(RE, hw): L = [] LL = [] RE.subscribe(lambda *x: L.append(x)) sdv = StripDepVar() RE.subscribe(lambda *x: LL.append(sdv(*x))) RE(scan([hw.ab_det], hw.motor1, 0, 10, 10)) for (n1, d1), (n2, d2) in zip(L, LL): assert n1 == n2 if n1 == "descriptor": assert d1 != d2 for k in ["data_keys", "hints", "configuration", "object_keys"]: for kk in ["det"]: assert kk not in d2[k] elif n1 == "event": assert d1 != d2 for k in ["data", "timestamps"]: for kk in ["det_a", "det_b"]: assert kk not in d2[k] else: assert d1 == d2
def run_exp(delay): # pragma: no cover time.sleep(delay) print("running exp") p = Publisher(proxy[0], prefix=b"an") RE.subscribe(p) det = SynSignal(func=lambda: np.ones((10, 10)), name="gr") RE( bp.scan( [det], hw.motor1, 0, 2, 2, md={ "tomo": { "type": "full_field", "rotation": "motor1", "center": 1, } }, ) )
def generate_data(RE): # This adds {'proposal_id': 1} to all future runs, unless overridden. RE.md['proposal_id'] = 1 RE(count([det])) RE(scan([det], motor, 1, 5, 5)) RE(scan([det], motor, 1, 10, 10)) RE.md['proposal_id'] = 2 RE(count([det])) RE(scan([det], motor, -1, 1, 5)) RE(relative_scan([det], motor, 1, 10, 10)) RE(scan([det], motor, -1, 1, 1000)) RE.md['proposal_id'] = 3 # This adds {'operator': 'Ken'} to all future runs, unless overridden. RE.md['operator'] = 'Ken' RE(count([det]), purpose='calibration', sample='A') RE(scan([det], motor, 1, 10, 10), operator='Dan') # temporarily overrides Ken RE(count([det]), sample='A') # (now back to Ken) RE(count([det]), sample='B') RE.md['operator'] = 'Dan' RE(count([det]), purpose='calibration') RE(scan([det], motor, 1, 10, 10)) del RE.md['operator'] # clean up by un-setting operator
def test_with_baseline(RE, hw): bec = BestEffortCallback() RE.subscribe(bec) sd = SupplementalData(baseline=[hw.det]) RE.preprocessors.append(sd) RE(scan([hw.ab_det], hw.motor, 1, 5, 5))
def escan(*args, **kwargs): return (yield from bp.scan(*args, per_step=one_1d_step_pseudo_shutter, **kwargs))
def test_simple(RE, hw): bec = BestEffortCallback() RE.subscribe(bec) RE(scan([hw.ab_det], hw.motor, 1, 5, 5))
def generate_example_catalog(data_path): data_path = Path(data_path) def factory(name, doc): serializer = Serializer(data_path / 'abc') serializer('start', doc) return [serializer], [] RE = RunEngine() sd = SupplementalData() RE.preprocessors.append(sd) sd.baseline.extend([motor1, motor2]) rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det])) RE(count([noisy_det], 5)) RE(scan([det], motor, -1, 1, 7)) RE(grid_scan([det4], motor1, -1, 1, 4, motor2, -1, 1, 7, False)) RE(scan([det], motor, -1, 1, motor2, -1, 1, 5)) RE(count([noisy_det, det], 5)) # RE(count([img], 5)) def factory(name, doc): serializer = Serializer(data_path / 'xyz') serializer('start', doc) return [serializer], [] RE = RunEngine() rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det], 3)) catalog_filepath = data_path / 'catalog.yml' with open(catalog_filepath, 'w') as file: file.write(f''' plugins: source: - module: intake_bluesky sources: abc: description: Some imaginary beamline driver: intake_bluesky.jsonl.BlueskyJSONLCatalog container: catalog args: paths: {Path(data_path) / 'abc' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "00-ID" xyz: description: Some imaginary beamline driver: intake_bluesky.jsonl.BlueskyJSONLCatalog container: catalog args: paths: {Path(data_path) / 'xyz' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "99-ID" ''') return str(catalog_filepath)
def absolute_scan(dets, motor, start, finish, intervals, time=None, *, md=None): yield from _pre_scan(dets, total_points=intervals + 1, count_time=time) return (yield from plans.scan(dets, motor, start, finish, intervals, md=md))
def cscan(*args, **kwargs): return (yield from bp.scan(*args, per_step=one_1d_step_check_beam, **kwargs))
res = SimpleToEventStream(pipeline, ("result",)) merge = AlignEventStreams(raw_source.starmap(StripDepVar()), res) merge.sink(pprint) # send to viz server merge.starsink(p) RE.subscribe(lambda *x: raw_source.emit(x)) RE.subscribe(lambda *x: p(*x)) RE.subscribe(lambda *x: time.sleep(.1)) RE.subscribe(lambda *x: time.sleep(1), "stop") RE( pchain( bp.scan([hw.noisy_det], hw.motor, 0, 10, 10), bp.grid_scan( [hw.ab_det], hw.motor, 0, 5, 5, hw.motor2, 0, 5, 5, True, per_step=one_nd_step, ), bp.grid_scan( [hw.ab_det],
def inner(): yield from bp.scan([camera], motor, start, end, steps)
def test_blank_hints(RE, hw): bec = BestEffortCallback() RE.subscribe(bec) RE(scan([hw.ab_det], hw.motor, 1, 5, 5, md={'hints': {}}))