def test_serp_scan(): """Note: run this standalone, not inside mfx hutch python.""" import numpy as np from bluesky import RunEngine from bluesky.callbacks.best_effort import BestEffortCallback from ophyd.sim import motor1, motor2 from ophyd.status import StatusBase from pcdsdaq.daq import Daq from pcdsdaq.sim import set_sim_mode class FakeSeq: def trigger(self): print('Triggered the sequencer!') status = StatusBase() status.set_finished() return status set_sim_mode(True) RE = RunEngine({}) bec = BestEffortCallback() RE.subscribe(bec) seq = FakeSeq() daq = Daq(RE=RE) RE(serp_seq_scan(motor1, np.arange(100, 200, 10), motor2, [0, 100], seq))
def test_count_sirepo_simulation(sirepo_guest_session): with sirepo_guest_session(simulation_type="srw") as sirepo_session: simulation_table = sirepo_session.simulation_list() # pick a known simulation simulation_id = simulation_table[ "/Light Source Facilities/NSLS-II/NSLS-II SRX beamline"][ "NSLS-II SRX beamline"] srx_simulation_data = sirepo_session.simulation_data( simulation_id=simulation_id) sirepo_simulation_class = build_sirepo_simulation( sirepo_simulation_data=srx_simulation_data) # this function will store all documents # published by the RunEngine in a list published_bluesky_documents = list() def store_published_document(name, document): published_bluesky_documents.append((name, document)) RE = RunEngine() RE.subscribe(store_published_document) sirepo_simulation = sirepo_simulation_class(name="srx") run_id = RE(count([sirepo_simulation])) assert len(published_bluesky_documents) > 0
def snapshot_cli(): """ given a list of PVs on the command line, snapshot and print report EXAMPLES:: snapshot.py pv1 [more pvs ...] snapshot.py `cat pvlist.txt` Note that these are equivalent:: snapshot.py rpi5bf5:0:humidity rpi5bf5:0:temperature snapshot.py rpi5bf5:0:{humidity,temperature} """ from bluesky import RunEngine args = get_args() md = OrderedDict(purpose="archive a set of EPICS PVs") md.update(parse_metadata(args)) obj_dict = APS_utils.connect_pvlist(args.EPICS_PV, wait=False) time.sleep(2) # FIXME: allow time to connect db = Broker.named(args.broker_config) RE = RunEngine({}) RE.subscribe(db.insert) uuid_list = RE(APS_plans.snapshot(obj_dict.values(), md=md)) if args.report: snap = list(db(uuid_list[0]))[0] APS_callbacks.SnapshotReport().print_report(snap)
def run(self): """ Overrides the `run()` function of the `multiprocessing.Process` class. Called by the `start` method. """ self._exit_event.clear() self._RE = RunEngine({}) bec = BestEffortCallback() self._RE.subscribe(bec) # db = Broker.named('temp') self._RE.subscribe(self._db.insert) self._execution_queue = queue.Queue() self._thread_conn = threading.Thread( target=self._receive_packet_thread, name="RE Worker Receive") self._thread_conn.start() # Now make the main thread busy self._execute_in_main_thread() self._thread_conn.join() del self._RE
def __init__(self, **kwargs): super(QRunEngine, self).__init__() self.RE = RunEngine(context_managers=[], during_task=DuringTask(), **kwargs) self.RE.subscribe(self.sigDocumentYield.emit) # TODO: pull from settings plugin from suitcase.mongo_normalized import Serializer #TODO create single databroker db #python-dotenv stores name-value pairs in .env (add to .gitginore) username = os.getenv("USER_MONGO") pw = os.getenv("PASSWD_MONGO") try: self.RE.subscribe( Serializer( f"mongodb://{username}:{pw}@localhost:27017/mds?authsource=mds", f"mongodb://{username}:{pw}@localhost:27017/fs?authsource=fs" )) except OperationFailure as err: msg.notifyMessage("Could not connect to local mongo database.", title="xicam.Acquire Error", level=msg.ERROR) msg.logError(err) self.queue = PriorityQueue() self.process_queue()
def RE(): """ Standard logging runengine """ RE = RunEngine({}) collector = MsgCollector(msg_hook=run_engine_logger.debug) RE.msg_hook = collector return RE
def macro_sweep_test(target): logging.info( 'macro_sweep_test initiated with target {:0.4f}'.format(target)) RE = RunEngine({}) bec = BestEffortCallback() RE.subscribe(bec) RE.waiting_hook = ProgressBarManager() RE(run_wrapper(rel_smooth_sweep_test(tst_23, target)))
def RE(): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, loop=loop) yield RE if RE.state != 'idle': RE.halt()
def run_plans(self): RE = RunEngine({}) RE.log.setLevel(logging.INFO) publisher = Publisher('localhost:5567') RE.subscribe(publisher) for plan in self.PLANS: print('Starting Scan...') RE(plan) print('Scan Done...')
def test_basic_functionality(): "A simple test demonstrating validation failure and success" handler_registry.clear() RE = RunEngine() rr = RunRouter([validator_factory_raising]) RE.subscribe(rr) # This should fail because there is no handler registered. with pytest.raises(UndefinedAssetSpecification): RE(count([img])) # Register the handler... handler_registry.update({'NPY_SEQ': NumpySeqHandler}) # ...and now the validator should be satisfied. RE(count([img]))
def macro_RSXS_smooth_sweep(stroke_height, stroke_spacing, n_strokes, both_directions=True): """ macro_RSXS_smooth_sweep This method wraps up the bluesky/ophyd codeand allows users to drive the LU20 experiment with minimal code overhead. It contains the following bluesky plan. This bluesky plan moves a 2-axis actuator across multiple traversals of a sample. The plan traverses the entirety of the stroke_height (y-axis) and after each traversal, steps in the x-axis by the stroke_spacing.It may be configured to scan in only a single direction and shutter the beam for the opposite direction. This removes the shutter at the beginning of the plan and reinserts it at the end. At the end of the plan, the sample is moved to its original y-axis position but with an x-axis posiiton ready for the next run. For more details about the path, see the documentation of the xy_sequencer, the method that generates the sample's path. Parameters ---------- stroke_height : float Vertical distance (y-axs) of each stroke. stroke_spacing : float Horizontal distance between individual strokes. n_strokes : int Number of strokes to complete. both_directions : bool, optional Defaults to True. If this value is true the beam will be scanned across the sample while moving in both vertical directions. If false, the beam is only scanned in a single direction. """ RE = RunEngine({}) bec = BestEffortCallback() RE.subscribe(bec) RE.waiting_hook = ProgressBarManager() RE( run_wrapper( rel_smooth_sweep(mot_x=rsxs_sample_x, mot_y=rsxs_sample_y, shutter=shutter, stroke_height=stroke_height, stroke_spacing=stroke_spacing, n_strokes=n_strokes, both_directions=both_directions)))
def run_publisher(in_port, data_path): """ Acquire data in an infinite loop and publish it. """ import asyncio from bluesky.callbacks.zmq import Publisher from suitcase.jsonl import Serializer from ophyd.sim import noisy_det, motor1, motor2 from bluesky.plans import count from bluesky.preprocessors import SupplementalData from bluesky.plan_stubs import sleep publisher = Publisher(f'localhost:{in_port}') RE = RunEngine(loop=asyncio.new_event_loop()) sd = SupplementalData() RE.preprocessors.append(sd) sd.baseline.extend([motor1, motor2]) RE.subscribe(publisher) def factory(name, doc): serializer = Serializer(data_path / 'abc') serializer('start', doc) return [serializer], [] rr = RunRouter([factory]) RE.subscribe(rr) def infinite_plan(): while True: yield from sleep(3) yield from count([noisy_det], 20, delay=0.5) try: RE(infinite_plan()) finally: RE.halt()
def generate_example_catalog(data_path): data_path = Path(data_path) def factory(name, doc): serializer = Serializer(data_path / 'abc') serializer('start', doc) return [serializer], [] RE = RunEngine() sd = SupplementalData() RE.preprocessors.append(sd) sd.baseline.extend([motor1, motor2]) rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det])) RE(count([noisy_det], 5)) RE(scan([det], motor, -1, 1, 7)) RE(grid_scan([det4], motor1, -1, 1, 4, motor2, -1, 1, 7, False)) RE(scan([det], motor, -1, 1, motor2, -1, 1, 5)) RE(count([noisy_det, det], 5)) RE(count([random_img], 5)) RE(count([img], 5)) def factory(name, doc): serializer = Serializer(data_path / 'xyz') serializer('start', doc) return [serializer], [] RE = RunEngine() rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det], 3)) catalog_filepath = data_path / 'catalog.yml' with open(catalog_filepath, 'w') as file: file.write(f''' sources: abc: description: Some imaginary beamline driver: bluesky-jsonl-catalog container: catalog args: paths: {Path(data_path) / 'abc' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "00-ID" xyz: description: Some imaginary beamline driver: bluesky-jsonl-catalog container: catalog args: paths: {Path(data_path) / 'xyz' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "99-ID" ''') return str(catalog_filepath)
def _generate_simulation_data(): """ priviate function to insert data to exp_db """ if os.environ['XPDAN_SETUP'] != str(2): raise RuntimeError("ONLY insert data if you are running" "simulation") # simulated det pe1c = SimulatedPE1C('pe1c', {'pe1_image': lambda: np.random.randn(25, 25)}) # TODO : add md schema later RE = RunEngine({}) RE.subscribe(an_glbl['exp_db'].db.insert, 'all', ) RE(count([pe1c])) RE(scan([pe1c], motor, 1, 5, 5)) RE(scan([pe1c], motor, 1, 10, 10))
def test_rank_models(): RE = RunEngine() # Create accurate fit motor = SynAxis(name='motor') det = SynSignal(name='centroid', func=lambda: 5 * motor.read()['motor']['value'] + 2) fit1 = LinearFit('centroid', 'motor', update_every=None, name='Accurate') RE(scan([det], motor, -1, 1, 50), fit1) # Create inaccurate fit det2 = SynSignal(name='centroid', func=lambda: 25 * motor.read()['motor']['value'] + 2) fit2 = LinearFit('centroid', 'motor', update_every=None, name='Inaccurate') RE(scan([det2], motor, -1, 1, 50), fit2) # Create inaccurate fit det3 = SynSignal(name='centroid', func=lambda: 12 * motor.read()['motor']['value'] + 2) fit3 = LinearFit('centroid', 'motor', update_every=None, name='Midly Inaccurate') RE(scan([det3], motor, -1, 1, 50), fit3) # Rank models ranking = rank_models([fit2, fit1, fit3], target=22, x=4) assert ranking[0] == fit1 assert ranking[1] == fit3 assert ranking[2] == fit2
def test_simple_scan_nd(): RE = RunEngine() hardware1 = yaqc_bluesky.Device(39423) hardware2 = yaqc_bluesky.Device(39424) sensor = yaqc_bluesky.Device(39425) cy = cycler(hardware1, [1, 2, 3]) * cycler(hardware2, [4, 5, 6]) RE(scan_nd([sensor], cy))
def test_linear_fit(): #Create RunEngine RE = RunEngine() #Excepted values of fit expected = {'slope': 5, 'intercept': 2} #Create simulated devices motor = Mover('motor', {'motor': lambda x: x}, {'x': 0}) det = Reader('det', {'centroid': lambda: 5 * motor.read()['motor']['value'] + 2}) #Assemble fitting callback cb = LinearFit('centroid', 'motor', update_every=None) #Scan through variables RE(scan([det], motor, -1, 1, 50), cb) #Check accuracy of fit for k, v in expected.items(): assert np.allclose(cb.result.values[k], v, atol=1e-6) #Check we create an accurate estimate assert np.allclose(cb.eval(x=10), 52, atol=1e-5) assert np.allclose(cb.eval(motor=10), 52, atol=1e-5) assert np.allclose(cb.backsolve(52)['x'], 10, atol=1e-5)
def test_multi_fit(): #Create RunEngine RE = RunEngine() #Excepted values of fit expected = {'x0': 5, 'x1': 4, 'x2': 3} #Create simulated devices m1 = Mover('m1', {'m1': lambda x: x}, {'x': 0}) m2 = Mover('m2', {'m2': lambda x: x}, {'x': 0}) det = Reader( 'det', { 'centroid': lambda: 5 + 4 * m1.read()['m1']['value'] + 3 * m2.read()['m2'][ 'value'] }) #Assemble fitting callback cb = MultiPitchFit('centroid', ('m1', 'm2'), update_every=None) #Scan through variables RE(outer_product_scan([det], m1, -1, 1, 10, m2, -1, 1, 10, False), cb) #Check accuracy of fit print(cb.result.fit_report()) for k, v in expected.items(): assert np.allclose(cb.result.values[k], v, atol=1e-6) #Check we create an accurate estimate assert np.allclose(cb.eval(a0=5, a1=10), 55, atol=1e-5) assert np.allclose(cb.backsolve(55, a1=10)['a0'], 5, atol=1e-5) assert np.allclose(cb.backsolve(55, a0=5)['a1'], 10, atol=1e-5)
def test_multi_fit(): RE = RunEngine() # Expected values of fit expected = {'x0': 5, 'x1': 4, 'x2': 3} m1 = SynAxis(name='m1') m2 = SynAxis(name='m2') det = SynSignal(name='centroid', func=lambda: 5 + 4 * m1.read()['m1']['value'] + 3 * m2. read()['m2']['value']) # Assemble fitting callback cb = MultiPitchFit('centroid', ('m1', 'm2'), update_every=None) RE(outer_product_scan([det], m1, -1, 1, 10, m2, -1, 1, 10, False), cb) # Check accuracy of fit logger.debug(cb.result.fit_report()) for k, v in expected.items(): assert np.allclose(cb.result.values[k], v, atol=1e-6) # Check we create an accurate estimate assert np.allclose(cb.eval(a0=5, a1=10), 55, atol=1e-5) assert np.allclose(cb.backsolve(55, a1=10)['a0'], 5, atol=1e-5) assert np.allclose(cb.backsolve(55, a0=5)['a1'], 10, atol=1e-5)
def test_rank_models(): RE = RunEngine() #Create accurate fit motor = Mover('motor', {'motor': lambda x: x}, {'x': 0}) det = Reader('det', {'centroid': lambda: 5 * motor.read()['motor']['value'] + 2}) fit1 = LinearFit('centroid', 'motor', update_every=None, name='Accurate') RE(scan([det], motor, -1, 1, 50), fit1) #Create inaccurate fit det2 = Reader( 'det', {'centroid': lambda: 25 * motor.read()['motor']['value'] + 2}) fit2 = LinearFit('centroid', 'motor', update_every=None, name='Inaccurate') RE(scan([det2], motor, -1, 1, 50), fit2) #Create inaccurate fit det3 = Reader( 'det', {'centroid': lambda: 12 * motor.read()['motor']['value'] + 2}) fit3 = LinearFit('centroid', 'motor', update_every=None, name='Midly Inaccurate') RE(scan([det3], motor, -1, 1, 50), fit3) #Rank models ranking = rank_models([fit2, fit1, fit3], target=22, x=4) assert ranking[0] == fit1 assert ranking[1] == fit3 assert ranking[2] == fit2
def _generate_simulation_data(): """ priviate function to insert data to exp_db """ if os.environ['XPDAN_SETUP'] != str(2): raise RuntimeError("ONLY insert data if you are running" "simulation") # simulated det pe1c = SimulatedPE1C('pe1c', {'pe1_image': lambda: np.random.randn(25, 25)}) # TODO : add md schema later RE = RunEngine({}) RE.subscribe( an_glbl['exp_db'].db.insert, 'all', ) RE(count([pe1c])) RE(scan([pe1c], motor, 1, 5, 5)) RE(scan([pe1c], motor, 1, 10, 10))
class RunEngineTraitType(TraitType): info_text = 'a RunEngine instance' default_value = RunEngine(get_history()) def validate(self, obj, value): if not isinstance(value, RunEngine): self.error(obj, value) return value
def main(): parser = argparse.ArgumentParser(description="TES horizontal feedback") parser.add_argument("FBref", type=float, help="Target position. For example, -137000.") args = parser.parse_args() # config_bluesky_logging(level='INFO') RE = RunEngine() RE(plan(args.FBref))
def test_fly_scan_smoke(): seq = SimSequencer('ECS:TST:100', name='seq') RE = RunEngine() # Create a plan where we fly for a second def plan(): yield from fly_during_wrapper(run_wrapper(sleep(1)), [seq]) # Run the plan RE(plan())
def run_pb(flyers, root='/tmp', collection_time=10): # make some filenames filenames = list() for flyer in flyers: filenames.append(root + "/" + str(uuid4())[:8] + "." + flyer.name) RE = RunEngine() RE(set_and_fly(filenames, flyers, sleeptime=10)) return filenames
def generate_example_data(callback): from ophyd.sim import det, motor1, motor2, motor3 motor1.set(3.1).wait() motor2.set(-1000.02).wait() motor3.set(5.01).wait() RE = RunEngine() sd = SupplementalData(baseline=[motor1, motor2, motor3]) RE.preprocessors.append(sd) RE.md["operator"] = "Dmitri" RE(count([det], 5, delay=0.05), RewriteTimes("2020-01-01 9:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-01-01 9:05", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-01-01 9:07", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 9:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 9:05", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 13:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:00", callback)) RE(count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:05", callback)) RE( count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:07", callback), operator="Michael", ) RE( count([det], 5, delay=0.05), RewriteTimes("2020-02-01 15:08", callback), operator="Michael", ) _generate_newton_data( RE, callback, [ "2020-02-02 9:00", "2020-02-02 10:00", "2020-02-02 12:00", "2020-02-02 13:00", "2020-02-02 15:00", "2020-02-02 17:00", "2020-02-02 19:00", ], )
def get_catalog(): RE = RunEngine() directory = tempfile.TemporaryDirectory().name with Serializer(directory) as serializer: RE(count([img]), serializer) with Serializer(directory) as serializer: RE(count([img], 3), serializer) catalog = BlueskyMsgpackCatalog(f"{directory}/*.msgpack") return catalog
def test_my_list_grid_scan2(): xpd_configuration["shutter"] = shctl1 motor = hw().motor plan = my_list_grid_scan(xpd_pe1c, motor, [1.], cs700, [300., 400., 500.], acquire_time=0.2, images_per_set=5, wait_for_step=0.) RE = RunEngine() RE(plan)
def run(self): """ Overrides the `run()` function of the `multiprocessing.Process` class. Called by the `start` method. """ self._exit_event = threading.Event() # TODO: TC - Do you think that the following code may be included in RE.__init__() # (for Python 3.8 and above) # Setting the default event loop is needed to make the code work with Python 3.8. loop = get_bluesky_event_loop() asyncio.set_event_loop(loop) self._RE = RunEngine({}) bec = BestEffortCallback() self._RE.subscribe(bec) # db = Broker.named('temp') self._RE.subscribe(self._db.insert) self._execution_queue = queue.Queue() self._thread_conn = threading.Thread( target=self._receive_packet_thread, name="RE Worker Receive") self._thread_conn.start() # Environment is initialized: send a report msg = {"type": "report", "value": {"action": "environment_created"}} self._conn.send(msg) # Now make the main thread busy self._execute_in_main_thread() self._thread_conn.join() del self._RE # Finally send a report msg = {"type": "report", "value": {"action": "environment_closed"}} self._conn.send(msg)
def _md(md): RE = RunEngine(md) scan = simple_scan(motor) assert_raises(KeyError, RE, scan) # missing owner, beamline_id scan = simple_scan(motor) assert_raises(KeyError, RE, scan, owner='dan') RE(scan, owner='dan', beamline_id='his desk', group='some group', config={}) # this should work RE(scan) # and now this should work, reusing metadata RE.md.clear() scan = simple_scan(motor) assert_raises(KeyError, RE, scan) # We can prime the md directly. RE.md['owner'] = 'dan' RE.md['group'] = 'some group' RE.md['config'] = {} RE.md['beamline_id'] = 'his desk' RE(scan) # Do optional values persist? RE(scan, project='sitting') RE(scan, subs={'start': validate_dict_cb('project', 'sitting')}) # Persistent values are white-listed, so this should not persist. RE(scan, mood='excited') RE(scan, subs={'start': validate_dict_cb_opposite('mood')}) # Add 'mood' to the whitelist and check that it persists. RE.persistent_fields.append('mood') assert_in('mood', RE.persistent_fields) RE(scan, mood='excited') RE(scan, subs={'start': validate_dict_cb('mood', 'excited')}) # Remove 'project' from the whitelist and check that is stops persisting. RE.persistent_fields.remove('project') assert_not_in('project', RE.persistent_fields) RE(scan, project='standing') RE(scan) RE(scan, subs={'start': validate_dict_cb_opposite('project')}) # Removing a field required by our Document spec is not allowed. assert_raises(ValueError, RE.persistent_fields.remove, 'beamline_id')
def main(): act = MimicActuator(name='act') bk_dev = Bookkeeping(name='bk') dets = [bk_dev] RE = RunEngine({}) RE.log.setLevel(logging.DEBUG) lt = LiveTable([ bk_dev.status.name, bk_dev.target.name, act.setpoint.name, act.readback.name ]) RE(bp.scan(dets, act, 1, 5, 5, per_step=solve_stub), lt)
def test_ims_stage_in_plan(fake_ims): logger.debug('test_ims_stage_in_plan') RE = RunEngine() m = fake_ims def plan(): yield from open_run() yield from stage(m) yield from unstage(m) yield from close_run() RE(plan())
def _md(md): RE = RunEngine(md) RE.ignore_callback_exceptions = False scan = simple_scan(motor) assert_raises(KeyError, RE, scan) # missing owner, beamline_id scan = simple_scan(motor) assert_raises(KeyError, RE, scan, owner='dan') scan = simple_scan(motor) RE(scan, owner='dan', beamline_id='his desk', group='some group', config={}) # this should work scan = simple_scan(motor) assert_raises(KeyError, RE, scan) # this should fail; none was persisted RE.md['owner'] = 'dan' RE.md['group'] = 'some group' RE.md['config'] = {} RE.md['beamline_id'] = 'his desk' scan = simple_scan(motor) RE(scan) # this should work RE.md.clear() scan = simple_scan(motor) assert_raises(KeyError, RE, scan) # We can prime the md directly. RE.md['owner'] = 'dan' RE.md['group'] = 'some group' RE.md['config'] = {} RE.md['beamline_id'] = 'his desk' scan = simple_scan(motor) RE(scan) # Check persistence. scan = simple_scan(motor) RE(scan, project='sitting') # 'project' should not persist scan = simple_scan(motor) RE(scan, subs={'start': [validate_dict_cb_opposite('project')]}) # ...unless we add it to RE.md RE.md['project'] = 'sitting' scan = simple_scan(motor) RE(scan, subs={'start': [validate_dict_cb('project', 'sitting')]}) # new values to 'project' passed in the call override the value in md scan = simple_scan(motor) RE(scan, project='standing', subs={'start': [validate_dict_cb('project', 'standing')]}) # ...but they do not update the value in md assert_equal(RE.md['project'], 'sitting')
# This adds {'proposal_id': 1} to all future runs, unless overridden. RE.md['proposal_id'] = 1 RE(count([det])) RE(scan([det], motor, 1, 5, 5)) RE(scan([det], motor, 1, 10, 10)) RE.md['proposal_id'] = 2 RE(count([det])) RE(scan([det], motor, -1, 1, 5)) RE(relative_scan([det], motor, 1, 10, 10)) RE(scan([det], motor, -1, 1, 1000)) RE.md['proposal_id'] = 3 # This adds {'operator': 'Ken'} to all future runs, unless overridden. RE.md['operator'] = 'Ken' RE(count([det]), purpose='calibration', sample='A') RE(scan([det], motor, 1, 10, 10), operator='Dan') # temporarily overrides Ken RE(count([det]), sample='A') # (now back to Ken) RE(count([det]), sample='B') RE.md['operator'] = 'Dan' RE(count([det]), purpose='calibration') RE(scan([det], motor, 1, 10, 10)) del RE.md['operator'] # clean up by un-setting operator if __name__ == '__main__': db = make_broker(os.path.expanduser('~/.data-cache/')) RE = RunEngine({}) RE.subscribe('all', db.mds.insert) generate_data(RE)
import time import sys from bluesky.simulators import summarize_plan # Set up a RunEngine and use metadata backed by a sqlite file. from bluesky import RunEngine from bluesky.utils import get_history RE = RunEngine({}) # Set up a Broker. from databroker import Broker db = Broker.named('iss') db_analysis = Broker.named('iss-analysis') # Subscribe metadatastore to documents. # If this is removed, data is not saved to metadatastore. RE.subscribe(db.insert) # Set up SupplementalData. from bluesky import SupplementalData sd = SupplementalData() RE.preprocessors.append(sd) # Add a progress bar. from timeit import default_timer as timer from bluesky.utils import ProgressBarManager pbar_manager = ProgressBarManager() #RE.waiting_hook = pbar_manager
def generate_example_catalog(data_path): data_path = Path(data_path) def factory(name, doc): serializer = Serializer(data_path / 'abc') serializer('start', doc) return [serializer], [] RE = RunEngine() sd = SupplementalData() RE.preprocessors.append(sd) sd.baseline.extend([motor1, motor2]) rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det])) RE(count([noisy_det], 5)) RE(scan([det], motor, -1, 1, 7)) RE(grid_scan([det4], motor1, -1, 1, 4, motor2, -1, 1, 7, False)) RE(scan([det], motor, -1, 1, motor2, -1, 1, 5)) RE(count([noisy_det, det], 5)) # RE(count([img], 5)) def factory(name, doc): serializer = Serializer(data_path / 'xyz') serializer('start', doc) return [serializer], [] RE = RunEngine() rr = RunRouter([factory]) RE.subscribe(rr) RE(count([det], 3)) catalog_filepath = data_path / 'catalog.yml' with open(catalog_filepath, 'w') as file: file.write(f''' plugins: source: - module: intake_bluesky sources: abc: description: Some imaginary beamline driver: intake_bluesky.jsonl.BlueskyJSONLCatalog container: catalog args: paths: {Path(data_path) / 'abc' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "00-ID" xyz: description: Some imaginary beamline driver: intake_bluesky.jsonl.BlueskyJSONLCatalog container: catalog args: paths: {Path(data_path) / 'xyz' / '*.jsonl'} handler_registry: NPY_SEQ: ophyd.sim.NumpySeqHandler metadata: beamline: "99-ID" ''') return str(catalog_filepath)
from databroker.tests.utils import temp_config from databroker import Broker # db setup config = temp_config() tempdir = config['metadatastore']['config']['directory'] def cleanup(): shutil.rmtree(tempdir) db = Broker.from_config(config) RE = RunEngine({}) # subscribe BEC bec = BestEffortCallback() RE.subscribe(bec) RE.subscribe(db.insert) # move motor to a reproducible location RE(mov(motor1, 0)) RE(mov(motor2, 0)) RE(relative_outer_product_scan([det4], motor1, -1, 0, 10, motor2, -2, 0, 20, True)) RE(outer_product_scan([det4], motor1, -1, 0, 10, motor2, -2, 0, 20, True))
# Set up a RunEngine and use metadata backed by a sqlite file. from bluesky import RunEngine from bluesky.utils import get_history RE = RunEngine(get_history()) # Set up a Broker. from databroker import Broker db = Broker.named('csx') # Subscribe metadatastore to documents. # If this is removed, data is not saved to metadatastore. RE.subscribe(db.insert) # Set up SupplementalData. from bluesky import SupplementalData sd = SupplementalData() RE.preprocessors.append(sd) # Add a progress bar. from bluesky.utils import ProgressBarManager pbar_manager = ProgressBarManager() RE.waiting_hook = pbar_manager # Register bluesky IPython magics. from bluesky.magics import BlueskyMagics get_ipython().register_magics(BlueskyMagics) # Set up the BestEffortCallback. from bluesky.callbacks.best_effort import BestEffortCallback bec = BestEffortCallback() RE.subscribe(bec)
yield from move() plt.pause(.001) yield from trigger_and_read(list(detectors) + list(motors)) install_kicker() p = Publisher(glbl_dict["inbound_proxy_address"]) hw = hw() import numpy as np rand_img = SynSignal( func=lambda: np.array(np.random.random((10, 10))), name="img", labels={"detectors"}, ) RE = RunEngine() # build the pipeline raw_source = Stream() raw_output = SimpleFromEventStream( "event", ("data", "det_a"), raw_source, principle=True ) raw_output2 = SimpleFromEventStream("event", ("data", "noisy_det"), raw_source) raw_output3 = SimpleFromEventStream("event", ("data", "img"), raw_source) pipeline = ( raw_output.union(raw_output2, raw_output3.map(np.sum)) .map(lambda x: x ** 2) .accumulate(lambda x, y: x + y) ) res = SimpleToEventStream(pipeline, ("result",))