def setup_test_run_engine(): RE = RunEngine() RE.md['owner'] = 'test_owner' RE.md['group'] = 'Grant No. 12345' RE.md['config'] = {'detector_model': 'XYZ', 'pixel_size': 10} RE.md['beamline_id'] = 'test_beamline' return RE
def RE(request): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, loop=loop) bec = best_effort.BestEffortCallback() RE.subscribe(bec) return RE
def setup_test_run_engine(): # The metadata configured here used to be required for the RE to be # usable. Now it is all optional, but maintained for legacy reasons. RE = RunEngine() RE.md['owner'] = 'test_owner' RE.md['group'] = 'Grant No. 12345' RE.md['config'] = {'detector_model': 'XYZ', 'pixel_size': 10} RE.md['beamline_id'] = 'test_beamline' return RE
def fresh_RE(request): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, loop=loop) RE.ignore_callback_exceptions = False def clean_event_loop(): if RE.state != 'idle': RE.halt() ev = asyncio.Event(loop=loop) ev.set() loop.run_until_complete(ev.wait()) request.addfinalizer(clean_event_loop) return RE
class RunEngineTraitType(TraitType): info_text = 'a RunEngine instance' default_value = RunEngine(dict()) def validate(self, obj, value): if not isinstance(value, RunEngine): self.error(obj, value) return value
def RE(request): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, loop=loop) def clean_event_loop(): if RE.state != 'idle': RE.halt() ev = asyncio.Event(loop=loop) ev.set() loop.run_until_complete(ev.wait()) request.addfinalizer(clean_event_loop) return RE
def RE(request): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, call_returns_result=request.param, loop=loop) def clean_event_loop(): if RE.state not in ('idle', 'panicked'): try: RE.halt() except TransitionError: pass loop.call_soon_threadsafe(loop.stop) RE._th.join() loop.close() request.addfinalizer(clean_event_loop) return RE
def RE(request): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, loop=loop) def clean_event_loop(): if RE.state not in ('idle', 'panicked'): try: RE.halt() except TransitionError: pass loop.call_soon_threadsafe(loop.stop) if LooseVersion(bluesky.__version__) >= LooseVersion('1.6.0'): RE._th.join() loop.close() request.addfinalizer(clean_event_loop) return RE
def bluesky_utils(): # setup run engine RE = RunEngine({}) bec = best_effort.BestEffortCallback() bec.disable_plots() RE.subscribe(bec) # setup databroker and mongo db = Broker.named("local") try: databroker.assets.utils.install_sentinels(db.reg.config, version=1) except Exception: pass # setup file handler RE.subscribe(db.insert) db.reg.register_handler("srw", SRWFileHandler, overwrite=True) db.reg.register_handler("SIREPO_FLYER", SRWFileHandler, overwrite=True) return RE, db
def utils(): # bluesky RunEngine RE = RunEngine({}) bec = best_effort.BestEffortCallback() RE.subscribe(bec) # Mongo Backend db = Broker.named("local") try: databroker.assets.utils.install_sentinels(db.reg.config, version=1) except Exception: pass # update database info RE.subscribe(db.insert) db.reg.register_handler("srw", SRWFileHandler, overwrite=True) # store data root_dir = "/tmp/sirepo_flyer_data" _ = make_dir_tree(datetime.datetime.now().year, base_path=root_dir) return RE, db
# Make plots update live while scans run. from bluesky.utils import install_qt_kicker install_qt_kicker() # import nslsii # Register bluesky IPython magics. #from bluesky.magics import BlueskyMagics #get_ipython().register_magics(BlueskyMagics) #nslsii.configure_base(get_ipython().user_ns, 'amx') import bluesky.plans as bp from bluesky.run_engine import RunEngine from bluesky.utils import get_history RE = RunEngine(get_history()) beamline = os.environ["BEAMLINE_ID"] from databroker import Broker db = Broker.named(beamline) RE.subscribe(db.insert) # from bluesky.callbacks.best_effort import BestEffortCallback # bec = BestEffortCallback() # RE.subscribe(bec) # convenience imports # from ophyd.commands import * from bluesky.callbacks import * # from bluesky.spec_api import *
class glbl(): beamline_host_name = BEAMLINE_HOST_NAME base = BASE_DIR home = HOME_DIR _export_tar_dir = _EXPORT_TAR_DIR xpdconfig = BLCONFIG_DIR import_dir = IMPORT_DIR config_base = CONFIG_BASE tiff_base = TIFF_BASE usrScript_dir = USERSCRIPT_DIR yaml_dir = YAML_DIR allfolders = ALL_FOLDERS archive_dir = USER_BACKUP_DIR dk_yaml = DARK_YAML_NAME dk_window = DARK_WINDOW frame_acq_time = FRAME_ACQUIRE_TIME auto_dark = True owner = OWNER beamline_id = BEAMLINE_ID group = GROUP _allowed_scanplan_type = ALLOWED_SCANPLAN_TYPE # logic to assign correct objects depends on simulation or real experiment if not simulation: from bluesky.run_engine import RunEngine from bluesky.register_mds import register_mds # import real object as other names to avoid possible self-referencing later from bluesky import Msg as msg from bluesky.plans import Count as count from bluesky.plans import AbsScanPlan as absScanPlan from databroker import DataBroker from databroker import get_images as getImages from databroker import get_events as getEvents from bluesky.callbacks import LiveTable as livetable from bluesky.callbacks.broker import verify_files_saved as verifyFiles from ophyd import EpicsSignalRO, EpicsSignal from bluesky.suspenders import SuspendFloor ring_current = EpicsSignalRO('SR:OPS-BI{DCCT:1}I:Real-I', name='ring_current') xpdRE = RunEngine() xpdRE.md['owner'] = owner xpdRE.md['beamline_id'] = beamline_id xpdRE.md['group'] = group register_mds(xpdRE) beamdump_sus = SuspendFloor(ring_current, ring_current.get() * 0.9, resume_thresh=ring_current.get() * 0.9, sleep=1200) #xpdRE.install_suspender(beamdump_sus) # don't enable it untill beam is back # real imports Msg = msg Count = count db = DataBroker LiveTable = livetable get_events = getEvents get_images = getImages AbsScanPlan = absScanPlan verify_files_saved = verifyFiles # real collection objects area_det = None temp_controller = None shutter = None else: simulation = True ARCHIVE_BASE_DIR = os.path.join(BASE_DIR, 'userSimulationArchive') # mock imports Msg = MagicMock() Count = MagicMock() AbsScanPlan = MagicMock() db = MagicMock() get_events = MagicMock() get_images = MagicMock() LiveTable = mock_livetable verify_files_saved = MagicMock() # mock collection objects xpdRE = MagicMock() temp_controller = MagicMock() shutter = mock_shutter() area_det = MagicMock() area_det.cam = MagicMock() area_det.cam.acquire_time = MagicMock() area_det.cam.acquire_time.put = MagicMock(return_value=0.1) area_det.cam.acquire_time.get = MagicMock(return_value=0.1) area_det.number_of_sets = MagicMock() area_det.number_of_sets.put = MagicMock(return_value=1) print('==== Simulation being created in current directory:{} ===='. format(BASE_DIR))
out = proj2[int(v), :, int(vv)] print(v, vv, mmm.get()[0]) time.sleep(.5) return np.squeeze(out) f = FullField() # det = SynSignal(f, name="img", labels={"detectors"}) det = SynSignalWithRegistry(f, name="img", labels={"detectors"},) det.kind = "hinted" #g = Pencil() #det2 = SynSignal(g, name="img", labels={"detectors"}) #det2.kind = "hinted" RE = RunEngine() RE.md['analysis_stage'] = 'raw' p = Publisher(glbl_dict["inbound_proxy_address"], prefix=b"raw") t = RE.subscribe(p) # RE.subscribe(print) # Build scan l = [0, 90] for i in range(8): ll = l.copy() interval = sorted(set(ll))[1] / 2 for lll in ll: j = lll + interval j = round(j, 0) if j not in l and j < 180: l.append(j)
def fresh_RE(request): loop = asyncio.new_event_loop() loop.set_debug(True) RE = RunEngine({}, loop=loop) RE.ignore_callback_exceptions = False return RE
import datetime import json # noqa F401 import databroker import matplotlib.pyplot as plt import numpy as np # noqa F401 from bluesky.callbacks import best_effort from bluesky.run_engine import RunEngine from databroker import Broker from ophyd.utils import make_dir_tree from sirepo_bluesky.shadow_handler import ShadowFileHandler from sirepo_bluesky.srw_handler import SRWFileHandler RE = RunEngine({}) bec = best_effort.BestEffortCallback() RE.subscribe(bec) # MongoDB backend: db = Broker.named('local') # mongodb backend try: databroker.assets.utils.install_sentinels(db.reg.config, version=1) except Exception: pass RE.subscribe(db.insert) db.reg.register_handler('srw', SRWFileHandler, overwrite=True) # db.reg.register_handler('shadow', ShadowFileHandler, overwrite=True) db.reg.register_handler('SIREPO_FLYER', SRWFileHandler, overwrite=True) plt.ion()
f = FullField() # det = SynSignal(f, name="img", labels={"detectors"}) det = SynSignalWithRegistry( f, name="img", labels={"detectors"}, ) det.kind = "hinted" #g = Pencil() #det2 = SynSignal(g, name="img", labels={"detectors"}) #det2.kind = "hinted" RE = RunEngine() RE.md['analysis_stage'] = 'raw' p = Publisher(glbl_dict["inbound_proxy_address"], prefix=b"raw") t = RE.subscribe(p) # RE.subscribe(print) # Build scan l = [0, 90] for i in range(8): ll = l.copy() interval = sorted(set(ll))[1] / 2 for lll in ll: j = lll + interval j = round(j, 0) if j not in l and j < 180: l.append(j)
def x348_scan_tool(N, M, start_pt, n_pt, m_pt, start=0, stop=None, delay=.15, use_seq=True): """ x384_scan_tool Single function for running the x384 snake scan with minimal python overhead. All (X,Y,Z) coordinates for the motors are specified using the motor's position readout when the sample is at the given point. E.g. The start point is found by aligning the starting sample in the beam and copying the motors' current positions. All of the (X,Y,Z) coordinates use this system. None of the (X,Y,Z) Coordinates are relative. N Corresponds with the horizontal (X) axis M Corresponds with the horizontal (Y) axis Parameters ---------- N : int The number of samples in the N direction. M : int The number of samples in the M direction. start_pt : np.array or list 3-length numpy array specifying the starting point (X,Y,Z) of the motors. This is used for calibrating the scan area. n_pt : np.array or list 3-length numpy array specifying the location (X,Y,Z) of the last point in the N direction in the same row as the start_pt. This is used for calibrating the scan area. m_pt : np.array or list 3-length numpy array specifying the location (X,Y,Z) of the last point in the M direction in the same column as the start_pt. This is used for calibrating the scan area. start : int Index of the first point to scan. Index numbers are allotted starting at zero and counting down the samples in the order that the 'snakey path' will visit these points. Indexing starts at 0. end : int or None Index of the end of the scan. This is value is EXCLUSIVE. E.g using a start value of 3 and an end value of 6 means that sample indexes 3, 4, and 5 will be scanned but NOT 6. If None is entered is passed, only the index listed by 'start' will be scanned. Defaults to None. delay : float Time to wait for the sequencer to complete. Units are in seconds. Defaults to .15s. use_seq : bool Defaults to True. Set this to false to disable the sequencer (and hence the beam if it's in burst mode) for a run. This is good for simulating runs before they happen """ start_pt = np.array(start_pt) n_pt = np.array(n_pt) m_pt = np.array(m_pt) sq = SetSequencer("ECS:SYS0:1", name="Event Sequencer") pal = McgranePalette(name="mcgpal", N=N, M=M, chip_spacing=0, chip_dims=[N]) pal._accept_calibration( start_pt=start_pt, n_pt=n_pt, m_pt=m_pt, ) #pal.x_motor.move(start_pt[0]) #pal.y_motor.move(start_pt[1]) #pal.z_motor.move(start_pt[2]) RE = RunEngine({}) RE(run_wrapper(x348_scan( pal, sq, start, stop, sequencer_delay=delay, )))
# Import matplotlib and put it in interactive mode. import matplotlib.pyplot as plt plt.ion() # import nslsii # Register bluesky IPython magics. #from bluesky.magics import BlueskyMagics #get_ipython().register_magics(BlueskyMagics) #nslsii.configure_base(get_ipython().user_ns, 'amx') import bluesky.plans as bp from bluesky.run_engine import RunEngine from bluesky.utils import get_history, PersistentDict RE = RunEngine() beamline = os.environ["BEAMLINE_ID"] configdir = os.environ['CONFIGDIR'] RE.md = PersistentDict('%s%s_bluesky_config' % (configdir, beamline)) from databroker import Broker db = Broker.named(beamline) RE.subscribe(db.insert) # from bluesky.callbacks.best_effort import BestEffortCallback # bec = BestEffortCallback() # RE.subscribe(bec) # convenience imports # from ophyd.commands import * from bluesky.callbacks import *
# Create a graph source = Stream() # Convert from raw event model to data fes = FromEventStream('event', ('data', 'noisy_det'), source, principle=True) # Averageing graph adder = fes.accumulate(lambda x, y: x + y) counter = fes.accumulate(lambda s, x: s + 1, start=0) averager = adder.zip(counter).map(lambda x: x[0] / x[1]) # Binned averaging sw = fes.sliding_window(2).map(sum).map(lambda x: x / 2) # Convert back to Event Model tes1 = ToEventStream(averager, ('average', )) tes2 = ToEventStream(sw, ('binned', )) # sink to plotting tes1.sink(lambda x: lp(*x)) tes2.sink(lambda x: lp2(*x)) # Run the scan RE = RunEngine() t = RE.subscribe(lambda *x: source.emit(x)) # RE.subscribe(lp3) # RE.subscribe(print) source.visualize(source_node=True) RE(count([hw().noisy_det], 100)) plt.show()
simmotor2.read() simmotor2.set(1) # RE(scan([detector],simmotor1,0,14,10)) #Flyer from ophyd.sim import hw from bluesky.run_engine import RunEngine from databroker import temp_config, Broker from bluesky.plans import fly import bluesky.plans as bp hw = hw() flying_zebra = hw.flyer1 db = Broker.named('temp') RE = RunEngine() RE.subscribe(db.insert) RE(fly([flying_zebra])) RE(fly([flying_zebra])) hdr = db[-1] hdr.stream_names hdr.table('stream_name') hw.direct_img hw.det.exposure_time = 1 RE(bp.count([hw.det], num=3)) db[-1].table()