Esempio n. 1
0
def run():
    """
    # setup for simulated IOC
    EPICS_CA_ADDR_LIST=127.0.0.1:5064
    EPICS_CA_AUTO_ADDR_LIST=NO

    Run mongo.

    Run simulated IOC.
    """
    # arg_parser = argparse.ArgumentParser()
    # arg_parser.add_argument("--agent-name", required=True, type=str)
    # arg_parser.add_argument("--episode-count", required=True, type=int)

    # args = arg_parser.parse_args()

    RE = RunEngine()

    bec = BestEffortCallback()

    RE.subscribe(bec)

    db = catalog["mad"]  # this is set up by entrypoint

    RE.subscribe(db.v1.insert)

    tiff_sim_detector = NewPerkinElmerDetector(prefix="Sim{{det1}}:",
                                               name="tiff_sim_detector")
    RE(count([tiff_sim_detector]))
Esempio n. 2
0
    def run(self):
        """
        Overrides the `run()` function of the `multiprocessing.Process` class. Called
        by the `start` method.
        """
        self._exit_event.clear()

        self._RE = RunEngine({})

        bec = BestEffortCallback()
        self._RE.subscribe(bec)

        # db = Broker.named('temp')
        self._RE.subscribe(self._db.insert)

        self._execution_queue = queue.Queue()

        self._thread_conn = threading.Thread(
            target=self._receive_packet_thread, name="RE Worker Receive")
        self._thread_conn.start()

        # Now make the main thread busy
        self._execute_in_main_thread()

        self._thread_conn.join()

        del self._RE
Esempio n. 3
0
def test_serp_scan():
    """Note: run this standalone, not inside mfx hutch python."""
    import numpy as np
    from bluesky import RunEngine
    from bluesky.callbacks.best_effort import BestEffortCallback
    from ophyd.sim import motor1, motor2
    from ophyd.status import StatusBase
    from pcdsdaq.daq import Daq
    from pcdsdaq.sim import set_sim_mode

    class FakeSeq:
        def trigger(self):
            print('Triggered the sequencer!')
            status = StatusBase()
            status.set_finished()
            return status

    set_sim_mode(True)
    RE = RunEngine({})
    bec = BestEffortCallback()
    RE.subscribe(bec)
    seq = FakeSeq()
    daq = Daq(RE=RE)

    RE(serp_seq_scan(motor1, np.arange(100, 200, 10), motor2, [0, 100], seq))
Esempio n. 4
0
def test_plans(RE, pln, args, kwargs, hw):
    args = tuple(getattr(hw, v, v) if isinstance(v, str) else v for v in args)
    det = hw.det
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE.subscribe(checker)
    RE(pln([det], *args, **kwargs))
Esempio n. 5
0
def test_with_baseline(fresh_RE):
    RE = fresh_RE
    det = Detector()
    bec = BestEffortCallback()
    RE.subscribe(bec)
    sd = SupplementalData(baseline=[det3])
    RE.preprocessors.append(sd)
    RE(scan([det], motor, 1, 5, 5))
Esempio n. 6
0
def test_plans_motors_no_hints(RE, pln, args, kwargs, hw):
    args = tuple(getattr(hw, v, v) if isinstance(v, str) else v for v in args)
    det = hw.det
    for v in args:
        assert not hasattr(v, 'hints')
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE(pln([det], *args, **kwargs))
Esempio n. 7
0
def macro_sweep_test(target):
    logging.info(
        'macro_sweep_test initiated with target {:0.4f}'.format(target))
    RE = RunEngine({})
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE.waiting_hook = ProgressBarManager()
    RE(run_wrapper(rel_smooth_sweep_test(tst_23, target)))
Esempio n. 8
0
def test_multirun_nested_plan(capsys, caplog, RE, hw):
    # This test only checks if the plan runs without crashing. If BEC crashes,
    #   the plan will still run, but data will not be displayed.
    @bpp.set_run_key_decorator(run="inner_run")
    def plan_inner():
        yield from grid_scan([hw.det4], hw.motor1, 0, 1, 1, hw.motor2, 0, 1, 2,
                             True)

    def sequence():
        for n in range(5):
            yield from bps.mov(hw.motor, n * 0.1 + 1)
            yield from bps.trigger_and_read([hw.det1])

    @bpp.set_run_key_decorator(run="outer_run")
    @bpp.stage_decorator([hw.det1, hw.motor])
    @bpp.run_decorator(md={})
    def plan_outer():
        yield from sequence()
        # Call inner plan from within the plan
        yield from plan_inner()
        # Run another set of commands
        yield from sequence()

    # The first test should fail. We check if expected error message is printed in case
    #   of failure.
    bec = BestEffortCallback()
    bec_token = RE.subscribe(bec)
    RE(plan_outer())

    captured = capsys.readouterr()

    # Check for the number of runs (the number of times UID is printed in the output)
    scan_uid_substr = "Persistent Unique Scan ID"
    n_runs = captured.out.count(scan_uid_substr)
    assert n_runs == 2, "scan output contains incorrect number of runs"
    # Check if the expected error message is printed once the callback fails. The same
    #   substring will be used in the second part of the test to check if BEC did not fail.
    err_msg_substr = "is being suppressed to not interrupt plan execution"
    assert err_msg_substr in str(caplog.text), \
        "Best Effort Callback failed, but expected error message was not printed"

    RE.unsubscribe(bec_token)
    caplog.clear()

    # The second test should succeed, i.e. the error message should not be printed
    def factory(name, doc):
        bec = BestEffortCallback()
        return [bec], []

    rr = RunRouter([factory])
    RE.subscribe(rr)
    RE(plan_outer())

    captured = capsys.readouterr()
    n_runs = captured.out.count(scan_uid_substr)
    assert n_runs == 2, "scan output contains incorrect number of runs"
    assert err_msg_substr not in caplog.text, \
        "Best Effort Callback failed while executing nested plans"
Esempio n. 9
0
def test_underhinted_plan(RE, hw):
    bec = BestEffortCallback()
    RE.subscribe(bec)

    @bpp.run_decorator()
    def broken_plan(dets):
        yield from bps.trigger_and_read(dets)

    RE(broken_plan([hw.det]))
Esempio n. 10
0
def get_runengine(db=None):
    """
    Return an instance of RunEngine.  It is recommended to have only
    one RunEngine per session.
    """
    RE = RunEngine({})
    db = metadata_db if db is None else db
    RE.subscribe(db.insert)
    RE.subscribe(BestEffortCallback())
    return RE
Esempio n. 11
0
def test_plans_motor_empty_hints(RE, pln, args, kwargs, hw):
    args = tuple(getattr(hw, v, v) if isinstance(v, str) else v for v in args)
    for v in args:
        if hasattr(v, 'hints'):
            v.hints = {}
            assert v.hints == {}
    det = hw.det
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE(pln([det], *args, **kwargs))
Esempio n. 12
0
    def __init__(self, mode='debug'):
        self.RE = bluesky.RunEngine({})
        self.db = databroker.Broker.named("mongodb_config")
        self.RE.subscribe(self.db.insert)
        self.RE.subscribe(BestEffortCallback())

        self._mode = mode
        from apstools.devices import ApsMachineParametersDevice
        self._aps = ApsMachineParametersDevice(name="APS")
        self.shutter = Experiment.get_shutter(mode)
        self.suspend_shutter = SuspendFloor(self.shutter.pss_state, 1)
Esempio n. 13
0
def replay(headers, callback=None, sort=True):
    """
    replay the document stream from one (or more) scans (headers)

    PARAMETERS

    headers: scan or [scan]
        Scan(s) to be replayed through callback.
        A *scan* is an instance of a Bluesky `databroker.Header`.
        see: https://nsls-ii.github.io/databroker/api.html?highlight=header#header-api

    callback: scan or [scan]
        The Bluesky callback to handle the stream of documents from a scan.
        If `None`, then use the `bec` (BestEffortCallback) from the IPython shell.
        (default:``None``)

    sort: bool
        Sort the headers chronologically if True.
        (default:``True``)

    *new in apstools release 1.1.11*
    """
    callback = callback or ipython_shell_namespace().get(
        "bec",  # get from IPython shell
        BestEffortCallback(),  # make one, if we must
    )
    _headers = headers  # do not mutate the input arg
    if isinstance(_headers, databroker.Header):
        _headers = [_headers]

    def increasing_time_sorter(run):
        return run.start["time"]

    def decreasing_time_sorter(run):
        """Default for databroker v0 results."""
        return -run.start["time"]

    sorter = {
        True: increasing_time_sorter,
        False: decreasing_time_sorter
    }[sort]

    for h in sorted(_headers, key=sorter):
        if not isinstance(h, databroker.Header):
            raise TypeError(
                f"Must be a databroker Header: received: {type(h)}: |{h}|")
        cmd = _rebuild_scan_command(h.start)
        logger.debug(f"{cmd}")

        # at last, this is where the real action happens
        for k, doc in h.documents():  # get the stream
            callback(k, doc)  # play it through the callback
Esempio n. 14
0
def macro_RSXS_smooth_sweep(stroke_height,
                            stroke_spacing,
                            n_strokes,
                            both_directions=True):
    """
    macro_RSXS_smooth_sweep

    This method wraps up the bluesky/ophyd codeand allows users to drive
    the LU20 experiment with minimal code overhead. It contains the following
    bluesky plan.

    This bluesky plan moves a 2-axis actuator across multiple traversals of a
    sample. The plan traverses the entirety of the stroke_height (y-axis) and
    after each traversal, steps in the x-axis by the stroke_spacing.It may be
    configured to scan in only a single direction and shutter the beam for the
    opposite direction. This removes the shutter at the beginning of the plan
    and reinserts it at the end. At the end of the plan, the sample is moved to
    its original y-axis position but with an x-axis posiiton ready for the next
    run. For more details about the path, see the documentation of the
    xy_sequencer, the method that generates the sample's path. 

    Parameters 
    ----------
    stroke_height : float
        Vertical distance (y-axs) of each stroke.

    stroke_spacing : float
        Horizontal distance between individual strokes.
    
    n_strokes : int
        Number of strokes to complete.

    both_directions : bool, optional
        Defaults to True. If this value is true the beam will be scanned across
        the sample while moving in both vertical directions. If false, the beam
        is only scanned in a single direction.
    """

    RE = RunEngine({})
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE.waiting_hook = ProgressBarManager()
    RE(
        run_wrapper(
            rel_smooth_sweep(mot_x=rsxs_sample_x,
                             mot_y=rsxs_sample_y,
                             shutter=shutter,
                             stroke_height=stroke_height,
                             stroke_spacing=stroke_spacing,
                             n_strokes=n_strokes,
                             both_directions=both_directions)))
Esempio n. 15
0
def test_bec_peak_stats_derivative_and_stats(RE, hw):
    bec = BestEffortCallback(calc_derivative_and_stats=True)
    RE.subscribe(bec)

    c = DocCollector()
    RE.subscribe(c.insert)

    res = RE(scan([hw.ab_det], hw.motor, 1, 5, 5))

    if RE.call_returns_result:
        uid = res.run_start_uids[0]
    else:
        uid = res[0]

    desc_uid = c.descriptor[uid][0]["uid"]
    ps = bec._peak_stats[desc_uid]["det_a"]

    assert hasattr(ps, "derivative_stats")

    fields = ["min", "max", "com", "cen", "fwhm", "crossings"]
    der_fields = ["x", "y"] + fields
    for field in der_fields:
        assert hasattr(ps.derivative_stats,
                       field), f"{field} is not an attribute of ps.der"

    assert isinstance(ps.__repr__(), str)

    # These imports are needed by the `eval` below:
    from numpy import array  # noqa F401
    from collections import OrderedDict  # noqa F401

    out = eval(str(ps))
    assert isinstance(out, dict)
    for key in ("stats", "derivative_stats"):
        assert key in out

    for field in fields:
        stats_value = getattr(ps.stats, field)
        out_value = out["stats"][field]
        if stats_value is not None:
            assert np.allclose(stats_value, out_value)
        else:
            stats_value == out_value

    for field in der_fields:
        stats_value = getattr(ps.derivative_stats, field)
        out_value = out["derivative_stats"][field]
        if stats_value is not None:
            assert np.allclose(stats_value, out_value)
        else:
            stats_value == out_value
Esempio n. 16
0
    def __init__(self,
                 queue,
                 teleport,
                 *scan_widgets,
                 live_widget=None,
                 **kwargs):
        super().__init__(**kwargs)
        self.label = label = StartLabel()
        self.queue = queue
        self.teleport = teleport
        self.md_parameters = MetaDataEntry(name='Metadata')
        self.md_widget = ParameterTree()
        self.md_widget.setParameters(self.md_parameters)
        outmost_layout = QtWidgets.QHBoxLayout()

        input_layout = QtWidgets.QVBoxLayout()
        outmost_layout.addLayout(input_layout)

        input_layout.addWidget(label)
        self.tabs = TabScanSelector(*scan_widgets)

        input_layout.addWidget(self.tabs)
        for sw in scan_widgets:
            sw.md_parameters = self.md_parameters

        self.go_button = QtWidgets.QPushButton('SCAN!')
        self.md_button = QtWidgets.QPushButton('edit metadata')
        input_layout.addWidget(self.md_button)
        input_layout.addWidget(self.go_button)

        self.teleport.name_doc.connect(label.doc_consumer)

        self.cbr = Dispatcher()
        self.bec = BestEffortCallback()
        self.teleport.name_doc.connect(
            lambda name, doc: self.cbr.process(DocumentNames(name), doc))
        self.cbr.subscribe(self.bec)

        def runner():
            self.queue.put(self.tabs.get_plan())

        self.go_button.clicked.connect(runner)
        self.md_button.clicked.connect(self.md_widget.show)

        if live_widget is None:
            live_widget = LivePlaceholder()
        self.live_widget = live_widget
        self.teleport.name_doc.connect(live_widget.doc_consumer)
        outmost_layout.addWidget(live_widget)

        self.setLayout(outmost_layout)
Esempio n. 17
0
def test_plot_ints(RE):
    from ophyd import Signal
    from bluesky.callbacks.best_effort import BestEffortCallback
    from bluesky.plans import count
    import bluesky.plan_stubs as bps

    bec = BestEffortCallback()
    RE.subscribe(bec)

    s = Signal(name='s')
    RE(bps.mov(s, int(0)))
    assert s.describe()['s']['dtype'] == 'integer'
    s.kind = 'hinted'
    with pytest.warns(None) as record:
        RE(count([s], num=35))

    assert len(record) == 0
Esempio n. 18
0
def getRunEngine(db=None):
    """
    Return an instance of RunEngine.  It is recommended to have only
    one RunEngine per session.
    """
    RE = RunEngine({})
    db = metadata_db if db is None else db
    RE.subscribe(db.insert)
    RE.subscribe(BestEffortCallback())
    RE.md['beamline_id'] = 'APS 6-BM-A'
    RE.md['proposal_id'] = 'internal test'
    RE.md['pid'] = os.getpid()
    RE.md['login_id'] = USERNAME + '@' + HOSTNAME
    RE.md['BLUESKY_VERSION'] = bluesky.__version__
    RE.md['OPHYD_VERSION'] = ophyd.__version__
    RE.md['apstools_VERSION'] = apstools.__version__
    RE.md['SESSION_STARTED'] = datetime.isoformat(datetime.now(), " ")
    return RE
Esempio n. 19
0
    def run(self):
        """
        Overrides the `run()` function of the `multiprocessing.Process` class. Called
        by the `start` method.
        """
        self._exit_event = threading.Event()

        # TODO: TC - Do you think that the following code may be included in RE.__init__()
        #   (for Python 3.8 and above)
        # Setting the default event loop is needed to make the code work with Python 3.8.
        loop = get_bluesky_event_loop()
        asyncio.set_event_loop(loop)

        self._RE = RunEngine({})

        bec = BestEffortCallback()
        self._RE.subscribe(bec)

        # db = Broker.named('temp')
        self._RE.subscribe(self._db.insert)

        self._execution_queue = queue.Queue()

        self._thread_conn = threading.Thread(
            target=self._receive_packet_thread, name="RE Worker Receive")
        self._thread_conn.start()

        # Environment is initialized: send a report
        msg = {"type": "report", "value": {"action": "environment_created"}}
        self._conn.send(msg)

        # Now make the main thread busy
        self._execute_in_main_thread()

        self._thread_conn.join()

        del self._RE

        # Finally send a report
        msg = {"type": "report", "value": {"action": "environment_closed"}}
        self._conn.send(msg)
Esempio n. 20
0
def test_push_start_document(capsys):
    """ Pass the start document to BEC and verify if the scan information is printed correctly"""

    bec = BestEffortCallback()

    uid = new_uid()
    time = ttime.time()
    scan_id = 113435  # Just some arbitrary number

    # Include minimum information needed to print the header
    bec("start", {"scan_id": scan_id, "time": time, "uid": uid})

    captured = capsys.readouterr()
    assert f"Transient Scan ID: {scan_id}" in captured.out, \
        "BestEffortCallback: Scan ID is not printed correctly"

    tt = datetime.fromtimestamp(time).utctimetuple()
    assert f"Time: {ttime.strftime('%Y-%m-%d %H:%M:%S', tt)}" in captured.out, \
        "BestEffortCallback: Scan time is not printed correctly"
    assert f"Persistent Unique Scan ID: '{uid}'" in captured.out, \
        "BestEffortCallback: Scan UID is not printed correctly"
Esempio n. 21
0
def run():
    arg_parser = argparse.ArgumentParser()
    arg_parser.add_argument("--agent-name", required=True, type=str)
    arg_parser.add_argument("--episode-count", required=True, type=int)

    args = arg_parser.parse_args()

    RE = RunEngine()

    bec = BestEffortCallback()

    RE.subscribe(bec)

    db = Broker.named("bluesky-cartpole")

    # insert bluesky documents into databroker
    RE.subscribe(db.insert)

    RE(
        train_cartpole_agent(agent_name=args.agent_name,
                             episode_count=args.episode_count))
Esempio n. 22
0
def getRunEngine(db=None):
    """
    Return an instance of RunEngine.  It is recommended to have only
    one RunEngine per session.
    """
    RE = RunEngine({})
    db = db or metadata_db
    RE.subscribe(db.insert)
    RE.subscribe(BestEffortCallback())
    RE.md['beamline_id'] = 'APS 6-BM-A'
    RE.md['proposal_id'] = 'internal test'
    RE.md['pid'] = os.getpid()
    RE.md['login_id'] = USERNAME + '@' + HOSTNAME
    RE.md['versions'] = {}
    RE.md['versions']['apstools'] = apstools.__version__
    RE.md['versions']['bluesky'] = bluesky.__version__
    RE.md['versions']['databroker'] = databroker.__version__
    RE.md['versions']['matplotlib'] = matplotlib.__version__
    RE.md['versions']['numpy'] = np.__version__
    RE.md['versions']['ophyd'] = ophyd.__version__
    RE.md['SESSION_STARTED'] = datetime.isoformat(datetime.now(), " ")
    return RE
Esempio n. 23
0
def test_plot_prune_fifo(RE, hw):
    bec = BestEffortCallback()
    RE.subscribe(bec)

    num_pruned = 2

    # create the LivePlot
    RE(bps.repeater(num_pruned, scan, [hw.ab_det], hw.motor, 1, 5, 5))

    # test it
    assert len(bec._live_plots) == 1

    # get the reference key for our LivePlot dict
    uuid = next(iter(bec._live_plots))
    assert len(bec._live_plots[uuid]) == 1

    # get reference key for our detector
    det_name = next(iter(bec._live_plots[uuid]))
    # should be same as hw.ab_det.a.name (`.a` comes from .read_attrs[0]), prove it now
    assert det_name == hw.ab_det.a.name

    # get the LivePlot object
    lp = bec._live_plots[uuid][det_name]

    assert lp is not None
    assert len(lp.ax.lines) == num_pruned

    # prune the LivePlot (has no effect since we have exact number to keep)
    bec.plot_prune_fifo(num_pruned, hw.motor, hw.ab_det.a)
    assert len(lp.ax.lines) == num_pruned

    # add more lines to the LivePlot
    RE(bps.repeater(num_pruned, scan, [hw.ab_det], hw.motor, 1, 5, 5))

    # get the LivePlot object, again, in case the UUID was changed
    assert len(bec._live_plots) == 1
    uuid = next(iter(bec._live_plots))
    lp = bec._live_plots[uuid][det_name]
    assert lp is not None
    assert len(lp.ax.lines) == num_pruned * 2

    # prune again, this time reduces number of lines
    bec.plot_prune_fifo(num_pruned, hw.motor, hw.ab_det.a)
    assert len(lp.ax.lines) == num_pruned
Esempio n. 24
0
from bluesky import RunEngine
from bluesky.plans import scan
from bluesky.callbacks.best_effort import BestEffortCallback
from ophyd.sim import SynGauss

from status import StatusAxis

# Create simulated devices
motor = StatusAxis(name='motor')
det = SynGauss('det', motor, 'motor', center=0, Imax=1, sigma=1)
det.kind = 'hinted'
motor.prefix = 'fake:Prefix'
# Create our RunEngine
RE = RunEngine()
RE.subscribe(BestEffortCallback())
Esempio n. 25
0
from bluesky import RunEngine
from bluesky.callbacks.best_effort import BestEffortCallback
from bluesky.utils import install_kicker

bec = BestEffortCallback()

RE = RunEngine(bec)

install_kicker()
Esempio n. 26
0
        for motor, pos in step.items():
            if pos == pos_cache[motor]:
                # This step does not move this motor.
                continue
            yield from abs_set(motor, pos, group=grp)
            pos_cache[motor] = pos
        yield from wait(group=grp)

    motors = step.keys()
    yield from move()
    plt.pause(.001)
    yield from trigger_and_read(list(detectors) + list(motors))


install_kicker()
bec = BestEffortCallback()
bec.enable_plots()
hw = hw()
RE = RunEngine()
# build the pipeline
raw_source = Stream()
raw_output = SimpleFromEventStream('event', ('data', 'det_a'),
                                   raw_source,
                                   principle=True)
raw_output2 = SimpleFromEventStream('event', ('data', 'noisy_det'), raw_source)

pipeline = raw_output.union(raw_output2).map(lambda x: 1).accumulate(
    lambda x, y: x + y)

res = SimpleToEventStream(pipeline, ('result', ))
Esempio n. 27
0
# Add a progress bar.
from timeit import default_timer as timer


from bluesky.utils import ProgressBarManager
pbar_manager = ProgressBarManager()
#RE.waiting_hook = pbar_manager

# Register bluesky IPython magics.
from bluesky.magics import BlueskyMagics
get_ipython().register_magics(BlueskyMagics)

# Set up the BestEffortCallback.
from bluesky.callbacks.best_effort import BestEffortCallback
bec = BestEffortCallback()
bec.disable_plots()
bec.disable_table()
RE.subscribe(bec)
peaks = bec.peaks  # just as alias for less typing


# Make plots update live while scans run.
from bluesky.utils import install_qt_kicker
install_qt_kicker()

from pathlib import Path
from historydict import HistoryDict

try:
    RE.md = HistoryDict('/nsls2/xf08id/metadata/bluesky_history.db')
Esempio n. 28
0
def test_with_baseline(RE, hw):
    bec = BestEffortCallback()
    RE.subscribe(bec)
    sd = SupplementalData(baseline=[hw.det])
    RE.preprocessors.append(sd)
    RE(scan([hw.ab_det], hw.motor, 1, 5, 5))
Esempio n. 29
0
def test_blank_hints(RE, hw):
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE(scan([hw.ab_det], hw.motor, 1, 5, 5, md={'hints': {}}))
Esempio n. 30
0
def test_disable(RE, hw):
    det, motor = hw.ab_det, hw.motor
    bec = BestEffortCallback()
    RE.subscribe(bec)

    bec.disable_table()

    RE(scan([det], motor, 1, 5, 5))
    assert bec._table is None

    bec.enable_table()

    RE(scan([det], motor, 1, 5, 5))
    assert bec._table is not None

    bec.peaks.com
    bec.peaks['com']
    assert ast.literal_eval(repr(bec.peaks)) == vars(bec.peaks)

    bec.clear()
    assert bec._table is None

    # smoke test
    bec.disable_plots()
    bec.enable_plots()
    bec.disable_baseline()
    bec.enable_baseline()
    bec.disable_heading()
    bec.enable_heading()
Esempio n. 31
0
def test_simple(RE, hw):
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE(scan([hw.ab_det], hw.motor, 1, 5, 5))
Esempio n. 32
0
def test_disable(RE, hw):
    det, motor = hw.ab_det, hw.motor
    bec = BestEffortCallback()
    RE.subscribe(bec)

    bec.disable_table()

    RE(scan([det], motor, 1, 5, 5))
    assert bec._table is None

    bec.enable_table()

    RE(scan([det], motor, 1, 5, 5))
    assert bec._table is not None

    bec.peaks.com
    bec.peaks['com']
    assert ast.literal_eval(repr(bec.peaks)) == vars(bec.peaks)

    bec.clear()
    assert bec._table is None

    # smoke test
    bec.disable_plots()
    bec.enable_plots()
    bec.disable_baseline()
    bec.enable_baseline()
    bec.disable_heading()
    bec.enable_heading()
Esempio n. 33
0
from bluesky import SupplementalData
sd = SupplementalData()
RE.preprocessors.append(sd)

# Add a progress bar.
from bluesky.utils import ProgressBarManager
pbar_manager = ProgressBarManager()
RE.waiting_hook = pbar_manager

# Register bluesky IPython magics.
from bluesky.magics import BlueskyMagics
get_ipython().register_magics(BlueskyMagics)

# Set up the BestEffortCallback.
from bluesky.callbacks.best_effort import BestEffortCallback
bec = BestEffortCallback()
RE.subscribe(bec)
peaks = bec.peaks  # just as alias for less typing
bec.disable_baseline()

# At the end of every run, verify that files were saved and
# print a confirmation message.
from bluesky.callbacks.broker import verify_files_saved
# RE.subscribe(post_run(verify_files_saved), 'stop')

# Import matplotlib and put it in interactive mode.
import matplotlib.pyplot as plt
plt.ion()

# Make plots update live while scans run.
from bluesky.utils import install_qt_kicker