예제 #1
0
def generate_example_catalog(data_path):
    data_path = Path(data_path)

    def factory(name, doc):
        serializer = Serializer(data_path / 'abc')
        serializer('start', doc)
        return [serializer], []

    RE = RunEngine()
    sd = SupplementalData()
    RE.preprocessors.append(sd)
    sd.baseline.extend([motor1, motor2])
    rr = RunRouter([factory])
    RE.subscribe(rr)
    RE(count([det]))
    RE(count([noisy_det], 5))
    RE(scan([det], motor, -1, 1, 7))
    RE(grid_scan([det4], motor1, -1, 1, 4, motor2, -1, 1, 7, False))
    RE(scan([det], motor, -1, 1, motor2, -1, 1, 5))
    RE(count([noisy_det, det], 5))
    RE(count([random_img], 5))
    RE(count([img], 5))

    def factory(name, doc):
        serializer = Serializer(data_path / 'xyz')
        serializer('start', doc)
        return [serializer], []

    RE = RunEngine()
    rr = RunRouter([factory])
    RE.subscribe(rr)
    RE(count([det], 3))

    catalog_filepath = data_path / 'catalog.yml'
    with open(catalog_filepath, 'w') as file:
        file.write(f'''
sources:
  abc:
    description: Some imaginary beamline
    driver: bluesky-jsonl-catalog
    container: catalog
    args:
      paths: {Path(data_path) / 'abc' / '*.jsonl'}
      handler_registry:
        NPY_SEQ: ophyd.sim.NumpySeqHandler
    metadata:
      beamline: "00-ID"
  xyz:
    description: Some imaginary beamline
    driver: bluesky-jsonl-catalog
    container: catalog
    args:
      paths: {Path(data_path) / 'xyz' / '*.jsonl'}
      handler_registry:
        NPY_SEQ: ophyd.sim.NumpySeqHandler
    metadata:
      beamline: "99-ID"
''')
    return str(catalog_filepath)
예제 #2
0
    def run(self):
        """
        Overrides the `run()` function of the `multiprocessing.Process` class. Called
        by the `start` method.
        """
        self._exit_event.clear()

        self._RE = RunEngine({})

        bec = BestEffortCallback()
        self._RE.subscribe(bec)

        # db = Broker.named('temp')
        self._RE.subscribe(self._db.insert)

        self._execution_queue = queue.Queue()

        self._thread_conn = threading.Thread(
            target=self._receive_packet_thread, name="RE Worker Receive")
        self._thread_conn.start()

        # Now make the main thread busy
        self._execute_in_main_thread()

        self._thread_conn.join()

        del self._RE
예제 #3
0
    def __init__(self, **kwargs):
        super(QRunEngine, self).__init__()

        self.RE = RunEngine(context_managers=[],
                            during_task=DuringTask(),
                            **kwargs)
        self.RE.subscribe(self.sigDocumentYield.emit)

        # TODO: pull from settings plugin
        from suitcase.mongo_normalized import Serializer
        #TODO create single databroker db
        #python-dotenv stores name-value pairs in .env (add to .gitginore)
        username = os.getenv("USER_MONGO")
        pw = os.getenv("PASSWD_MONGO")
        try:
            self.RE.subscribe(
                Serializer(
                    f"mongodb://{username}:{pw}@localhost:27017/mds?authsource=mds",
                    f"mongodb://{username}:{pw}@localhost:27017/fs?authsource=fs"
                ))
        except OperationFailure as err:
            msg.notifyMessage("Could not connect to local mongo database.",
                              title="xicam.Acquire Error",
                              level=msg.ERROR)
            msg.logError(err)

        self.queue = PriorityQueue()
        self.process_queue()
예제 #4
0
def spawn_RE(*, loop=None, **kwargs):
    RE = RunEngine(context_managers=[], **kwargs)
    queue = _get_asyncio_queue(RE.loop)()
    t = Teleporter()

    async def get_next_message(msg):
        return await queue.async_get()

    RE.register_command('next_plan', get_next_message)

    def forever_plan():
        while True:
            plan = yield Msg('next_plan')
            try:
                yield from plan
            except GeneratorExit:
                raise
            except Exception as ex:
                print(f'things went sideways \n{ex}')

    def thread_task():
        RE(forever_plan())

    thread = threading.Thread(target=thread_task, daemon=True, name='RE')
    thread.start()

    RE.subscribe(t.name_doc.emit)

    return RE, queue, thread, t
예제 #5
0
def test_simple_scan_nd():
    RE = RunEngine()
    hardware1 = yaqc_bluesky.Device(39423)
    hardware2 = yaqc_bluesky.Device(39424)
    sensor = yaqc_bluesky.Device(39425)
    cy = cycler(hardware1, [1, 2, 3]) * cycler(hardware2, [4, 5, 6])
    RE(scan_nd([sensor], cy))
예제 #6
0
def test_multi_fit():
    RE = RunEngine()

    # Expected values of fit
    expected = {'x0': 5, 'x1': 4, 'x2': 3}

    m1 = SynAxis(name='m1')
    m2 = SynAxis(name='m2')
    det = SynSignal(name='centroid',
                    func=lambda: 5 + 4 * m1.read()['m1']['value'] + 3 * m2.
                    read()['m2']['value'])

    # Assemble fitting callback
    cb = MultiPitchFit('centroid', ('m1', 'm2'), update_every=None)

    RE(outer_product_scan([det], m1, -1, 1, 10, m2, -1, 1, 10, False), cb)

    # Check accuracy of fit
    logger.debug(cb.result.fit_report())
    for k, v in expected.items():
        assert np.allclose(cb.result.values[k], v, atol=1e-6)

    # Check we create an accurate estimate
    assert np.allclose(cb.eval(a0=5, a1=10), 55, atol=1e-5)
    assert np.allclose(cb.backsolve(55, a1=10)['a0'], 5, atol=1e-5)
    assert np.allclose(cb.backsolve(55, a0=5)['a1'], 10, atol=1e-5)
예제 #7
0
def test_multi_fit():
    #Create RunEngine
    RE = RunEngine()

    #Excepted values of fit
    expected = {'x0': 5, 'x1': 4, 'x2': 3}

    #Create simulated devices
    m1 = Mover('m1', {'m1': lambda x: x}, {'x': 0})
    m2 = Mover('m2', {'m2': lambda x: x}, {'x': 0})
    det = Reader(
        'det', {
            'centroid':
            lambda: 5 + 4 * m1.read()['m1']['value'] + 3 * m2.read()['m2'][
                'value']
        })

    #Assemble fitting callback
    cb = MultiPitchFit('centroid', ('m1', 'm2'), update_every=None)

    #Scan through variables
    RE(outer_product_scan([det], m1, -1, 1, 10, m2, -1, 1, 10, False), cb)
    #Check accuracy of fit
    print(cb.result.fit_report())
    for k, v in expected.items():
        assert np.allclose(cb.result.values[k], v, atol=1e-6)

    #Check we create an accurate estimate
    assert np.allclose(cb.eval(a0=5, a1=10), 55, atol=1e-5)
    assert np.allclose(cb.backsolve(55, a1=10)['a0'], 5, atol=1e-5)
    assert np.allclose(cb.backsolve(55, a0=5)['a1'], 10, atol=1e-5)
예제 #8
0
def test_linear_fit():
    #Create RunEngine
    RE = RunEngine()

    #Excepted values of fit
    expected = {'slope': 5, 'intercept': 2}

    #Create simulated devices
    motor = Mover('motor', {'motor': lambda x: x}, {'x': 0})
    det = Reader('det',
                 {'centroid': lambda: 5 * motor.read()['motor']['value'] + 2})

    #Assemble fitting callback
    cb = LinearFit('centroid', 'motor', update_every=None)

    #Scan through variables
    RE(scan([det], motor, -1, 1, 50), cb)

    #Check accuracy of fit
    for k, v in expected.items():
        assert np.allclose(cb.result.values[k], v, atol=1e-6)

    #Check we create an accurate estimate
    assert np.allclose(cb.eval(x=10), 52, atol=1e-5)
    assert np.allclose(cb.eval(motor=10), 52, atol=1e-5)
    assert np.allclose(cb.backsolve(52)['x'], 10, atol=1e-5)
예제 #9
0
def snapshot_cli():
    """
    given a list of PVs on the command line, snapshot and print report
    
    EXAMPLES::
    
        snapshot.py pv1 [more pvs ...]
        snapshot.py `cat pvlist.txt`

    Note that these are equivalent::

        snapshot.py rpi5bf5:0:humidity rpi5bf5:0:temperature
        snapshot.py rpi5bf5:0:{humidity,temperature}

    """
    from bluesky import RunEngine

    args = get_args()
    
    md = OrderedDict(purpose="archive a set of EPICS PVs")
    md.update(parse_metadata(args))

    obj_dict = APS_utils.connect_pvlist(args.EPICS_PV, wait=False)
    time.sleep(2)   # FIXME: allow time to connect
    
    db = Broker.named(args.broker_config)
    RE = RunEngine({})
    RE.subscribe(db.insert)

    uuid_list = RE(APS_plans.snapshot(obj_dict.values(), md=md))
    
    if args.report:
        snap = list(db(uuid_list[0]))[0]
        APS_callbacks.SnapshotReport().print_report(snap)
예제 #10
0
def test_rank_models():
    RE = RunEngine()

    #Create accurate fit
    motor = Mover('motor', {'motor': lambda x: x}, {'x': 0})
    det = Reader('det',
                 {'centroid': lambda: 5 * motor.read()['motor']['value'] + 2})
    fit1 = LinearFit('centroid', 'motor', update_every=None, name='Accurate')
    RE(scan([det], motor, -1, 1, 50), fit1)

    #Create inaccurate fit
    det2 = Reader(
        'det', {'centroid': lambda: 25 * motor.read()['motor']['value'] + 2})
    fit2 = LinearFit('centroid', 'motor', update_every=None, name='Inaccurate')
    RE(scan([det2], motor, -1, 1, 50), fit2)

    #Create inaccurate fit
    det3 = Reader(
        'det', {'centroid': lambda: 12 * motor.read()['motor']['value'] + 2})
    fit3 = LinearFit('centroid',
                     'motor',
                     update_every=None,
                     name='Midly Inaccurate')
    RE(scan([det3], motor, -1, 1, 50), fit3)

    #Rank models
    ranking = rank_models([fit2, fit1, fit3], target=22, x=4)
    assert ranking[0] == fit1
    assert ranking[1] == fit3
    assert ranking[2] == fit2
def test_count_sirepo_simulation(sirepo_guest_session):
    with sirepo_guest_session(simulation_type="srw") as sirepo_session:
        simulation_table = sirepo_session.simulation_list()
        # pick a known simulation
        simulation_id = simulation_table[
            "/Light Source Facilities/NSLS-II/NSLS-II SRX beamline"][
                "NSLS-II SRX beamline"]
        srx_simulation_data = sirepo_session.simulation_data(
            simulation_id=simulation_id)
        sirepo_simulation_class = build_sirepo_simulation(
            sirepo_simulation_data=srx_simulation_data)

    # this function will store all documents
    # published by the RunEngine in a list
    published_bluesky_documents = list()

    def store_published_document(name, document):
        published_bluesky_documents.append((name, document))

    RE = RunEngine()
    RE.subscribe(store_published_document)

    sirepo_simulation = sirepo_simulation_class(name="srx")
    run_id = RE(count([sirepo_simulation]))

    assert len(published_bluesky_documents) > 0
예제 #12
0
def test_serp_scan():
    """Note: run this standalone, not inside mfx hutch python."""
    import numpy as np
    from bluesky import RunEngine
    from bluesky.callbacks.best_effort import BestEffortCallback
    from ophyd.sim import motor1, motor2
    from ophyd.status import StatusBase
    from pcdsdaq.daq import Daq
    from pcdsdaq.sim import set_sim_mode

    class FakeSeq:
        def trigger(self):
            print('Triggered the sequencer!')
            status = StatusBase()
            status.set_finished()
            return status

    set_sim_mode(True)
    RE = RunEngine({})
    bec = BestEffortCallback()
    RE.subscribe(bec)
    seq = FakeSeq()
    daq = Daq(RE=RE)

    RE(serp_seq_scan(motor1, np.arange(100, 200, 10), motor2, [0, 100], seq))
예제 #13
0
def test_rank_models():
    RE = RunEngine()

    # Create accurate fit
    motor = SynAxis(name='motor')
    det = SynSignal(name='centroid',
                    func=lambda: 5 * motor.read()['motor']['value'] + 2)
    fit1 = LinearFit('centroid', 'motor', update_every=None, name='Accurate')
    RE(scan([det], motor, -1, 1, 50), fit1)

    # Create inaccurate fit
    det2 = SynSignal(name='centroid',
                     func=lambda: 25 * motor.read()['motor']['value'] + 2)
    fit2 = LinearFit('centroid', 'motor', update_every=None, name='Inaccurate')
    RE(scan([det2], motor, -1, 1, 50), fit2)

    # Create inaccurate fit
    det3 = SynSignal(name='centroid',
                     func=lambda: 12 * motor.read()['motor']['value'] + 2)
    fit3 = LinearFit('centroid',
                     'motor',
                     update_every=None,
                     name='Midly Inaccurate')
    RE(scan([det3], motor, -1, 1, 50), fit3)

    # Rank models
    ranking = rank_models([fit2, fit1, fit3], target=22, x=4)
    assert ranking[0] == fit1
    assert ranking[1] == fit3
    assert ranking[2] == fit2
예제 #14
0
def run_publisher(in_port, data_path, quiet=False):
    """
    Acquire data in an infinite loop and publish it.
    """
    publisher = Publisher(f"localhost:{in_port}")
    RE = RunEngine(loop=asyncio.new_event_loop())
    sd = SupplementalData()
    RE.preprocessors.append(sd)
    sd.baseline.extend([motor1, motor2])
    RE.subscribe(publisher)

    def factory(name, doc):
        serializer = Serializer(data_path / "abc", flush=True)
        return [serializer], []

    rr = RunRouter([factory])
    RE.subscribe(rr)
    if not quiet:
        RE.subscribe(LiveTable(["motor", "det"]))

    motor.delay = 0.2
    det.kind = "hinted"

    def infinite_plan():
        while True:
            for i in range(1, 5):
                yield from sleep(2)
                yield from scan([det], motor, -1, 1, 5 * i)

    # Just as a convenience, avoid collission with scan_ids of runs in Catalog.
    RE.md["scan_id"] = 100
    try:
        RE(infinite_plan())
    finally:
        RE.halt()
예제 #15
0
def run():
    """
    # setup for simulated IOC
    EPICS_CA_ADDR_LIST=127.0.0.1:5064
    EPICS_CA_AUTO_ADDR_LIST=NO

    Run mongo.

    Run simulated IOC.
    """
    # arg_parser = argparse.ArgumentParser()
    # arg_parser.add_argument("--agent-name", required=True, type=str)
    # arg_parser.add_argument("--episode-count", required=True, type=int)

    # args = arg_parser.parse_args()

    RE = RunEngine()

    bec = BestEffortCallback()

    RE.subscribe(bec)

    db = catalog["mad"]  # this is set up by entrypoint

    RE.subscribe(db.v1.insert)

    tiff_sim_detector = NewPerkinElmerDetector(prefix="Sim{{det1}}:",
                                               name="tiff_sim_detector")
    RE(count([tiff_sim_detector]))
예제 #16
0
def run_publisher(in_port, data_path):
    """
    Acquire data in an infinite loop and publish it.
    """
    import asyncio
    from bluesky.callbacks.zmq import Publisher
    from suitcase.jsonl import Serializer
    from ophyd.sim import noisy_det, motor1, motor2
    from bluesky.plans import count
    from bluesky.preprocessors import SupplementalData
    from bluesky.plan_stubs import sleep
    publisher = Publisher(f'localhost:{in_port}')
    RE = RunEngine(loop=asyncio.new_event_loop())
    sd = SupplementalData()
    RE.preprocessors.append(sd)
    sd.baseline.extend([motor1, motor2])
    RE.subscribe(publisher)

    def factory(name, doc):
        serializer = Serializer(data_path / 'abc')
        serializer('start', doc)
        return [serializer], []

    rr = RunRouter([factory])
    RE.subscribe(rr)

    def infinite_plan():
        while True:
            yield from sleep(3)
            yield from count([noisy_det], 20, delay=0.5)

    try:
        RE(infinite_plan())
    finally:
        RE.halt()
예제 #17
0
def RE():
    """
    Standard logging runengine
    """
    RE = RunEngine({})
    collector = MsgCollector(msg_hook=run_engine_logger.debug)
    RE.msg_hook = collector
    return RE
예제 #18
0
def macro_sweep_test(target):
    logging.info(
        'macro_sweep_test initiated with target {:0.4f}'.format(target))
    RE = RunEngine({})
    bec = BestEffortCallback()
    RE.subscribe(bec)
    RE.waiting_hook = ProgressBarManager()
    RE(run_wrapper(rel_smooth_sweep_test(tst_23, target)))
예제 #19
0
def RE():
    loop = asyncio.new_event_loop()
    loop.set_debug(True)
    RE = RunEngine({}, loop=loop)

    yield RE

    if RE.state != 'idle':
        RE.halt()
예제 #20
0
class RunEngineTraitType(TraitType):

    info_text = 'a RunEngine instance'
    default_value = RunEngine(get_history())

    def validate(self, obj, value):
        if not isinstance(value, RunEngine):
            self.error(obj, value)
        return value
예제 #21
0
def run_pb(flyers, root='/tmp', collection_time=10):
    # make some filenames
    filenames = list()
    for flyer in flyers:
        filenames.append(root + "/" + str(uuid4())[:8] + "." + flyer.name)

    RE = RunEngine()
    RE(set_and_fly(filenames, flyers, sleeptime=10))

    return filenames
예제 #22
0
def test_fly_scan_smoke():
    seq = SimSequencer('ECS:TST:100', name='seq')
    RE = RunEngine()

    # Create a plan where we fly for a second
    def plan():
        yield from fly_during_wrapper(run_wrapper(sleep(1)), [seq])

    # Run the plan
    RE(plan())
def main():
    parser = argparse.ArgumentParser(description="TES horizontal feedback")
    parser.add_argument("FBref",
                        type=float,
                        help="Target position. For example, -137000.")
    args = parser.parse_args()
    # config_bluesky_logging(level='INFO')

    RE = RunEngine()
    RE(plan(args.FBref))
예제 #24
0
def get_runengine(db=None):
    """
    Return an instance of RunEngine.  It is recommended to have only
    one RunEngine per session.
    """
    RE = RunEngine({})
    db = metadata_db if db is None else db
    RE.subscribe(db.insert)
    RE.subscribe(BestEffortCallback())
    return RE
예제 #25
0
 def run_plans(self):
     RE = RunEngine({})
     RE.log.setLevel(logging.INFO)
 
     publisher = Publisher('localhost:5567')
     RE.subscribe(publisher)
     
     for plan in self.PLANS:
         print('Starting Scan...')
         RE(plan)
         print('Scan Done...')
예제 #26
0
def test_my_list_grid_scan2():
    xpd_configuration["shutter"] = shctl1
    motor = hw().motor
    plan = my_list_grid_scan(xpd_pe1c,
                             motor, [1.],
                             cs700, [300., 400., 500.],
                             acquire_time=0.2,
                             images_per_set=5,
                             wait_for_step=0.)
    RE = RunEngine()
    RE(plan)
def get_catalog():
    RE = RunEngine()

    directory = tempfile.TemporaryDirectory().name
    with Serializer(directory) as serializer:
        RE(count([img]), serializer)
    with Serializer(directory) as serializer:
        RE(count([img], 3), serializer)

    catalog = BlueskyMsgpackCatalog(f"{directory}/*.msgpack")
    return catalog
예제 #28
0
def get_catalog():
    RE = RunEngine()

    directory = tempfile.TemporaryDirectory().name
    for i in range(1, 5):
        with Serializer(directory) as serializer:
            RE(scan([det], motor, -1, 1, 5 * i), serializer)
    with Serializer(directory) as serializer:
        RE(count([random_img], 3), serializer)

    catalog = BlueskyMsgpackCatalog(f"{directory}/*.msgpack")
    return catalog
예제 #29
0
def main():
    act = MimicActuator(name='act')
    bk_dev = Bookkeeping(name='bk')
    dets = [bk_dev]
    RE = RunEngine({})
    RE.log.setLevel(logging.DEBUG)

    lt = LiveTable([
        bk_dev.status.name, bk_dev.target.name, act.setpoint.name,
        act.readback.name
    ])
    RE(bp.scan(dets, act, 1, 5, 5, per_step=solve_stub), lt)
예제 #30
0
def test_ims_stage_in_plan(fake_ims):
    logger.debug('test_ims_stage_in_plan')
    RE = RunEngine()
    m = fake_ims

    def plan():
        yield from open_run()
        yield from stage(m)
        yield from unstage(m)
        yield from close_run()

    RE(plan())