Example #1
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"raw")
        RE.subscribe(p)

        # Tiny fake pipeline
        pp = Publisher(proxy[0], prefix=b"an")
        raw_source = Stream()
        SimpleFromEventStream(
            "event",
            ("data", "img"),
            raw_source.starmap(Retrieve({"NPY_SEQ": NumpySeqHandler})),
            principle=True,
        ).map(lambda x: x * 2).SimpleToEventStream(
            ("img2",), analysis_stage="pdf"
        ).starsink(
            pp
        )
        RE.subscribe(lambda *x: raw_source.emit(x))

        RE(bp.count([hw.img], md=dict(analysis_stage="raw")))
        print("finished exp")
        p.close()
Example #2
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"an")
        RE.subscribe(p)

        RE(
            bp.grid_scan(
                [hw.noisy_det],
                hw.motor3,
                0,
                2,
                2,
                hw.motor1,
                0,
                2,
                2,
                True,
                hw.motor2,
                0,
                2,
                2,
                True,
                md={
                    "tomo": {
                        "type": "pencil",
                        "rotation": "motor1",
                        "translation": "motor2",
                        "stack": "motor3",
                        "center": 1,
                    }
                },
            ))
Example #3
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"raw")
        RE.subscribe(p)
        RE(bp.count([hw.img], md=dict(analysis_stage="raw")))
Example #4
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"raw")
        RE.subscribe(p)
        det = SynSignal(func=lambda: np.ones(10), name="gr")
        RE(bp.count([det], md=dict(analysis_stage="raw")))
        RE(bp.count([det], md=dict(analysis_stage="pdf")))
Example #5
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"raw")
        RE.subscribe(p)
        z = np.zeros(10)
        z[3] = 1
        x = SynSignal(func=lambda: np.arange(10), name="x")
        y = SynSignal(func=lambda: z, name="y")
        RE(bp.count([x, y], md=dict(analysis_stage="raw")))
Example #6
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"raw")
        RE.subscribe(p)
        RE.subscribe(db.insert)
        dark, = RE(bp.count([hw.img], md=dict(analysis_stage="raw")))
        flat, = RE(bp.count([hw.img], md=dict(analysis_stage="raw")))
        RE(
            bp.count(
                [hw.img],
                md=dict(
                    analysis_stage="raw",
                    sc_dk_field_uid=dark,
                    sc_flat_field_uid=flat,
                ),
            ))
Example #7
0
    def run_exp(delay):  # pragma: no cover
        time.sleep(delay)
        print("running exp")

        p = Publisher(proxy[0], prefix=b"an")
        RE.subscribe(p)

        det = SynSignal(func=lambda: np.ones((10, 10)), name="gr")
        RE(
            bp.scan(
                [det],
                hw.motor1,
                0,
                2,
                2,
                md={
                    "tomo": {
                        "type": "full_field",
                        "rotation": "motor1",
                        "center": 1,
                    }
                },
            ))
Example #8
0
def test_zmq_multi_prefix(RE, hw):
    # COMPONENT 1
    # Run a 0MQ proxy on a separate process.
    def start_proxy():  # pragma: no cover
        Proxy(5567, 5568).start()

    proxy_proc = multiprocessing.Process(target=start_proxy, daemon=True)
    proxy_proc.start()
    time.sleep(5)  # Give this plenty of time to start up.

    # COMPONENT 2
    # Run a Publisher and a RunEngine in this main process.

    p = Publisher('127.0.0.1:5567', prefix=b'sb')  # noqa
    p2 = Publisher('127.0.0.1:5567', prefix=b'not_sb')  # noqa
    RE.subscribe(p)
    RE.subscribe(p2)

    # COMPONENT 3
    # Run a RemoteDispatcher on another separate process. Pass the documents
    # it receives over a Queue to this process, so we can count them for our
    # test.

    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', prefix=[b'sb', b'not_sb'])
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()

    queue = multiprocessing.Queue()
    dispatcher_proc = multiprocessing.Process(target=make_and_start_dispatcher,
                                              daemon=True,
                                              args=(queue, ))
    dispatcher_proc.start()
    time.sleep(5)  # As above, give this plenty of time to start.

    # Generate two documents. The Publisher will send them to the proxy
    # device over 5567, and the proxy will send them to the
    # RemoteDispatcher over 5568. The RemoteDispatcher will push them into
    # the queue, where we can verify that they round-tripped.

    local_accumulator = []

    def local_cb(name, doc):
        local_accumulator.append((name, doc))
        local_accumulator.append((name, doc))

    # Check that numpy stuff is sanitized by putting some in the start doc.
    md = {
        'stuff': {
            'nested': np.array([1, 2, 3])
        },
        'scalar_stuff': np.float64(3),
        'array_stuff': np.ones((3, 3))
    }

    # RE([Msg('open_run', **md), Msg('close_run')], local_cb)
    RE(count([hw.det]), local_cb)
    time.sleep(1)

    # Get the two documents from the queue (or timeout --- test will fail)
    remote_accumulator = []
    for i in range(len(local_accumulator)):
        remote_accumulator.append(queue.get(timeout=2))
    p.close()
    p2.close()
    proxy_proc.terminate()
    dispatcher_proc.terminate()
    proxy_proc.join()
    dispatcher_proc.join()
    assert remote_accumulator == local_accumulator
Example #9
0
def test_zmq_no_RE_newserializer(RE):
    # COMPONENT 1
    # Run a 0MQ proxy on a separate process.
    def start_proxy():  # pragma: no cover
        Proxy(5567, 5568).start()

    proxy_proc = multiprocessing.Process(target=start_proxy, daemon=True)
    proxy_proc.start()
    time.sleep(5)  # Give this plenty of time to start up.

    # COMPONENT 2
    # Run a Publisher and a RunEngine in this main process.

    p = Publisher('127.0.0.1:5567', serializer=cloudpickle.dumps)  # noqa

    # COMPONENT 3
    # Run a RemoteDispatcher on another separate process. Pass the documents
    # it receives over a Queue to this process, so we can count them for our
    # test.

    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', deserializer=cloudpickle.loads)
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()

    queue = multiprocessing.Queue()
    dispatcher_proc = multiprocessing.Process(target=make_and_start_dispatcher,
                                              daemon=True,
                                              args=(queue, ))
    dispatcher_proc.start()
    time.sleep(5)  # As above, give this plenty of time to start.

    # Generate two documents. The Publisher will send them to the proxy
    # device over 5567, and the proxy will send them to the
    # RemoteDispatcher over 5568. The RemoteDispatcher will push them into
    # the queue, where we can verify that they round-tripped.

    local_accumulator = []

    def local_cb(name, doc):
        local_accumulator.append((name, doc))

    RE([Msg('open_run'), Msg('close_run')], local_cb)

    # This time the Publisher isn't attached to an RE. Send the documents
    # manually. (The idea is, these might have come from a Broker instead...)
    for name, doc in local_accumulator:
        p(name, doc)
    time.sleep(1)

    # Get the two documents from the queue (or timeout --- test will fail)
    remote_accumulator = []
    for i in range(2):
        remote_accumulator.append(queue.get(timeout=2))
    p.close()
    proxy_proc.terminate()
    dispatcher_proc.terminate()
    proxy_proc.join()
    dispatcher_proc.join()
    assert remote_accumulator == local_accumulator
Example #10
0
def test_zmq_multi_prefix(RE, hw):
    # COMPONENT 1
    # Run a 0MQ proxy on a separate process.
    def start_proxy():  # pragma: no cover
        Proxy(5567, 5568).start()

    proxy_proc = multiprocessing.Process(target=start_proxy, daemon=True)
    proxy_proc.start()
    time.sleep(5)  # Give this plenty of time to start up.

    # COMPONENT 2
    # Run a Publisher and a RunEngine in this main process.

    p = Publisher('127.0.0.1:5567', prefix=b'sb')  # noqa
    p2 = Publisher('127.0.0.1:5567', prefix=b'not_sb')  # noqa
    RE.subscribe(p)
    RE.subscribe(p2)


    # COMPONENT 3
    # Run a RemoteDispatcher on another separate process. Pass the documents
    # it receives over a Queue to this process, so we can count them for our
    # test.

    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', prefix=[b'sb', b'not_sb'])
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()

    queue = multiprocessing.Queue()
    dispatcher_proc = multiprocessing.Process(target=make_and_start_dispatcher,
                                              daemon=True, args=(queue,))
    dispatcher_proc.start()
    time.sleep(5)  # As above, give this plenty of time to start.

    # Generate two documents. The Publisher will send them to the proxy
    # device over 5567, and the proxy will send them to the
    # RemoteDispatcher over 5568. The RemoteDispatcher will push them into
    # the queue, where we can verify that they round-tripped.

    local_accumulator = []

    def local_cb(name, doc):
        local_accumulator.append((name, doc))
        local_accumulator.append((name, doc))

    # Check that numpy stuff is sanitized by putting some in the start doc.
    md = {'stuff': {'nested': np.array([1, 2, 3])},
          'scalar_stuff': np.float64(3),
          'array_stuff': np.ones((3, 3))}

    # RE([Msg('open_run', **md), Msg('close_run')], local_cb)
    RE(count([hw.det]), local_cb)
    time.sleep(1)

    # Get the two documents from the queue (or timeout --- test will fail)
    remote_accumulator = []
    for i in range(len(local_accumulator)):
        remote_accumulator.append(queue.get(timeout=2))
    p.close()
    p2.close()
    proxy_proc.terminate()
    dispatcher_proc.terminate()
    proxy_proc.join()
    dispatcher_proc.join()
    assert remote_accumulator == local_accumulator
Example #11
0
def test_zmq_no_RE_newserializer(RE):
    # COMPONENT 1
    # Run a 0MQ proxy on a separate process.
    def start_proxy():  # pragma: no cover
        Proxy(5567, 5568).start()

    proxy_proc = multiprocessing.Process(target=start_proxy, daemon=True)
    proxy_proc.start()
    time.sleep(5)  # Give this plenty of time to start up.

    # COMPONENT 2
    # Run a Publisher and a RunEngine in this main process.

    p = Publisher('127.0.0.1:5567', serializer=cloudpickle.dumps)  # noqa

    # COMPONENT 3
    # Run a RemoteDispatcher on another separate process. Pass the documents
    # it receives over a Queue to this process, so we can count them for our
    # test.

    def make_and_start_dispatcher(queue):  # pragma: no cover
        def put_in_queue(name, doc):
            print('putting ', name, 'in queue')
            queue.put((name, doc))

        d = RemoteDispatcher('127.0.0.1:5568', deserializer=cloudpickle.loads)
        d.subscribe(put_in_queue)
        print("REMOTE IS READY TO START")
        d.loop.call_later(9, d.stop)
        d.start()

    queue = multiprocessing.Queue()
    dispatcher_proc = multiprocessing.Process(target=make_and_start_dispatcher,
                                              daemon=True, args=(queue,))
    dispatcher_proc.start()
    time.sleep(5)  # As above, give this plenty of time to start.

    # Generate two documents. The Publisher will send them to the proxy
    # device over 5567, and the proxy will send them to the
    # RemoteDispatcher over 5568. The RemoteDispatcher will push them into
    # the queue, where we can verify that they round-tripped.

    local_accumulator = []

    def local_cb(name, doc):
        local_accumulator.append((name, doc))

    RE([Msg('open_run'), Msg('close_run')], local_cb)

    # This time the Publisher isn't attached to an RE. Send the documents
    # manually. (The idea is, these might have come from a Broker instead...)
    for name, doc in local_accumulator:
        p(name, doc)
    time.sleep(1)

    # Get the two documents from the queue (or timeout --- test will fail)
    remote_accumulator = []
    for i in range(2):
        remote_accumulator.append(queue.get(timeout=2))
    p.close()
    proxy_proc.terminate()
    dispatcher_proc.terminate()
    proxy_proc.join()
    dispatcher_proc.join()
    assert remote_accumulator == local_accumulator