Beispiel #1
0
def build_runs_from_documents(document_generator):
    """
    Build BlueskyRuns from a stream of documents --- (name, doc) pairs.

    This will work for any number of runs, including 0 (empty generator).

    Parameters
    ----------
    document_generator: Iterable[Tuple[String, Dict]]
        Iterable of ``(name, doc)`` pairs.

    Returns
    -------
    List[BlueskyRun]
    """
    runs = []

    def factory(name, doc):
        dc = DocumentCache()

        def add_run_to_list(event):
            run = BlueskyRun(dc)
            runs.append(run)

        dc.events.started.connect(add_run_to_list)
        return [dc], []

    rr = event_model.RunRouter([factory])
    for item in document_generator:
        rr(*item)
    return runs
Beispiel #2
0
def test_subfactory_callback_exception():
    """
    Test that RunRouter._start_to_descriptors and RunRouter._descriptor_to_start
    are updated in the case that a subfactory callback raises an Exception.
    """
    def exception_callback(name, doc):
        """ a callback that always raises Exception """
        raise Exception()

    def exception_callback_subfactory(descriptor_doc_name, descriptor_doc):
        """ a subfactory that always returns one exception_callback """
        return [exception_callback]

    def exception_callback_factory(start_doc_name, start_doc):
        """ a factory that return 0 callbacks and one exception_callback_subfactory """
        return ([], [exception_callback_subfactory])

    rr = event_model.RunRouter([exception_callback_factory])

    start_doc = {'time': 0, 'uid': 'abcdef'}
    rr('start', start_doc)

    descriptor_doc = {'run_start': 'abcdef', 'uid': 'ghijkl'}
    with pytest.raises(Exception):
        rr('descriptor', descriptor_doc)

    assert rr._start_to_descriptors['abcdef'] == ['ghijkl']
    assert rr._descriptor_to_start['ghijkl'] == 'abcdef'

    event_document = {'descriptor': 'ghijkl', 'uid': 'mnopqr'}
    rr.event(event_document)
Beispiel #3
0
def stream_documents_into_runs(add_run):
    """
    Convert a flat stream of documents to "live" BlueskyRuns.

    Parameters
    ----------
    add_run : callable
        This will be called as ``add_run(run: BlueskyRun)`` each time a 'start'
        document is received.

    Returns
    -------
    callback : callable
        This should be subscribed to a callback registry that calls it like
        ``callback(name, doc)``.

    Examples
    --------

    This is used for connecting something that emits a flat stream of documents
    to something that wants to receive BlueskyRuns.

    Append to a plain list.

    >>> from bluesky import RunEngine
    >>> RE = RunEngine()
    >>> runs = []
    >>> RE.subscribe(stream_documents_into_runs(runs.append))

    Or, more usefully to an observable list.

    >>> from bluesky_widgets.models.utils import RunList
    >>> runs = RunList()
    >>> RE.subscribe(stream_documents_into_runs(runs.append))

    Add runs to a model with an ``add_run`` method. For example, it might be a
    model that generates figures.

    >>> from bluesky_widgets.models.plot_builders import AutoLines
    >>> model = AutoLines()

    >>> RE.subscribe(stream_documents_into_runs(model.add_run))
    """
    def factory(name, doc):
        dc = DocumentCache()

        def build_and_add_run(event):
            run = BlueskyRun(dc)
            add_run(run)

        dc.events.started.connect(build_and_add_run)
        return [dc], []

    rr = event_model.RunRouter([factory],
                               handler_registry={"NPY_SEQ": NumpySeqHandler})
    return rr
Beispiel #4
0
def test_subfactory():
    # this test targeted the bug described in issue #170
    factory_documents = defaultdict(list)
    subfactory_documents = defaultdict(list)

    def factory(name, start_doc):
        def collect_factory_documents(name, doc):
            factory_documents[name].append(doc)

        def collect_subfactory_documents(name, doc):
            subfactory_documents[name].append(doc)

        def subfactory(name, descriptor_doc):
            return [collect_subfactory_documents]

        return [collect_factory_documents], [subfactory]

    rr = event_model.RunRouter([factory])

    run_bundle = event_model.compose_run()
    rr("start", run_bundle.start_doc)
    assert len(factory_documents) == 1
    assert len(factory_documents["start"]) == 1
    assert factory_documents["start"] == [run_bundle.start_doc]
    assert len(subfactory_documents) == 0

    descriptor_bundle = run_bundle.compose_descriptor(
        data_keys={"motor": {
            "shape": [],
            "dtype": "number",
            "source": "..."
        }},
        name="primary",
    )
    rr("descriptor", descriptor_bundle.descriptor_doc)
    assert len(factory_documents) == 2
    assert len(factory_documents["start"]) == 1
    assert factory_documents["start"] == [run_bundle.start_doc]
    assert len(factory_documents["descriptor"]) == 1
    assert factory_documents["descriptor"] == [
        descriptor_bundle.descriptor_doc
    ]

    assert len(subfactory_documents) == 2
    assert len(subfactory_documents["start"]) == 1
    assert subfactory_documents["start"] == [run_bundle.start_doc]
    assert len(subfactory_documents["descriptor"]) == 1
    assert subfactory_documents["descriptor"] == [
        descriptor_bundle.descriptor_doc
    ]

    stop_doc = run_bundle.compose_stop()
    rr("stop", stop_doc)

    assert len(rr._start_to_start_doc) == 0
Beispiel #5
0
def test_same_start_doc_twice():
    "If the user sends us the same uid twice, raise helpfully."
    rr = event_model.RunRouter([])
    doc = {'time': 0, 'uid': 'stuff'}
    rr('start', doc)
    with pytest.raises(ValueError):
        rr('start', doc)  # exact same object
    with pytest.raises(ValueError):
        rr('start', doc.copy())  # same content
    doc2 = {'time': 1, 'uid': 'stuff'}
    with pytest.raises(ValueError):
        rr('start', doc2)  # same uid, different content
Beispiel #6
0
def save_example_data():
    """
    Run this from repo root to re-generate data.

    python -c "import bluesky_tutorial_utils; bluesky_tutorial_utils.save_example_data()"
    """
    import suitcase.jsonl

    def factory(name, doc):
        return [suitcase.jsonl.Serializer(str(directory))], []

    rr = event_model.RunRouter([factory])
    generate_example_data(rr)
def test_with_run_router(tmp_path, md):
    # use a directory that does not exist to test that it will be created
    output_dir_path = tmp_path / Path("doesnotexist")

    def factory(name, doc):
        serializer = nxsas.Serializer(file_prefix="doesnotexist/",
                                      directory=output_dir_path)
        return [serializer], []

    rr = event_model.RunRouter([factory])

    start_doc_md = {}
    start_doc_md.update(rsoxs_start_doc)
    start_doc_md.update(md)
    # compose_run will raise an exception if "time" and "uid" are in the metadata
    start_doc_md.pop("time")
    start_doc_md.pop("uid")
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(metadata=start_doc_md)

    rr("start", start_doc)

    descriptor_doc_md = dict()
    descriptor_doc_md.update(rsoxs_descriptor_en_doc)
    # compose_descriptor will raise an exception if "run_start" is in the metadata
    descriptor_doc_md.pop("run_start")
    descriptor_doc, compose_event, compose_event_page = compose_descriptor(
        **descriptor_doc_md)
    rr("descriptor", descriptor_doc)

    event_md = dict()
    event_md.update(rsoxs_event_page_en_doc)
    # event_md["seq_num"] = [1]
    # the descriptor uid will interfere with compose_event
    event_md.pop("descriptor")
    event_doc = compose_event(**event_md)
    rr("event", event_doc)

    stop_doc = compose_stop()
    rr("stop", stop_doc)

    print(os.listdir(path=output_dir_path))
    assert len(os.listdir(path=output_dir_path)) == 1
def start_worker(export_dir, kafka_bootstrap_servers, kafka_topics):
    def worker_factory(name, start_doc, export_dir):
        example_worker = ExampleWorker()
        return [example_worker], []

    dispatcher = bluesky_kafka.RemoteDispatcher(
        topics=kafka_topics,
        group_id="iss-example-worker",
        bootstrap_servers=kafka_bootstrap_servers,
        #deserializer=msgpack.loads,
    )

    rr = event_model.RunRouter(
        [partial(worker_factory, export_dir="export_dir")],
        handler_registry={
            "AD_TIFF": databroker.assets.handlers.AreaDetectorTiffHandler,
            "NPY_SEQ": ophyd.sim.NumpySeqHandler,
        },
    )
    dispatcher.subscribe(rr)
    dispatcher.start()
Beispiel #9
0
def test_run_router(tmp_path):
    bundle = event_model.compose_run()
    docs = []
    start_doc, compose_descriptor, compose_resource, compose_stop = bundle
    docs.append(('start', start_doc))
    bundle = compose_descriptor(data_keys={
        'motor': {
            'shape': [],
            'dtype': 'number',
            'source': '...'
        },
        'image': {
            'shape': [512, 512],
            'dtype': 'number',
            'source': '...',
            'external': 'FILESTORE:'
        }
    },
                                name='primary')
    primary_descriptor_doc, compose_primary_event, compose_event_page = bundle
    docs.append(('descriptor', primary_descriptor_doc))
    bundle = compose_descriptor(
        data_keys={'motor': {
            'shape': [],
            'dtype': 'number',
            'source': '...'
        }},
        name='baseline')
    baseline_descriptor_doc, compose_baseline_event, compose_event_page = bundle
    docs.append(('descriptor', baseline_descriptor_doc))
    bundle = compose_resource(spec='TIFF',
                              root=str(tmp_path),
                              resource_path='stack.tiff',
                              resource_kwargs={})
    resource_doc, compose_datum, compose_datum_page = bundle
    docs.append(('resource', resource_doc))
    datum_doc = compose_datum(datum_kwargs={'slice': 5})
    docs.append(('datum', datum_doc))
    primary_event_doc = compose_primary_event(data={
        'motor': 0,
        'image': datum_doc['datum_id']
    },
                                              timestamps={
                                                  'motor': 0,
                                                  'image': 0
                                              },
                                              filled={'image': False})
    docs.append(('event', primary_event_doc))
    baseline_event_doc = compose_baseline_event(data={'motor': 0},
                                                timestamps={'motor': 0})
    docs.append(('event', baseline_event_doc))
    stop_doc = compose_stop()
    docs.append(('stop', stop_doc))

    # Empty list of factories. Just make sure nothing blows up.
    rr = event_model.RunRouter([])
    for name, doc in docs:
        rr(name, doc)

    # A factory that rejects all runs.
    def null_factory(name, doc):
        return [], []

    rr = event_model.RunRouter([null_factory])
    for name, doc in docs:
        rr(name, doc)

    # A factory that accepts all runs.
    collected = []

    def collector(name, doc):
        if name == 'event_page':
            name = 'event'
            doc, = event_model.unpack_event_page(doc)
        elif name == 'datum_page':
            name = 'datum'
            doc, = event_model.unpack_datum_page(doc)
        collected.append((name, doc))

    def all_factory(name, doc):
        return [collector], []

    rr = event_model.RunRouter([all_factory])
    for name, doc in docs:
        rr(name, doc)

    assert collected == docs
    collected.clear()

    # A factory that returns a subfactory interested in 'baseline' only.
    def subfactory(name, doc):
        if doc.get('name') == 'baseline':
            return [collector]
        return []

    def factory_with_subfactory_only(name, doc):
        return [], [subfactory]

    rr = event_model.RunRouter([factory_with_subfactory_only])
    for name, doc in docs:
        rr(name, doc)

    expected_item = ('event', baseline_event_doc)
    unexpected_item = ('event', primary_event_doc)
    assert expected_item in collected
    assert unexpected_item not in collected
    collected.clear()

    # Test factory that expects old (pre-1.14.0) RunRouter behavior.

    collected_header_docs = {}

    class LocalException3(Exception):
        ...

    def header_collector(name, doc):
        if name in ('start', 'stop', 'descriptor'):
            key = (name, doc['uid'])
            if key in collected_header_docs:
                raise LocalException3
            collected_header_docs[key] = doc

    def all_factory(name, doc):
        header_collector(name, doc)
        return [header_collector], []

    rr = event_model.RunRouter([all_factory])
    with pytest.warns(UserWarning,
                      match='1.14.0'), pytest.raises(LocalException3):
        for name, doc in docs:
            rr(name, doc)

    collected_header_docs.clear()

    # Test subfactory that expects old (pre-1.14.0) RunRouter behavior.

    def factory_with_subfactory_only(name, doc):
        header_collector(name, doc)

        def subfactory(name, doc):
            if doc.get('name') == 'baseline':
                header_collector(name, doc)
                return [header_collector]
            return []

        return [], [subfactory]

    rr = event_model.RunRouter([factory_with_subfactory_only])
    with pytest.warns(UserWarning,
                      match='1.14.0'), pytest.raises(LocalException3):
        for name, doc in docs:
            rr(name, doc)

    collected_header_docs.clear()

    # Test RunRouter with handler_registry.

    class FakeTiffHandler:
        def __init__(self, resource_path):
            assert resource_path == str(tmp_path / "stack.tiff")

        def __call__(self, slice):
            return numpy.ones((5, 5))

    reg = {'TIFF': FakeTiffHandler}

    def check_filled(name, doc):
        if name == 'event_page':
            for is_filled in doc['filled'].values():
                assert all(is_filled)
        elif name == 'event':
            for is_filled in doc['filled'].values():
                assert is_filled

    def check_not_filled(name, doc):
        if name == 'event_page':
            for is_filled in doc['filled'].values():
                assert not any(is_filled)
        elif name == 'event':
            for is_filled in doc['filled'].values():
                assert not is_filled

    def check_filled_factory(name, doc):
        return [check_filled], []

    def check_not_filled_factory(name, doc):
        return [check_not_filled], []

    # If reg is missing our spec (or just not given) docs pass through
    # unfilled.
    rr = event_model.RunRouter([check_not_filled_factory])
    for name, doc in docs:
        rr(name, doc)

    # If fill_or_fail is set to True and reg is missing our spec (or just not
    # given) we raise.
    rr = event_model.RunRouter([check_not_filled_factory], fill_or_fail=True)
    with pytest.raises(event_model.UndefinedAssetSpecification):
        for name, doc in docs:
            rr(name, doc)

    # If spec is provided, docs are filled, regardless of fill_or_fail.
    rr = event_model.RunRouter([check_filled_factory], reg)
    for name, doc in docs:
        rr(name, doc)

    rr = event_model.RunRouter([check_filled_factory], reg, fill_or_fail=True)
    for name, doc in docs:
        rr(name, doc)
Beispiel #10
0
def test_run_router():
    bundle = event_model.compose_run()
    docs = []
    start_doc, compose_descriptor, compose_resource, compose_stop = bundle
    docs.append(('start', start_doc))
    bundle = compose_descriptor(data_keys={
        'motor': {
            'shape': [],
            'dtype': 'number',
            'source': '...'
        },
        'image': {
            'shape': [512, 512],
            'dtype': 'number',
            'source': '...',
            'external': 'FILESTORE:'
        }
    },
                                name='primary')
    primary_descriptor_doc, compose_primary_event, compose_event_page = bundle
    docs.append(('descriptor', primary_descriptor_doc))
    bundle = compose_descriptor(
        data_keys={'motor': {
            'shape': [],
            'dtype': 'number',
            'source': '...'
        }},
        name='baseline')
    baseline_descriptor_doc, compose_baseline_event, compose_event_page = bundle
    docs.append(('descriptor', baseline_descriptor_doc))
    bundle = compose_resource(spec='TIFF',
                              root='/tmp',
                              resource_path='stack.tiff',
                              resource_kwargs={})
    resource_doc, compose_datum, compose_datum_page = bundle
    docs.append(('resource', resource_doc))
    datum_doc = compose_datum(datum_kwargs={'slice': 5})
    docs.append(('datum', datum_doc))
    primary_event_doc = compose_primary_event(data={
        'motor': 0,
        'image': datum_doc['datum_id']
    },
                                              timestamps={
                                                  'motor': 0,
                                                  'image': 0
                                              },
                                              filled={'image': False})
    docs.append(('event', primary_event_doc))
    baseline_event_doc = compose_baseline_event(data={'motor': 0},
                                                timestamps={'motor': 0})
    docs.append(('event', baseline_event_doc))
    stop_doc = compose_stop()
    docs.append(('stop', stop_doc))

    # Empty list of factories. Just make sure nothing blows up.
    rr = event_model.RunRouter([])
    for name, doc in docs:
        rr(name, doc)

    # A factory that rejects all runs.
    def null_factory(name, doc):
        return [], []

    rr = event_model.RunRouter([null_factory])
    for name, doc in docs:
        rr(name, doc)

    # A factory that accepts all runs.
    collected = []

    def collector(name, doc):
        if name == 'event_page':
            name = 'event'
            doc, = event_model.unpack_event_page(doc)
        elif name == 'datum_page':
            name = 'datum'
            doc, = event_model.unpack_datum_page(doc)
        collected.append((name, doc))

    def all_factory(name, doc):
        collector(name, doc)
        return [collector], []

    rr = event_model.RunRouter([all_factory])
    for name, doc in docs:
        rr(name, doc)

    assert collected == docs
    collected.clear()

    # A factory that returns a subfactory interested in 'baseline' only.
    def subfactory(name, doc):
        if doc.get('name') == 'baseline':
            return [collector]
        return []

    def factory_with_subfactory_only(name, doc):
        return [], [subfactory]

    rr = event_model.RunRouter([factory_with_subfactory_only])
    for name, doc in docs:
        rr(name, doc)

    expected_item = ('event', baseline_event_doc)
    unexpected_item = ('event', primary_event_doc)
    assert expected_item in collected
    assert unexpected_item not in collected
    collected.clear()