Пример #1
0
def test_run(RE, tmp_path):
    print(RE.md)

    document_list = list()

    def store_documents(name, doc):
        document_list.append((name, doc))

    RE.subscribe(store_documents)

    RE(count([]), md={"techniques": list()})

    assert len(document_list) > 0

    nxsas.export(gen=document_list, directory=tmp_path)
def test_descriptor_nexus_metadata(tmp_path):
    documents = []
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(
        # 'run start' document
        metadata=start_doc_md
    )

    print(start_doc)
    documents.append(("start", start_doc))
    descriptor_doc, _, _ = compose_descriptor(**descriptor_doc_md)
    documents.append(("descriptor", descriptor_doc))
    stop_doc = compose_stop()
    print(stop_doc)
    documents.append(("stop", stop_doc))
    artifacts = nxsas.export(documents, tmp_path)

    assert len(artifacts["stream_data"]) == 1
    print(artifacts)

    output_filepath = artifacts["stream_data"][0]
    assert output_filepath.exists()

    with h5py.File(output_filepath, "r") as h5f:
        assert "bluesky" in h5f
        print(list(h5f["bluesky"]))

        assert "primary" in h5f["bluesky"]["descriptors"]
        print(list(h5f["bluesky"]["descriptors"]["primary"]))
def test_event_page_nexus_metadata(tmp_path):
    documents = []
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(
        # 'run start' document
        metadata=start_doc_md
    )

    print(start_doc)
    documents.append(("start", start_doc))
    descriptor_doc, compose_event, compose_event_page = compose_descriptor(
        **descriptor_doc_md
    )
    documents.append(("descriptor", descriptor_doc))

    event_page = compose_event_page(**event_page_md)
    documents.append(("event_page", event_page))

    stop_doc = compose_stop()
    print(stop_doc)
    documents.append(("stop", stop_doc))
    artifacts = nxsas.export(documents, tmp_path)

    assert len(artifacts["stream_data"]) == 1
    print(artifacts)

    output_filepath = artifacts["stream_data"][0]
    assert output_filepath.exists()

    with h5py.File(output_filepath, "r") as h5f:
        assert "bluesky" in h5f
        print(list(h5f["bluesky"]))

        assert "primary" in h5f["bluesky"]["events"]

        assert "random_walk:dt" in h5f["bluesky"]["events"]["primary"]["data"]
        assert h5f["bluesky"]["events"]["primary"]["data"]["random_walk:dt"].shape == (
            1,
        )
        assert h5f["bluesky"]["events"]["primary"]["data"]["random_walk:dt"][
            ()
        ] == np.array([-1.0])

        assert "random_walk:x" in h5f["bluesky"]["events"]["primary"]["data"]
        assert h5f["bluesky"]["events"]["primary"]["data"]["random_walk:x"].shape == (
            1,
        )
        assert h5f["bluesky"]["events"]["primary"]["data"]["random_walk:x"][
            ()
        ] == np.array([1.9221013521832928])
Пример #4
0
def test_descriptor_nexus_metadata(caplog, tmp_path):
    caplog.set_level(logging.DEBUG, logger="suitcase.nxsas")

    start_doc_md = {}
    start_doc_md.update(rsoxs_start_doc)
    start_doc_md.update(techniques_md)
    start_doc_md.pop("time")
    start_doc_md.pop("uid")
    documents = []
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(
        # 'run start' document
        metadata=start_doc_md
    )

    documents.append(("start", start_doc))

    descriptor_doc_md = dict()
    descriptor_doc_md.update(rsoxs_descriptor_en_doc)
    # compose_descriptor will raise an exception if "run_start" is in the metadata
    descriptor_doc_md.pop("run_start")
    descriptor_doc, _, _ = compose_descriptor(**descriptor_doc_md)
    documents.append(("descriptor", descriptor_doc))

    stop_doc = compose_stop()
    documents.append(("stop", stop_doc))
    artifacts = nxsas.export(documents, tmp_path)

    assert len(artifacts["stream_data"]) == 1

    output_filepath = artifacts["stream_data"][0]
    assert output_filepath.exists()

    with h5py.File(output_filepath, "r") as h5f:
        assert "bluesky" in h5f
        print(list(h5f["bluesky"]))

        assert "primary" in h5f["bluesky"]["descriptors"]
        assert "data_keys" in h5f["bluesky"]["descriptors"]["primary"]
        assert "en_energy" in h5f["bluesky"]["descriptors"]["primary"]["data_keys"]
Пример #5
0
def export_h5_file(
    output_directory,
    desc_data_keys,
    event_data_and_timestamps_list=None,
    event_page_data_and_timestamps_list=None,
):
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(metadata={"md": {"techniques": []}})
    document_list = [("start", start_doc)]

    (
        primary_descriptor_doc,
        compose_primary_event,
        compose_primary_event_page,
    ) = compose_descriptor(data_keys=desc_data_keys, name="primary",)
    document_list.append(("descriptor", primary_descriptor_doc))

    if event_data_and_timestamps_list is not None:
        for event_data_and_timestamps in event_data_and_timestamps_list:
            event = compose_primary_event(
                data=event_data_and_timestamps["data"],
                timestamps=event_data_and_timestamps["timestamps"],
            )
            document_list.append(("event", event))

    if event_page_data_and_timestamps_list is not None:
        for event_page_data_and_timestamps in event_page_data_and_timestamps_list:
            event_page = compose_primary_event_page(**event_page_data_and_timestamps)
            document_list.append(("event_page", event_page))

    stop_doc = compose_stop()
    document_list.append(("stop", stop_doc))

    artifacts = nxsas.export(gen=document_list, directory=output_directory,)

    assert len(artifacts["stream_data"]) == 1
    return artifacts["stream_data"][0]
def test_start_nexus_metadata(tmp_path):
    documents = []
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(
        # 'run start' document
        metadata=start_doc_md
    )
    print(start_doc)
    documents.append(("start", start_doc))
    stop_doc = compose_stop()
    print(stop_doc)
    documents.append(("stop", stop_doc))
    artifacts = nxsas.export(documents, tmp_path)

    assert len(artifacts["stream_data"]) == 1
    print(artifacts)

    output_filepath = artifacts["stream_data"][0]
    assert output_filepath.exists()

    with h5py.File(output_filepath, "r") as h5f:
        assert "bluesky" in h5f
        print(list(h5f["bluesky"]))
        assert len(h5f["bluesky"]["start"]) == 15
        assert h5f["bluesky"]["start"]["detectors"][()] == ["random_walk:x"]
        assert h5f["bluesky"]["start"]["motors"][()] == ["random_walk:dt"]
        assert h5f["bluesky"]["start"]["num_intervals"][()] == 2
        assert h5f["bluesky"]["start"]["num_points"][()] == 3
        assert h5f["bluesky"]["start"]["plan_name"][()] == "scan"
        assert h5f["bluesky"]["start"]["plan_pattern"][()] == "inner_product"
        assert (
            h5f["bluesky"]["start"]["plan_pattern_module"][()]
            == "bluesky.plan_patterns"
        )
        assert h5f["bluesky"]["start"]["plan_type"][()] == "generator"
        assert h5f["bluesky"]["start"]["scan_id"][()] == 2
        assert h5f["bluesky"]["start"]["time"][()] == start_doc["time"]
        assert h5f["bluesky"]["start"]["uid"][()] == start_doc["uid"]

        assert len(h5f["bluesky"]) == 4
        assert "hints" in h5f["bluesky"]["start"]
        assert "dimensions" in h5f["bluesky"]["start"]["hints"]
        # the "dimensions" attribute has been jsonified because it is complicated
        assert (
            h5f["bluesky"]["start"]["hints"]["dimensions"][()]
            == '[[["random_walk:dt"], "primary"]]'
        )
        assert json.loads(h5f["bluesky"]["start"]["hints"]["dimensions"][()]) == [
            [["random_walk:dt"], "primary"]
        ]

        assert "md" in h5f["bluesky"]["start"]

        assert "plan_args" in h5f["bluesky"]["start"]
        assert h5f["bluesky"]["start"]["plan_args"]["args"][()] == json.dumps(
            start_doc["plan_args"]["args"]
        )

        assert "plan_pattern_args" in h5f["bluesky"]["start"]
Пример #7
0
def test_start_nexus_metadata(caplog, tmp_path):
    caplog.set_level(logging.DEBUG, logger="suitcase.nxsas")

    start_doc_md = {}
    start_doc_md.update(rsoxs_start_doc)
    start_doc_md.update(techniques_md)
    # componse_run will raise an exception if "time" or "uid" are in the metadata
    start_doc_md.pop("time")
    start_doc_md.pop("uid")
    documents = []
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(
        # 'run start' document
        metadata=start_doc_md
    )
    documents.append(("start", start_doc))
    stop_doc = compose_stop()

    documents.append(("stop", stop_doc))
    artifacts = nxsas.export(documents, tmp_path)

    assert len(artifacts["stream_data"]) == 1

    output_filepath = artifacts["stream_data"][0]
    assert output_filepath.exists()

    with h5py.File(output_filepath, "r") as h5f:
        assert "bluesky" in h5f
        print(list(h5f["bluesky"]))
        assert "start" in h5f["bluesky"]
        assert len(h5f["bluesky"]["start"]) == 42
        assert len(h5f["bluesky"].attrs) == 0
        assert all(h5f["bluesky"]["start"]["detectors"][()] == ["Synced", "en_energy"])
        assert all(
            h5f["bluesky"]["start"]["motors"][()]
            == ["WAXS Exposure", "SAXS Exposure", "en"]
        )
        assert h5f["bluesky"]["start"]["num_intervals"][()] == 127
        assert h5f["bluesky"]["start"]["num_points"][()] == 128
        assert h5f["bluesky"]["start"]["plan_name"][()] == "full_carbon_scan_nd"
        assert h5f["bluesky"]["start"]["plan_type"][()] == "generator"
        assert h5f["bluesky"]["start"]["scan_id"][()] == 6852
        assert h5f["bluesky"]["start"]["time"][()] == start_doc["time"]
        assert h5f["bluesky"]["start"]["uid"][()] == start_doc["uid"]

        assert len(h5f["bluesky"]) == 4
        assert "hints" in h5f["bluesky"]["start"]
        assert "dimensions" in h5f["bluesky"]["start"]["hints"]
        # the "dimensions" attribute has been jsonified because it is complicated
        # assert (
        #     h5f["bluesky"]["hints"].attrs["dimensions"]
        #     == '[[["random_walk:dt"], "primary"]]'
        # )
        # assert json.loads(h5f["bluesky"]["hints"].attrs["dimensions"]) == [
        #     [["random_walk:dt"], "primary"]
        # ]

        assert "md" in h5f["bluesky"]["start"]

        assert "plan_args" in h5f["bluesky"]["start"]
        assert "detectors" in h5f["bluesky"]["start"]["plan_args"]
Пример #8
0
def test_event_page_nexus_metadata(tmp_path):
    start_doc_md = {}
    start_doc_md.update(rsoxs_start_doc)
    start_doc_md.update(techniques_md)
    # compose_run will throw an exception if "time" and "uid" are in the metadata
    start_doc_md.pop("time")
    start_doc_md.pop("uid")
    documents = []
    (
        start_doc,
        compose_descriptor,
        compose_resource,
        compose_stop,
    ) = event_model.compose_run(
        # 'run start' document
        metadata=start_doc_md
    )

    documents.append(("start", start_doc))

    descriptor_doc_md = dict()
    descriptor_doc_md.update(rsoxs_descriptor_en_doc)
    # compose_descriptor will raise an exception if "run_start" is in the metadata
    descriptor_doc_md.pop("run_start")
    descriptor_doc, compose_event, compose_event_page = compose_descriptor(
        **descriptor_doc_md
    )
    documents.append(("descriptor", descriptor_doc))

    event_md = dict()
    event_md.update(rsoxs_event_page_en_doc)
    # event_md["seq_num"] = [1]
    # the descriptor uid will interfere with compose_event
    event_md.pop("descriptor")
    event_doc = compose_event(**event_md)
    documents.append(("event", event_doc))

    stop_doc = compose_stop()
    documents.append(("stop", stop_doc))
    artifacts = nxsas.export(documents, tmp_path)

    assert len(artifacts["stream_data"]) == 1

    output_filepath = artifacts["stream_data"][0]
    assert output_filepath.exists()

    with h5py.File(output_filepath, "r") as h5f:
        assert "bluesky" in h5f

        assert "primary" in h5f["bluesky"]["events"]

        assert "en_energy" in h5f["bluesky"]["events"]["primary"]["data"]
        assert h5f["bluesky"]["events"]["primary"]["data"]["en_energy"].shape == (1,)
        assert h5f["bluesky"]["events"]["primary"]["data"]["en_energy"][()] == [
            270.0012299
        ]

        # now test the NeXus structure
        assert "entry" in h5f
        assert len(h5f["entry"].attrs) == 2
        assert h5f["entry"].attrs["NX_Class"] == "NXEntry"
        assert h5f["entry"].attrs["default"] == "data"

        assert "end_time" in h5f["entry"]
        assert isinstance(h5f["entry"]["end_time"], h5py.Dataset)
        print(f"end_time: {h5f['entry']['end_time']}")
        assert h5f["entry"]["end_time"][()] == stop_doc["time"]
        assert len(h5f["entry"]["end_time"].attrs) == 4
def do_not_test_export(tmp_path, example_data):
    # Exercise the exporter on the myriad cases parametrized in example_data.
    documents = example_data()
    nxsas.export(documents, tmp_path)