def test_static_data_reaches_file(docker_compose):
    producer = create_producer()
    sleep(10)
    # Start file writing
    job_id = publish_run_start_message(
        producer,
        "commands/nexus_structure_static.json",
        "output_file_static.nxs",
        start_time=int(docker_compose),
    )

    # Give it some time to accumulate data
    sleep(10)
    # Stop file writing
    publish_run_stop_message(producer, job_id=job_id)

    filepath = "output-files/output_file_static.nxs"
    with OpenNexusFileWhenAvailable(filepath) as file:
        assert not file.swmr_mode
        assert file["entry/start_time"][()] == "2016-04-12T02:58:52"
        assert file["entry/end_time"][()] == "2016-04-12T03:29:11"
        assert file["entry/duration"][()] == 1817.0
        assert file["entry/features"][0] == 10138143369737381149
        assert file["entry/user_1/affiliation"][()] == "ISIS, STFC"
        assert np.allclose(
            file["entry/instrument/monitor1/transformations/location"].
            attrs["vector"],
            np.array([0.0, 0.0, -1.0]),
        )
        assert (file["entry/instrument/monitor1/transformations/location"].
                attrs["transformation_type"] == "translation")
Пример #2
0
def test_data_reaches_file(docker_compose_fw):
    producer = create_producer()
    sleep(20)
    # Start file writing
    send_writer_command(
        os.path.join("filewriter_tests", "commands",
                     "example-json-command.json"),
        producer,
        start_time=docker_compose_fw,
    )
    producer.flush()
    # Give it some time to accumulate data
    sleep(10)
    # Stop file writing
    send_writer_command(
        os.path.join("filewriter_tests", "commands", "stop-command.json"),
        producer)
    sleep(10)
    send_writer_command(
        os.path.join("filewriter_tests", "commands", "writer-exit.json"),
        producer)
    producer.flush()

    filepath = os.path.join("filewriter_tests", "output-files",
                            "output_file.nxs")
    with OpenNexusFileWhenAvailable(filepath) as file:
        # Static checks
        assert not file.swmr_mode
        assert file["entry/start_time"][...] == "2016-04-12T02:58:52"
        assert file["entry/end_time"][...] == "2016-04-12T03:29:11"
        assert file["entry/duration"][...] == 1817.0
        assert file["entry/features"][0] == 10138143369737381149
        assert file["entry/user_1/affiliation"][...] == "ISIS, STFC"
        assert np.allclose(
            file["entry/instrument/monitor1/transformations/location"].
            attrs["vector"],
            np.array([0.0, 0.0, -1.0]),
        )
        assert (file["entry/instrument/monitor1/transformations/location"].
                attrs["transformation_type"] == "translation")

        # Streamed checks
        # Ev42 event data (Detector_1)
        assert file["entry/detector_1_events/event_id"][0] == 99406
        assert file["entry/detector_1_events/event_id"][1] == 98345
        # f142 Sample env (Sample)
        assert np.isclose(
            21.0, file["entry/sample/sample_env_logs/Det_Temp_RRB/value"][0])
Пример #3
0
def test_two_different_writer_modules_with_same_flatbuffer_id(docker_compose):
    producer = create_producer()
    start_time = unix_time_milliseconds(datetime.utcnow()) - 10000
    for i in range(10):
        publish_f142_message(
            producer,
            "TEST_sampleEnv",
            int(start_time + i * 1000),
            source_name="test_source_1",
        )
        publish_f142_message(
            producer,
            "TEST_sampleEnv",
            int(start_time + i * 1000),
            source_name="test_source_2",
        )
    check(producer.flush(5) == 0, "Unable to flush kafka messages.")
    # Start file writing
    publish_run_start_message(
        producer,
        "commands/nexus_structure_multiple_modules.json",
        "output_file_multiple_modules.nxs",
        start_time=int(start_time),
        stop_time=int(start_time + 5 * 1000),
    )
    # Give it some time to accumulate data
    sleep(10)

    filepath = "output-files/output_file_multiple_modules.nxs"
    with OpenNexusFileWhenAvailable(filepath) as file:
        assert (
            len(file["entry/sample/dataset1/time"][:]) > 0
            and len(file["entry/sample/dataset1/value"][:]) > 0
        ), "f142 module should have written this dataset, it should have written a value and time"

        assert (
            "cue_timestamp_zero" not in file["entry/sample/dataset2"]
        ), "f142_test module should have written this dataset, it writes cue_index but no cue_timestamp_zero"
        assert (len(file["entry/sample/dataset2/cue_index"][:]) >
                0), "Expected index values, found none."
        for i in range(len(file["entry/sample/dataset2/cue_index"][:])):
            assert (file["entry/sample/dataset2/cue_index"][i] == i
                    ), "Expect consecutive integers to be written by f142_test"
Пример #4
0
def test_long_run(docker_compose_long_running):
    producer = create_producer()
    sleep(20)
    # Start file writing
    send_writer_command(
        os.path.join("filewriter_tests", "commands", "longrunning.json"),
        producer,
        topic="TEST_writerCommandLR",
        start_time=docker_compose_long_running,
    )
    producer.flush()
    sleep(10)
    # Minimum length of the test is determined by (pv_updates * 3) + 10 seconds
    pv_updates = 6000
    # range is exclusive of the last number, so in order to get 1 to pv_updates we need to use pv_updates+1
    for i in range(1, pv_updates + 1):
        change_pv_value("SIMPLE:DOUBLE", i)
        sleep(3)

    send_writer_command(
        os.path.join("filewriter_tests", "commands", "stop-command-lr.json"),
        producer,
        topic="TEST_writerCommandLR",
    )
    producer.flush()
    sleep(30)

    filepath = os.path.join("filewriter_tests", "output-files",
                            "output_file_lr.nxs")
    with OpenNexusFileWhenAvailable(filepath) as file:
        counter = 1
        # check values are contiguous
        for value in file["entry/cont_data/value"]:
            assert isclose(value, counter)
            counter += 1

    # check that the last value is the same as the number of updates
    assert counter == pv_updates + 1

    with open(os.path.join("logs", "lr_status_messages.log"), "w+") as file:
        status_messages = consume_everything("TEST_writerStatus")
        for msg in status_messages:
            file.write(str(msg.value(), encoding="utf-8") + "\n")
Пример #5
0
def test_static_data_reaches_file(docker_compose_fw):
    producer = create_producer()
    sleep(20)
    # Start file writing
    send_writer_command(
        os.path.join("filewriter_tests", "commands", "static-data-add.json"),
        producer,
        start_time=docker_compose_fw,
    )
    producer.flush()
    # Give it some time to accumulate data
    sleep(10)
    # Stop file writing
    send_writer_command(
        os.path.join("filewriter_tests", "commands", "static-data-stop.json"), producer
    )
    sleep(10)
    send_writer_command(
        os.path.join("filewriter_tests", "commands", "writer-exit.json"), producer
    )
    producer.flush()

    filepath = os.path.join(
        "filewriter_tests", "output-files", "output_file_static.nxs"
    )
    with OpenNexusFileWhenAvailable(filepath) as file:
        assert not file.swmr_mode
        assert file["entry/start_time"].value == "2016-04-12T02:58:52"
        assert file["entry/end_time"].value == "2016-04-12T03:29:11"
        assert file["entry/duration"].value == 1817.0
        assert file["entry/features"][0] == 10138143369737381149
        assert file["entry/user_1/affiliation"].value == "ISIS, STFC"
        assert np.allclose(
            file["entry/instrument/monitor1/transformations/location"].attrs["vector"],
            np.array([0.0, 0.0, -1.0]),
        )
        assert (
            file["entry/instrument/monitor1/transformations/location"].attrs[
                "transformation_type"
            ]
            == "translation"
        )
Пример #6
0
def test_ep00(docker_compose):
    producer = create_producer()
    topic = "TEST_epicsConnectionStatus"
    sleep(10)

    # Start file writing
    job_id = publish_run_start_message(
        producer,
        "commands/nexus_structure_epics_status.json",
        "output_file_ep00.nxs",
        start_time=current_unix_time_ms(),
    )
    sleep(5)
    first_timestamp = current_unix_time_ms()
    publish_ep00_message(producer, topic, EventType.NEVER_CONNECTED,
                         first_timestamp)
    second_timestamp = current_unix_time_ms()
    publish_ep00_message(producer,
                         topic,
                         EventType.CONNECTED,
                         kafka_timestamp=second_timestamp)

    # Give it some time to accumulate data
    sleep(10)

    # Stop file writing
    publish_run_stop_message(producer,
                             job_id,
                             stop_time=current_unix_time_ms())

    filepath = "output-files/output_file_ep00.nxs"
    with OpenNexusFileWhenAvailable(filepath) as file:
        assert file["EpicsConnectionStatus/connection_status_time"][
            0] == milliseconds_to_nanoseconds(first_timestamp)
        assert file["EpicsConnectionStatus/connection_status"][
            0] == b"NEVER_CONNECTED"
        assert file["EpicsConnectionStatus/connection_status_time"][
            1] == milliseconds_to_nanoseconds(second_timestamp)
        assert file["EpicsConnectionStatus/connection_status"][
            1] == b"CONNECTED"
def test_long_run(docker_compose_long_running):
    producer = create_producer()
    sleep(20)
    # Start file writing
    job_id = publish_run_start_message(
        producer,
        "commands/nexus_structure_long_running.json",
        nexus_filename="output_file_lr.nxs",
        topic="TEST_writerCommandLR",
        start_time=int(docker_compose_long_running),
    )
    sleep(10)
    # Minimum length of the test is determined by (pv_updates * 3) + 10 seconds
    pv_updates = 6000
    # range is exclusive of the last number, so in order to get 1 to pv_updates we need to use pv_updates+1
    for i in range(1, pv_updates + 1):
        change_pv_value("SIMPLE:DOUBLE", i)
        sleep(3)

    publish_run_stop_message(producer,
                             job_id=job_id,
                             topic="TEST_writerCommandLR")
    sleep(30)

    filepath = "output-files/output_file_lr.nxs"
    with OpenNexusFileWhenAvailable(filepath) as file:
        counter = 1
        # check values are contiguous
        for value in file["entry/cont_data/value"]:
            assert isclose(value, counter)
            counter += 1

    # check that the last value is the same as the number of updates
    assert counter == pv_updates + 1

    with open("logs/lr_status_messages.log", "w+") as file:
        status_messages = consume_everything("TEST_writerStatus")
        for msg in status_messages:
            file.write(str(deserialise_x5f2(msg.value())) + "\n")
Пример #8
0
    stop_time = 1_560_330_000_148
    # Ask to write 147 messages from the middle of the 200 messages we published
    publish_run_start_message(
        producer,
        "commands/nexus_structure_historical.json",
        "output_file_of_historical_data.nxs",
        start_time=start_time,
        stop_time=stop_time,
        topic=command_topic,
    )

    sleep(20)
    # The command also includes a stream for topic TEST_emptyTopic which exists but has no data in it, the
    # file writer should recognise there is no data in that topic and close the corresponding streamer without problem.
    filepath = "output-files/output_file_of_historical_data.nxs"
    with OpenNexusFileWhenAvailable(filepath) as file:
        # Expect to have recorded one value per ms between the start and stop time
        # +3 due to writing one message before start and one message after stop
        assert file["entry/historical_data_1/time"].len() == (
            stop_time - start_time + 3
        ), "Expected there to be one message per millisecond recorded between specified start and stop time"
        assert file["entry/historical_data_2/time"].len() == (
            stop_time - start_time + 3
        ), "Expected there to be one message per millisecond recorded between specified start and stop time"

        # EPICS alarms
        assert (
            file["entry/historical_data_1/alarm_status"].len() == 2
        ), "Expected there to have record two changes in EPICS alarm status"
        assert (
            file["entry/historical_data_1/alarm_severity"].len() == 2