Beispiel #1
0
 def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type(
     self,
 ):
     complex_log = {
         "source_name": "some_source",
         "value": np.complex(3, 4),
         "timestamp_unix_ns": 1585332414000000000,
     }
     with pytest.raises(NotImplementedError):
         serialise_f142(**complex_log)
Beispiel #2
0
async def test_data_returned_if_multiple_slow_metadata_msgs_exceed_buffer(
        queues):
    data_queue, worker_instruction_queue, test_message_queue = queues
    run_info_topic = "fake_topic"
    test_instrument_name = "DATA_STREAM_TEST"

    # The Kafka topics to get metadata from are recorded as "stream" objects in
    # the nexus_structure field of the run start message
    f142_source_name = "f142_source"
    f142_log_name = "f142_log"
    streams = [
        Stream(f"/entry/{f142_log_name}", "f142_topic", f142_source_name,
               "f142", "double", "m"),
    ]

    first_f142_value = 26.1236
    f142_timestamp = 123456  # ns after epoch
    first_message = serialise_f142(first_f142_value, f142_source_name,
                                   f142_timestamp)
    second_f142_value = 62.721
    second_message = serialise_f142(second_f142_value, f142_source_name,
                                    f142_timestamp)

    test_stream_args = TEST_STREAM_ARGS.copy()
    test_stream_args["slow_metadata_buffer_size"] = 1
    test_stream_args["topics"] = None
    n_chunks = 0
    reached_asserts = False
    async for data in _data_stream(data_queue,
                                   worker_instruction_queue,
                                   run_info_topic=run_info_topic,
                                   query_consumer=FakeQueryConsumer(
                                       test_instrument_name, streams=streams),
                                   halt_after_n_data_chunks=3,
                                   **test_stream_args,
                                   test_message_queue=test_message_queue):
        # n_chunks == 0 zeroth chunk contains data
        # from run start message
        if n_chunks == 0:
            test_message_queue.put(FakeMessage(first_message))
            test_message_queue.put(FakeMessage(second_message))
        elif n_chunks == 1:
            # Contains data from first message
            assert isclose(data.attrs[f142_source_name].value.values[0],
                           first_f142_value)
        elif n_chunks == 2:
            # Contains data from second message
            assert isclose(data.attrs[f142_source_name].value.values[0],
                           second_f142_value)
            reached_asserts = True
        n_chunks += 1

    assert reached_asserts
Beispiel #3
0
async def test_data_stream_returns_data_from_multiple_slow_metadata_messages(
        queues):
    data_queue, worker_instruction_queue, test_message_queue = queues
    run_info_topic = "fake_topic"
    test_instrument_name = "DATA_STREAM_TEST"

    # The Kafka topics to get metadata from are recorded as "stream" objects in
    # the nexus_structure field of the run start message
    f142_source_name = "f142_source"
    f142_log_name = "f142_log"
    streams = [
        Stream(f"/entry/{f142_log_name}", "f142_topic", f142_source_name,
               "f142", "double", "m"),
    ]

    test_stream_args = TEST_STREAM_ARGS.copy()
    test_stream_args["topics"] = None
    n_chunks = 0
    async for data in _data_stream(data_queue,
                                   worker_instruction_queue,
                                   run_info_topic=run_info_topic,
                                   query_consumer=FakeQueryConsumer(
                                       test_instrument_name, streams=streams),
                                   halt_after_n_data_chunks=2,
                                   **test_stream_args,
                                   test_message_queue=test_message_queue):
        data_from_stream = data

        if n_chunks == 0:
            # Fake receiving a Kafka message for each metadata schema
            # Do this after the run start message has been parsed, so that
            # a metadata buffer will have been created for each data source
            # described in the start message.
            f142_value_1 = 26.1236
            f142_timestamp_1 = 123456  # ns after epoch
            f142_test_message = serialise_f142(f142_value_1, f142_source_name,
                                               f142_timestamp_1)
            test_message_queue.put(FakeMessage(f142_test_message))
            f142_value_2 = 2.725
            f142_timestamp_2 = 234567  # ns after epoch
            f142_test_message = serialise_f142(f142_value_2, f142_source_name,
                                               f142_timestamp_2)
            test_message_queue.put(FakeMessage(f142_test_message))

        n_chunks += 1

    assert np.allclose(data_from_stream.attrs[f142_source_name].value.values,
                       np.array([f142_value_1, f142_value_2]))
    assert np.array_equal(
        data_from_stream.attrs[f142_source_name].value.coords['time'].values,
        np.array([f142_timestamp_1, f142_timestamp_2],
                 dtype=np.dtype('datetime64[ns]')))
Beispiel #4
0
def publish_f142_message(
    producer: Producer,
    topic: str,
    kafka_timestamp: Optional[int] = None,
    source_name: Optional[str] = None,
    alarm_status: Optional[int] = None,
    alarm_severity: Optional[int] = None,
):
    """
    Publish an f142 message to a given topic.
    Optionally set the timestamp in the kafka header to allow, for example, fake "historical" data.
    :param producer: Producer to publish the message with
    :param topic: Name of topic to publish to
    :param kafka_timestamp: Timestamp to set in the Kafka header (milliseconds after unix epoch)
    :param source_name: Name of the source in the f142 message
    :param alarm_status: EPICS alarm status, use enum-like class from streaming_data_types.fbschemas.logdata_f142.AlarmStatus
    :param alarm_severity: EPICS alarm severity, use enum-like class from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity
    """
    if source_name is None:
        source_name = "fw-test-helpers"
    value = 42
    f142_message = serialise_f142(
        value,
        source_name,
        _millseconds_to_nanoseconds(kafka_timestamp),
        alarm_status,
        alarm_severity,
    )
    producer.produce(topic, f142_message, timestamp=kafka_timestamp)
    producer.poll(0)
    producer.flush()
Beispiel #5
0
    def test_if_buffer_has_wrong_id_then_throws(self):
        buf = serialise_f142(**self.original_entry)

        # Manually hack the id
        buf = bytearray(buf)
        buf[4:8] = b"1234"

        with pytest.raises(RuntimeError):
            deserialise_f142(buf)
Beispiel #6
0
    def test_serialises_and_deserialises_integer_f142_message_correctly(self):
        buf = serialise_f142(**self.original_entry)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.source_name == self.original_entry["source_name"]
        assert deserialised_tuple.value == self.original_entry["value"]
        assert (
            deserialised_tuple.timestamp_unix_ns
            == self.original_entry["timestamp_unix_ns"]
        )
Beispiel #7
0
    def test_epics_alarms_default_to_no_change_when_not_provided_to_serialiser(self):
        float_log = {
            "source_name": "some_source",
            "value": 1.234,
            "timestamp_unix_ns": 1585332414000000000,
        }
        buf = serialise_f142(**float_log)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.alarm_status == AlarmStatus.NO_CHANGE
        assert deserialised_tuple.alarm_severity == AlarmSeverity.NO_CHANGE
Beispiel #8
0
def to_f142(dev_name, dev_value, dev_severity, timestamp_ns):
    """Convert the device information in to an f142 FlatBuffer.

    :param dev_name: the device name
    :param dev_value: the device's value
    :param dev_severity: the device's status
    :param timestamp_ns: the associated timestamp in nanoseconds
    :return: FlatBuffer representation of data
    """
    # Alarm status is not relevant for NICOS but we have to send something
    return serialise_f142(dev_value, dev_name, timestamp_ns,
                          AlarmStatus.NO_ALARM, dev_severity)
Beispiel #9
0
    def test_serialises_and_deserialises_numpy_array_integers_correctly(self):
        array_log = {
            "source_name": "some_source",
            "value": np.array([1, 2, 3]),
            "timestamp_unix_ns": 1585332414000000000,
        }
        buf = serialise_f142(**array_log)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.source_name == array_log["source_name"]
        assert np.array_equal(deserialised_tuple.value, array_log["value"])
        assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"]
Beispiel #10
0
    def test_serialises_and_deserialises_string_f142_message_correctly(self):
        string_log = {
            "source_name": "some_source",
            "value": "some_string",
            "timestamp_unix_ns": 1585332414000000000,
        }
        buf = serialise_f142(**string_log)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.source_name == string_log["source_name"]
        assert deserialised_tuple.value == string_log["value"]
        assert deserialised_tuple.timestamp_unix_ns == string_log["timestamp_unix_ns"]
Beispiel #11
0
    def test_serialises_and_deserialises_scalar_ndarray_f142_message_correctly(self):
        numpy_log = {
            "source_name": "some_source",
            "value": np.array(42),
            "timestamp_unix_ns": 1585332414000000000,
        }
        buf = serialise_f142(**numpy_log)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.source_name == numpy_log["source_name"]
        assert deserialised_tuple.value == np.array(numpy_log["value"])
        assert deserialised_tuple.timestamp_unix_ns == numpy_log["timestamp_unix_ns"]
Beispiel #12
0
    def test_serialises_and_deserialises_epics_alarms_correctly(self):
        float_log = {
            "source_name": "some_source",
            "value": 1.234,
            "timestamp_unix_ns": 1585332414000000000,
            "alarm_status": AlarmStatus.HIHI,
            "alarm_severity": AlarmSeverity.MAJOR,
        }
        buf = serialise_f142(**float_log)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.alarm_status == float_log["alarm_status"]
        assert deserialised_tuple.alarm_severity == float_log["alarm_severity"]
Beispiel #13
0
    def test_serialises_and_deserialises_native_list_correctly(self):
        list_log = {
            "source_name": "some_source",
            "value": [1, 2, 3],
            "timestamp_unix_ns": 1585332414000000000,
        }
        buf = serialise_f142(**list_log)
        deserialised_tuple = deserialise_f142(buf)

        assert deserialised_tuple.source_name == list_log["source_name"]
        # Array values are output as numpy array
        assert np.array_equal(deserialised_tuple.value, np.array(list_log["value"]))
        assert deserialised_tuple.timestamp_unix_ns == list_log["timestamp_unix_ns"]
Beispiel #14
0
    def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly(
        self,
    ):
        array_log = {
            "source_name": "some_source",
            "value": np.array([1, 2, 3], dtype=np.uint16),
            "timestamp_unix_ns": 1585332414000000000,
        }
        buf = serialise_f142(**array_log)
        deserialised_tuple = deserialise_f142(buf)

        assert np.array_equal(deserialised_tuple.value, array_log["value"])
        assert deserialised_tuple.value.dtype == array_log["value"].dtype
Beispiel #15
0
def publish_f142_message(
    producer: KafkaProducer,
    topic: str,
    data: np.array,
    source_name: str,
    timestamp_ns: int,
    alarm_status: Optional[AlarmStatus] = None,
    alarm_severity: Optional[AlarmSeverity] = None,
):
    """
    Publish an f142 message to a given topic.
    :param producer: Kafka producer to publish update with
    :param topic: Name of topic to publish to
    :param data: Value of the PV update
    :param source_name: Name of the PV
    :param timestamp_ns: Timestamp for value (nanoseconds after unix epoch)
    :param alarm_status:
    :param alarm_severity:
    """
    if alarm_status is None:
        f142_message = serialise_f142(
            value=data, source_name=source_name, timestamp_unix_ns=timestamp_ns,
        )
    else:
        f142_message = serialise_f142(
            value=data,
            source_name=source_name,
            timestamp_unix_ns=timestamp_ns,
            alarm_status=alarm_status,
            alarm_severity=alarm_severity,
        )
    producer.produce(
        topic,
        f142_message,
        key=source_name,
        timestamp_ms=_nanoseconds_to_milliseconds(timestamp_ns),
    )
Beispiel #16
0
    def create_f142_message(self, timestamp, tag, value):

        timestamp_ns = timestamp_to_nsecs(timestamp)
        if type(value) == str:
            # overrride the current implementation as it appears wrong,
            # it converts a ubyte array to a ushort array
            bvalue = np.array(list(value.encode('utf-8')), dtype=np.ubyte)
            builder, source = _setup_builder(tag)
            _serialise_ubytearray(builder, bvalue, source)
            buff = _complete_buffer(
                builder,
                timestamp_ns,
            )

        else:
            buff = serialise_f142(value, tag, timestamp_ns)
        return buff
from streaming_data_types.logdata_f142 import serialise_f142
from datetime import datetime
import time

projection = 0
flat_field = 1
dark_field = 2
invalid = 3

if __name__ == '__main__':
    kafka_producer = KafkaProducer(bootstrap_servers='localhost:9092')
    idx = 150
    for i in range(1, 201):
        idx += 1
        nbr_str = str(i)
        nbr_zeros = 4 - len(nbr_str)
        first_zeros = '0' * nbr_zeros
        path_to_image = os.path.join('..', 'Lego1',
                                     f'tomo_{first_zeros}{nbr_str}.tif')
        image = Image.open(path_to_image)
        img_array = np.array(image)
        serialized_output = serialise_ADAr('image_source', idx, datetime.now(),
                                                          img_array)
        rotation_angle = serialise_f142(i, 'rotation_angle', time.time_ns())
        image_key = serialise_f142(projection, 'image_key', time.time_ns())
        kafka_producer.send('odin_topic', serialized_output)
        kafka_producer.send('odin_topic', rotation_angle)
        kafka_producer.send('odin_topic', image_key)
        time.sleep(0.1)

Beispiel #18
0
async def test_data_stream_returns_metadata(queues):
    data_queue, worker_instruction_queue, test_message_queue = queues
    run_info_topic = "fake_topic"
    test_instrument_name = "DATA_STREAM_TEST"

    # The Kafka topics to get metadata from are recorded as "stream" objects in
    # the nexus_structure field of the run start message
    # There are currently 3 schemas for metadata, they have flatbuffer ids
    # f142, senv and tdct
    f142_source_name = "f142_source"
    f142_log_name = "f142_log"
    senv_source_name = "senv_source"
    senv_log_name = "senv_log"
    tdct_source_name = "tdct_source"
    tdct_log_name = "tdct_log"
    streams = [
        Stream(f"/entry/{f142_log_name}", "f142_topic", f142_source_name,
               "f142", "double", "m"),
        Stream(f"/entry/{senv_log_name}", "senv_topic", senv_source_name,
               "senv", "double", "m"),
        Stream(f"/entry/{tdct_log_name}", "tdct_topic", tdct_source_name,
               "tdct")
    ]

    test_stream_args = TEST_STREAM_ARGS.copy()
    test_stream_args["topics"] = None
    n_chunks = 0
    async for data in _data_stream(data_queue,
                                   worker_instruction_queue,
                                   run_info_topic=run_info_topic,
                                   query_consumer=FakeQueryConsumer(
                                       test_instrument_name, streams=streams),
                                   halt_after_n_data_chunks=2,
                                   **test_stream_args,
                                   test_message_queue=test_message_queue):
        data_from_stream = data

        if n_chunks == 0:
            # Fake receiving a Kafka message for each metadata schema
            # Do this after the run start message has been parsed, so that
            # a metadata buffer will have been created for each data source
            # described in the start message.
            f142_value = 26.1236
            f142_timestamp = 123456  # ns after epoch
            f142_test_message = serialise_f142(f142_value, f142_source_name,
                                               f142_timestamp)
            test_message_queue.put(FakeMessage(f142_test_message))
            senv_values = np.array([26, 127, 52])
            senv_timestamp_ns = 123000  # ns after epoch
            senv_timestamp = datetime.datetime.fromtimestamp(
                senv_timestamp_ns * 1e-9, datetime.timezone.utc)
            senv_time_between_samples = 100  # ns
            senv_test_message = serialise_senv(senv_source_name, -1,
                                               senv_timestamp,
                                               senv_time_between_samples, 0,
                                               senv_values, Location.Start)
            test_message_queue.put(FakeMessage(senv_test_message))
            tdct_timestamps = np.array([1234, 2345, 3456])  # ns
            tdct_test_message = serialise_tdct(tdct_source_name,
                                               tdct_timestamps)
            test_message_queue.put(FakeMessage(tdct_test_message))

        n_chunks += 1

    assert isclose(data_from_stream.attrs[f142_source_name].value.values[0],
                   f142_value)
    assert data_from_stream.attrs[f142_source_name].value.coords[
        'time'].values[0] == np.array(f142_timestamp,
                                      dtype=np.dtype('datetime64[ns]'))
    assert np.array_equal(
        data_from_stream.attrs[senv_source_name].value.values, senv_values)
    senv_expected_timestamps = np.array([
        senv_timestamp_ns, senv_timestamp_ns + senv_time_between_samples,
        senv_timestamp_ns + (2 * senv_time_between_samples)
    ],
                                        dtype=np.dtype('datetime64[ns]'))
    assert np.array_equal(
        data_from_stream.attrs[senv_source_name].value.coords['time'].values,
        senv_expected_timestamps)
    assert np.array_equal(
        data_from_stream.attrs[tdct_source_name].value.values, tdct_timestamps)
Beispiel #19
0
async def test_stream_loop_exits_if_stop_time_reached_and_later_message_seen(
        queues):
    data_queue, worker_instruction_queue, test_message_queue = queues
    run_info_topic = "fake_topic"
    test_instrument_name = "DATA_STREAM_TEST"

    # The Kafka topics to get metadata from are recorded as "stream" objects in
    # the nexus_structure field of the run start message
    f142_source_name = "f142_source"
    f142_log_name = "f142_log"
    streams = [
        Stream(f"/entry/{f142_log_name}", "f142_topic", f142_source_name,
               "f142", "double", "m"),
    ]

    test_stream_args = TEST_STREAM_ARGS.copy()
    test_stream_args["topics"] = None
    # System time is already after this stop time so the stream will stop
    # as soon as it sees the end of partition or a message with a
    # timestamp after the stop time
    stop_time_in_past = datetime.datetime(2017, 11, 28, 23, 55, 59, 342380)
    n_chunks = 0
    async for data in _data_stream(data_queue,
                                   worker_instruction_queue,
                                   run_info_topic=run_info_topic,
                                   query_consumer=FakeQueryConsumer(
                                       test_instrument_name,
                                       stop_time=stop_time_in_past,
                                       streams=streams),
                                   **test_stream_args,
                                   test_message_queue=test_message_queue,
                                   end_at=StopTime.END_OF_RUN):
        if n_chunks == 0:
            # Publish a message with a timestamp before the stop time
            f142_value_1 = 26.1236
            timestamp_before_stop_dt = datetime.datetime(
                2017, 11, 28, 23, 55, 50, 0)
            # Convert to integer nanoseconds
            # (for timestamp in message payload)
            timestamp_before_stop_ns = int(
                timestamp_before_stop_dt.timestamp() * 1_000_000_000)
            # Convert to integer milliseconds
            # (for Kafka message header)
            timestamp_before_stop_ms = int(
                timestamp_before_stop_dt.timestamp() * 1_000)
            f142_test_message = serialise_f142(f142_value_1, f142_source_name,
                                               timestamp_before_stop_ns)
            test_message_queue.put(
                FakeMessage(f142_test_message,
                            timestamp=timestamp_before_stop_ms))
        elif n_chunks == 1:
            # The data from the first message will be returned
            assert np.allclose(data.attrs[f142_source_name].value.values,
                               np.array([f142_value_1]))
            assert np.array_equal(
                data.attrs[f142_source_name].value.coords['time'].values,
                np.array([timestamp_before_stop_ns],
                         dtype=np.dtype('datetime64[ns]')))

            # Publish message with timestamp after stop time, this will trigger
            # the consumer to stop and data_stream to exit.
            # A TimeoutError would occur if the functionality is broken.
            f142_value_2 = 2.725
            timestamp_after_stop_dt = datetime.datetime(
                2017, 11, 28, 23, 56, 50, 0)
            timestamp_after_stop_ns = int(timestamp_after_stop_dt.timestamp() *
                                          1_000_000_000)
            timestamp_after_stop_ms = int(timestamp_after_stop_dt.timestamp() *
                                          1_000)
            f142_test_message = serialise_f142(f142_value_2, f142_source_name,
                                               timestamp_after_stop_ns)
            test_message_queue.put(
                FakeMessage(f142_test_message,
                            timestamp=timestamp_after_stop_ms))

        n_chunks += 1
Beispiel #20
0
def create_f142_buffer(value, source_name='mypv'):
    return serialise_f142(value, source_name)