def test_if_buffer_has_wrong_id_then_throws(self): buf = serialise_f142(**self.original_entry) # Manually hack the id buf = bytearray(buf) buf[4:8] = b"1234" with pytest.raises(RuntimeError): deserialise_f142(buf)
def test_update_handler_publishes_periodic_update(): producer = FakeProducer() context = FakeContext() pv_timestamp_s = 1.1 # seconds from unix epoch pv_source_name = "source_name" pv_value = -3 pv_type = "i" update_period_ms = 10 pva_update_handler = PVAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142", update_period_ms) # type: ignore context.call_monitor_callback_with_fake_pv_update( NTScalar(pv_type, valueAlarm=True).wrap(pv_value, timestamp=pv_timestamp_s)) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value assert pv_update_output.source_name == pv_source_name sleep(0.05) assert ( producer.messages_published > 1 ), "Expected more than the 1 message from triggered update due to periodic updates being active" pva_update_handler.stop()
def test_update_handler_publishes_enum_update(): producer = FakeProducer() context = FakeContext() pv_index = 0 pv_value_str = "choice0" pv_timestamp_s = 1.1 # seconds from unix epoch pv_source_name = "source_name" pva_update_handler = PVAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore context.call_monitor_callback_with_fake_pv_update( NTEnum(valueAlarm=True).wrap( { "index": pv_index, "choices": [pv_value_str, "choice1", "choice2"] }, timestamp=pv_timestamp_s, )) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value_str assert pv_update_output.source_name == pv_source_name pva_update_handler.stop()
def test_update_handler_publishes_alarm_update(): producer = FakeProducer() context = FakeContext() pv_value = 42 pv_caproto_type = ChannelType.TIME_INT pv_numpy_type = np.int32 pv_source_name = "source_name" alarm_status = 6 # AlarmStatus.LOW alarm_severity = 1 # AlarmSeverity.MINOR update_handler = CAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore metadata = (alarm_status, alarm_severity, TimeStamp(4, 0)) context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( np.array([pv_value]).astype(pv_numpy_type), pv_caproto_type, 1, 1, 1, metadata=metadata, )) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value assert pv_update_output.source_name == pv_source_name assert pv_update_output.alarm_status == AlarmStatus.LOW assert pv_update_output.alarm_severity == AlarmSeverity.MINOR update_handler.stop()
def check_expected_value(log_data_buffer: bytes, pv_name: str, expected_value: Any = None): """ Checks the message name (PV) and value type (type of PV), and, optionally, the value. :param log_data_buffer: Received message payload :param pv_name: PV/channel name :param expected_value: The expected PV value from the message, can be a list or tuple for arrays """ log_data = deserialise_f142(log_data_buffer) assert log_data.source_name == pv_name assert log_data.timestamp_unix_ns > 0 def is_sequence(obj): return type(obj) in [list, tuple] if expected_value is not None: if is_sequence(expected_value): assert np.allclose(log_data.value, np.array(expected_value)) else: if isinstance(expected_value, float): assert isclose(log_data.value, expected_value) else: assert (log_data.value == expected_value ), f"Expected {expected_value}, got {log_data.value}"
def test_update_handler_publishes_int_update(pv_value, pv_caproto_type, pv_numpy_type): producer = FakeProducer() context = FakeContext() pv_source_name = "source_name" update_handler = CAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore metadata = (0, 0, TimeStamp(4, 0)) context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( np.array([pv_value]).astype(pv_numpy_type), pv_caproto_type, 1, 1, 1, metadata=metadata, )) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value assert pv_update_output.source_name == pv_source_name update_handler.stop()
def test_update_handler_publishes_periodic_update(): producer = FakeProducer() context = FakeContext() pv_value = 42 pv_caproto_type = ChannelType.TIME_INT pv_numpy_type = np.int32 pv_source_name = "source_name" update_period_ms = 10 update_handler = CAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142", update_period_ms) # type: ignore metadata = (0, 0, TimeStamp(4, 0)) context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( np.array([pv_value]).astype(pv_numpy_type), pv_caproto_type, 1, 1, 1, metadata=metadata, )) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value assert pv_update_output.source_name == pv_source_name sleep(0.05) assert ( producer.messages_published > 1 ), "Expected more than the 1 message from triggered update due to periodic updates being active" update_handler.stop()
def test_serialises_and_deserialises_integer_f142_message_correctly(self): buf = serialise_f142(**self.original_entry) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == self.original_entry["source_name"] assert deserialised_tuple.value == self.original_entry["value"] assert ( deserialised_tuple.timestamp_unix_ns == self.original_entry["timestamp_unix_ns"] )
def test_epics_alarms_default_to_no_change_when_not_provided_to_serialiser(self): float_log = { "source_name": "some_source", "value": 1.234, "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**float_log) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.alarm_status == AlarmStatus.NO_CHANGE assert deserialised_tuple.alarm_severity == AlarmSeverity.NO_CHANGE
def check_expected_alarm_status( log_data_buffer: bytes, expected_status: AlarmStatus, expected_severity: AlarmSeverity, ): log_data = deserialise_f142(log_data_buffer) assert ( log_data.alarm_severity == expected_severity ), f"Actual alarm severity: {log_data.alarm_severity}, Expected alarm severity: {expected_severity}" assert ( log_data.alarm_status == expected_status ), f"Actual alarm status: {log_data.alarm_status}, Expected alarm status: {expected_status}"
def test_serialises_and_deserialises_numpy_array_integers_correctly(self): array_log = { "source_name": "some_source", "value": np.array([1, 2, 3]), "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**array_log) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == array_log["source_name"] assert np.array_equal(deserialised_tuple.value, array_log["value"]) assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"]
def test_serialises_and_deserialises_string_f142_message_correctly(self): string_log = { "source_name": "some_source", "value": "some_string", "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**string_log) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == string_log["source_name"] assert deserialised_tuple.value == string_log["value"] assert deserialised_tuple.timestamp_unix_ns == string_log["timestamp_unix_ns"]
def test_serialises_and_deserialises_scalar_ndarray_f142_message_correctly(self): numpy_log = { "source_name": "some_source", "value": np.array(42), "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**numpy_log) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == numpy_log["source_name"] assert deserialised_tuple.value == np.array(numpy_log["value"]) assert deserialised_tuple.timestamp_unix_ns == numpy_log["timestamp_unix_ns"]
def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( self, ): array_log = { "source_name": "some_source", "value": np.array([1, 2, 3], dtype=np.uint16), "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**array_log) deserialised_tuple = deserialise_f142(buf) assert np.array_equal(deserialised_tuple.value, array_log["value"]) assert deserialised_tuple.value.dtype == array_log["value"].dtype
def test_serialises_and_deserialises_native_list_correctly(self): list_log = { "source_name": "some_source", "value": [1, 2, 3], "timestamp_unix_ns": 1585332414000000000, } buf = serialise_f142(**list_log) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.source_name == list_log["source_name"] # Array values are output as numpy array assert np.array_equal(deserialised_tuple.value, np.array(list_log["value"])) assert deserialised_tuple.timestamp_unix_ns == list_log["timestamp_unix_ns"]
def test_serialises_and_deserialises_epics_alarms_correctly(self): float_log = { "source_name": "some_source", "value": 1.234, "timestamp_unix_ns": 1585332414000000000, "alarm_status": AlarmStatus.HIHI, "alarm_severity": AlarmSeverity.MAJOR, } buf = serialise_f142(**float_log) deserialised_tuple = deserialise_f142(buf) assert deserialised_tuple.alarm_status == float_log["alarm_status"] assert deserialised_tuple.alarm_severity == float_log["alarm_severity"]
def test_update_handler_publishes_f142_update(): producer = FakeProducer() pv_source_name = "source_name" fake_update_handler = FakeUpdateHandler(producer, pv_source_name, "output_topic", "f142", 100) # type: ignore fake_update_handler._timer_callback() assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.source_name == pv_source_name fake_update_handler.stop()
def test_update_handler_does_not_include_alarm_details_if_unchanged_in_subsequent_updates( ): producer = FakeProducer() context = FakeContext() pv_timestamp_s = 1.1 # seconds from unix epoch pv_source_name = "source_name" pv_value = -3 pv_type = "i" alarm_status = 4 # Indicates RECORD alarm, we map the alarm message to a specific alarm status to forward alarm_severity = 1 # AlarmSeverity.MINOR alarm_message = "HIGH_ALARM" pva_update_handler = PVAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore context.call_monitor_callback_with_fake_pv_update( NTScalar(pv_type, valueAlarm=True).wrap( { "value": pv_value, "alarm": { "status": alarm_status, "severity": alarm_severity, "message": alarm_message, }, }, timestamp=pv_timestamp_s, )) # Second update, with unchanged alarm context.call_monitor_callback_with_fake_pv_update( NTScalar(pv_type, valueAlarm=True).wrap( { "value": pv_value, "alarm": { "status": alarm_status, "severity": alarm_severity, "message": alarm_message, }, }, timestamp=pv_timestamp_s, )) assert producer.messages_published == 2 pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.alarm_status == AlarmStatus.NO_CHANGE assert pv_update_output.alarm_severity == AlarmSeverity.NO_CHANGE pva_update_handler.stop()
def check_multiple_expected_values(message_list, expected_values): """ Checks for expected PV values in multiple messages. Note: not order/time-specific, and requires PVs to have different names. :param message_list: A list of flatbuffers objects :param expected_values: A dict with PV names as keys for expected value types and values """ used_pv_names = [] for log_data_buffer in message_list: log_data = deserialise_f142(log_data_buffer) assert (log_data.source_name in expected_values.keys() and log_data.source_name not in used_pv_names) used_pv_names.append(log_data.source_name) check_expected_value(log_data_buffer, log_data.source_name, expected_values[log_data.source_name])
def test_update_handler_publishes_enum_update(): producer = FakeProducer() context = FakeContext() pv_caproto_type = ChannelType.TIME_ENUM pv_source_name = "source_name" update_handler = CAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore # Nothing gets published when ENUM type update is received, the handler will resubscribe using STRING # type as the string is more useful to forwarder to the filewriter than the enum int metadata = (0, 0, TimeStamp(4, 0)) context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( np.array([0]), pv_caproto_type, 1, 1, 1, metadata=metadata, )) # Second update, with STRING type enum_string_value = "ENUM_STRING" context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( [enum_string_value.encode("utf8")], ChannelType.TIME_STRING, 1, 1, 1, metadata=metadata, )) assert ( producer.messages_published == 1 ), "Only expected a single message with string payload, not the original enum update" assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == enum_string_value assert pv_update_output.source_name == pv_source_name update_handler.stop()
def test_update_handler_publishes_int_update(pv_value, pv_type): producer = FakeProducer() context = FakeContext() pv_timestamp_s = 1.1 # seconds from unix epoch pv_source_name = "source_name" pva_update_handler = PVAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore context.call_monitor_callback_with_fake_pv_update( NTScalar(pv_type, valueAlarm=True).wrap(pv_value, timestamp=pv_timestamp_s)) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value assert pv_update_output.source_name == pv_source_name pva_update_handler.stop()
def test_update_handler_does_not_include_alarm_details_if_unchanged_in_subsequent_updates( ): producer = FakeProducer() context = FakeContext() pv_value = 42 pv_caproto_type = ChannelType.TIME_INT pv_numpy_type = np.int32 pv_source_name = "source_name" alarm_status = 6 # AlarmStatus.LOW alarm_severity = 1 # AlarmSeverity.MINOR update_handler = CAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore metadata = (alarm_status, alarm_severity, TimeStamp(4, 0)) context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( np.array([pv_value]).astype(pv_numpy_type), pv_caproto_type, 1, 1, 1, metadata=metadata, )) # Second update, with unchanged alarm context.call_monitor_callback_with_fake_pv_update( ReadNotifyResponse( np.array([pv_value]).astype(pv_numpy_type), pv_caproto_type, 1, 1, 1, metadata=metadata, )) assert producer.messages_published == 2 pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.alarm_status == AlarmStatus.NO_CHANGE assert pv_update_output.alarm_severity == AlarmSeverity.NO_CHANGE update_handler.stop()
def test_forwarder_sends_idle_pv_updates(docker_compose_idle_updates): data_topic = "TEST_forwarderData_idle_updates" sleep(5) producer = ProducerWrapper("localhost:9092", CONFIG_TOPIC, data_topic) producer.add_config([PVDOUBLE]) consumer = create_consumer() consumer.subscribe([data_topic]) sleep(10) for i in range(3): msg, _ = poll_for_valid_message(consumer) check_expected_value(msg, PVDOUBLE, 0) log_data = deserialise_f142(msg) assert ( log_data.alarm_status != AlarmStatus.NO_CHANGE ), "Expect logs from periodic updates to always contain the current EPICS alarm status" assert ( log_data.alarm_severity != AlarmSeverity.NO_CHANGE ), "Expect logs from periodic updates to always contain the current EPICS alarm severity" consumer.close() producer.stop_all_pvs()
def test_update_handler_publishes_alarm_update(): producer = FakeProducer() context = FakeContext() pv_value = 42 pv_type = "i" pv_timestamp_s = 1.1 # seconds from unix epoch pv_source_name = "source_name" alarm_status = 4 # Indicates RECORD alarm, we map the alarm message to a specific alarm status to forward alarm_severity = 1 # AlarmSeverity.MINOR alarm_message = "HIGH_ALARM" pva_update_handler = PVAUpdateHandler(producer, context, pv_source_name, "output_topic", "f142") # type: ignore context.call_monitor_callback_with_fake_pv_update( NTScalar(pv_type, valueAlarm=True).wrap( { "value": pv_value, "alarm": { "status": alarm_status, "severity": alarm_severity, "message": alarm_message, }, }, timestamp=pv_timestamp_s, )) assert producer.published_payload is not None pv_update_output = deserialise_f142(producer.published_payload) assert pv_update_output.value == pv_value assert pv_update_output.source_name == pv_source_name assert pv_update_output.alarm_status == AlarmStatus.HIGH assert pv_update_output.alarm_severity == AlarmSeverity.MINOR pva_update_handler.stop()