def __init__(self, ip_address, port=DLT_DAEMON_TCP_PORT, use_proxy=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param **kwargs: All other args passed to DLTMessageHandler """ # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() kwargs["ip_address"] = ip_address kwargs["port"] = port kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler(self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename")
def setUp(self): if six.PY2: self.filter_queue = mp_queue() self.message_queue = mp_queue() else: self.ctx = multiprocessing.get_context() self.filter_queue = mp_queue(ctx=self.ctx) self.message_queue = mp_queue(ctx=self.ctx) self.handler = DLTContextHandler(self.filter_queue, self.message_queue)
def test_make_send_filter_msg(): """Test to generate a filter message""" handler = DLTContextHandler(MagicMock(), MagicMock()) is_register = True filters = [("APID", "CTID")] queue = MagicMock() eq_( handler._make_send_filter_msg(queue, filters, is_register), (id(queue), filters, is_register), )
def test_make_send_filter_msg_with_ack_queue(): """Test to generate a filter message with ack queue setting""" handler = DLTContextHandler(MagicMock(), MagicMock()) is_register = True filters = [("APID", "CTID")] queue = MagicMock() queue_ack = MagicMock() eq_( handler._make_send_filter_msg(queue, filters, is_register, context_filter_ack_queue=queue_ack), (id(queue), id(queue_ack), filters, is_register), )
def __init__(self, ip_address, port=DLT_DAEMON_TCP_PORT, use_proxy=False, enable_dlt_time=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon. Defaults to TCP connection, unless a multicast address is used. In that case an UDP multicast connection will be used :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param bool enable_dlt_time: Record the latest dlt message timestamp if enabled. :param **kwargs: All other args passed to DLTMessageHandler """ # - dlt-time share memory init self._dlt_time_value = DLTTimeValue() if enable_dlt_time else None # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() kwargs["ip_address"] = ip_address kwargs["port"] = port kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler( self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs, dlt_time_value=self._dlt_time_value, ) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename")
class DLTBroker(object): """DLT Broker class manages receiving and filtering of DLT Messages """ def __init__(self, ip_address, port=3490, use_proxy=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param **kwargs: All other args passed to DLTMessageHandler """ # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() kwargs["ip_address"] = ip_address kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler(self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename") def start(self): """DLTBroker main worker method""" logger.debug("Starting DLTBroker with parameters: use_proxy=%s, ip_address=%s, port=%s, filename=%s", False, self._ip_address, self._port, self._filename) self.msg_handler.start() self.context_handler.start() # - ensure we don't block on join_thread() in stop() # https://docs.python.org/2.7/library/multiprocessing.html#multiprocessing.Queue.cancel_join_thread self.filter_queue.cancel_join_thread() self.message_queue.cancel_join_thread() def add_context(self, context_queue, filters=None): """Register context :param Queue context_queue: The queue to which new messages will be added :param tuple filters: An list of tuples (eg: [(apid, ctid)]) used to filter messages that go into this queue. """ if filters is None: filters = [(None, None)] if not isinstance(filters, (tuple, list)): raise RuntimeError("Context queue filters must be a tuple." " Ex. (('SYS', 'JOUR'), ('AUDI', 'CAPI'))") self.context_handler.register(context_queue, filters) def remove_context(self, context_queue): """Unregister context :param Queue context_queue: The queue to unregister. """ self.context_handler.unregister(context_queue) def stop(self): """Stop the broker""" logger.info("Stopping DLTContextHandler and DLTMessageHandler") # - stop the DLTMessageHandler process and DLTContextHandler thread self.mp_stop_flag.set() self.context_handler.stop() logger.debug("Waiting on DLTContextHandler and DLTMessageHandler") self.context_handler.join() if self.msg_handler.is_alive(): try: self.msg_handler.terminate() except OSError: pass else: self.msg_handler.join() logger.debug("DLTBroker execution done") # pylint: disable=invalid-name def isAlive(self): """Backwards compatibility method Called from mtee.testing.connectors.tools.broker_assert. Will need to be replaced in MTEE eventually. """ return any((self.msg_handler.is_alive(), self.context_handler.is_alive()))
def setUp(self): self.filter_queue = mp_queue() self.message_queue = mp_queue() self.handler = DLTContextHandler(self.filter_queue, self.message_queue)
class TestDLTContextHandler(unittest.TestCase): def setUp(self): self.filter_queue = mp_queue() self.message_queue = mp_queue() self.handler = DLTContextHandler(self.filter_queue, self.message_queue) def test_init(self): self.assertFalse(self.handler.stop_flag.is_set()) self.assertFalse(self.handler.is_alive()) self.assertTrue(self.handler.filter_queue.empty()) self.assertTrue(self.handler.message_queue.empty()) def test_register_no_filter(self): queue = Queue() queue_id = id(queue) self.handler.register(queue) # When no filter is specified, filter (None, None) should be # added (ie: match all messages) self.assertIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.context_map[queue_id], (queue, [(None, None)])) self.assertEqual(self.handler.filter_queue.get(), (queue_id, [(None, None)], True)) def test_register_single_filter(self): queue = Queue() queue_id = id(queue) filters = ("SYS", "JOUR") self.handler.register(queue, filters) # Specified, filter should be added to filter_queue self.assertIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.context_map[queue_id], (queue, filters)) self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, True)) def test_register_similar_filters(self): queue0 = Queue() queue_id0 = id(queue0) filters0 = ("SYS", "JOUR") queue1 = Queue() queue_id1 = id(queue1) filters1 = ("SYS", "JOUR") self.handler.register(queue0, filters0) self.handler.register(queue1, filters1) # Each queue should have a unique entry in the context_map and # filter_queue (even if they have the same filter) self.assertIn(queue_id0, self.handler.context_map) self.assertIn(queue_id1, self.handler.context_map) self.assertEqual(self.handler.context_map[queue_id0], (queue0, filters0)) self.assertEqual(self.handler.context_map[queue_id1], (queue1, filters1)) self.assertEqual(self.handler.filter_queue.get(), (queue_id0, filters0, True)) self.assertEqual(self.handler.filter_queue.get(), (queue_id1, filters1, True)) def test_unregister(self): queue = Queue() queue_id = id(queue) filters = ("SYS", "JOUR") self.handler.register(queue, filters) self.assertIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, True)) self.handler.unregister(queue) self.assertNotIn(queue_id, self.handler.context_map) self.assertEqual(self.handler.filter_queue.get(), (queue_id, filters, False)) def test_run_no_messages(self): try: self.handler.start() time.sleep(0.2) self.handler.stop() self.assertTrue(self.handler.stop_flag.is_set()) self.assertFalse(self.handler.is_alive()) except: self.fail() def test_run_single_context_queue(self): queue = Queue() queue_id = id(queue) filters = ("DA1", "DC1") self.handler.register(queue, filters) self.handler.start() # - simulate feeding of messages into the message_queue for _ in range(10): self.handler.message_queue.put((queue_id, create_messages(stream_one))) try: for _ in range(10): queue.get(timeout=0.01) except Empty: # - we should not get an Empty for exactly 10 messages self.fail() finally: self.handler.stop() def test_run_multiple_context_queue(self): self.handler.start() queue0 = Queue() queue_id0 = id(queue0) filters0 = ("DA1", "DC1") self.handler.register(queue0, filters0) queue1 = Queue() queue_id1 = id(queue1) filters1 = ("SYS", "JOUR") self.handler.register(queue1, filters1) # - queue with no filter queue2 = Queue() queue_id2 = id(queue2) self.handler.register(queue2) # - simulate feeding of messages into the message_queue for _ in range(10): for message in create_messages(stream_multiple, from_file=True): queue_id = queue_id0 if message.apid == 'DA1' else queue_id1 self.handler.message_queue.put((queue_id, message)) # - simulate feeding of all messages for the queue with # no filter. self.handler.message_queue.put((queue_id2, message)) try: da1_messages = [] sys_messages = [] all_messages = [] for _ in range(10): da1_messages.append(queue0.get(timeout=0.01)) sys_messages.append(queue1.get(timeout=0.01)) all_messages.append(queue2.get(timeout=0.01)) # these queues should not get any messages from other queues self.assertTrue(all(msg.apid == 'DA1' for msg in da1_messages)) self.assertTrue(all(msg.apid == 'SYS' for msg in sys_messages)) # this queues should get all messages self.assertFalse(all(msg.apid == 'DA1' for msg in all_messages) or all(msg.apid == 'SYS' for msg in all_messages)) except Empty: # - we should not get an Empty for at least 10 messages self.fail() finally: self.handler.stop() def test_run_unregister_with_unread_messages(self): self.handler.start() queue = Queue() queue_id = id(queue) filters = ("DA1", "DC1") self.handler.register(queue, filters) self.assertIn(queue_id, self.handler.context_map) self.handler.unregister(queue) # - simulate feeding of messages into the message_queue for _ in range(3): self.handler.message_queue.put((queue_id, create_messages(stream_one))) try: self.assertNotIn(queue_id, self.handler.context_map) # allow some time for the thread to read all messages time.sleep(0.5) self.assertTrue(self.handler.message_queue.empty()) self.assertTrue(queue.empty()) finally: self.handler.stop()
class DLTBroker(object): """DLT Broker class manages receiving and filtering of DLT Messages """ def __init__(self, ip_address, port=DLT_DAEMON_TCP_PORT, use_proxy=False, enable_dlt_time=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon. Defaults to TCP connection, unless a multicast address is used. In that case an UDP multicast connection will be used :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param bool enable_dlt_time: Record the latest dlt message timestamp if enabled. :param **kwargs: All other args passed to DLTMessageHandler """ # - dlt-time share memory init self._dlt_time_value = DLTTimeValue() if enable_dlt_time else None # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() kwargs["ip_address"] = ip_address kwargs["port"] = port kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler( self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs, dlt_time_value=self._dlt_time_value, ) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename") def start(self): """DLTBroker main worker method""" logger.debug( "Starting DLTBroker with parameters: use_proxy=%s, ip_address=%s, port=%s, filename=%s, multicast=%s", False, self._ip_address, self._port, self._filename, ip.ip_address(self._ip_address).is_multicast) if self._dlt_time_value: logger.debug("Enable dlt time for DLTBroker.") self.msg_handler.start() self.context_handler.start() # - ensure we don't block on join_thread() in stop() # https://docs.python.org/2.7/library/multiprocessing.html#multiprocessing.Queue.cancel_join_thread self.filter_queue.cancel_join_thread() self.message_queue.cancel_join_thread() def add_context(self, context_queue, filters=None): """Register context :param Queue context_queue: The queue to which new messages will be added :param tuple filters: An list of tuples (eg: [(apid, ctid)]) used to filter messages that go into this queue. """ if filters is None: filters = [(None, None)] if not isinstance(filters, (tuple, list)): raise RuntimeError("Context queue filters must be a tuple." " Ex. (('SYS', 'JOUR'), ('AUDI', 'CAPI'))") self.context_handler.register(context_queue, filters) def remove_context(self, context_queue): """Unregister context :param Queue context_queue: The queue to unregister. """ self.context_handler.unregister(context_queue) def stop(self): """Stop the broker""" logger.info("Stopping DLTContextHandler and DLTMessageHandler") logger.debug("Stop DLTMessageHandler") self.mp_stop_flag.set() logger.debug("Stop DLTContextHandler") self.context_handler.stop() logger.debug("Waiting on DLTContextHandler ending") self.context_handler.join() logger.debug("Waiting on DLTMessageHandler ending") if self.msg_handler.is_alive(): try: self.msg_handler.terminate() except OSError: pass else: self.msg_handler.join() logger.debug("DLTBroker execution done") # pylint: disable=invalid-name def isAlive(self): """Backwards compatibility method Called from mtee.testing.connectors.tools.broker_assert. Will need to be replaced in MTEE eventually. """ return any( (self.msg_handler.is_alive(), self.context_handler.is_alive())) def dlt_time(self): """Get time for the last dlt message The value is seconds from 1970/1/1 0:00:00 :rtype: float """ if self._dlt_time_value: return self._dlt_time_value.timestamp raise RuntimeError("Getting dlt time function is not enabled")
def __init__(self, ip_address, port=DLT_DAEMON_TCP_PORT, use_proxy=False, enable_dlt_time=False, enable_filter_set_ack=False, filter_set_ack_timeout=2.0, ignore_filter_set_ack_timeout=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon. Defaults to TCP connection, unless a multicast address is used. In that case an UDP multicast connection will be used :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param bool enable_dlt_time: Record the latest dlt message timestamp if enabled. :param bool enable_filter_set_ack: Wait an ack message when sending a filter-setting message :param float filter_set_ack_timeout: Waiting time for the ack message :param bool ignore_filter_set_ack_timeout: Ignore the timeout when the value is True :param **kwargs: All other args passed to DLTMessageHandler """ # - dlt-time share memory init self._dlt_time_value = DLTTimeValue() if enable_dlt_time else None # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() # - filter ack queue setting self.enable_filter_set_ack = enable_filter_set_ack self.ignore_filter_set_ack_timeout = ignore_filter_set_ack_timeout self.filter_set_ack_timeout = filter_set_ack_timeout if enable_filter_set_ack: # Optional[multiprocessing.Queue[Tuple[int, bool]]] # int presents queue id, bool presents enable or not self.filter_ack_queue = Queue() self.filter_ack_msg_handler = DLTFilterAckMessageHandler( self.filter_ack_queue) else: self.filter_ack_queue = None self.filter_ack_msg_handler = None kwargs["ip_address"] = ip_address kwargs["port"] = port kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler( self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs, dlt_time_value=self._dlt_time_value, filter_ack_queue=self.filter_ack_queue, ) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename")
class DLTBroker(object): """DLT Broker class manages receiving and filtering of DLT Messages""" def __init__(self, ip_address, port=DLT_DAEMON_TCP_PORT, use_proxy=False, enable_dlt_time=False, enable_filter_set_ack=False, filter_set_ack_timeout=2.0, ignore_filter_set_ack_timeout=False, **kwargs): """Initialize the DLT Broker :param str ip_address: IP address of the DLT Daemon. Defaults to TCP connection, unless a multicast address is used. In that case an UDP multicast connection will be used :param str post: Port of the DLT Daemon :param bool use_proxy: Ignored - compatibility option :param bool enable_dlt_time: Record the latest dlt message timestamp if enabled. :param bool enable_filter_set_ack: Wait an ack message when sending a filter-setting message :param float filter_set_ack_timeout: Waiting time for the ack message :param bool ignore_filter_set_ack_timeout: Ignore the timeout when the value is True :param **kwargs: All other args passed to DLTMessageHandler """ # - dlt-time share memory init self._dlt_time_value = DLTTimeValue() if enable_dlt_time else None # - handlers init self.mp_stop_flag = Event() self.filter_queue = Queue() self.message_queue = Queue() # - filter ack queue setting self.enable_filter_set_ack = enable_filter_set_ack self.ignore_filter_set_ack_timeout = ignore_filter_set_ack_timeout self.filter_set_ack_timeout = filter_set_ack_timeout if enable_filter_set_ack: # Optional[multiprocessing.Queue[Tuple[int, bool]]] # int presents queue id, bool presents enable or not self.filter_ack_queue = Queue() self.filter_ack_msg_handler = DLTFilterAckMessageHandler( self.filter_ack_queue) else: self.filter_ack_queue = None self.filter_ack_msg_handler = None kwargs["ip_address"] = ip_address kwargs["port"] = port kwargs["timeout"] = kwargs.get("timeout", DLT_CLIENT_TIMEOUT) self.msg_handler = DLTMessageHandler( self.filter_queue, self.message_queue, self.mp_stop_flag, kwargs, dlt_time_value=self._dlt_time_value, filter_ack_queue=self.filter_ack_queue, ) self.context_handler = DLTContextHandler(self.filter_queue, self.message_queue) self._ip_address = ip_address self._port = port self._filename = kwargs.get("filename") def start(self): """DLTBroker main worker method""" logger.debug( "Starting DLTBroker with parameters: use_proxy=%s, ip_address=%s, port=%s, filename=%s, multicast=%s", False, self._ip_address, self._port, self._filename, ip.ip_address(self._ip_address).is_multicast) if self._dlt_time_value: logger.debug("Enable dlt time for DLTBroker.") self.msg_handler.start() self.context_handler.start() if self.enable_filter_set_ack: self.filter_ack_msg_handler.start() # - ensure we don't block on join_thread() in stop() # https://docs.python.org/2.7/library/multiprocessing.html#multiprocessing.Queue.cancel_join_thread self.filter_queue.cancel_join_thread() self.message_queue.cancel_join_thread() if self.enable_filter_set_ack: self.filter_ack_queue.cancel_join_thread() def _recv_filter_set_ack(self, context_filter_ack_queue, required_response): try: resp = context_filter_ack_queue.get( timeout=self.filter_set_ack_timeout) if resp != required_response: logger.debug( "Filter-setting ack response not matched: %s, expected: %s", resp, required_response) return False return True except tqueue.Empty as err: if self.ignore_filter_set_ack_timeout: logger.info("Timeout for getting filter-setting ack: %s, %s", id(context_filter_ack_queue), required_response) return None raise err return False def add_context(self, context_queue, filters=None): """Register context :param Queue context_queue: The queue to which new messages will be added :param tuple filters: An list of tuples (eg: [(apid, ctid)]) used to filter messages that go into this queue. """ filters = filters or [(None, None)] if not isinstance(filters, (tuple, list)): raise RuntimeError( "Context queue filters must be a tuple. Ex. (('SYS', 'JOUR'), ('AUDI', 'CAPI'))" ) if self.enable_filter_set_ack: logger.debug("Send a filter-setting message with requesting ack") with create_filter_ack_queue( self.filter_ack_msg_handler) as context_filter_ack_queue: self.context_handler.register( context_queue, filters, context_filter_ack_queue=context_filter_ack_queue) if not self._recv_filter_set_ack(context_filter_ack_queue, True): logger.warning(( "Could not receive filter-setting messge ack. It's possible that DLTClient client does " "not start. If it's a test case. It might be an error. For now, Run it anyway. " "filters: %s, queue_id: %s"), filters, id(context_queue)) else: self.context_handler.register(context_queue, filters) def remove_context(self, context_queue): """Unregister context :param Queue context_queue: The queue to unregister. """ self.context_handler.unregister(context_queue) def stop(self): """Stop the broker""" logger.info("Stopping DLTContextHandler and DLTMessageHandler") logger.debug("Stop DLTMessageHandler") self.mp_stop_flag.set() logger.debug("Stop DLTContextHandler") self.context_handler.stop() logger.debug("Waiting on DLTContextHandler ending") self.context_handler.join() if self.enable_filter_set_ack: logger.debug("Stop DLTFilterAckMessageHandler") self.filter_ack_msg_handler.stop() logger.debug("Waiting on DLTFilterAckMessageHandler ending") self.filter_ack_msg_handler.join() logger.debug("Waiting on DLTMessageHandler ending") if self.msg_handler.is_alive(): try: self.msg_handler.terminate() except OSError: pass else: self.msg_handler.join() logger.debug("DLTBroker execution done") # pylint: disable=invalid-name def isAlive(self): """Backwards compatibility method Called from mtee.testing.connectors.tools.broker_assert. Will need to be replaced in MTEE eventually. """ return any( (self.msg_handler.is_alive(), self.context_handler.is_alive())) def dlt_time(self): """Get time for the last dlt message The value is seconds from 1970/1/1 0:00:00 :rtype: float """ if self._dlt_time_value: return self._dlt_time_value.timestamp raise RuntimeError("Getting dlt time function is not enabled")