示例#1
0
    def test_internal_messages_not_republished(self, mp, caplog):
        """
        Verify that message event is not published if the event originates from
        an internal source.
        """
        helper = PubSubHelper()

        work_q = MPQueue(ctx=mp)
        # TEST is the default component name assigned in
        # _proc_worker_wrapper_helper. This message should not be published to pypubsub
        msg = EventMessage(
            "TEST",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "123"}),
        )
        work_q.put(msg)

        _proc_worker_wrapper_helper(mp,
                                    caplog,
                                    ScriptWorker,
                                    args=(work_q, ),
                                    expect_shutdown_evt=True)

        msgs_on_topic = helper.messages_on_topic(topics.request.procedure.list)
        assert len(msgs_on_topic) == 0
    def test_external_messages_are_published_locally(self, mp_fixture, caplog):
        """
        Verify that message event is published if the event originates from an
        external source.
        """
        pubsub.pub.unsubAll()
        helper = PubSubHelper()

        work_q = MPQueue(ctx=mp_fixture)
        msg = EventMessage(
            "EXTERNAL COMPONENT",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "123"}),
        )
        work_q.put(msg)

        with mock.patch.object(pubsub.pub, "unsubAll", return_value=[]):
            _proc_worker_wrapper_helper(
                mp_fixture,
                caplog,
                EventBusWorker,
                args=(work_q, ),
                expect_shutdown_evt=True,
            )

        assert topics.request.procedure.list in helper.topic_list
        work_q.safe_close()
def assert_command_request_and_response(mp_fixture, caplog, mock_method,
                                        request_topic, response_topic, cmd):
    pubsub.pub.unsubAll()
    helper = PubSubHelper()

    work_q = MPQueue(ctx=mp_fixture)
    msg = EventMessage(
        "UNITTEST",
        "PUBSUB",
        dict(topic=request_topic, kwargs={
            "request_id": "1234",
            "cmd": cmd
        }),
    )
    work_q.put(msg)
    event = mp_fixture.Event()

    mock_method.side_effect = partial(set_event, event)
    with mock.patch.object(pubsub.pub, "unsubAll", return_value=[]):
        _proc_worker_wrapper_helper(
            mp_fixture,
            caplog,
            ScriptExecutionServiceWorker,
            args=(work_q, mp_fixture),
            expect_shutdown_evt=True,
        )

    assert event.is_set()
    mock_method.assert_called_once()
    assert mock_method.call_args[0][0] == cmd

    assert helper.topic_list == [request_topic, response_topic]

    work_q.safe_close()
示例#4
0
    def __init__(
        self,
        name: str,
        startup_event: multiprocessing.Event,
        shutdown_event: multiprocessing.Event,
        event_q: mptools.MPQueue,
        work_q: mptools.MPQueue,
        *args,
        scan_counter: Optional[multiprocessing.Value] = None,
        environment: Optional[Environment] = None,
        **kwargs,
    ):
        # Message is rolled by hand and sent via a direct message to the
        # ProcessManager as we want to announce CREATING at the earliest
        # possible moment; we can't announce via pypubsub just yet as the
        # intraprocess<->interprocess republish function is not registered
        # till later in the construction process
        msg = EventMessage(
            msg_src=name,
            msg_type="PUBSUB",
            msg=dict(
                topic="procedure.lifecycle.statechange",
                kwargs=dict(new_state=ProcedureState.CREATING),
            ),
        )
        event_q.put(msg)

        self.name = name

        self._scan_counter = scan_counter
        self._environment = environment
        self.work_q = work_q

        # user_module will be set on LOAD message
        self.user_module = None

        super().__init__(name, startup_event, shutdown_event, event_q, *args,
                         **kwargs)

        # AT2-591. The forked process inherits all subscriptions of the
        # parent, which we do not want to maintain in this child process. This
        # could be done before super().__init__() at the expense of losing the
        # log message, as logging is set up in the super constructor
        unsubscribed = pub.unsubAll()
        self.log(
            logging.DEBUG,
            "Unsubscribed %s pypubsub subscriptions in Procedure #%s (PID=%s)",
            len(unsubscribed),
            self.name,
            os.getpid(),
        )

        # Register a callback function so that all pypubsub messages broadcast
        # in this process are also queued for distribution to remote processes
        pub.subscribe(self.republish, pub.ALL_TOPICS)
示例#5
0
def test_drain_queue(mp_fixture):
    q = MPQueue(ctx=mp_fixture)

    items = list(q.drain())
    assert items == []

    expected = [f"ITEM{idx}" for idx in range(10)]
    for item in expected:
        q.put(item)

    items = list(q.drain())
    assert items == expected

    num_left = q.safe_close()
    assert num_left == 0
示例#6
0
    def test_on_load(self, mock_module_fn, mp, caplog):
        """ """
        mock_module_fn.side_effect = MagicMock()
        script = GitScript("git://test.py", GitArgs())
        evt = EventMessage("test", "LOAD", script)

        work_q = MPQueue(ctx=mp)
        work_q.put(evt)

        _proc_worker_wrapper_helper(mp,
                                    caplog,
                                    ScriptWorker,
                                    args=(work_q, ),
                                    expect_shutdown_evt=True)
        assert mock_module_fn.called_once_with(script)
示例#7
0
def test_mpqueue_get(mp_fixture):
    q = MPQueue(ctx=mp_fixture)

    item = q.safe_get(None)
    assert item is None

    q.put("ITEM1")
    q.put("ITEM2")

    assert q.safe_get(0.02) == "ITEM1"
    assert q.safe_get(0.02) == "ITEM2"
    assert q.safe_get(0.02) is None
    assert q.safe_get(None) is None

    num_left = q.safe_close()
    assert num_left == 0
def test_main_loop_ends_on_fatal_message(mp_fixture):
    """
    Main loop should terminate when fatal messsage is received.
    """
    mock_ctx = mock.MagicMock()

    event_q = MPQueue(ctx=mp_fixture)
    event_q.put(EventMessage("TEST", "FATAL", msg="foo"))
    event_q.put(EventMessage("TEST", "END", msg="foo"))
    mock_ctx.event_queue = event_q

    mock_ctx.shutdown_event.is_set.return_value = False

    main_loop(mock_ctx, [])

    assert event_q.safe_close() == 1
def test_main_loop_ignores_and_logs_events_of_unknown_types(mp_fixture):
    """
    Loop should log events it doesn't know how to handle.
    """
    mock_ctx = mock.MagicMock()

    event_q = MPQueue(ctx=mp_fixture)
    event_q.put(EventMessage("TEST", "FOO", msg="1"))
    mock_ctx.event_queue = event_q

    # one processing loop before shutdown in set, at which point the loop
    # should exit with three messages still in the event queue
    mock_ctx.shutdown_event.is_set.side_effect = [False, True]

    main_loop(mock_ctx, [])

    event_q.safe_close()
    mock_ctx.log.assert_called_once()
    assert "Unknown Event" in mock_ctx.log.call_args[0][1]
    def test_internal_messages_not_republished(self, mp_fixture, caplog):
        """
        Verify that message event is not published if the event originates from
        an internal source.
        """
        pubsub.pub.unsubAll()
        helper = PubSubHelper()

        work_q = MPQueue(ctx=mp_fixture)
        # TEST is the default component name assigned in
        # _proc_worker_wrapper_helper. This message should be ignored.
        msg = EventMessage(
            "TEST",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "123"}),
        )

        work_q.put(msg)
        # But coming from NONTEST, this message should be republished.
        msg = EventMessage(
            "NONTEST",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "456"}),
        )
        work_q.put(msg)

        with mock.patch.object(pubsub.pub, "unsubAll", return_value=[]):
            _proc_worker_wrapper_helper(
                mp_fixture,
                caplog,
                EventBusWorker,
                args=(work_q, ),
                expect_shutdown_evt=True,
            )

        assert len(helper.messages) == 1
        assert helper.messages[0][1] == dict(msg_src="NONTEST",
                                             request_id="456")

        work_q.safe_close()
    def test_handles_request_to_list_invalid_id(self, mp_fixture, caplog):
        """
        The ValueError raised when SES.summarise is given an invalid PID should be handled.
        """
        pubsub.pub.unsubAll()
        helper = PubSubHelper()

        work_q = MPQueue(ctx=mp_fixture)
        msg = EventMessage(
            "TEST_SUMMARY",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "123"}),
        )
        work_q.put(msg)

        with mock.patch(
                "ska_oso_oet.procedure.application.main.ScriptExecutionService.summarise"
        ) as mock_cls:
            with mock.patch.object(pubsub.pub, "unsubAll", return_value=[]):
                mock_cls.side_effect = ValueError
                _proc_worker_wrapper_helper(
                    mp_fixture,
                    caplog,
                    ScriptExecutionServiceWorker,
                    args=(work_q, mp_fixture),
                    expect_shutdown_evt=True,
                )

        mock_cls.assert_called_once()

        assert helper.topic_list == [
            topics.request.procedure.list,  # list requested
            topics.procedure.pool.list,  # response published
        ]
        assert helper.messages[1][1] == dict(msg_src="TEST",
                                             request_id="123",
                                             result=[])

        work_q.safe_close()
示例#12
0
    def test_script_worker_calls_correct_function_on_message_type(
            self, mock_env_fn, mock_load_fn, mock_run_fn, caplog):
        mp = multiprocessing.get_context()
        script = GitScript("git://test.py", GitArgs())
        env_evt = EventMessage("test", "ENV", script)
        load_evt = EventMessage("test", "LOAD", script)
        run_evt = EventMessage("test", "RUN", ("init", None))
        work_q = MPQueue(ctx=mp)
        work_q.put(env_evt)
        work_q.put(load_evt)
        work_q.put(run_evt)

        _proc_worker_wrapper_helper(mp,
                                    caplog,
                                    ScriptWorker,
                                    args=(work_q, ),
                                    expect_shutdown_evt=True)
        env_args, _ = mock_env_fn.call_args
        assert env_args[0].msg_type == env_evt.msg_type
        mock_env_fn.assert_called_once()

        load_args, _ = mock_load_fn.call_args
        assert load_args[0].msg_type == load_evt.msg_type
        mock_load_fn.assert_called_once()

        run_args, _ = mock_run_fn.call_args
        assert run_args[0].msg_type == run_evt.msg_type
        mock_run_fn.assert_called_once()
def test_main_loop_adds_pubsub_messages_to_event_queues(mp_fixture):
    """
    PUBSUB messages should be added to event queues.
    """
    mock_ctx = mock.MagicMock()

    event_q = MPQueue(ctx=mp_fixture)
    event_q.put(EventMessage("TEST", "PUBSUB", msg="1"))
    event_q.put(EventMessage("TEST", "PUBSUB", msg="2"))
    event_q.put(EventMessage("TEST", "PUBSUB", msg="3"))
    event_q.put(EventMessage("TEST", "END", msg="foo"))
    mock_ctx.event_queue = event_q

    # one processing loop before shutdown in set, at which point the loop
    # should exit with three messages still in the event queue
    mock_ctx.shutdown_event.is_set.return_value = False

    q1 = MPQueue(ctx=mp_fixture)
    q2 = MPQueue(ctx=mp_fixture)

    main_loop(mock_ctx, [q1, q2])

    assert q1.safe_close() == 3
    assert q2.safe_close() == 3

    event_q.safe_close()
    def test_list_method_called(self, mp_fixture, caplog):
        """
        SES.summarise should be called when 'request.procedure.list' message is received
        """
        pubsub.pub.unsubAll()
        helper = PubSubHelper()

        work_q = MPQueue(ctx=mp_fixture)
        msg = EventMessage(
            "TEST_SUMMARY",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "123"}),
        )
        work_q.put(msg)
        event = mp_fixture.Event()

        with mock.patch(
                "ska_oso_oet.procedure.application.main.ScriptExecutionService.summarise"
        ) as mock_cls:
            with mock.patch.object(pubsub.pub, "unsubAll", return_value=[]):
                mock_cls.side_effect = partial(set_event, event)
                _proc_worker_wrapper_helper(
                    mp_fixture,
                    caplog,
                    ScriptExecutionServiceWorker,
                    args=(work_q, mp_fixture),
                    expect_shutdown_evt=True,
                )

        assert event.is_set() is True
        mock_cls.assert_called_once()

        assert helper.topic_list == [
            topics.request.procedure.list,  # list requested
            topics.procedure.pool.list,  # response published
        ]

        work_q.safe_close()
示例#15
0
    def test_external_messages_are_published_locally(self, mp, caplog):
        """
        Verify that message event is published if the event originates from an
        external source.
        """
        work_q = MPQueue(ctx=mp)
        msg = EventMessage(
            "EXTERNAL COMPONENT",
            "PUBSUB",
            dict(topic=topics.request.procedure.list,
                 kwargs={"request_id": "123"}),
        )
        work_q.put(msg)
        _proc_worker_wrapper_helper(mp,
                                    caplog,
                                    ScriptWorker,
                                    args=(work_q, ),
                                    expect_shutdown_evt=True)

        # there's no easy way to assert that the external event was republished
        # on an independent pypubsub bus. Workaround is to assert that the
        # republishing code was run via the log message
        assert "Republishing external event: EXTERNAL COMPONENT" in caplog.text
示例#16
0
def test_queue_proc_worker(mp_fixture, caplog):
    work_q = MPQueue(ctx=mp_fixture)
    work_q.put(1)
    work_q.put(2)
    work_q.put(3)
    work_q.put(4)
    work_q.put("END")
    work_q.put(5)

    items = _proc_worker_wrapper_helper(
        mp_fixture,
        caplog,
        QueueProcWorkerTest,
        args=(work_q, ),
        expect_shutdown_evt=False,
    )
    assert len(items) == 4
    assert items == [f"DONE {idx + 1}" for idx in range(4)]
def test_main_loop_ends_when_shutdown_event_is_set(mp_fixture):
    """
    Main loop should terminate when shutdown event is set.
    """
    mock_ctx = mock.MagicMock()

    event_q = MPQueue(ctx=mp_fixture)
    event_q.put(EventMessage("TEST", "PUBSUB", msg="foo"))
    event_q.put(EventMessage("TEST", "PUBSUB", msg="foo"))
    event_q.put(EventMessage("TEST", "PUBSUB", msg="foo"))
    event_q.put(EventMessage("TEST", "END", msg="foo"))
    mock_ctx.event_queue = event_q

    # one processing loop before shutdown in set, at which point the loop
    # should exit with two messages still in the event queue
    mock_ctx.shutdown_event.is_set.side_effect = [False, False, True]

    main_loop(mock_ctx, [])

    assert event_q.safe_close() == 2