Ejemplo n.º 1
0
def disconnect_from_rabbit(rabbit: RabbitMQApi, attempts: int = 3) -> None:
    tries = 0

    while tries < attempts:
        try:
            rabbit.disconnect()
            return
        except Exception as e:
            tries += 1
            print("Could not disconnect to rabbit. Attempts so "
                  "far: {}".format(tries))
            print(e)
            if tries >= attempts:
                raise e
Ejemplo n.º 2
0
class TestDataTransformersManager(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)
        self.manager_name = 'test_data_transformers_manager'
        self.test_queue_name = 'Test Queue'
        self.test_data_str = 'test data'
        self.test_timestamp = datetime(2012, 1, 1).timestamp()
        self.test_heartbeat = {
            'component_name': 'Test Component',
            'is_alive': True,
            'timestamp': self.test_timestamp,
        }
        self.dummy_process1 = Process(target=infinite_fn, args=())
        self.dummy_process1.daemon = True
        self.dummy_process2 = Process(target=infinite_fn, args=())
        self.dummy_process2.daemon = True
        self.dummy_process3 = Process(target=infinite_fn, args=())
        self.dummy_process3.daemon = True
        self.transformer_process_dict_example = {
            SYSTEM_DATA_TRANSFORMER_NAME: self.dummy_process1,
            GITHUB_DATA_TRANSFORMER_NAME: self.dummy_process2,
        }
        self.test_manager = DataTransformersManager(self.dummy_logger,
                                                    self.manager_name,
                                                    self.rabbitmq)
        self.test_exception = PANICException('test_exception', 1)

    def tearDown(self) -> None:
        # Delete any queues and exchanges which are common across many tests
        try:
            self.test_manager.rabbitmq.connect()

            # Declare them before just in case there are tests which do not
            # use these queues and exchanges
            self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.test_manager.rabbitmq.queue_declare(DT_MAN_INPUT_QUEUE, False,
                                                     True, False, False)
            self.test_manager.rabbitmq.exchange_declare(
                HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False)

            self.test_manager.rabbitmq.queue_purge(self.test_queue_name)
            self.test_manager.rabbitmq.queue_purge(DT_MAN_INPUT_QUEUE)
            self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
            self.test_manager.rabbitmq.queue_delete(DT_MAN_INPUT_QUEUE)
            self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.test_manager.rabbitmq.disconnect()
        except Exception as e:
            print("Deletion of queues and exchanges failed: {}".format(e))

        self.dummy_logger = None
        self.rabbitmq = None
        self.dummy_process1 = None
        self.dummy_process2 = None
        self.dummy_process3 = None
        self.test_manager = None
        self.test_exception = None
        self.transformer_process_dict_example = None

    def test_str_returns_name(self) -> None:
        self.assertEqual(self.manager_name, str(self.test_manager))

    def test_name_returns_name(self) -> None:
        self.assertEqual(self.manager_name, self.test_manager.name)

    def test_transformer_process_dict_returns_transformer_process_dict(
            self) -> None:
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example
        self.assertEqual(self.transformer_process_dict_example,
                         self.test_manager.transformer_process_dict)

    def test_initialise_rabbitmq_initializes_everything_as_expected(
            self) -> None:
        try:
            # To make sure that there is no connection/channel already
            # established
            self.assertIsNone(self.rabbitmq.connection)
            self.assertIsNone(self.rabbitmq.channel)

            # To make sure that the exchange and queue have not already been
            # declared
            self.rabbitmq.connect()
            self.test_manager.rabbitmq.queue_delete(DT_MAN_INPUT_QUEUE)
            self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_manager._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_manager.rabbitmq.is_connected)
            self.assertTrue(self.test_manager.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_manager.rabbitmq.channel._delivery_confirmation)

            # Check whether the exchange and queue have been creating by
            # sending messages with the same routing keys as for the queue. We
            # will also check if the size of the queue is 0 to confirm that
            # basic_consume was called (it will store the msg in the component
            # memory immediately). If one of the exchange or queue is not
            # created, then either an exception will be thrown or the queue size
            # would be 1. Note when deleting the exchange in the beginning we
            # also released every binding, hence there is no other queue binded
            # with the same routing key to any exchange at this point.
            self.test_manager.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=DT_MAN_INPUT_ROUTING_KEY,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=True)

            # Re-declare queue to get the number of messages
            res = self.test_manager.rabbitmq.queue_declare(
                DT_MAN_INPUT_QUEUE, False, True, False, False)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(RabbitMQApi, "start_consuming")
    def test_listen_for_data_calls_start_consuming(
            self, mock_start_consuming) -> None:
        mock_start_consuming.return_value = None
        self.test_manager._listen_for_data()
        mock_start_consuming.assert_called_once()

    def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # heartbeat is received
        try:
            self.test_manager._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_manager.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_manager.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.manager')
            self.test_manager._send_heartbeat(self.test_heartbeat)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the HB
            _, _, body = self.test_manager.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(self.test_heartbeat, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_transformers_processes_creates_correct_sys_proc_first_time(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_manager._start_transformers_processes()

        system_data_trans_proc = self.test_manager.transformer_process_dict[
            SYSTEM_DATA_TRANSFORMER_NAME]
        self.assertTrue(system_data_trans_proc.daemon)
        self.assertEqual(0, len(system_data_trans_proc._args))
        self.assertEqual(start_system_data_transformer,
                         system_data_trans_proc._target)

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_transformers_processes_creates_correct_sys_proc_if_not_alive(
            self, mock_start) -> None:
        mock_start.return_value = None

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        system_data_trans_proc = self.test_manager.transformer_process_dict[
            SYSTEM_DATA_TRANSFORMER_NAME]
        self.assertTrue(system_data_trans_proc.daemon)
        self.assertEqual(0, len(system_data_trans_proc._args))
        self.assertEqual(start_system_data_transformer,
                         system_data_trans_proc._target)

    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, "Process")
    def test_start_transformers_processes_stores_sys_proc_correct_first_time(
            self, mock_init, mock_start) -> None:
        mock_start.return_value = None
        mock_init.return_value = self.dummy_process3

        self.test_manager._start_transformers_processes()

        expected_trans_proc_dict_without_gh = {
            SYSTEM_DATA_TRANSFORMER_NAME: self.dummy_process3,
        }
        actual_trans_proc_dict_without_gh = \
            self.test_manager._transformer_process_dict
        del actual_trans_proc_dict_without_gh[GITHUB_DATA_TRANSFORMER_NAME]

        self.assertEqual(expected_trans_proc_dict_without_gh,
                         actual_trans_proc_dict_without_gh)

    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, "Process")
    def test_start_transformers_processes_stores_sys_proc_correct_if_not_alive(
            self, mock_init, mock_start) -> None:
        mock_start.return_value = None
        mock_init.return_value = self.dummy_process3

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        expected_trans_proc_dict_without_gh = {
            SYSTEM_DATA_TRANSFORMER_NAME: self.dummy_process3,
        }
        actual_trans_proc_dict_without_gh = \
            self.test_manager._transformer_process_dict
        del actual_trans_proc_dict_without_gh[GITHUB_DATA_TRANSFORMER_NAME]

        self.assertEqual(expected_trans_proc_dict_without_gh,
                         actual_trans_proc_dict_without_gh)

    @mock.patch("src.data_transformers.starters.create_logger")
    def test_start_transformers_processes_starts_system_process_if_first_time(
            self, mock_create_logger) -> None:
        mock_create_logger.return_value = self.dummy_logger
        self.test_manager._start_transformers_processes()

        # We need to sleep to give some time for the data transformer to be
        # initialized, otherwise the process would not terminate
        time.sleep(1)

        system_data_trans_process = self.test_manager.transformer_process_dict[
            SYSTEM_DATA_TRANSFORMER_NAME]
        github_data_trans_process = self.test_manager.transformer_process_dict[
            GITHUB_DATA_TRANSFORMER_NAME]
        self.assertTrue(system_data_trans_process.is_alive())

        system_data_trans_process.terminate()
        system_data_trans_process.join()
        github_data_trans_process.terminate()
        github_data_trans_process.join()

    @mock.patch("src.data_transformers.starters.create_logger")
    def test_start_transformers_processes_starts_system_process_if_not_alive(
            self, mock_create_logger) -> None:
        mock_create_logger.return_value = self.dummy_logger

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        # We need to sleep to give some time for the data transformer to be
        # initialized, otherwise the process would not terminate
        time.sleep(1)

        system_data_trans_process = self.test_manager.transformer_process_dict[
            SYSTEM_DATA_TRANSFORMER_NAME]
        github_data_trans_process = self.test_manager.transformer_process_dict[
            GITHUB_DATA_TRANSFORMER_NAME]
        self.assertTrue(system_data_trans_process.is_alive())

        system_data_trans_process.terminate()
        system_data_trans_process.join()
        github_data_trans_process.terminate()
        github_data_trans_process.join()

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_transformers_processes_creates_correct_git_proc_first_time(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_manager._start_transformers_processes()

        github_data_trans_proc = self.test_manager.transformer_process_dict[
            GITHUB_DATA_TRANSFORMER_NAME]
        self.assertTrue(github_data_trans_proc.daemon)
        self.assertEqual(0, len(github_data_trans_proc._args))
        self.assertEqual(start_github_data_transformer,
                         github_data_trans_proc._target)

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_transformers_processes_creates_correct_git_proc_if_not_alive(
            self, mock_start) -> None:
        mock_start.return_value = None

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        github_data_trans_proc = self.test_manager.transformer_process_dict[
            GITHUB_DATA_TRANSFORMER_NAME]
        self.assertTrue(github_data_trans_proc.daemon)
        self.assertEqual(0, len(github_data_trans_proc._args))
        self.assertEqual(start_github_data_transformer,
                         github_data_trans_proc._target)

    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, "Process")
    def test_start_transformers_processes_stores_github_proc_correct_first_time(
            self, mock_init, mock_start) -> None:
        mock_start.return_value = None
        mock_init.return_value = self.dummy_process3

        self.test_manager._start_transformers_processes()

        expected_trans_proc_dict_without_sys = {
            GITHUB_DATA_TRANSFORMER_NAME: self.dummy_process3,
        }
        actual_trans_proc_dict_without_sys = \
            self.test_manager._transformer_process_dict
        del actual_trans_proc_dict_without_sys[SYSTEM_DATA_TRANSFORMER_NAME]

        self.assertEqual(expected_trans_proc_dict_without_sys,
                         actual_trans_proc_dict_without_sys)

    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, "Process")
    def test_start_transformers_processes_stores_git_proc_correct_if_not_alive(
            self, mock_init, mock_start) -> None:
        mock_start.return_value = None
        mock_init.return_value = self.dummy_process3

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        expected_trans_proc_dict_without_sys = {
            GITHUB_DATA_TRANSFORMER_NAME: self.dummy_process3,
        }
        actual_trans_proc_dict_without_sys = \
            self.test_manager._transformer_process_dict
        del actual_trans_proc_dict_without_sys[SYSTEM_DATA_TRANSFORMER_NAME]

        self.assertEqual(expected_trans_proc_dict_without_sys,
                         actual_trans_proc_dict_without_sys)

    @mock.patch("src.data_transformers.starters.create_logger")
    def test_start_transformers_processes_starts_github_process_if_first_time(
            self, mock_create_logger) -> None:
        mock_create_logger.return_value = self.dummy_logger
        self.test_manager._start_transformers_processes()

        # We need to sleep to give some time for the data transformer to be
        # initialized, otherwise the process would not terminate
        time.sleep(1)

        system_data_trans_process = self.test_manager.transformer_process_dict[
            SYSTEM_DATA_TRANSFORMER_NAME]
        github_data_trans_process = self.test_manager.transformer_process_dict[
            GITHUB_DATA_TRANSFORMER_NAME]
        self.assertTrue(github_data_trans_process.is_alive())

        system_data_trans_process.terminate()
        system_data_trans_process.join()
        github_data_trans_process.terminate()
        github_data_trans_process.join()

    @mock.patch("src.data_transformers.starters.create_logger")
    def test_start_transformers_processes_starts_github_process_if_not_alive(
            self, mock_create_logger) -> None:
        mock_create_logger.return_value = self.dummy_logger

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        # We need to sleep to give some time for the data transformer to be
        # initialized, otherwise the process would not terminate
        time.sleep(1)

        system_data_trans_process = self.test_manager.transformer_process_dict[
            SYSTEM_DATA_TRANSFORMER_NAME]
        github_data_trans_process = self.test_manager.transformer_process_dict[
            GITHUB_DATA_TRANSFORMER_NAME]
        self.assertTrue(github_data_trans_process.is_alive())

        system_data_trans_process.terminate()
        system_data_trans_process.join()
        github_data_trans_process.terminate()
        github_data_trans_process.join()

    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_transformers_processes_does_not_start_any_process_if_alive(
            self, mock_start, mock_is_alive) -> None:
        mock_start.return_value = None
        mock_is_alive.return_value = True

        # Make the state non-empty to mock the case when creation is not being
        # done for the first time
        self.test_manager._transformer_process_dict = \
            self.transformer_process_dict_example

        self.test_manager._start_transformers_processes()

        mock_start.assert_not_called()

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    def test_process_ping_sends_a_valid_hb_if_all_processes_are_alive(
            self, mock_is_alive, mock_send_hb) -> None:
        # We will perform this test by checking that send_hb is called with the
        # correct heartbeat. The actual sending was already tested above. Note
        # we wil mock is_alive by setting it to always return true to avoid
        # creating processes
        mock_is_alive.return_value = True
        mock_send_hb.return_value = None
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            expected_hb = {
                'component_name':
                self.test_manager.name,
                'running_processes':
                [SYSTEM_DATA_TRANSFORMER_NAME, GITHUB_DATA_TRANSFORMER_NAME],
                'dead_processes': [],
                'timestamp':
                datetime.now().timestamp()
            }
            mock_send_hb.assert_called_once_with(expected_hb)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(DataTransformersManager,
                       "_start_transformers_processes")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "join")
    def test_process_ping_sends_a_valid_hb_if_some_processes_alive_some_dead(
            self, mock_join, mock_is_alive, mock_start_trans,
            mock_send_hb) -> None:
        # We will perform this test by checking that send_hb is called with the
        # correct heartbeat as the actual sending was already tested above. Note
        # we wil mock is_alive by setting it to first return true and then
        # return false (note we only have two processes). By this we can avoid
        # creating processes.
        mock_is_alive.side_effect = [True, False]
        mock_send_hb.return_value = None
        mock_join.return_value = None
        mock_start_trans.return_value = None
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            expected_hb = {
                'component_name': self.test_manager.name,
                'running_processes': [SYSTEM_DATA_TRANSFORMER_NAME],
                'dead_processes': [GITHUB_DATA_TRANSFORMER_NAME],
                'timestamp': datetime.now().timestamp()
            }
            mock_send_hb.assert_called_once_with(expected_hb)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(DataTransformersManager,
                       "_start_transformers_processes")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "join")
    def test_process_ping_sends_a_valid_hb_if_all_processes_dead(
            self, mock_join, mock_is_alive, mock_start_trans,
            mock_send_hb) -> None:
        # We will perform this test by checking that send_hb is called with the
        # correct heartbeat as the actual sending was already tested above. Note
        # we wil mock is_alive by setting it to always return False. By this we
        # can avoid creating processes.
        mock_is_alive.return_value = False
        mock_send_hb.return_value = None
        mock_join.return_value = None
        mock_start_trans.return_value = None
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            expected_hb = {
                'component_name':
                self.test_manager.name,
                'running_processes': [],
                'dead_processes':
                [SYSTEM_DATA_TRANSFORMER_NAME, GITHUB_DATA_TRANSFORMER_NAME],
                'timestamp':
                datetime.now().timestamp()
            }
            mock_send_hb.assert_called_once_with(expected_hb)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(DataTransformersManager,
                       "_start_transformers_processes")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "join")
    def test_process_ping_restarts_dead_processes_all_dead(
            self, mock_join, mock_is_alive, mock_start_trans,
            mock_send_hb) -> None:
        # We will perform this test by checking that
        # _start_transformers_processes is called, as the actual restarting
        # logic was already tested above. Note we wil mock is_alive by setting
        # it to always return False. By this we can avoid creating processes.
        mock_is_alive.return_value = False
        mock_send_hb.return_value = None
        mock_join.return_value = None
        mock_start_trans.return_value = None
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            mock_start_trans.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(DataTransformersManager,
                       "_start_transformers_processes")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "join")
    def test_process_ping_restarts_dead_processes_some_alive_some_dead(
            self, mock_join, mock_is_alive, mock_start_trans,
            mock_send_hb) -> None:
        # We will perform this test by checking that
        # _start_transformers_processes is called, as the actual restarting
        # logic was already tested above. Note we wil mock is_alive by setting
        # it to first return False and then True. By this we can avoid creating
        # processes.
        mock_is_alive.side_effect = [False, True]
        mock_send_hb.return_value = None
        mock_join.return_value = None
        mock_start_trans.return_value = None
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            mock_start_trans.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(DataTransformersManager,
                       "_start_transformers_processes")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "join")
    def test_process_ping_does_not_restart_dead_processes_if_all_alive(
            self, mock_join, mock_is_alive, mock_start_trans,
            mock_send_hb) -> None:
        # We will perform this test by checking that
        # _start_transformers_processes is called, as the actual restarting
        # logic was already tested above. Note we wil mock is_alive by setting
        # it to always return True. By this we can avoid creating processes.
        mock_is_alive.return_value = True
        mock_send_hb.return_value = None
        mock_join.return_value = None
        mock_start_trans.return_value = None
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            mock_start_trans.assert_not_called()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, 'Process')
    def test_process_ping_does_not_send_hb_if_processing_fails(
            self, mock_process, mock_start, mock_is_alive,
            mock_send_hb) -> None:
        # We will perform this test by checking that _send_heartbeat is not
        # called. Note we will generate an exception from is_alive
        mock_is_alive.side_effect = self.test_exception
        mock_send_hb.return_value = None
        mock_start.return_value = None
        mock_process.side_effect = self.dummy_process1
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            # Make the state non-empty to mock the fact that the processes were
            # already created
            self.test_manager._transformer_process_dict = \
                self.transformer_process_dict_example

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            mock_send_hb.assert_not_called()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed(
            self, mock_send_hb) -> None:
        # This test would fail if a msg not del excep is raised, as it is not
        # caught in the test.
        mock_send_hb.side_effect = MessageWasNotDeliveredException('test')
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    def test_process_ping_send_hb_raises_amqp_connection_err_on_connection_err(
            self, mock_send_hb) -> None:
        mock_send_hb.side_effect = pika.exceptions.AMQPConnectionError('test')
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            self.assertRaises(pika.exceptions.AMQPConnectionError,
                              self.test_manager._process_ping,
                              blocking_channel, method, properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    def test_process_ping_send_hb_raises_amqp_channel_err_on_channel_err(
            self, mock_send_hb) -> None:
        mock_send_hb.side_effect = pika.exceptions.AMQPChannelError('test')
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            self.assertRaises(pika.exceptions.AMQPChannelError,
                              self.test_manager._process_ping,
                              blocking_channel, method, properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(DataTransformersManager, "_send_heartbeat")
    def test_process_ping_send_hb_raises_exception_on_unexpected_exception(
            self, mock_send_hb) -> None:
        mock_send_hb.side_effect = self.test_exception
        try:
            # Some of the variables below are needed as parameters for the
            # process_ping function
            self.test_manager._initialise_rabbitmq()
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(
                routing_key=DT_MAN_INPUT_ROUTING_KEY)
            body = 'ping'
            properties = pika.spec.BasicProperties()

            self.assertRaises(PANICException, self.test_manager._process_ping,
                              blocking_channel, method, properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 3
0
def _initialise_and_declare_config_queues() -> None:
    # TODO: This can be refactored by storing the queue configurations in
    #     : constant.py so that it is easier to maintain.
    dummy_logger = logging.getLogger('Dummy')

    while True:
        try:
            rabbitmq = RabbitMQApi(dummy_logger, env.RABBIT_IP)
            log_and_print(
                "Connecting with RabbitMQ to create and bind "
                "configuration queues.", dummy_logger)
            ret = rabbitmq.connect()
            if ret == -1:
                log_and_print(
                    "RabbitMQ is temporarily unavailable. Re-trying in {} "
                    "seconds.".format(RE_INITIALISE_SLEEPING_PERIOD),
                    dummy_logger)
                time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
                continue

            # Config exchange declaration
            log_and_print("Creating {} exchange.".format(CONFIG_EXCHANGE),
                          dummy_logger)
            rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True,
                                      False, False)

            # Alert router queues
            log_and_print(
                "Creating queue '{}'".format(ALERT_ROUTER_CONFIGS_QUEUE_NAME),
                dummy_logger)
            rabbitmq.queue_declare(ALERT_ROUTER_CONFIGS_QUEUE_NAME, False,
                                   True, False, False)
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(ALERT_ROUTER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'channels.*'), dummy_logger)
            rabbitmq.queue_bind(ALERT_ROUTER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'channels.*')

            # System Alerters Manager queues
            log_and_print(
                "Creating queue '{}'".format(
                    SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger)
            rabbitmq.queue_declare(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME,
                                   False, True, False, False)
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'chains.*.*.alerts_config'),
                dummy_logger)
            rabbitmq.queue_bind(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'chains.*.*.alerts_config')
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'general.alerts_config'),
                dummy_logger)
            rabbitmq.queue_bind(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'general.alerts_config')

            # Channels manager queues
            log_and_print(
                "Creating queue '{}'".format(
                    CHANNELS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger)
            rabbitmq.queue_declare(CHANNELS_MANAGER_CONFIGS_QUEUE_NAME, False,
                                   True, False, False)
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(CHANNELS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'channels.*'), dummy_logger)
            rabbitmq.queue_bind(CHANNELS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'channels.*')

            # GitHub Monitors Manager queues
            log_and_print(
                "Creating queue '{}'".format(
                    GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger)
            rabbitmq.queue_declare(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                   False, True, False, False)
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'chains.*.*.repos_config'),
                dummy_logger)
            rabbitmq.queue_bind(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'chains.*.*.repos_config')
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'general.repos_config'),
                dummy_logger)
            rabbitmq.queue_bind(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'general.repos_config')

            # System Monitors Manager queues
            log_and_print(
                "Creating queue '{}'".format(
                    SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger)
            rabbitmq.queue_declare(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                   False, True, False, False)
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'chains.*.*.nodes_config'),
                dummy_logger)
            rabbitmq.queue_bind(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'chains.*.*.nodes_config')
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                 CONFIG_EXCHANGE, 'general.systems_config'),
                dummy_logger)
            rabbitmq.queue_bind(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME,
                                CONFIG_EXCHANGE, 'general.systems_config')

            # Config Store queues
            log_and_print(
                "Creating queue '{}'".format(STORE_CONFIGS_QUEUE_NAME),
                dummy_logger)
            rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True,
                                   False, False)
            log_and_print(
                "Binding queue '{}' to '{}' exchange with routing "
                "key {}.".format(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE,
                                 '#'), dummy_logger)
            rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, '#')

            ret = rabbitmq.disconnect()
            if ret == -1:
                log_and_print(
                    "RabbitMQ is temporarily unavailable. Re-trying in {} "
                    "seconds.".format(RE_INITIALISE_SLEEPING_PERIOD),
                    dummy_logger)
                time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
                continue

            log_and_print(
                "Configuration queues initialisation procedure has "
                "completed successfully. Disconnecting with "
                "RabbitMQ.", dummy_logger)
            break
        except pika.exceptions.AMQPChannelError as e:
            log_and_print(
                "Channel error while initialising the configuration "
                "queues: {}. Re-trying in {} "
                "seconds.".format(repr(e), RE_INITIALISE_SLEEPING_PERIOD),
                dummy_logger)
            time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
        except pika.exceptions.AMQPConnectionError as e:
            log_and_print(
                "RabbitMQ connection error while initialising the "
                "configuration queues: {}. Re-trying in {} "
                "seconds.".format(repr(e), RE_INITIALISE_SLEEPING_PERIOD),
                dummy_logger)
            time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
        except Exception as e:
            log_and_print(
                "Unexpected exception while initialising the "
                "configuration queues: {}. Re-trying in {} "
                "seconds.".format(repr(e), RE_INITIALISE_SLEEPING_PERIOD),
                dummy_logger)
            time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
Ejemplo n.º 4
0
class TestGitHubMonitor(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)
        self.monitor_name = 'test_monitor'
        self.monitoring_period = 10
        self.repo_id = 'test_repo_id'
        self.parent_id = 'test_parent_id'
        self.repo_name = 'test_repo'
        self.monitor_repo = True
        self.releases_page = 'test_url'
        self.routing_key = 'test_routing_key'
        self.test_data_str = 'test data'
        self.test_data_dict = {
            'test_key_1': 'test_val_1',
            'test_key_2': 'test_val_2',
        }
        self.test_heartbeat = {
            'component_name': 'Test Component',
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp(),
        }
        self.test_queue_name = 'Test Queue'
        # In the real retrieved data there are more fields, but these are the
        # only ones that interest us so far.
        self.retrieved_metrics_example = [
            {
                'name': 'First Release 😮',
                'tag_name': 'v1.0.0'
            },
            {
                'name': 'Release Candidate 1',
                'tag_name': 'v0.1.0'
            },
        ]
        self.processed_data_example = {
            '0': {
                'release_name': 'First Release 😮',
                'tag_name': 'v1.0.0'
            },
            '1': {
                'release_name': 'Release Candidate 1',
                'tag_name': 'v0.1.0'
            },
        }
        self.test_exception = PANICException('test_exception', 1)
        self.repo_config = RepoConfig(self.repo_id, self.parent_id,
                                      self.repo_name, self.monitor_repo,
                                      self.releases_page)
        self.test_monitor = GitHubMonitor(self.monitor_name, self.repo_config,
                                          self.dummy_logger,
                                          self.monitoring_period,
                                          self.rabbitmq)

    def tearDown(self) -> None:
        # Delete any queues and exchanges which are common across many tests
        try:
            self.test_monitor.rabbitmq.connect()

            # Declare them before just in case there are tests which do not
            # use these queues and exchanges
            self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.test_monitor.rabbitmq.exchange_declare(
                HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False)
            self.test_monitor.rabbitmq.exchange_declare(
                RAW_DATA_EXCHANGE, 'direct', False, True, False, False)

            self.test_monitor.rabbitmq.queue_purge(self.test_queue_name)
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)
            self.test_monitor.rabbitmq.exchange_delete(RAW_DATA_EXCHANGE)
            self.test_monitor.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.test_monitor.rabbitmq.disconnect()
        except Exception as e:
            print("Deletion of queues and exchanges failed: {}".format(e))

        self.dummy_logger = None
        self.rabbitmq = None
        self.test_exception = None
        self.repo_config = None
        self.test_monitor = None

    def test_str_returns_monitor_name(self) -> None:
        self.assertEqual(self.monitor_name, str(self.test_monitor))

    def test_get_monitor_period_returns_monitor_period(self) -> None:
        self.assertEqual(self.monitoring_period,
                         self.test_monitor.monitor_period)

    def test_get_monitor_name_returns_monitor_name(self) -> None:
        self.assertEqual(self.monitor_name, self.test_monitor.monitor_name)

    def test_repo_config_returns_repo_config(self) -> None:
        self.assertEqual(self.repo_config, self.test_monitor.repo_config)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that there is no connection/channel already
            # established
            self.assertIsNone(self.rabbitmq.connection)
            self.assertIsNone(self.rabbitmq.channel)

            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(RAW_DATA_EXCHANGE)
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_monitor._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_monitor.rabbitmq.is_connected)
            self.assertTrue(self.test_monitor.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_monitor.rabbitmq.channel._delivery_confirmation)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_monitor.rabbitmq.basic_publish_confirm(
                exchange=RAW_DATA_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
            self.test_monitor.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_process_retrieved_data")
    @mock.patch.object(GitHubMonitor, "_process_error")
    def test_process_data_calls_process_error_on_retrieval_error(
            self, mock_process_error, mock_process_retrieved_data) -> None:
        # Do not test the processing of data for now
        mock_process_error.return_value = self.test_data_dict

        self.test_monitor._process_data(self.test_data_dict, True,
                                        self.test_exception)

        # Test passes if _process_error is called once and
        # process_retrieved_data is not called
        self.assertEqual(1, mock_process_error.call_count)
        self.assertEqual(0, mock_process_retrieved_data.call_count)

    @mock.patch.object(GitHubMonitor, "_process_retrieved_data")
    @mock.patch.object(GitHubMonitor, "_process_error")
    def test_process_data_calls_process_retrieved_data_on_retrieval_success(
            self, mock_process_error, mock_process_retrieved_data) -> None:
        # Do not test the processing of data for now
        mock_process_retrieved_data.return_value = self.test_data_dict

        self.test_monitor._process_data(self.test_data_dict, False, None)

        # Test passes if _process_error is called once and
        # process_retrieved_data is not called
        self.assertEqual(0, mock_process_error.call_count)
        self.assertEqual(1, mock_process_retrieved_data.call_count)

    def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # heartbeat is received
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')
            self.test_monitor._send_heartbeat(self.test_heartbeat)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the HB
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(self.test_heartbeat, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    def test_display_data_returns_the_correct_string(self) -> None:
        expected_output = json.dumps(
            self.processed_data_example,
            ensure_ascii=False).encode('utf8').decode()
        actual_output = self.test_monitor._display_data(
            self.processed_data_example)
        self.assertEqual(expected_output, actual_output)

    @freeze_time("2012-01-01")
    def test_process_error_returns_expected_data(self) -> None:
        expected_output = {
            'error': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'message': self.test_exception.message,
                'code': self.test_exception.code,
            }
        }
        actual_output = self.test_monitor._process_error(self.test_exception)
        self.assertEqual(actual_output, expected_output)

    @freeze_time("2012-01-01")
    def test_process_retrieved_data_returns_expected_data(self) -> None:
        expected_output = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        actual_output = self.test_monitor._process_retrieved_data(
            self.retrieved_metrics_example)
        self.assertEqual(expected_output, actual_output)

    def test_send_data_sends_data_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_data, and checks that the
        # data is received
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')

            self.test_monitor._send_data(self.processed_data_example)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(self.processed_data_example, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_sends_data_and_hb_if_data_retrieve_and_processing_success(
            self, mock_get_data) -> None:
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        expected_output_hb = {
            'component_name': self.test_monitor.monitor_name,
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp()
        }

        try:
            mock_get_data.return_value = self.retrieved_metrics_example
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.test_monitor._monitor()

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 2 messages in the queue, the heartbeat and the
            # processed data
            self.assertEqual(2, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))

            # Check that the message received is actually the HB
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_hb, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_process_data")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_sends_no_data_and_hb_if_data_ret_success_and_proc_fails(
            self, mock_get_data, mock_process_data) -> None:
        mock_process_data.side_effect = self.test_exception
        mock_get_data.return_value = self.retrieved_metrics_example
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.test_monitor._monitor()

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 0 messages in the queue.
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_sends_no_data_and_no_hb_on_get_data_unexpected_exception(
            self, mock_get_data) -> None:
        mock_get_data.side_effect = self.test_exception
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.assertRaises(PANICException, self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 0 messages in the queue.
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_sends_gh_api_call_exception_data_and_hb_on_api_call_err(
            self, mock_get_data) -> None:
        api_call_err_return = {
            "message":
            "Not Found",
            "documentation_url":
            "https://docs.github.com/rest/reference/repos#list-releases"
        }
        mock_get_data.return_value = api_call_err_return
        data_ret_exception = GitHubAPICallException(
            api_call_err_return['message'])
        expected_output_data = {
            'error': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'message': data_ret_exception.message,
                'code': data_ret_exception.code,
            }
        }
        expected_output_hb = {
            'component_name': self.test_monitor.monitor_name,
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp()
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.test_monitor._monitor()

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 2 messages in the queue, the heartbeat and the
            # exception details.
            self.assertEqual(2, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))

            # Check that the message received is actually the HB
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_hb, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_sends_exception_data_and_hb_on_expected_exceptions(
            self, mock_get_data) -> None:
        json_decode_error = json.JSONDecodeError(msg='test error',
                                                 doc='test',
                                                 pos=2)
        errors_exceptions_dict = {
            ReqConnectionError('test'):
            CannotAccessGitHubPageException(self.repo_config.releases_page),
            ReadTimeout('test'):
            CannotAccessGitHubPageException(self.repo_config.releases_page),
            IncompleteRead('test'):
            DataReadingException(self.monitor_name,
                                 self.repo_config.releases_page),
            ChunkedEncodingError('test'):
            DataReadingException(self.monitor_name,
                                 self.repo_config.releases_page),
            ProtocolError('test'):
            DataReadingException(self.monitor_name,
                                 self.repo_config.releases_page),
            json_decode_error:
            JSONDecodeException(json_decode_error)
        }
        try:
            self.test_monitor._initialise_rabbitmq()
            for error, data_ret_exception in errors_exceptions_dict.items():
                mock_get_data.side_effect = error
                expected_output_data = {
                    'error': {
                        'meta_data': {
                            'monitor_name': self.test_monitor.monitor_name,
                            'repo_name':
                            self.test_monitor.repo_config.repo_name,
                            'repo_id': self.test_monitor.repo_config.repo_id,
                            'repo_parent_id':
                            self.test_monitor.repo_config.parent_id,
                            'time': datetime(2012, 1, 1).timestamp()
                        },
                        'message': data_ret_exception.message,
                        'code': data_ret_exception.code,
                    }
                }
                expected_output_hb = {
                    'component_name': self.test_monitor.monitor_name,
                    'is_alive': True,
                    'timestamp': datetime(2012, 1, 1).timestamp()
                }
                # Delete the queue before to avoid messages in the queue on
                # error.
                self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name,
                    durable=True,
                    exclusive=False,
                    auto_delete=False,
                    passive=False)
                self.assertEqual(0, res.method.message_count)
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name,
                    exchange=RAW_DATA_EXCHANGE,
                    routing_key='github')
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name,
                    exchange=HEALTH_CHECK_EXCHANGE,
                    routing_key='heartbeat.worker')

                self.test_monitor._monitor()

                # By re-declaring the queue again we can get the number of
                # messages in the queue.
                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name,
                    durable=True,
                    exclusive=False,
                    auto_delete=False,
                    passive=True)
                # There must be 2 messages in the queue, the heartbeat and the
                # processed data
                self.assertEqual(2, res.method.message_count)

                # Check that the message received is actually the processed data
                _, _, body = self.test_monitor.rabbitmq.basic_get(
                    self.test_queue_name)
                self.assertEqual(expected_output_data, json.loads(body))

                # Check that the message received is actually the HB
                _, _, body = self.test_monitor.rabbitmq.basic_get(
                    self.test_queue_name)
                self.assertEqual(expected_output_hb, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_raises_msg_not_delivered_exception_if_data_not_routed(
            self, mock_get_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        try:
            self.test_monitor._initialise_rabbitmq()

            self.assertRaises(MessageWasNotDeliveredException,
                              self.test_monitor._monitor)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_raises_msg_not_del_except_if_hb_not_routed_and_sends_data(
            self, mock_get_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')

            self.assertRaises(MessageWasNotDeliveredException,
                              self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of
            # messages in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 1 message in the queue, the processed data
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_send_data")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_send_data_raises_amqp_channel_error_on_channel_error(
            self, mock_get_data, mock_send_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_data.side_effect = pika.exceptions.AMQPChannelError('test')
        try:
            self.test_monitor._initialise_rabbitmq()

            self.assertRaises(pika.exceptions.AMQPChannelError,
                              self.test_monitor._monitor)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(GitHubMonitor, "_send_heartbeat")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_send_hb_raises_amqp_chan_err_on_chan_err_and_sends_data(
            self, mock_get_data, mock_send_heartbeat) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_heartbeat.side_effect = \
            pika.exceptions.AMQPChannelError('test')
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.assertRaises(pika.exceptions.AMQPChannelError,
                              self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of
            # messages in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 1 message in the queue, the processed data
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_send_data")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_send_data_raises_amqp_conn_error_on_conn_error(
            self, mock_get_data, mock_send_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_data.side_effect = pika.exceptions.AMQPConnectionError(
            'test')
        try:
            self.test_monitor._initialise_rabbitmq()

            self.assertRaises(pika.exceptions.AMQPConnectionError,
                              self.test_monitor._monitor)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(GitHubMonitor, "_send_heartbeat")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_send_hb_raises_amqp_conn_err_on_conn_err_and_sends_data(
            self, mock_get_data, mock_send_heartbeat) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_heartbeat.side_effect = \
            pika.exceptions.AMQPConnectionError('test')
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'repo_name': self.test_monitor.repo_config.repo_name,
                    'repo_id': self.test_monitor.repo_config.repo_id,
                    'repo_parent_id': self.test_monitor.repo_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(queue=self.test_queue_name,
                                                  exchange=RAW_DATA_EXCHANGE,
                                                  routing_key='github')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.assertRaises(pika.exceptions.AMQPConnectionError,
                              self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of
            # messages in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            # There must be 1 message in the queue, the processed data
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(GitHubMonitor, "_send_data")
    @mock.patch.object(GitHubMonitor, "_get_data")
    def test_monitor_does_not_send_hb_and_data_if_send_data_fails(
            self, mock_get_data, mock_send_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        exception_types_dict = \
            {
                Exception('test'): Exception,
                pika.exceptions.AMQPConnectionError('test'):
                    pika.exceptions.AMQPConnectionError,
                pika.exceptions.AMQPChannelError('test'):
                    pika.exceptions.AMQPChannelError,
                MessageWasNotDeliveredException('test'):
                    MessageWasNotDeliveredException
            }
        try:
            self.test_monitor._initialise_rabbitmq()
            for exception, exception_type in exception_types_dict.items():
                mock_send_data.side_effect = exception
                self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name,
                    durable=True,
                    exclusive=False,
                    auto_delete=False,
                    passive=False)
                self.assertEqual(0, res.method.message_count)
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name,
                    exchange=HEALTH_CHECK_EXCHANGE,
                    routing_key='heartbeat.worker')
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name,
                    exchange=RAW_DATA_EXCHANGE,
                    routing_key='github')

                self.assertRaises(exception_type, self.test_monitor._monitor)

                # By re-declaring the queue again we can get the number of
                # messages in the queue.
                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name,
                    durable=True,
                    exclusive=False,
                    auto_delete=False,
                    passive=True)
                # There must be no messages in the queue.
                self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 5
0
class TestConfigStore(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.test_store_name = 'store name'
        self.test_store = ConfigStore(self.test_store_name, self.dummy_logger,
                                      self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE,
                                 STORE_CONFIGS_ROUTING_KEY_CHAINS)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_parent_id = 'parent_id'
        self.test_config_type = 'config_type'

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.last_monitored = datetime(2012, 1, 1).timestamp()

        self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config'
        self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config'
        self.routing_key_3 = 'chains.cosmos.cosmos.repos_config'

        self.routing_key_4 = 'general.repos_config'
        self.routing_key_5 = 'general.alerts_config'
        self.routing_key_6 = 'general.systems_config'

        self.routing_key_7 = 'channels.email_config'
        self.routing_key_8 = 'channels.pagerduty_config'
        self.routing_key_9 = 'channels.opsgenie_config'
        self.routing_key_10 = 'channels.telegram_config'
        self.routing_key_11 = 'channels.twilio_config'

        self.nodes_config_1 = {
            "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": {
                "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "name": "cosmos_sentry_1(46.166.146.165:9100)",
                "monitor_tendermint": "false",
                "monitor_rpc": "false",
                "monitor_prometheus": "false",
                "exporter_url": "http://46.166.146.165:9100/metrics",
                "monitor_system": "true",
                "is_validator": "false",
                "monitor_node": "true",
                "is_archive_node": "true",
                "use_as_data_source": "true"
            },
            "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": {
                "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "name": "cosmos_sentry_2(172.16.151.10:9100)",
                "monitor_tendermint": "false",
                "monitor_rpc": "false",
                "monitor_prometheus": "false",
                "exporter_url": "http://172.16.151.10:9100/metrics",
                "monitor_system": "true",
                "is_validator": "false",
                "monitor_node": "true",
                "is_archive_node": "true",
                "use_as_data_source": "true"
            }
        }

        self.repos_config_1 = {
            "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": {
                "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "repo_name": "tendermint/tendermint/",
                "monitor_repo": "true"
            },
            "repo_83713022-4155-420b-ada1-73a863f58282": {
                "id": "repo_83713022-4155-420b-ada1-73a863f58282",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "repo_name": "SimplyVC/panic_cosmos/",
                "monitor_repo": "true"
            }
        }

        self.alerts_config_1 = {
            "1": {
                "name": "open_file_descriptors",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "2": {
                "name": "system_cpu_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "3": {
                "name": "system_storage_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "4": {
                "name": "system_ram_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "5": {
                "name": "system_is_down",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "200",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "0",
                "warning_enabled": "true"
            }
        }

        self.systems_config_1 = {
            "system_1d026af1-6cab-403d-8256-c8faa462930a": {
                "id": "system_1d026af1-6cab-403d-8256-c8faa462930a",
                "parent_id": "GLOBAL",
                "name": "matic_full_node_nl(172.26.10.137:9100)",
                "exporter_url": "http://172.26.10.137:9100/metrics",
                "monitor_system": "true"
            },
            "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": {
                "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822",
                "parent_id": "GLOBAL",
                "name": "matic_full_node_mt(172.16.152.137:9100)",
                "exporter_url": "http://172.16.152.137:9100/metrics",
                "monitor_system": "true"
            }
        }

        self.telegram_config_1 = {
            "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": {
                "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9",
                "channel_name": "telegram_chat_1",
                "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE",
                "chat_id": "-759538717",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "alerts": "false",
                "commands": "false",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.twilio_config_1 = {
            "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": {
                "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c",
                "channel_name": "twilio_caller_main",
                "account_sid": "ACb77777284e97e49eb2260aada0220e12",
                "auth_token": "d19f777777a0b8e274470d599e5bcc5e8",
                "twilio_phone_no": "+19893077770",
                "twilio_phone_numbers_to_dial_valid": "+35697777380",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.pagerduty_config_1 = {
            "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": {
                "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc",
                "channel_name": "pager_duty_1",
                "api_token": "meVp_vyQybcX7dA3o1fS",
                "integration_key": "4a520ce3577777ad89a3518096f3a5189",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.opsgenie_config_1 = {
            "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": {
                "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35",
                "channel_name": "ops_genie_main",
                "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06",
                "eu": "true",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.email_config_1 = {
            "email_01b23d79-10f5-4815-a11f-034f53974b23": {
                "id": "email_01b23d79-10f5-4815-a11f-034f53974b23",
                "channel_name": "main_email_channel",
                "port": "25",
                "smtp": "exchange.olive.com",
                "email_from": "*****@*****.**",
                "emails_to": "*****@*****.**",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.config_data_unexpected = {"unexpected": {}}

    def tearDown(self) -> None:
        connect_to_rabbit(self.rabbitmq)
        delete_queue_if_exists(self.rabbitmq, STORE_CONFIGS_QUEUE_NAME)
        delete_exchange_if_exists(self.rabbitmq, CONFIG_EXCHANGE)
        delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE)
        disconnect_from_rabbit(self.rabbitmq)

        connect_to_rabbit(self.test_rabbit_manager)
        delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name)
        disconnect_from_rabbit(self.test_rabbit_manager)

        self.redis.delete_all_unsafe()
        self.redis = None
        self.dummy_logger = None
        self.connection_check_time_interval = None
        self.rabbitmq = None
        self.test_rabbit_manager = None

    def test__str__returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, str(self.test_store))

    def test_name_property_returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, self.test_store.name)

    def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None:
        self.assertEqual(self.mongo_ip, self.test_store.mongo_ip)

    def test_mongo_db_property_returns_mongo_db_correctly(self) -> None:
        self.assertEqual(self.mongo_db, self.test_store.mongo_db)

    def test_mongo_port_property_returns_mongo_port_correctly(self) -> None:
        self.assertEqual(self.mongo_port, self.test_store.mongo_port)

    def test_redis_property_returns_redis_correctly(self) -> None:
        self.assertEqual(type(self.redis), type(self.test_store.redis))

    def test_mongo_property_returns_none_when_mongo_not_init(self) -> None:
        self.assertEqual(None, self.test_store.mongo)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.exchange_delete(CONFIG_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_store._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_store.rabbitmq.is_connected)
            self.assertTrue(self.test_store.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_store.rabbitmq.channel._delivery_confirmation)

            # Check whether the producing exchanges have been created by
            # using passive=True. If this check fails an exception is raised
            # automatically.
            self.test_store.rabbitmq.exchange_declare(CONFIG_EXCHANGE,
                                                      passive=True)
            self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE,
                                                      passive=True)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=CONFIG_EXCHANGE,
                routing_key=STORE_CONFIGS_ROUTING_KEY_CHAINS,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            # Re-declare queue to get the number of messages
            res = self.test_store.rabbitmq.queue_declare(
                STORE_CONFIGS_QUEUE_NAME, False, True, False, False)

            self.assertEqual(1, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RedisApi.hset", autospec=True)
    def test_process_redis_store_does_nothing_on_error_key(self,
                                                           mock_hset) -> None:
        self.test_store._process_redis_store(self.test_parent_id,
                                             self.config_data_unexpected)
        mock_hset.assert_not_called()

    @parameterized.expand([
        ("self.nodes_config_1", "self.routing_key_1"),
        ("self.alerts_config_1", "self.routing_key_2"),
        ("self.repos_config_1", "self.routing_key_3"),
        ("self.repos_config_1", "self.routing_key_4"),
        ("self.alerts_config_1", "self.routing_key_5"),
        ("self.systems_config_1", "self.routing_key_6"),
        ("self.email_config_1", "self.routing_key_7"),
        ("self.pagerduty_config_1", "self.routing_key_8"),
        ("self.opsgenie_config_1", "self.routing_key_9"),
        ("self.telegram_config_1", "self.routing_key_10"),
        ("self.twilio_config_1", "self.routing_key_11"),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_saves_in_redis(self, mock_config_data,
                                         mock_routing_key, mock_send_hb,
                                         mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            data = eval(mock_config_data)
            routing_key = eval(mock_routing_key)

            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=eval(mock_routing_key))

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            self.assertEqual(
                data,
                json.loads(
                    self.redis.get(
                        Keys.get_config(routing_key)).decode("utf-8")))

        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch(
        "src.data_store.stores.config.ConfigStore._process_redis_store",
        autospec=True)
    def test_process_data_sends_heartbeat_correctly(self,
                                                    mock_process_redis_store,
                                                    mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=self.routing_key_1)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.nodes_config_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            heartbeat_test = {
                'component_name': self.test_store_name,
                'is_alive': True,
                'timestamp': datetime(2012, 1, 1).timestamp()
            }

            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(heartbeat_test, json.loads(body))
            mock_process_redis_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    def test_process_data_doesnt_send_heartbeat_on_processing_error(
            self, mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(routing_key=None)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.nodes_config_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.nodes_config_1", "self.routing_key_1"),
        ("self.alerts_config_1", "self.routing_key_2"),
        ("self.repos_config_1", "self.routing_key_3"),
        ("self.repos_config_1", "self.routing_key_4"),
        ("self.alerts_config_1", "self.routing_key_5"),
        ("self.systems_config_1", "self.routing_key_6"),
        ("self.email_config_1", "self.routing_key_7"),
        ("self.pagerduty_config_1", "self.routing_key_8"),
        ("self.opsgenie_config_1", "self.routing_key_9"),
        ("self.telegram_config_1", "self.routing_key_10"),
        ("self.twilio_config_1", "self.routing_key_11"),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_saves_in_redis_then_removes_it_on_empty_config(
            self, mock_config_data, mock_routing_key, mock_send_hb,
            mock_ack) -> None:

        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            data = eval(mock_config_data)
            routing_key = eval(mock_routing_key)

            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(routing_key=routing_key)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            self.assertEqual(
                data,
                json.loads(
                    self.redis.get(
                        Keys.get_config(routing_key)).decode("utf-8")))

            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps({}).encode())

            self.assertEqual(None,
                             self.redis.get(Keys.get_config(routing_key)))

        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 6
0
class TestStoreManager(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            env.RABBIT_IP,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            env.RABBIT_IP,
            connection_check_time_interval=self.connection_check_time_interval)

        self.manager_name = 'test_store_manager'
        self.routing_key = 'heartbeat.manager'
        self.test_queue_name = 'test queue'
        self.test_store_manager = StoreManager(self.dummy_logger,
                                               self.manager_name,
                                               self.rabbitmq)

        # Adding dummy process
        self.dummy_process = Process(target=infinite_fn, args=())
        self.dummy_process.daemon = True

        connect_to_rabbit(self.rabbitmq)
        connect_to_rabbit(self.test_rabbit_manager)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.queue_declare(DATA_STORE_MAN_INPUT_QUEUE, False, True,
                                    False, False)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.rabbitmq.queue_bind(DATA_STORE_MAN_INPUT_QUEUE,
                                 HEALTH_CHECK_EXCHANGE,
                                 DATA_STORE_MAN_INPUT_ROUTING_KEY)

        self.test_data_str = 'test data'
        self.test_heartbeat = {
            'component_name': self.manager_name,
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp(),
        }
        self.test_exception = PANICException('test_exception', 1)

    def tearDown(self) -> None:
        connect_to_rabbit(self.rabbitmq)
        delete_queue_if_exists(self.rabbitmq, DATA_STORE_MAN_INPUT_QUEUE)
        delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE)
        disconnect_from_rabbit(self.rabbitmq)

        connect_to_rabbit(self.test_rabbit_manager)
        delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name)
        disconnect_from_rabbit(self.test_rabbit_manager)

        self.dummy_logger = None
        self.dummy_process = None
        self.connection_check_time_interval = None
        self.rabbitmq = None
        self.test_rabbit_manager = None

    def test__str__returns_name_correctly(self) -> None:
        self.assertEqual(self.manager_name, str(self.test_store_manager))

    def test_name_property_returns_name_correctly(self) -> None:
        self.assertEqual(self.manager_name, self.test_store_manager.name)

    def test_logger_property_returns_logger_correctly(self) -> None:
        self.assertEqual(self.dummy_logger, self.test_store_manager.logger)

    def test_rabbitmq_property_returns_rabbitmq_correctly(self) -> None:
        self.assertEqual(self.rabbitmq, self.test_store_manager.rabbitmq)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_store_manager._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_store_manager.rabbitmq.is_connected)
            self.assertTrue(
                self.test_store_manager.rabbitmq.connection.is_open)
            self.assertTrue(self.test_store_manager.rabbitmq.channel.
                            _delivery_confirmation)

            # Check whether the producing exchanges have been created by
            # using passive=True. If this check fails an exception is raised
            # automatically.
            self.test_store_manager.rabbitmq.exchange_declare(
                HEALTH_CHECK_EXCHANGE, passive=True)

            self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                                   True, False, False)
            self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                                HEALTH_CHECK_EXCHANGE,
                                                self.routing_key)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store_manager.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            res = self.test_rabbit_manager.queue_declare(
                self.test_queue_name, False, True, False, False)
            self.assertEqual(1, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # heartbeat is received
        try:
            self.test_store_manager._initialise_rabbitmq()
            self.test_rabbit_manager.connect()

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_store_manager.rabbitmq.queue_bind(
                queue=self.test_queue_name,
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key)
            self.test_store_manager._send_heartbeat(self.test_heartbeat)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the HB
            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(self.test_heartbeat, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_stores_processes_starts_system_store_correctly(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_store_manager._start_stores_processes()

        new_entry_process = self.test_store_manager._store_process_dict[
            SYSTEM_STORE_NAME]

        self.assertTrue(new_entry_process.daemon)
        self.assertEqual(0, len(new_entry_process._args))
        self.assertEqual(start_system_store, new_entry_process._target)

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_stores_processes_starts_github_store_correctly(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_store_manager._start_stores_processes()

        new_entry_process = self.test_store_manager._store_process_dict[
            GITHUB_STORE_NAME]

        self.assertTrue(new_entry_process.daemon)
        self.assertEqual(0, len(new_entry_process._args))
        self.assertEqual(start_github_store, new_entry_process._target)

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_stores_processes_starts_alert_store_correctly(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_store_manager._start_stores_processes()

        new_entry_process = self.test_store_manager._store_process_dict[
            ALERT_STORE_NAME]

        self.assertTrue(new_entry_process.daemon)
        self.assertEqual(0, len(new_entry_process._args))
        self.assertEqual(start_alert_store, new_entry_process._target)

    @mock.patch.object(multiprocessing.Process, "start")
    def test_start_stores_processes_starts_config_store_correctly(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_store_manager._start_stores_processes()

        new_entry_process = self.test_store_manager._store_process_dict[
            CONFIG_STORE_NAME]

        self.assertTrue(new_entry_process.daemon)
        self.assertEqual(0, len(new_entry_process._args))
        self.assertEqual(start_config_store, new_entry_process._target)

    @mock.patch("src.data_store.starters.create_logger")
    def test_start_stores_processes_starts_the_processes_correctly(
            self, mock_create_logger) -> None:
        mock_create_logger.return_value = self.dummy_logger
        self.test_store_manager._start_stores_processes()

        # We need to sleep to give some time for the stores to be initialised,
        # otherwise the process would not terminate
        time.sleep(1)

        new_system_process = self.test_store_manager._store_process_dict[
            SYSTEM_STORE_NAME]
        self.assertTrue(new_system_process.is_alive())
        new_system_process.terminate()
        new_system_process.join()

        new_github_process = self.test_store_manager._store_process_dict[
            GITHUB_STORE_NAME]
        self.assertTrue(new_github_process.is_alive())
        new_github_process.terminate()
        new_github_process.join()

        new_alert_process = self.test_store_manager._store_process_dict[
            ALERT_STORE_NAME]
        self.assertTrue(new_alert_process.is_alive())
        new_alert_process.terminate()
        new_alert_process.join()

        new_config_process = self.test_store_manager._store_process_dict[
            CONFIG_STORE_NAME]
        self.assertTrue(new_config_process.is_alive())
        new_config_process.terminate()
        new_config_process.join()

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.starters.create_logger")
    @mock.patch.object(RabbitMQApi, "basic_ack")
    def test_process_ping_sends_a_valid_hb_if_process_is_alive(
            self, mock_ack, mock_create_logger) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # received heartbeat is valid.
        mock_create_logger.return_value = self.dummy_logger
        mock_ack.return_value = None
        try:
            self.test_store_manager._initialise_rabbitmq()
            self.test_store_manager._start_stores_processes()

            # Give time for the processes to start
            time.sleep(1)

            self.test_rabbit_manager.queue_declare(queue=self.test_queue_name,
                                                   durable=True,
                                                   exclusive=False,
                                                   auto_delete=False,
                                                   passive=False)
            # Delete the queue before to avoid messages in the queue on error.
            self.test_rabbit_manager.queue_delete(self.test_queue_name)

            # initialise
            blocking_channel = self.test_store_manager.rabbitmq.channel
            properties = pika.spec.BasicProperties()
            method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key)
            body = 'ping'
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)
            self.test_store_manager._process_ping(blocking_channel, method_hb,
                                                  properties, body)

            time.sleep(1)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)
            expected_output = {
                "component_name":
                self.manager_name,
                "dead_processes": [],
                "running_processes": [
                    SYSTEM_STORE_NAME, GITHUB_STORE_NAME, ALERT_STORE_NAME,
                    CONFIG_STORE_NAME
                ],
                "timestamp":
                datetime(2012, 1, 1).timestamp()
            }
            # Check that the message received is a valid HB
            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output, json.loads(body))

            # Clean before test finishes
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                ALERT_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].join()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].join()
            self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join(
            )
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].join()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.starters.create_logger")
    @mock.patch.object(RabbitMQApi, "basic_ack")
    def test_process_ping_sends_a_valid_hb_if_all_processes_are_dead(
            self, mock_ack, mock_create_logger) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # received heartbeat is valid.
        mock_create_logger.return_value = self.dummy_logger
        mock_ack.return_value = None
        try:
            self.test_store_manager._initialise_rabbitmq()
            self.test_store_manager._start_stores_processes()

            # Give time for the processes to start
            time.sleep(1)

            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                ALERT_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].join()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].join()
            self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join(
            )
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].join()

            # Time for processes to terminate
            time.sleep(1)

            # Delete the queue before to avoid messages in the queue on error.
            self.test_rabbit_manager.queue_delete(self.test_queue_name)

            # initialise
            blocking_channel = self.test_store_manager.rabbitmq.channel
            properties = pika.spec.BasicProperties()
            method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key)
            body = 'ping'
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)
            self.test_store_manager._process_ping(blocking_channel, method_hb,
                                                  properties, body)

            time.sleep(1)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)
            expected_output = {
                "component_name":
                self.manager_name,
                "dead_processes": [
                    SYSTEM_STORE_NAME, GITHUB_STORE_NAME, ALERT_STORE_NAME,
                    CONFIG_STORE_NAME
                ],
                "running_processes": [],
                "timestamp":
                datetime(2012, 1, 1).timestamp()
            }
            # Check that the message received is a valid HB
            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output, json.loads(body))

            # Clean before test finishes
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                ALERT_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].join()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].join()
            self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join(
            )
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].join()
            self.rabbitmq.disconnect()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(RabbitMQApi, "basic_ack")
    @mock.patch("src.data_store.starters.create_logger")
    @mock.patch.object(StoreManager, "_send_heartbeat")
    def test_process_ping_restarts_dead_processes(self, send_hb_mock,
                                                  mock_create_logger,
                                                  mock_ack) -> None:
        send_hb_mock.return_value = None
        mock_create_logger.return_value = self.dummy_logger
        mock_ack.return_value = None
        try:
            self.test_store_manager._initialise_rabbitmq()
            self.test_store_manager._start_stores_processes()

            # Give time for the processes to start
            time.sleep(1)

            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                ALERT_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].join()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].join()
            self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join(
            )
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].join()

            # Give time for the processes to terminate
            time.sleep(1)

            # Check that that the processes have terminated
            self.assertFalse(self.test_store_manager.
                             _store_process_dict[SYSTEM_STORE_NAME].is_alive())
            self.assertFalse(self.test_store_manager.
                             _store_process_dict[GITHUB_STORE_NAME].is_alive())
            self.assertFalse(self.test_store_manager.
                             _store_process_dict[ALERT_STORE_NAME].is_alive())
            self.assertFalse(self.test_store_manager.
                             _store_process_dict[CONFIG_STORE_NAME].is_alive())

            # initialise
            blocking_channel = self.test_store_manager.rabbitmq.channel
            properties = pika.spec.BasicProperties()
            method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key)
            body = 'ping'
            self.test_store_manager._process_ping(blocking_channel, method_hb,
                                                  properties, body)

            # Give time for the processes to start
            time.sleep(1)

            self.assertTrue(self.test_store_manager.
                            _store_process_dict[SYSTEM_STORE_NAME].is_alive())
            self.assertTrue(self.test_store_manager.
                            _store_process_dict[GITHUB_STORE_NAME].is_alive())
            self.assertTrue(self.test_store_manager.
                            _store_process_dict[ALERT_STORE_NAME].is_alive())
            self.assertTrue(self.test_store_manager.
                            _store_process_dict[CONFIG_STORE_NAME].is_alive())

            # Clean before test finishes
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                ALERT_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].terminate()
            self.test_store_manager._store_process_dict[
                SYSTEM_STORE_NAME].join()
            self.test_store_manager._store_process_dict[
                GITHUB_STORE_NAME].join()
            self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join(
            )
            self.test_store_manager._store_process_dict[
                CONFIG_STORE_NAME].join()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, 'Process')
    def test_process_ping_does_not_send_hb_if_processing_fails(
            self, mock_process, mock_start, is_alive_mock) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat. In this test we will
        # check that no heartbeat is sent when mocking a raised exception.
        is_alive_mock.side_effect = self.test_exception
        mock_start.return_value = None
        mock_process.side_effect = self.dummy_process
        try:
            self.test_store_manager._initialise_rabbitmq()
            self.test_store_manager._start_stores_processes()

            time.sleep(1)
            # Delete the queue before to avoid messages in the queue on error.
            self.test_rabbit_manager.queue_delete(self.test_queue_name)

            # initialise
            blocking_channel = self.test_store_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(routing_key=self.routing_key)
            properties = pika.spec.BasicProperties()
            body = 'ping'
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)
            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)
            self.test_store_manager._process_ping(blocking_channel, method,
                                                  properties, body)

            time.sleep(1)
            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed(
            self) -> None:
        try:
            self.test_store_manager._initialise_rabbitmq()
            self.test_store_manager._start_stores_processes()

            time.sleep(1)

            # initialise
            blocking_channel = self.test_store_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            properties = pika.spec.BasicProperties()
            body = 'ping'

            self.test_store_manager._process_ping(blocking_channel, method,
                                                  properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([("pika.exceptions.AMQPChannelError('test')",
                            "pika.exceptions.AMQPChannelError"),
                           ("self.test_exception", "PANICException"),
                           ("pika.exceptions.AMQPConnectionError",
                            "pika.exceptions.AMQPConnectionError")])
    @mock.patch.object(StoreManager, "_send_heartbeat")
    def test_process_ping_send_hb_raises_exceptions(self, param_input,
                                                    param_expected,
                                                    hb_mock) -> None:
        hb_mock.side_effect = eval(param_input)
        try:
            self.test_store_manager._initialise_rabbitmq()

            # initialise
            blocking_channel = self.test_store_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(routing_key=self.routing_key)
            properties = pika.spec.BasicProperties()
            body = 'ping'

            self.assertRaises(eval(param_expected),
                              self.test_store_manager._process_ping,
                              blocking_channel, method, properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 7
0
class TestGithubStore(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.test_store_name = 'store name'
        self.test_store = GithubStore(self.test_store_name, self.dummy_logger,
                                      self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE,
                                 GITHUB_STORE_INPUT_ROUTING_KEY)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.repo_name = 'simplyvc/panic/'
        self.repo_id = 'test_repo_id'
        self.parent_id = 'test_parent_id'

        self.repo_name_2 = 'simplyvc/panic_oasis/'
        self.repo_id_2 = 'test_repo_id_2'
        self.parent_id_2 = 'test_parent_id_2'

        self.last_monitored = datetime(2012, 1, 1).timestamp()
        self.github_data_1 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 5,
                        "previous": 4,
                    }
                }
            }
        }
        self.github_data_2 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 5,
                        "previous": 5,
                    }
                }
            }
        }
        self.github_data_3 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name_2,
                    "repo_id": self.repo_id_2,
                    "repo_parent_id": self.parent_id_2,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 8,
                        "previous": 1,
                    }
                }
            }
        }
        self.github_data_error = {
            "error": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "time": self.last_monitored
                },
                "code": "5006",
                "message": "error message"
            }
        }
        self.github_data_key_error = {
            "result": {
                "data": {
                    "repo_name": self.repo_name_2,
                    "repo_id": self.repo_id_2,
                    "repo_parent_id": self.parent_id_2,
                    "last_monitored": self.last_monitored
                },
                "wrong_data": {
                    "no_of_releases": {
                        "current": 8,
                        "previous": 1,
                    }
                }
            }
        }
        self.github_data_unexpected = {"unexpected": {}}

    def tearDown(self) -> None:
        connect_to_rabbit(self.rabbitmq)
        delete_queue_if_exists(self.rabbitmq, GITHUB_STORE_INPUT_QUEUE)
        delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE)
        delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE)
        disconnect_from_rabbit(self.rabbitmq)

        connect_to_rabbit(self.test_rabbit_manager)
        delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name)
        disconnect_from_rabbit(self.test_rabbit_manager)

        self.redis.delete_all_unsafe()
        self.redis = None
        self.dummy_logger = None
        self.connection_check_time_interval = None
        self.rabbitmq = None
        self.test_rabbit_manager = None

    def test__str__returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, str(self.test_store))

    def test_name_property_returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, self.test_store.name)

    def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None:
        self.assertEqual(self.mongo_ip, self.test_store.mongo_ip)

    def test_mongo_db_property_returns_mongo_db_correctly(self) -> None:
        self.assertEqual(self.mongo_db, self.test_store.mongo_db)

    def test_mongo_port_property_returns_mongo_port_correctly(self) -> None:
        self.assertEqual(self.mongo_port, self.test_store.mongo_port)

    def test_redis_property_returns_redis_correctly(self) -> None:
        self.assertEqual(type(self.redis), type(self.test_store.redis))

    def test_mongo_property_returns_none_when_mongo_not_init(self) -> None:
        self.assertEqual(None, self.test_store.mongo)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.exchange_delete(STORE_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_store._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_store.rabbitmq.is_connected)
            self.assertTrue(self.test_store.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_store.rabbitmq.channel._delivery_confirmation)

            # Check whether the producing exchanges have been created by
            # using passive=True. If this check fails an exception is raised
            # automatically.
            self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE,
                                                      passive=True)
            self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE,
                                                      passive=True)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=STORE_EXCHANGE,
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            # Re-declare queue to get the number of messages
            res = self.test_store.rabbitmq.queue_declare(
                GITHUB_STORE_INPUT_QUEUE, False, True, False, False)

            self.assertEqual(1, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.github_data_1", ),
        ("self.github_data_2", ),
        ("self.github_data_3", ),
    ])
    @mock.patch.object(RedisApi, "hset_multiple")
    def test_process_redis_store_redis_is_called_correctly(
            self, mock_github_data, mock_hset_multiple) -> None:

        data = eval(mock_github_data)
        self.test_store._process_redis_store(data)

        meta_data = data['result']['meta_data']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['result']['data']

        call_1 = call(
            Keys.get_hash_parent(parent_id), {
                Keys.get_github_no_of_releases(repo_id):
                str(metrics['no_of_releases']),
                Keys.get_github_last_monitored(repo_id):
                str(meta_data['last_monitored']),
            })
        mock_hset_multiple.assert_has_calls([call_1])

    @mock.patch("src.data_store.stores.store.RedisApi.hset_multiple",
                autospec=True)
    def test_process_redis_store_does_nothing_on_error_key(
            self, mock_hset_multiple) -> None:
        self.test_store._process_redis_store(self.github_data_error)
        mock_hset_multiple.assert_not_called()

    def test_process_redis_store_raises_exception_on_unexpected_key(
            self) -> None:
        self.assertRaises(ReceivedUnexpectedDataException,
                          self.test_store._process_redis_store,
                          self.github_data_unexpected)

    @parameterized.expand([
        ("self.github_data_1", ),
        ("self.github_data_2", ),
        ("self.github_data_3", ),
    ])
    def test_process_redis_store_redis_stores_correctly(
            self, mock_github_data) -> None:

        data = eval(mock_github_data)
        self.test_store._process_redis_store(data)

        meta_data = data['result']['meta_data']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['result']['data']

        self.assertEqual(
            str(metrics['no_of_releases']),
            self.redis.hget(
                Keys.get_hash_parent(parent_id),
                Keys.get_github_no_of_releases(repo_id)).decode("utf-8"))
        self.assertEqual(
            str(meta_data['last_monitored']),
            self.redis.hget(
                Keys.get_hash_parent(parent_id),
                Keys.get_github_last_monitored(repo_id)).decode("utf-8"))

    @parameterized.expand([
        ("self.github_data_1", ),
        ("self.github_data_2", ),
        ("self.github_data_3", ),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_saves_in_redis(self, mock_github_data, mock_send_hb,
                                         mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()
            data = eval(mock_github_data)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            meta_data = data['result']['meta_data']
            repo_id = meta_data['repo_id']
            parent_id = meta_data['repo_parent_id']
            metrics = data['result']['data']

            self.assertEqual(
                str(metrics['no_of_releases']),
                self.redis.hget(
                    Keys.get_hash_parent(parent_id),
                    Keys.get_github_no_of_releases(repo_id)).decode("utf-8"))
            self.assertEqual(
                str(meta_data['last_monitored']),
                self.redis.hget(
                    Keys.get_hash_parent(parent_id),
                    Keys.get_github_last_monitored(repo_id)).decode("utf-8"))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("KeyError", "self.github_data_key_error "),
        ("ReceivedUnexpectedDataException", "self.github_data_unexpected"),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_with_bad_data_does_raises_exceptions(
            self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.github_data_unexpected).encode())
            self.assertRaises(eval(mock_error),
                              self.test_store._process_redis_store,
                              eval(mock_bad_data))
            mock_ack.assert_called_once()
            mock_send_hb.assert_not_called()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch(
        "src.data_store.stores.github.GithubStore._process_redis_store",
        autospec=True)
    def test_process_data_sends_heartbeat_correctly(self,
                                                    mock_process_redis_store,
                                                    mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.github_data_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            heartbeat_test = {
                'component_name': self.test_store_name,
                'is_alive': True,
                'timestamp': datetime(2012, 1, 1).timestamp()
            }

            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(heartbeat_test, json.loads(body))
            mock_process_redis_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    def test_process_data_doesnt_send_heartbeat_on_processing_error(
            self, mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.github_data_unexpected).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 8
0
class TestSystemMonitor(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger, self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)
        self.monitor_name = 'test_monitor'
        self.monitoring_period = 10
        self.system_id = 'test_system_id'
        self.parent_id = 'test_parent_id'
        self.system_name = 'test_system'
        self.monitor_system = True
        self.node_exporter_url = 'test_url'
        self.routing_key = 'test_routing_key'
        self.test_data_str = 'test data'
        self.test_data_dict = {
            'test_key_1': 'test_val_1',
            'test_key_2': 'test_val_2',
        }
        self.test_heartbeat = {
            'component_name': 'Test Component',
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp(),
        }
        self.test_queue_name = 'Test Queue'
        self.metrics_to_monitor = [
            'process_cpu_seconds_total', 'go_memstats_alloc_bytes',
            'go_memstats_alloc_bytes_total', 'process_virtual_memory_bytes',
            'process_max_fds', 'process_open_fds', 'node_cpu_seconds_total',
            'node_filesystem_avail_bytes', 'node_filesystem_size_bytes',
            'node_memory_MemTotal_bytes', 'node_memory_MemAvailable_bytes',
            'node_network_transmit_bytes_total',
            'node_network_receive_bytes_total',
            'node_disk_io_time_seconds_total']
        self.retrieved_metrics_example = {
            'go_memstats_alloc_bytes': 2003024.0,
            'go_memstats_alloc_bytes_total': 435777412600.0,
            'node_cpu_seconds_total': {
                '{"cpu": "0", "mode": "idle"}': 3626110.54,
                '{"cpu": "0", "mode": "iowait"}': 16892.07,
                '{"cpu": "0", "mode": "irq"}': 0.0,
                '{"cpu": "0", "mode": "nice"}': 131.77,
                '{"cpu": "0", "mode": "softirq"}': 8165.66,
                '{"cpu": "0", "mode": "steal"}': 0.0,
                '{"cpu": "0", "mode": "system"}': 46168.15,
                '{"cpu": "0", "mode": "user"}': 238864.68,
                '{"cpu": "1", "mode": "idle"}': 3630087.24,
                '{"cpu": "1", "mode": "iowait"}': 17084.42,
                '{"cpu": "1", "mode": "irq"}': 0.0,
                '{"cpu": "1", "mode": "nice"}': 145.18,
                '{"cpu": "1", "mode": "softirq"}': 5126.93,
                '{"cpu": "1", "mode": "steal"}': 0.0,
                '{"cpu": "1", "mode": "system"}': 46121.4,
                '{"cpu": "1", "mode": "user"}': 239419.51},
            'node_disk_io_time_seconds_total': {
                '{"device": "dm-0"}': 38359.0,
                '{"device": "sda"}': 38288.0,
                '{"device": "sr0"}': 0.0},
            'node_filesystem_avail_bytes': {
                '{"device": "/dev/mapper/ubuntu--vg-ubuntu--lv", '
                '"fstype": "ext4", "mountpoint": "/"}': 57908170752.0,
                '{"device": "/dev/sda2", "fstype": "ext4", '
                '"mountpoint": "/boot"}': 729411584.0,
                '{"device": "lxcfs", "fstype": "fuse.lxcfs", '
                '"mountpoint": "/var/lib/lxcfs"}': 0.0,
                '{"device": "tmpfs", "fstype": "tmpfs", '
                '"mountpoint": "/run"}': 207900672.0,
                '{"device": "tmpfs", "fstype": "tmpfs", "mountpoint": '
                '"/run/lock"}': 5242880.0},
            'node_filesystem_size_bytes': {
                '{"device": "/dev/mapper/ubuntu--vg-ubuntu--lv", "fstype": '
                '"ext4", "mountpoint": "/"}': 104560844800.0,
                '{"device": "/dev/sda2", "fstype": "ext4", "mountpoint": '
                '"/boot"}': 1023303680.0,
                '{"device": "lxcfs", "fstype": "fuse.lxcfs", "mountpoint": '
                '"/var/lib/lxcfs"}': 0.0,
                '{"device": "tmpfs", "fstype": "tmpfs", "mountpoint": "/run"}':
                    209027072.0,
                '{"device": "tmpfs", "fstype": "tmpfs", "mountpoint": '
                '"/run/lock"}': 5242880.0},
            'node_memory_MemAvailable_bytes': 1377767424.0,
            'node_memory_MemTotal_bytes': 2090237952.0,
            'node_network_receive_bytes_total': {
                '{"device": "ens160"}': 722358765622.0,
                '{"device": "lo"}': 381405.0},
            'node_network_transmit_bytes_total': {
                '{"device": "ens160"}': 1011571824152.0,
                '{"device": "lo"}': 381405.0},
            'process_cpu_seconds_total': 2786.82,
            'process_max_fds': 1024.0,
            'process_open_fds': 8.0,
            'process_virtual_memory_bytes': 118513664.0}
        self.processed_data_example = {
            'process_cpu_seconds_total': 2786.82,
            'process_memory_usage': 0.0,
            'virtual_memory_usage': 118513664.0,
            'open_file_descriptors': 0.78125,
            'system_cpu_usage': 7.85,
            'system_ram_usage': 34.09,
            'system_storage_usage': 44.37,
            'network_transmit_bytes_total': 1011572205557.0,
            'network_receive_bytes_total': 722359147027.0,
            'disk_io_time_seconds_total': 76647.0,
        }
        self.test_exception = PANICException('test_exception', 1)
        self.system_config = SystemConfig(self.system_id, self.parent_id,
                                          self.system_name, self.monitor_system,
                                          self.node_exporter_url)
        self.test_monitor = SystemMonitor(self.monitor_name, self.system_config,
                                          self.dummy_logger,
                                          self.monitoring_period, self.rabbitmq)

    def tearDown(self) -> None:
        # Delete any queues and exchanges which are common across many tests
        try:
            self.test_monitor.rabbitmq.connect()

            # Declare them before just in case there are tests which do not
            # use these queues and exchanges
            self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.test_monitor.rabbitmq.exchange_declare(
                HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False)
            self.test_monitor.rabbitmq.exchange_declare(
                RAW_DATA_EXCHANGE, 'direct', False, True, False, False)

            self.test_monitor.rabbitmq.queue_purge(self.test_queue_name)
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)
            self.test_monitor.rabbitmq.exchange_delete(RAW_DATA_EXCHANGE)
            self.test_monitor.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.test_monitor.rabbitmq.disconnect()
        except Exception as e:
            print("Deletion of queues and exchanges failed: {}".format(e))

        self.dummy_logger = None
        self.rabbitmq = None
        self.test_exception = None
        self.system_config = None
        self.test_monitor = None

    def test_str_returns_monitor_name(self) -> None:
        self.assertEqual(self.monitor_name, str(self.test_monitor))

    def test_get_monitor_period_returns_monitor_period(self) -> None:
        self.assertEqual(self.monitoring_period,
                         self.test_monitor.monitor_period)

    def test_get_monitor_name_returns_monitor_name(self) -> None:
        self.assertEqual(self.monitor_name, self.test_monitor.monitor_name)

    def test_system_config_returns_system_config(self) -> None:
        self.assertEqual(self.system_config, self.test_monitor.system_config)

    def test_metrics_to_monitor_returns_metrics_to_monitor(self) -> None:
        self.assertEqual(self.metrics_to_monitor,
                         self.test_monitor.metrics_to_monitor)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that there is no connection/channel already
            # established
            self.assertIsNone(self.rabbitmq.connection)
            self.assertIsNone(self.rabbitmq.channel)

            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(RAW_DATA_EXCHANGE)
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_monitor._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_monitor.rabbitmq.is_connected)
            self.assertTrue(self.test_monitor.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_monitor.rabbitmq.channel._delivery_confirmation)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised hence the test fails.
            self.test_monitor.rabbitmq.basic_publish_confirm(
                exchange=RAW_DATA_EXCHANGE, routing_key=self.routing_key,
                body=self.test_data_str, is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
            self.test_monitor.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key,
                body=self.test_data_str, is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_process_retrieved_data")
    @mock.patch.object(SystemMonitor, "_process_error")
    def test_process_data_calls_process_error_on_retrieval_error(
            self, mock_process_error, mock_process_retrieved_data) -> None:
        # Do not test the processing of data for now
        mock_process_error.return_value = self.test_data_dict

        self.test_monitor._process_data(self.test_data_dict, True,
                                        self.test_exception)

        # Test passes if _process_error is called once and
        # process_retrieved_data is not called
        self.assertEqual(1, mock_process_error.call_count)
        self.assertEqual(0, mock_process_retrieved_data.call_count)

    @mock.patch.object(SystemMonitor, "_process_retrieved_data")
    @mock.patch.object(SystemMonitor, "_process_error")
    def test_process_data_calls_process_retrieved_data_on_retrieval_success(
            self, mock_process_error, mock_process_retrieved_data) -> None:
        # Do not test the processing of data for now
        mock_process_retrieved_data.return_value = self.test_data_dict

        self.test_monitor._process_data(self.test_data_dict, False, None)

        # Test passes if _process_error is called once and
        # process_retrieved_data is not called
        self.assertEqual(0, mock_process_error.call_count)
        self.assertEqual(1, mock_process_retrieved_data.call_count)

    def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # heartbeat is received
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')
            self.test_monitor._send_heartbeat(self.test_heartbeat)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the HB
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(self.test_heartbeat, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    def test_display_data_returns_the_correct_string(self) -> None:
        expected_output = \
            "process_cpu_seconds_total={}, process_memory_usage={}, " \
            "virtual_memory_usage={}, open_file_descriptors={}, " \
            "system_cpu_usage={}, system_ram_usage={}, " \
            "system_storage_usage={}, network_transmit_bytes_total={}, " \
            "network_receive_bytes_total={}, disk_io_time_seconds_total={}" \
            "".format(self.processed_data_example['process_cpu_seconds_total'],
                      self.processed_data_example['process_memory_usage'],
                      self.processed_data_example['virtual_memory_usage'],
                      self.processed_data_example['open_file_descriptors'],
                      self.processed_data_example['system_cpu_usage'],
                      self.processed_data_example['system_ram_usage'],
                      self.processed_data_example['system_storage_usage'],
                      self.processed_data_example[
                          'network_transmit_bytes_total'],
                      self.processed_data_example[
                          'network_receive_bytes_total'],
                      self.processed_data_example['disk_io_time_seconds_total'])

        actual_output = self.test_monitor._display_data(
            self.processed_data_example)
        self.assertEqual(expected_output, actual_output)

    @freeze_time("2012-01-01")
    def test_process_error_returns_expected_data(self) -> None:
        expected_output = {
            'error': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'system_name': self.test_monitor.system_config.system_name,
                    'system_id': self.test_monitor.system_config.system_id,
                    'system_parent_id':
                        self.test_monitor.system_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'message': self.test_exception.message,
                'code': self.test_exception.code,
            }
        }
        actual_output = self.test_monitor._process_error(self.test_exception)
        self.assertEqual(actual_output, expected_output)

    @freeze_time("2012-01-01")
    def test_process_retrieved_data_returns_expected_data(self) -> None:
        expected_output = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'system_name': self.test_monitor.system_config.system_name,
                    'system_id': self.test_monitor.system_config.system_id,
                    'system_parent_id':
                        self.test_monitor.system_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }

        actual_output = self.test_monitor._process_retrieved_data(
            self.retrieved_metrics_example)
        self.assertEqual(expected_output, actual_output)

    def test_send_data_sends_data_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_data, and checks that the
        # data is received
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')

            self.test_monitor._send_data(self.processed_data_example)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(self.processed_data_example, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_sends_data_and_hb_if_data_retrieve_and_processing_success(
            self, mock_get_data) -> None:
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'system_name': self.test_monitor.system_config.system_name,
                    'system_id': self.test_monitor.system_config.system_id,
                    'system_parent_id':
                        self.test_monitor.system_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        expected_output_hb = {
            'component_name': self.test_monitor.monitor_name,
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp()
        }

        try:
            mock_get_data.return_value = self.retrieved_metrics_example
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.test_monitor._monitor()

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            # There must be 2 messages in the queue, the heartbeat and the
            # processed data
            self.assertEqual(2, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))

            # Check that the message received is actually the HB
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_hb, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_process_data")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_sends_no_data_and_hb_if_data_ret_success_and_proc_fails(
            self, mock_get_data, mock_process_data) -> None:
        mock_process_data.side_effect = self.test_exception
        mock_get_data.return_value = self.retrieved_metrics_example
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.test_monitor._monitor()

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            # There must be 0 messages in the queue.
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_sends_no_data_and_no_hb_on_get_data_unexpected_exception(
            self, mock_get_data) -> None:
        mock_get_data.side_effect = self.test_exception
        try:
            self.test_monitor._initialise_rabbitmq()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')

            self.assertRaises(PANICException, self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            # There must be 0 messages in the queue.
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_sends_exception_data_and_hb_on_expected_exceptions(
            self, mock_get_data) -> None:
        errors_exceptions_dict = {
            ReqConnectionError('test'): SystemIsDownException(
                self.test_monitor.system_config.system_name),
            ReadTimeout('test'): SystemIsDownException(
                self.test_monitor.system_config.system_name),
            IncompleteRead('test'): DataReadingException(
                self.test_monitor.monitor_name,
                self.test_monitor.system_config.system_name),
            ChunkedEncodingError('test'): DataReadingException(
                self.test_monitor.monitor_name,
                self.test_monitor.system_config.system_name),
            ProtocolError('test'): DataReadingException(
                self.test_monitor.monitor_name,
                self.test_monitor.system_config.system_name),
            InvalidURL('test'): InvalidUrlException(
                self.test_monitor.system_config.node_exporter_url),
            InvalidSchema('test'): InvalidUrlException(
                self.test_monitor.system_config.node_exporter_url),
            MissingSchema('test'): InvalidUrlException(
                self.test_monitor.system_config.node_exporter_url),
            MetricNotFoundException('test_metric', 'test_endpoint'):
                MetricNotFoundException('test_metric', 'test_endpoint')
        }
        try:
            self.test_monitor._initialise_rabbitmq()
            for error, data_ret_exception in errors_exceptions_dict.items():
                mock_get_data.side_effect = error
                expected_output_data = {
                    'error': {
                        'meta_data': {
                            'monitor_name': self.test_monitor.monitor_name,
                            'system_name':
                                self.test_monitor.system_config.system_name,
                            'system_id':
                                self.test_monitor.system_config.system_id,
                            'system_parent_id':
                                self.test_monitor.system_config.parent_id,
                            'time': datetime(2012, 1, 1).timestamp()
                        },
                        'message': data_ret_exception.message,
                        'code': data_ret_exception.code,
                    }
                }
                expected_output_hb = {
                    'component_name': self.test_monitor.monitor_name,
                    'is_alive': True,
                    'timestamp': datetime(2012, 1, 1).timestamp()
                }
                # Delete the queue before to avoid messages in the queue on
                # error.
                self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name, durable=True, exclusive=False,
                    auto_delete=False, passive=False
                )
                self.assertEqual(0, res.method.message_count)
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                    routing_key='system')
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                    routing_key='heartbeat.worker')

                self.test_monitor._monitor()

                # By re-declaring the queue again we can get the number of
                # messages in the queue.
                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name, durable=True, exclusive=False,
                    auto_delete=False, passive=True
                )
                # There must be 2 messages in the queue, the heartbeat and the
                # processed data
                self.assertEqual(2, res.method.message_count)

                # Check that the message received is actually the processed data
                _, _, body = self.test_monitor.rabbitmq.basic_get(
                    self.test_queue_name)
                self.assertEqual(expected_output_data, json.loads(body))

                # Check that the message received is actually the HB
                _, _, body = self.test_monitor.rabbitmq.basic_get(
                    self.test_queue_name)
                self.assertEqual(expected_output_hb, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_raises_msg_not_delivered_exception_if_data_not_routed(
            self, mock_get_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        try:
            self.test_monitor._initialise_rabbitmq()

            self.assertRaises(MessageWasNotDeliveredException,
                              self.test_monitor._monitor)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_raises_msg_not_del_except_if_hb_not_routed_and_sends_data(
            self, mock_get_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'system_name': self.test_monitor.system_config.system_name,
                    'system_id': self.test_monitor.system_config.system_id,
                    'system_parent_id':
                        self.test_monitor.system_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')

            self.assertRaises(MessageWasNotDeliveredException,
                              self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of
            # messages in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            # There must be 1 message in the queue, the processed data
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_send_data")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_send_data_raises_amqp_channel_error_on_channel_error(
            self, mock_get_data, mock_send_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_data.side_effect = pika.exceptions.AMQPChannelError('test')
        try:
            self.test_monitor._initialise_rabbitmq()

            self.assertRaises(pika.exceptions.AMQPChannelError,
                              self.test_monitor._monitor)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(SystemMonitor, "_send_heartbeat")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_send_hb_raises_amqp_chan_err_on_chan_err_and_sends_data(
            self, mock_get_data, mock_send_heartbeat) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_heartbeat.side_effect = \
            pika.exceptions.AMQPChannelError('test')
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'system_name': self.test_monitor.system_config.system_name,
                    'system_id': self.test_monitor.system_config.system_id,
                    'system_parent_id':
                        self.test_monitor.system_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')

            self.assertRaises(pika.exceptions.AMQPChannelError,
                              self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of
            # messages in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            # There must be 1 message in the queue, the processed data
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_send_data")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_send_data_raises_amqp_conn_error_on_conn_error(
            self, mock_get_data, mock_send_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_data.side_effect = pika.exceptions.AMQPConnectionError('test')
        try:
            self.test_monitor._initialise_rabbitmq()

            self.assertRaises(pika.exceptions.AMQPConnectionError,
                              self.test_monitor._monitor)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(SystemMonitor, "_send_heartbeat")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_send_hb_raises_amqp_conn_err_on_conn_err_and_sends_data(
            self, mock_get_data, mock_send_heartbeat) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        mock_send_heartbeat.side_effect = \
            pika.exceptions.AMQPConnectionError('test')
        expected_output_data = {
            'result': {
                'meta_data': {
                    'monitor_name': self.test_monitor.monitor_name,
                    'system_name': self.test_monitor.system_config.system_name,
                    'system_id': self.test_monitor.system_config.system_id,
                    'system_parent_id':
                        self.test_monitor.system_config.parent_id,
                    'time': datetime(2012, 1, 1).timestamp()
                },
                'data': self.processed_data_example,
            }
        }
        try:
            self.test_monitor._initialise_rabbitmq()

            self.test_monitor.rabbitmq.queue_delete(self.test_queue_name)

            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.worker')
            self.test_monitor.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                routing_key='system')

            self.assertRaises(pika.exceptions.AMQPConnectionError,
                              self.test_monitor._monitor)

            # By re-declaring the queue again we can get the number of
            # messages in the queue.
            res = self.test_monitor.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            # There must be 1 message in the queue, the processed data
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the processed data
            _, _, body = self.test_monitor.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output_data, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(SystemMonitor, "_send_data")
    @mock.patch.object(SystemMonitor, "_get_data")
    def test_monitor_does_not_send_hb_and_data_if_send_data_fails(
            self, mock_get_data, mock_send_data) -> None:
        mock_get_data.return_value = self.retrieved_metrics_example
        exception_types_dict = \
            {
                Exception('test'): Exception,
                pika.exceptions.AMQPConnectionError('test'):
                    pika.exceptions.AMQPConnectionError,
                pika.exceptions.AMQPChannelError('test'):
                    pika.exceptions.AMQPChannelError,
                MessageWasNotDeliveredException('test'):
                    MessageWasNotDeliveredException
            }
        try:
            self.test_monitor._initialise_rabbitmq()
            for exception, exception_type in exception_types_dict.items():
                mock_send_data.side_effect = exception
                self.test_monitor.rabbitmq.queue_delete(
                    self.test_queue_name)

                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name, durable=True, exclusive=False,
                    auto_delete=False, passive=False
                )
                self.assertEqual(0, res.method.message_count)
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name,
                    exchange=HEALTH_CHECK_EXCHANGE,
                    routing_key='heartbeat.worker')
                self.test_monitor.rabbitmq.queue_bind(
                    queue=self.test_queue_name, exchange=RAW_DATA_EXCHANGE,
                    routing_key='system')

                self.assertRaises(exception_type, self.test_monitor._monitor)

                # By re-declaring the queue again we can get the number of
                # messages in the queue.
                res = self.test_monitor.rabbitmq.queue_declare(
                    queue=self.test_queue_name, durable=True,
                    exclusive=False, auto_delete=False, passive=True
                )
                # There must be no messages in the queue.
                self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 9
0
class TestAlertStore(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.mongo = MongoApi(logger=self.dummy_logger.getChild(
            MongoApi.__name__),
                              db_name=self.mongo_db,
                              host=self.mongo_ip,
                              port=self.mongo_port)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.test_store_name = 'store name'
        self.test_store = AlertStore(self.test_store_name, self.dummy_logger,
                                     self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(ALERT_STORE_INPUT_QUEUE, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(ALERT_STORE_INPUT_QUEUE, STORE_EXCHANGE,
                                 ALERT_STORE_INPUT_ROUTING_KEY)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.parent_id = 'test_parent_id'

        self.alert_id = 'test_alert_id'
        self.origin_id = 'test_origin_id'
        self.alert_name = 'test_alert'
        self.metric = 'system_is_down'
        self.severity = 'warning'
        self.message = 'alert message'
        self.value = 'alert_code_1'

        self.alert_id_2 = 'test_alert_id_2'
        self.origin_id_2 = 'test_origin_id_2'
        self.alert_name_2 = 'test_alert_2'
        self.severity_2 = 'critical'
        self.message_2 = 'alert message 2'
        self.value_2 = 'alert_code_2'

        self.alert_id_3 = 'test_alert_id_3'
        self.origin_id_3 = 'test_origin_id_3'
        self.alert_name_3 = 'test_alert_3'
        self.severity_3 = 'info'
        self.message_3 = 'alert message 3'
        self.value_3 = 'alert_code_3'

        self.last_monitored = datetime(2012, 1, 1).timestamp()
        self.none = None

        self.alert_data_1 = {
            'parent_id': self.parent_id,
            'origin_id': self.origin_id,
            'alert_code': {
                'name': self.alert_name,
                'value': self.value,
            },
            'severity': self.severity,
            'metric': self.metric,
            'message': self.message,
            'timestamp': self.last_monitored,
        }
        self.alert_data_2 = {
            'parent_id': self.parent_id,
            'origin_id': self.origin_id_2,
            'alert_code': {
                'name': self.alert_name_2,
                'value': self.value_2,
            },
            'severity': self.severity_2,
            'metric': self.metric,
            'message': self.message_2,
            'timestamp': self.last_monitored,
        }
        self.alert_data_3 = {
            'parent_id': self.parent_id,
            'origin_id': self.origin_id_3,
            'alert_code': {
                'name': self.alert_name_3,
                'value': self.value_3,
            },
            'severity': self.severity_3,
            'metric': self.metric,
            'message': self.message_3,
            'timestamp': self.last_monitored,
        }
        self.alert_data_key_error = {"result": {"data": {}, "data2": {}}}
        self.alert_data_unexpected = {"unexpected": {}}

    def tearDown(self) -> None:
        connect_to_rabbit(self.rabbitmq)
        delete_queue_if_exists(self.rabbitmq, ALERT_STORE_INPUT_QUEUE)
        delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE)
        delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE)
        disconnect_from_rabbit(self.rabbitmq)

        connect_to_rabbit(self.test_rabbit_manager)
        delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name)
        disconnect_from_rabbit(self.test_rabbit_manager)

        self.dummy_logger = None
        self.connection_check_time_interval = None
        self.rabbitmq = None
        self.test_rabbit_manager = None
        self.redis.delete_all_unsafe()
        self.redis = None
        self.mongo.drop_collection(self.parent_id)
        self.mongo = None
        self.test_store = None

    def test__str__returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, str(self.test_store))

    def test_name_property_returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, self.test_store.name)

    def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None:
        self.assertEqual(self.mongo_ip, self.test_store.mongo_ip)

    def test_mongo_db_property_returns_mongo_db_correctly(self) -> None:
        self.assertEqual(self.mongo_db, self.test_store.mongo_db)

    def test_mongo_port_property_returns_mongo_port_correctly(self) -> None:
        self.assertEqual(self.mongo_port, self.test_store.mongo_port)

    def test_mongo_property_returns_mongo(self) -> None:
        self.assertEqual(type(self.mongo), type(self.test_store.mongo))

    def test_redis_property_returns_redis_correctly(self) -> None:
        self.assertEqual(type(self.redis), type(self.test_store.redis))

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.queue_delete(ALERT_STORE_INPUT_QUEUE)
            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.exchange_delete(STORE_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_store._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_store.rabbitmq.is_connected)
            self.assertTrue(self.test_store.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_store.rabbitmq.channel._delivery_confirmation)

            # Check whether the producing exchanges have been created by
            # using passive=True. If this check fails an exception is raised
            # automatically.
            self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE,
                                                      passive=True)
            self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE,
                                                      passive=True)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=STORE_EXCHANGE,
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            # Re-declare queue to get the number of messages
            res = self.test_store.rabbitmq.queue_declare(
                ALERT_STORE_INPUT_QUEUE, False, True, False, False)

            self.assertEqual(1, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("KeyError", "self.alert_data_key_error "),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_with_bad_data_does_raises_exceptions(
            self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None:
        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.alert_data_unexpected).encode())
            self.assertRaises(eval(mock_error),
                              self.test_store._process_mongo_store,
                              eval(mock_bad_data))
            mock_ack.assert_called_once()
            mock_send_hb.assert_not_called()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store",
                autospec=True)
    @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store",
                autospec=True)
    def test_process_data_sends_heartbeat_correctly(self,
                                                    mock_process_mongo_store,
                                                    mock_process_redis_store,
                                                    mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.alert_data_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            heartbeat_test = {
                'component_name': self.test_store_name,
                'is_alive': True,
                'timestamp': datetime(2012, 1, 1).timestamp()
            }

            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(heartbeat_test, json.loads(body))
            mock_process_mongo_store.assert_called_once()
            mock_process_redis_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    def test_process_data_doesnt_send_heartbeat_on_processing_error(
            self, mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.alert_data_unexpected).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(MongoApi, "update_one")
    def test_process_mongo_store_calls_update_one(self,
                                                  mock_update_one) -> None:
        self.test_store._process_mongo_store(self.alert_data_1)
        mock_update_one.assert_called_once()

    @mock.patch.object(RedisApi, "hset")
    def test_process_redis_store_calls_hset(self, mock_hset) -> None:
        self.test_store._process_redis_store(self.alert_data_1)
        mock_hset.assert_called_once()

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    @freeze_time("2012-01-01")
    @mock.patch.object(MongoApi, "update_one")
    def test_process_mongo_store_calls_mongo_correctly(
            self, mock_system_data, mock_update_one) -> None:
        data = eval(mock_system_data)
        self.test_store._process_mongo_store(data)

        call_1 = call(data['parent_id'], {
            'doc_type': 'alert',
            'n_alerts': {
                '$lt': 1000
            }
        }, {
            '$push': {
                'alerts': {
                    'origin': data['origin_id'],
                    'alert_name': data['alert_code']['name'],
                    'severity': data['severity'],
                    'metric': data['metric'],
                    'message': data['message'],
                    'timestamp': str(data['timestamp']),
                }
            },
            '$min': {
                'first': data['timestamp']
            },
            '$max': {
                'last': data['timestamp']
            },
            '$inc': {
                'n_alerts': 1
            },
        })
        mock_update_one.assert_has_calls([call_1])

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    @freeze_time("2012-01-01")
    @mock.patch.object(RedisApi, "hset")
    def test_process_redis_store_calls_redis_correctly(self, mock_system_data,
                                                       mock_hset) -> None:
        data = eval(mock_system_data)
        self.test_store._process_redis_store(data)

        metric_data = {
            'severity': data['severity'],
            'message': data['message']
        }
        key = data['origin_id']

        call_1 = call(Keys.get_hash_parent(data['parent_id']),
                      eval('Keys.get_alert_{}(key)'.format(data['metric'])),
                      json.dumps(metric_data))
        mock_hset.assert_has_calls([call_1])

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    @mock.patch.object(MongoApi, "update_one")
    def test_process_data_calls_mongo_correctly(self, mock_system_data,
                                                mock_update_one, mock_send_hb,
                                                mock_process_redis_store,
                                                mock_ack) -> None:

        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            data = eval(mock_system_data)
            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())

            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            call_1 = call(data['parent_id'], {
                'doc_type': 'alert',
                'n_alerts': {
                    '$lt': 1000
                }
            }, {
                '$push': {
                    'alerts': {
                        'origin': data['origin_id'],
                        'alert_name': data['alert_code']['name'],
                        'severity': data['severity'],
                        'metric': data['metric'],
                        'message': data['message'],
                        'timestamp': str(data['timestamp']),
                    }
                },
                '$min': {
                    'first': data['timestamp']
                },
                '$max': {
                    'last': data['timestamp']
                },
                '$inc': {
                    'n_alerts': 1
                },
            })
            mock_update_one.assert_has_calls([call_1])
            mock_process_redis_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    @mock.patch.object(RedisApi, "hset")
    def test_process_data_calls_redis_correctly(self, mock_system_data,
                                                mock_hset, mock_send_hb,
                                                mock_process_mongo_store,
                                                mock_ack) -> None:

        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            data = eval(mock_system_data)
            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())

            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            metric_data = {
                'severity': data['severity'],
                'message': data['message']
            }
            key = data['origin_id']

            call_1 = call(
                Keys.get_hash_parent(data['parent_id']),
                eval('Keys.get_alert_{}(key)'.format(data['metric'])),
                json.dumps(metric_data))
            mock_hset.assert_has_calls([call_1])
            mock_process_mongo_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    def test_process_mongo_store_mongo_stores_correctly(
            self, mock_system_data) -> None:

        data = eval(mock_system_data)
        self.test_store._process_mongo_store(data)

        documents = self.mongo.get_all(data['parent_id'])
        document = documents[0]
        expected = [
            'alert', 1,
            str(data['origin_id']),
            str(data['alert_code']['name']),
            str(data['severity']),
            str(data['metric']),
            str(data['message']),
            str(data['timestamp'])
        ]
        actual = [
            document['doc_type'], document['n_alerts'],
            document['alerts'][0]['origin'],
            document['alerts'][0]['alert_name'],
            document['alerts'][0]['severity'], document['alerts'][0]['metric'],
            document['alerts'][0]['message'],
            document['alerts'][0]['timestamp']
        ]

        self.assertListEqual(expected, actual)

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    def test_process_redis_store_redis_stores_correctly(
            self, mock_system_data) -> None:

        data = eval(mock_system_data)
        self.test_store._process_redis_store(data)

        key = data['origin_id']

        stored_data = self.redis.hget(
            Keys.get_hash_parent(data['parent_id']),
            eval('Keys.get_alert_{}(key)'.format(data['metric'])))

        expected_data = {
            'severity': data['severity'],
            'message': data['message']
        }

        self.assertEqual(expected_data, json.loads(stored_data))

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_results_stores_in_mongo_correctly(
            self, mock_system_data, mock_send_hb, mock_process_redis_store,
            mock_ack) -> None:

        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            data = eval(mock_system_data)
            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())

            mock_process_redis_store.assert_called_once()
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            documents = self.mongo.get_all(data['parent_id'])
            document = documents[0]
            expected = [
                'alert', 1,
                str(data['origin_id']),
                str(data['alert_code']['name']),
                str(data['severity']),
                str(data['message']),
                str(data['timestamp'])
            ]
            actual = [
                document['doc_type'], document['n_alerts'],
                document['alerts'][0]['origin'],
                document['alerts'][0]['alert_name'],
                document['alerts'][0]['severity'],
                document['alerts'][0]['message'],
                document['alerts'][0]['timestamp']
            ]

            self.assertListEqual(expected, actual)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.alert_data_1", ),
        ("self.alert_data_2", ),
        ("self.alert_data_3", ),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_results_stores_in_redis_correctly(
            self, mock_system_data, mock_send_hb, mock_process_mongo_store,
            mock_ack) -> None:

        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            data = eval(mock_system_data)
            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())

            mock_process_mongo_store.assert_called_once()
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            key = data['origin_id']

            stored_data = self.redis.hget(
                Keys.get_hash_parent(data['parent_id']),
                eval('Keys.get_alert_{}(key)'.format(data['metric'])))

            expected_data = {
                'severity': data['severity'],
                'message': data['message']
            }

            self.assertEqual(expected_data, json.loads(stored_data))
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Ejemplo n.º 10
0
class TestGithubAlertersManager(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger, env.RABBIT_IP,
            connection_check_time_interval=self.connection_check_time_interval)

        self.manager_name = 'test_github_alerters_manager'
        self.test_queue_name = 'Test Queue'
        self.test_data_str = 'test data'
        self.test_heartbeat = {
            'component_name': self.manager_name,
            'is_alive': True,
            'timestamp': datetime(2012, 1, 1).timestamp(),
        }
        self.github_alerter_name = GITHUB_ALERTER_NAME
        self.dummy_process1 = Process(target=infinite_fn, args=())
        self.dummy_process1.daemon = True
        self.dummy_process2 = Process(target=infinite_fn, args=())
        self.dummy_process2.daemon = True
        self.dummy_process3 = Process(target=infinite_fn, args=())
        self.dummy_process3.daemon = True

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger, env.RABBIT_IP,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_manager = GithubAlerterManager(
            self.dummy_logger, self.manager_name, self.rabbitmq)
        self.test_exception = PANICException('test_exception', 1)

    def tearDown(self) -> None:
        # Delete any queues and exchanges which are common across many tests
        try:
            self.test_rabbit_manager.connect()
            self.test_manager.rabbitmq.connect()
            # Declare queues incase they haven't been declared already
            self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.test_manager.rabbitmq.queue_declare(
                queue=GITHUB_MANAGER_INPUT_QUEUE, durable=True,
                exclusive=False, auto_delete=False, passive=False
            )
            self.test_manager.rabbitmq.queue_purge(self.test_queue_name)
            self.test_manager.rabbitmq.queue_purge(GITHUB_MANAGER_INPUT_QUEUE)
            self.test_manager.rabbitmq.queue_delete(self.test_queue_name)
            self.test_manager.rabbitmq.queue_delete(GITHUB_MANAGER_INPUT_QUEUE)
            self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.test_manager.rabbitmq.disconnect()
            self.test_rabbit_manager.disconnect()
        except Exception as e:
            print("Test failed: {}".format(e))

        self.dummy_logger = None
        self.rabbitmq = None
        self.test_manager = None
        self.test_exception = None
        self.test_rabbit_manager = None

        self.dummy_process1 = None
        self.dummy_process2 = None
        self.dummy_process3 = None

    def test_str_returns_manager_name(self) -> None:
        self.assertEqual(self.manager_name, self.test_manager.__str__())

    def test_name_returns_manager_name(self) -> None:
        self.assertEqual(self.manager_name, self.test_manager.name)

    @mock.patch.object(RabbitMQApi, "start_consuming")
    def test_listen_for_data_calls_start_consuming(
            self, mock_start_consuming) -> None:
        mock_start_consuming.return_value = None
        self.test_manager._listen_for_data()
        self.assertEqual(1, mock_start_consuming.call_count)

    @mock.patch.object(GithubAlerterManager, "_process_ping")
    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self, mock_process_ping) -> None:
        mock_process_ping.return_value = None
        try:
            self.test_rabbit_manager.connect()
            # To make sure that there is no connection/channel already
            # established
            self.assertIsNone(self.rabbitmq.connection)
            self.assertIsNone(self.rabbitmq.channel)

            # To make sure that the exchanges and queues have not already been
            # declared
            self.test_manager.rabbitmq.connect()
            self.test_manager.rabbitmq.queue_delete(GITHUB_MANAGER_INPUT_QUEUE)
            self.test_manager.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)

            self.test_manager._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_manager.rabbitmq.is_connected)
            self.assertTrue(self.test_manager.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_manager.rabbitmq.channel._delivery_confirmation)

            # Check whether the exchanges and queues have been creating by
            # sending messages with the same routing keys as for the queues. We
            # will also check if the size of the queues is 0 to confirm that
            # basic_consume was called (it will store the msg in the component
            # memory immediately). If one of the exchanges or queues is not
            # created, then either an exception will be thrown or the queue size
            # would be 1. Note when deleting the exchanges in the beginning we
            # also released every binding, hence there are no other queue binded
            # with the same routing key to any exchange at this point.
            self.test_rabbit_manager.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=GITHUB_MANAGER_INPUT_ROUTING_KEY,
                body=self.test_data_str, is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=True)

            # Re-declare queue to get the number of messages
            res = self.test_rabbit_manager.queue_declare(
                GITHUB_MANAGER_INPUT_QUEUE, False, True, False, False)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # heartbeat is received
        try:
            self.test_manager._initialise_rabbitmq()
            self.test_rabbit_manager.connect()

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_manager.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.manager')
            self.test_manager._send_heartbeat(self.test_heartbeat)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            self.assertEqual(1, res.method.message_count)

            # Check that the message received is actually the HB
            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(self.test_heartbeat, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(multiprocessing.Process, "start")
    def test_create_and_start_alerter_process_creates_the_correct_process(
            self, mock_start) -> None:
        mock_start.return_value = None

        self.test_manager._start_alerters_processes()

        new_entry_process = self.test_manager.alerter_process_dict[
            GITHUB_ALERTER_NAME]

        self.assertTrue(new_entry_process.daemon)
        self.assertEqual(0, len(new_entry_process._args))
        self.assertEqual(start_github_alerter, new_entry_process._target)

    @mock.patch("src.alerter.alerter_starters.create_logger")
    def test_start_alerters_process_starts_the_process(
            self, mock_create_logger) -> None:
        mock_create_logger.return_value = self.dummy_logger
        self.test_manager._start_alerters_processes()

        # We need to sleep to give some time for the alerter to be initialised,
        # otherwise the process would not terminate
        time.sleep(1)

        new_entry_process = self.test_manager.alerter_process_dict[
            GITHUB_ALERTER_NAME]
        self.assertTrue(new_entry_process.is_alive())

        new_entry_process.terminate()
        new_entry_process.join()

    @freeze_time("2012-01-01")
    @mock.patch.object(RabbitMQApi, "basic_ack")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing.Process, "join")
    @mock.patch.object(multiprocessing.Process, "terminate")
    def test_process_ping_sends_a_valid_hb_if_process_is_alive(
            self, mock_terminate, mock_join, mock_start, mock_is_alive,
            mock_ack) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # received heartbeat is valid.
        mock_ack.return_value = None
        mock_is_alive.return_value = True
        mock_start.return_value = None
        mock_join.return_value = None
        mock_terminate.return_value = None
        try:
            self.test_manager._initialise_rabbitmq()
            self.test_manager._start_alerters_processes()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_manager.rabbitmq.queue_delete(self.test_queue_name)

            # initialise
            blocking_channel = self.test_manager.rabbitmq.channel
            properties = pika.spec.BasicProperties()
            method_hb = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            body = 'ping'
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_manager.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.manager')
            self.test_manager._process_ping(blocking_channel, method_hb,
                                            properties, body)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            self.assertEqual(1, res.method.message_count)
            expected_output = {
                "component_name": self.manager_name,
                "dead_processes": [],
                "running_processes": [GITHUB_ALERTER_NAME],
                "timestamp": datetime(2012, 1, 1).timestamp()
            }
            # Check that the message received is a valid HB
            _, _, body = self.test_manager.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(RabbitMQApi, "basic_ack")
    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing.Process, "join")
    @mock.patch.object(multiprocessing.Process, "terminate")
    def test_process_ping_sends_a_valid_hb_if_process_is_dead(
            self, mock_terminate, mock_join, mock_start, mock_is_alive,
            mock_ack) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat, and checks that the
        # received heartbeat is valid.
        mock_ack.return_value = None
        mock_is_alive.return_value = False
        mock_start.return_value = None
        mock_join.return_value = None
        mock_terminate.return_value = None
        try:
            self.test_manager._initialise_rabbitmq()
            self.test_manager._start_alerters_processes()

            # Delete the queue before to avoid messages in the queue on error.
            self.test_manager.rabbitmq.queue_delete(self.test_queue_name)

            # initialise
            blocking_channel = self.test_manager.rabbitmq.channel
            properties = pika.spec.BasicProperties()
            method_hb = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            body = 'ping'
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_manager.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.manager')
            self.test_manager._process_ping(blocking_channel, method_hb,
                                            properties, body)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            self.assertEqual(1, res.method.message_count)
            expected_output = {
                "component_name": self.manager_name,
                "dead_processes": [GITHUB_ALERTER_NAME],
                "running_processes": [],
                "timestamp": datetime(2012, 1, 1).timestamp()
            }
            # Check that the message received is a valid HB
            _, _, body = self.test_manager.rabbitmq.basic_get(
                self.test_queue_name)
            self.assertEqual(expected_output, json.loads(body))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch.object(RabbitMQApi, "basic_ack")
    @mock.patch("src.alerter.alerter_starters.create_logger")
    @mock.patch.object(GithubAlerterManager, "_send_heartbeat")
    def test_process_ping_restarts_dead_processes(
            self, send_hb_mock, mock_create_logger, mock_ack) -> None:
        send_hb_mock.return_value = None
        mock_create_logger.return_value = self.dummy_logger
        mock_ack.return_value = None
        try:
            self.test_manager._initialise_rabbitmq()
            self.test_manager._start_alerters_processes()

            # Give time for the processes to start
            time.sleep(1)

            # Automate the case when having all processes dead
            self.test_manager.alerter_process_dict[
                GITHUB_ALERTER_NAME].terminate()
            self.test_manager.alerter_process_dict[GITHUB_ALERTER_NAME].join()

            # Give time for the processes to terminate
            time.sleep(1)

            # Check that that the processes have terminated
            self.assertFalse(self.test_manager.alerter_process_dict[
                                 GITHUB_ALERTER_NAME].is_alive())

            # initialise
            blocking_channel = self.test_manager.rabbitmq.channel
            properties = pika.spec.BasicProperties()
            method_hb = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            body = 'ping'
            self.test_manager._process_ping(blocking_channel, method_hb,
                                            properties, body)

            # Give time for the processes to start
            time.sleep(1)

            self.assertTrue(self.test_manager.alerter_process_dict[
                                GITHUB_ALERTER_NAME].is_alive())

            # Clean before test finishes
            self.test_manager.alerter_process_dict[
                GITHUB_ALERTER_NAME].terminate()
            self.test_manager.alerter_process_dict[GITHUB_ALERTER_NAME].join()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch.object(multiprocessing.Process, "is_alive")
    @mock.patch.object(multiprocessing.Process, "start")
    @mock.patch.object(multiprocessing, 'Process')
    def test_process_ping_does_not_send_hb_if_processing_fails(
            self, mock_process, mock_start, is_alive_mock) -> None:
        # This test creates a queue which receives messages with the same
        # routing key as the ones sent by send_heartbeat. In this test we will
        # check that no heartbeat is sent when mocking a raised exception.
        is_alive_mock.side_effect = self.test_exception
        mock_start.return_value = None
        mock_process.side_effect = self.dummy_process1
        try:
            self.test_manager._initialise_rabbitmq()
            self.test_manager._start_alerters_processes()

            self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )

            # Delete the queue before to avoid messages in the queue on error.
            self.test_manager.rabbitmq.queue_delete(self.test_queue_name)

            # initialise
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            properties = pika.spec.BasicProperties()
            body = 'ping'
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=False
            )
            self.assertEqual(0, res.method.message_count)
            self.test_manager.rabbitmq.queue_bind(
                queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE,
                routing_key='heartbeat.manager')
            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)

            # By re-declaring the queue again we can get the number of messages
            # in the queue.
            res = self.test_manager.rabbitmq.queue_declare(
                queue=self.test_queue_name, durable=True, exclusive=False,
                auto_delete=False, passive=True
            )
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed(
            self) -> None:
        try:
            self.test_manager._initialise_rabbitmq()
            self.test_manager._start_alerters_processes()

            # initialise
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            properties = pika.spec.BasicProperties()
            body = 'ping'

            self.test_manager._process_ping(blocking_channel, method,
                                            properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("pika.exceptions.AMQPChannelError('test')",
         "pika.exceptions.AMQPChannelError"),
        ("self.test_exception", "PANICException"),
    ])
    @mock.patch.object(GithubAlerterManager, "_send_heartbeat")
    def test_process_ping_send_hb_raises_exceptions(
            self, param_input, param_expected, hb_mock) -> None:
        hb_mock.side_effect = eval(param_input)
        try:
            self.test_manager._initialise_rabbitmq()

            # initialise
            blocking_channel = self.test_manager.rabbitmq.channel
            method = pika.spec.Basic.Deliver(routing_key='heartbeat.manager')
            properties = pika.spec.BasicProperties()
            body = 'ping'

            self.assertRaises(eval(param_expected),
                              self.test_manager._process_ping,
                              blocking_channel,
                              method, properties, body)
        except Exception as e:
            self.fail("Test failed: {}".format(e))