def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbitmq = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.manager_name = 'test_github_alerters_manager' self.test_queue_name = 'Test Queue' self.test_data_str = 'test data' self.test_heartbeat = { 'component_name': self.manager_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.github_alerter_name = GITHUB_ALERTER_NAME self.dummy_process1 = Process(target=infinite_fn, args=()) self.dummy_process1.daemon = True self.dummy_process2 = Process(target=infinite_fn, args=()) self.dummy_process2.daemon = True self.dummy_process3 = Process(target=infinite_fn, args=()) self.dummy_process3.daemon = True self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.test_manager = GithubAlerterManager( self.dummy_logger, self.manager_name, self.rabbitmq) self.test_exception = PANICException('test_exception', 1)
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.manager_name = 'test_data_transformers_manager' self.test_queue_name = 'Test Queue' self.test_data_str = 'test data' self.test_timestamp = datetime(2012, 1, 1).timestamp() self.test_heartbeat = { 'component_name': 'Test Component', 'is_alive': True, 'timestamp': self.test_timestamp, } self.dummy_process1 = Process(target=infinite_fn, args=()) self.dummy_process1.daemon = True self.dummy_process2 = Process(target=infinite_fn, args=()) self.dummy_process2.daemon = True self.dummy_process3 = Process(target=infinite_fn, args=()) self.dummy_process3.daemon = True self.transformer_process_dict_example = { SYSTEM_DATA_TRANSFORMER_NAME: self.dummy_process1, GITHUB_DATA_TRANSFORMER_NAME: self.dummy_process2, } self.test_manager = DataTransformersManager(self.dummy_logger, self.manager_name, self.rabbitmq) self.test_exception = PANICException('test_exception', 1)
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.monitor_name = 'test_monitor' self.monitoring_period = 10 self.repo_id = 'test_repo_id' self.parent_id = 'test_parent_id' self.repo_name = 'test_repo' self.monitor_repo = True self.releases_page = 'test_url' self.routing_key = 'test_routing_key' self.test_data_str = 'test data' self.test_data_dict = { 'test_key_1': 'test_val_1', 'test_key_2': 'test_val_2', } self.test_heartbeat = { 'component_name': 'Test Component', 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.test_queue_name = 'Test Queue' # In the real retrieved data there are more fields, but these are the # only ones that interest us so far. self.retrieved_metrics_example = [ { 'name': 'First Release 😮', 'tag_name': 'v1.0.0' }, { 'name': 'Release Candidate 1', 'tag_name': 'v0.1.0' }, ] self.processed_data_example = { '0': { 'release_name': 'First Release 😮', 'tag_name': 'v1.0.0' }, '1': { 'release_name': 'Release Candidate 1', 'tag_name': 'v0.1.0' }, } self.test_exception = PANICException('test_exception', 1) self.repo_config = RepoConfig(self.repo_id, self.parent_id, self.repo_name, self.monitor_repo, self.releases_page) self.test_monitor = GitHubMonitor(self.monitor_name, self.repo_config, self.dummy_logger, self.monitoring_period, self.rabbitmq)
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbitmq = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.manager_name = 'test_store_manager' self.routing_key = 'heartbeat.manager' self.test_queue_name = 'test queue' self.test_store_manager = StoreManager(self.dummy_logger, self.manager_name, self.rabbitmq) # Adding dummy process self.dummy_process = Process(target=infinite_fn, args=()) self.dummy_process.daemon = True connect_to_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(DATA_STORE_MAN_INPUT_QUEUE, False, True, False, False) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.rabbitmq.queue_bind(DATA_STORE_MAN_INPUT_QUEUE, HEALTH_CHECK_EXCHANGE, DATA_STORE_MAN_INPUT_ROUTING_KEY) self.test_data_str = 'test data' self.test_heartbeat = { 'component_name': self.manager_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.test_exception = PANICException('test_exception', 1)
class TestOpsgenieAlertsHandler(unittest.TestCase): def setUp(self) -> None: self.test_handler_name = 'test_opsgenie_alerts_handler' self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_api_key = 'test api key' self.test_channel_name = 'test_opgenie_channel' self.test_channel_id = 'test_opsgenie_id12345' self.test_channel_logger = self.dummy_logger.getChild('dummy_channel') self.test_api = OpsgenieApi(self.test_api_key, True) self.test_channel = OpsgenieChannel(self.test_channel_name, self.test_channel_id, self.test_channel_logger, self.test_api) self.test_queue_size = 1000 self.test_max_attempts = 5 self.test_alert_validity_threshold = 300 self.test_opsgenie_alerts_handler = OpsgenieAlertsHandler( self.test_handler_name, self.dummy_logger, self.rabbitmq, self.test_channel, self.test_queue_size, self.test_max_attempts, self.test_alert_validity_threshold) self.test_data_str = "this is a test string" self.test_rabbit_queue_name = 'Test Queue' self.test_timestamp = 45676565.556 self.test_heartbeat = { 'component_name': 'Test Component', 'is_alive': True, 'timestamp': self.test_timestamp, } self.test_system_name = 'test_system' self.test_percentage_usage = 50 self.test_panic_severity = 'WARNING' self.test_parent_id = 'parent_1234' self.test_system_id = 'system_id32423' self.test_alert = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, self.test_timestamp, self.test_panic_severity, self.test_parent_id, self.test_system_id) self.test_alerts_queue = Queue(self.test_queue_size) def tearDown(self) -> None: # Delete any queues and exchanges which are common across many tests connect_to_rabbit(self.test_opsgenie_alerts_handler.rabbitmq) delete_queue_if_exists(self.test_opsgenie_alerts_handler.rabbitmq, self.test_rabbit_queue_name) delete_queue_if_exists( self.test_opsgenie_alerts_handler.rabbitmq, self.test_opsgenie_alerts_handler._opsgenie_alerts_handler_queue) delete_exchange_if_exists(self.test_opsgenie_alerts_handler.rabbitmq, HEALTH_CHECK_EXCHANGE) delete_exchange_if_exists(self.test_opsgenie_alerts_handler.rabbitmq, ALERT_EXCHANGE) disconnect_from_rabbit(self.test_opsgenie_alerts_handler.rabbitmq) self.dummy_logger = None self.test_channel_logger = None self.rabbitmq = None self.test_alert = None self.test_channel = None self.test_api = None self.test_opsgenie_alerts_handler = None self.test_alerts_queue = None def test__str__returns_handler_name(self) -> None: self.assertEqual(self.test_handler_name, str(self.test_opsgenie_alerts_handler)) def test_handler_name_returns_handler_name(self) -> None: self.assertEqual(self.test_handler_name, self.test_opsgenie_alerts_handler.handler_name) @mock.patch.object(RabbitMQApi, "start_consuming") def test_listen_for_data_calls_start_consuming( self, mock_start_consuming) -> None: mock_start_consuming.return_value = None self.test_opsgenie_alerts_handler._listen_for_data() mock_start_consuming.assert_called_once_with() def test_opsgenie_channel_returns_associated_opsgenie_channel( self) -> None: self.assertEqual(self.test_channel, self.test_opsgenie_alerts_handler.opsgenie_channel) def test_alerts_queue_returns_the_alerts_queue(self) -> None: self.test_opsgenie_alerts_handler._alerts_queue = self.test_alerts_queue self.assertEqual(self.test_alerts_queue, self.test_opsgenie_alerts_handler.alerts_queue) def test_init_initialises_handler_correctly(self) -> None: # In this test we will check that all fields that do not have a getter # were initialised correctly, as the previous tests test the getters. self.assertEqual( self.test_queue_size, self.test_opsgenie_alerts_handler.alerts_queue.maxsize) self.assertEqual(self.test_max_attempts, self.test_opsgenie_alerts_handler._max_attempts) self.assertEqual( self.test_alert_validity_threshold, self.test_opsgenie_alerts_handler._alert_validity_threshold) self.assertEqual( 'opsgenie_{}_alerts_handler_queue'.format(self.test_channel_id), self.test_opsgenie_alerts_handler._opsgenie_alerts_handler_queue) self.assertEqual( 'channel.{}'.format(self.test_channel_id), self.test_opsgenie_alerts_handler._opsgenie_channel_routing_key) @mock.patch.object(RabbitMQApi, "basic_qos") def test_initialise_rabbitmq_initialises_rabbit_correctly( self, mock_basic_qos) -> None: try: # To make sure that there is no connection/channel already # established self.assertIsNone(self.rabbitmq.connection) self.assertIsNone(self.rabbitmq.channel) # To make sure that the exchanges and queues have not already been # declared connect_to_rabbit(self.rabbitmq) self.test_opsgenie_alerts_handler.rabbitmq.queue_delete( self.test_opsgenie_alerts_handler. _opsgenie_alerts_handler_queue) self.test_opsgenie_alerts_handler.rabbitmq.exchange_delete( HEALTH_CHECK_EXCHANGE) self.test_opsgenie_alerts_handler.rabbitmq.exchange_delete( ALERT_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) self.test_opsgenie_alerts_handler._initialise_rabbitmq() # Perform checks that the connection has been opened and marked as # open, that the delivery confirmation variable is set and basic_qos # called successfully. self.assertTrue( self.test_opsgenie_alerts_handler.rabbitmq.is_connected) self.assertTrue( self.test_opsgenie_alerts_handler.rabbitmq.connection.is_open) self.assertTrue(self.test_opsgenie_alerts_handler.rabbitmq.channel. _delivery_confirmation) mock_basic_qos.assert_called_once_with( prefetch_count=self.test_queue_size / 5) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_opsgenie_alerts_handler.rabbitmq.exchange_declare( HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the consuming exchanges and queues have been # creating by sending messages with the same routing keys as for the # bindings. We will also check if the size of the queues is 0 to # confirm that basic_consume was called (it will store the msg in # the component memory immediately). If one of the exchanges or # queues is not created or basic_consume is not called, then either # an exception will be thrown or the queue size would be 1 # respectively. Note when deleting the exchanges in the beginning we # also released every binding, hence there are no other queue binded # with the same routing key to any exchange at this point. self.test_opsgenie_alerts_handler.rabbitmq.basic_publish_confirm( exchange=ALERT_EXCHANGE, routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=True) # Re-declare queue to get the number of messages res = self.test_opsgenie_alerts_handler.rabbitmq.queue_declare( self.test_opsgenie_alerts_handler. _opsgenie_alerts_handler_queue, False, True, False, False) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None: # This test creates a queue which receives messages with the same # routing key as the ones set by send_heartbeat, and checks that the # heartbeat is received try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() # Delete the queue before to avoid messages in the queue on error. self.test_opsgenie_alerts_handler.rabbitmq.queue_delete( self.test_rabbit_queue_name) res = self.test_opsgenie_alerts_handler.rabbitmq.queue_declare( queue=self.test_rabbit_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_opsgenie_alerts_handler.rabbitmq.queue_bind( queue=self.test_rabbit_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key='heartbeat.worker') self.test_opsgenie_alerts_handler._send_heartbeat( self.test_heartbeat) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_opsgenie_alerts_handler.rabbitmq.queue_declare( queue=self.test_rabbit_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) # Check that the message received is actually the HB _, _, body = self.test_opsgenie_alerts_handler.rabbitmq.basic_get( self.test_rabbit_queue_name) self.assertEqual(self.test_heartbeat, json.loads(body)) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_places_data_on_queue_if_no_processing_errors( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_empty) -> None: # Setting it to non empty so that there is no attempt to send the # heartbeat mock_empty.return_value = False mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() # Send alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) args, _ = mock_place_alert.call_args self.assertEqual(self.test_alert.alert_data, args[0].alert_data) self.assertEqual(1, len(args)) except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_does_not_place_data_on_queue_if_processing_errors( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_empty) -> None: # Setting it to non empty so that there is no attempt to send the # heartbeat mock_empty.return_value = False mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) data_to_send = copy.deepcopy(self.test_alert.alert_data) del data_to_send['message'] body = json.dumps(data_to_send) properties = pika.spec.BasicProperties() # Send alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) mock_place_alert.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_sends_data_waiting_in_queue_if_processing_errors( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_empty) -> None: # Setting it to non empty so that there is no attempt to send the # heartbeat mock_empty.return_value = False mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) data_to_send = copy.deepcopy(self.test_alert.alert_data) del data_to_send['message'] body = json.dumps(data_to_send) properties = pika.spec.BasicProperties() # Send alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) mock_send_alerts.assert_called_once_with() except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_sends_data_waiting_in_queue_if_no_processing_errors( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_empty) -> None: # Setting it to non empty so that there is no attempt to send the # heartbeat mock_empty.return_value = False mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() # Send alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) mock_send_alerts.assert_called_once_with() except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @parameterized.expand([ ( AMQPConnectionError, AMQPConnectionError('test'), ), ( AMQPChannelError, AMQPChannelError('test'), ), ( Exception, Exception('test'), ), (PANICException, PANICException('test', 4000)), ]) @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_raises_exception_if_sending_data_from_queue_error( self, error_class, error_instance, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_empty) -> None: # Setting it to non empty so that there is no attempt to send the # heartbeat mock_empty.return_value = False mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.side_effect = error_instance try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() self.assertRaises(error_class, self.test_opsgenie_alerts_handler._process_alert, blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @freeze_time("2012-01-01") @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_heartbeat") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_sends_hb_if_data_sent_and_no_processing_errors( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_send_heartbeat, mock_empty) -> None: # Setting it to non empty so that there is no attempt to send the # heartbeat mock_empty.return_value = True mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None mock_send_heartbeat.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() # Send alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) expected_heartbeat = { 'component_name': self.test_handler_name, 'is_alive': True, 'timestamp': datetime.now().timestamp() } mock_send_heartbeat.assert_called_once_with(expected_heartbeat) except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_heartbeat") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_does_not_send_hb_if_not_all_data_sent_from_queue( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_send_heartbeat, mock_empty) -> None: mock_empty.return_value = False mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None mock_send_heartbeat.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() # First test with a valid alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) # Test with an invalid alert dict invalid_alert = copy.deepcopy(self.test_alert.alert_data) del invalid_alert['message'] body = json.dumps(invalid_alert) self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) mock_send_heartbeat.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) args, _ = mock_basic_ack.call_args self.assertEqual(2, len(args)) @parameterized.expand([ (True, ), (False, ), ]) @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_heartbeat") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_does_not_send_hb_if_processing_error( self, is_queue_empty, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_send_heartbeat, mock_empty) -> None: mock_empty.return_value = is_queue_empty mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.return_value = None mock_send_heartbeat.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) invalid_alert = copy.deepcopy(self.test_alert.alert_data) del invalid_alert['message'] body = json.dumps(invalid_alert) properties = pika.spec.BasicProperties() # Send alert self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) mock_send_heartbeat.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @parameterized.expand([ ( True, AMQPConnectionError, AMQPConnectionError('test'), ), ( True, AMQPChannelError, AMQPChannelError('test'), ), ( True, Exception, Exception('test'), ), (True, PANICException, PANICException('test', 4000)), ( False, AMQPConnectionError, AMQPConnectionError('test'), ), ( False, AMQPChannelError, AMQPChannelError('test'), ), ( False, Exception, Exception('test'), ), (False, PANICException, PANICException('test', 4000)), ]) @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_heartbeat") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_does_not_send_hb_if_error_raised_when_sending_data( self, is_queue_empty, error_class, error_instance, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_send_heartbeat, mock_empty) -> None: mock_empty.return_value = is_queue_empty mock_place_alert.return_value = None mock_basic_ack.return_value = None mock_send_alerts.side_effect = error_instance mock_send_heartbeat.return_value = None try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() # Send with a valid alert self.assertRaises(error_class, self.test_opsgenie_alerts_handler._process_alert, blocking_channel, method, properties, body) # Test with an invalid alert invalid_alert = copy.deepcopy(self.test_alert.alert_data) del invalid_alert['message'] body = json.dumps(invalid_alert) self.assertRaises(error_class, self.test_opsgenie_alerts_handler._process_alert, blocking_channel, method, properties, body) mock_send_heartbeat.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) args, _ = mock_basic_ack.call_args self.assertEqual(2, len(args)) @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_heartbeat") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_does_not_raise_msg_not_delivered_exception( self, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_send_heartbeat, mock_empty) -> None: mock_basic_ack.return_value = None mock_place_alert.return_value = None mock_send_alerts.return_value = None mock_empty.return_value = True mock_send_heartbeat.side_effect = MessageWasNotDeliveredException( 'test') try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() # This would raise a MessageWasNotDeliveredException if raised, # hence the test would fail self.test_opsgenie_alerts_handler._process_alert( blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() @parameterized.expand([ ( AMQPConnectionError, AMQPConnectionError('test'), ), ( AMQPChannelError, AMQPChannelError('test'), ), ( Exception, Exception('test'), ), ]) @mock.patch.object(Queue, "empty") @mock.patch.object(OpsgenieAlertsHandler, "_send_heartbeat") @mock.patch.object(OpsgenieAlertsHandler, "_send_alerts") @mock.patch.object(OpsgenieAlertsHandler, "_place_alert_on_queue") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_alert_raises_error_if_raised_by_send_hb( self, exception_class, exception_instance, mock_basic_ack, mock_place_alert, mock_send_alerts, mock_send_heartbeat, mock_empty) -> None: # For this test we will check for channel, connection and unexpected # errors. mock_basic_ack.return_value = None mock_place_alert.return_value = None mock_send_alerts.return_value = None mock_send_heartbeat.side_effect = exception_instance mock_empty.return_value = True try: self.test_opsgenie_alerts_handler._initialise_rabbitmq() blocking_channel = \ self.test_opsgenie_alerts_handler.rabbitmq.channel method = pika.spec.Basic.Deliver( routing_key=self.test_opsgenie_alerts_handler. _opsgenie_channel_routing_key) body = json.dumps(self.test_alert.alert_data) properties = pika.spec.BasicProperties() self.assertRaises(exception_class, self.test_opsgenie_alerts_handler._process_alert, blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e)) mock_basic_ack.assert_called_once() def test_place_alert_on_queue_places_alert_on_queue_if_queue_not_full( self) -> None: # Use a smaller queue in this case for simplicity test_queue = Queue(3) self.test_opsgenie_alerts_handler._alerts_queue = test_queue test_queue.put('item1') test_queue.put('item2') self.test_opsgenie_alerts_handler._place_alert_on_queue( self.test_alert) all_queue_items = list(test_queue.queue) self.assertEqual(['item1', 'item2', self.test_alert], all_queue_items) def test_place_alert_on_queue_removes_oldest_and_places_if_queue_full( self) -> None: # Use a smaller queue in this case for simplicity test_queue = Queue(3) self.test_opsgenie_alerts_handler._alerts_queue = test_queue test_queue.put('item1') test_queue.put('item2') test_queue.put('item3') self.test_opsgenie_alerts_handler._place_alert_on_queue( self.test_alert) all_queue_items = list(test_queue.queue) self.assertEqual(['item2', 'item3', self.test_alert], all_queue_items) @mock.patch.object(Queue, "empty") @mock.patch.object(Queue, "get") @mock.patch.object(logging, "debug") @mock.patch.object(logging, "info") @mock.patch.object(logging, "warning") @mock.patch.object(logging, "critical") @mock.patch.object(logging, "error") @mock.patch.object(logging, "exception") def test_send_alerts_does_nothing_if_queue_is_empty( self, mock_exception, mock_error, mock_critical, mock_warning, mock_info, mock_debug, mock_get, mock_empty) -> None: mock_empty.return_value = True self.test_opsgenie_alerts_handler._send_alerts() mock_critical.assert_not_called() mock_info.assert_not_called() mock_warning.assert_not_called() mock_debug.assert_not_called() mock_get.assert_not_called() mock_exception.assert_not_called() mock_error.assert_not_called() @freeze_time("2012-01-01") @mock.patch.object(OpsgenieChannel, "alert") def test_send_alerts_discards_old_alerts_and_sends_the_recent( self, mock_alert) -> None: mock_alert.return_value = RequestStatus.SUCCESS test_alert_old1 = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp() - self.test_alert_validity_threshold - 1, self.test_panic_severity, self.test_parent_id, self.test_system_id) test_alert_recent1 = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp() - self.test_alert_validity_threshold, self.test_panic_severity, self.test_parent_id, self.test_system_id) test_alert_recent2 = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp(), self.test_panic_severity, self.test_parent_id, self.test_system_id) test_alert_recent3 = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp() - self.test_alert_validity_threshold + 4, self.test_panic_severity, self.test_parent_id, self.test_system_id) test_queue = Queue(4) self.test_opsgenie_alerts_handler._alerts_queue = test_queue test_queue.put(test_alert_old1) test_queue.put(test_alert_recent1) test_queue.put(test_alert_recent2) test_queue.put(test_alert_recent3) self.test_opsgenie_alerts_handler._send_alerts() self.assertTrue(self.test_opsgenie_alerts_handler.alerts_queue.empty()) expected_calls = [ call(test_alert_recent1), call(test_alert_recent2), call(test_alert_recent3) ] actual_calls = mock_alert.call_args_list self.assertEqual(expected_calls, actual_calls) @parameterized.expand([ ( [RequestStatus.SUCCESS], 1, ), ( [RequestStatus.FAILED, RequestStatus.SUCCESS], 2, ), ( [ RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS ], 3, ), ( [ RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS ], 4, ), ( [ RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS ], 5, ), ( [ RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS ], 5, ), ]) @freeze_time("2012-01-01") @mock.patch.object(RabbitMQApi, "connection") @mock.patch.object(OpsgenieChannel, "alert") def test_send_alerts_attempts_to_send_alert_for_up_to_max_attempts_times( self, alert_request_status_list, expected_no_calls, mock_alert, mock_connection) -> None: mock_alert.side_effect = alert_request_status_list mock_connection.return_value.sleep.return_value = None test_alert = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp(), self.test_panic_severity, self.test_parent_id, self.test_system_id) test_queue = Queue(4) self.test_opsgenie_alerts_handler._alerts_queue = test_queue test_queue.put(test_alert) self.test_opsgenie_alerts_handler._send_alerts() expected_calls = [] for _ in range(expected_no_calls): expected_calls.append(call(test_alert)) actual_calls = mock_alert.call_args_list self.assertEqual(expected_calls, actual_calls) @parameterized.expand([ ([RequestStatus.SUCCESS], ), ([RequestStatus.FAILED, RequestStatus.SUCCESS], ), ([RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS], ), ([ RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS ], ), ([ RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.FAILED, RequestStatus.SUCCESS ], ), ]) @freeze_time("2012-01-01") @mock.patch.object(RabbitMQApi, "connection") @mock.patch.object(OpsgenieChannel, "alert") def test_send_alerts_removes_alert_if_it_was_successfully_sent( self, alert_request_status_list, mock_alert, mock_connection) -> None: mock_alert.side_effect = alert_request_status_list mock_connection.return_value.sleep.return_value = None test_alert = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp(), self.test_panic_severity, self.test_parent_id, self.test_system_id) test_queue = Queue(4) self.test_opsgenie_alerts_handler._alerts_queue = test_queue test_queue.put(test_alert) self.test_opsgenie_alerts_handler._send_alerts() self.assertTrue(self.test_opsgenie_alerts_handler.alerts_queue.empty()) @freeze_time("2012-01-01") @mock.patch.object(RabbitMQApi, "connection") @mock.patch.object(OpsgenieChannel, "alert") def test_send_alerts_stops_sending_if_an_alert_is_not_successfully_sent( self, mock_alert, mock_connection) -> None: mock_alert.return_value = RequestStatus.FAILED mock_connection.return_value.sleep.return_value = None test_alert_1 = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp(), self.test_panic_severity, self.test_parent_id, self.test_system_id) test_alert_2 = OpenFileDescriptorsIncreasedAboveThresholdAlert( self.test_system_name, self.test_percentage_usage, self.test_panic_severity, datetime.now().timestamp() + 1, self.test_panic_severity, self.test_parent_id, self.test_system_id) test_queue = Queue(4) self.test_opsgenie_alerts_handler._alerts_queue = test_queue test_queue.put(test_alert_1) test_queue.put(test_alert_2) self.test_opsgenie_alerts_handler._send_alerts() self.assertFalse( self.test_opsgenie_alerts_handler.alerts_queue.empty()) self.assertEqual( 2, self.test_opsgenie_alerts_handler.alerts_queue.qsize()) self.assertEqual( test_alert_1, self.test_opsgenie_alerts_handler.alerts_queue.queue[0]) self.assertEqual( test_alert_2, self.test_opsgenie_alerts_handler.alerts_queue.queue[1])
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = ConfigStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True, False, False) self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, STORE_CONFIGS_ROUTING_KEY_CHAINS) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_parent_id = 'parent_id' self.test_config_type = 'config_type' self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.last_monitored = datetime(2012, 1, 1).timestamp() self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config' self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config' self.routing_key_3 = 'chains.cosmos.cosmos.repos_config' self.routing_key_4 = 'general.repos_config' self.routing_key_5 = 'general.alerts_config' self.routing_key_6 = 'general.systems_config' self.routing_key_7 = 'channels.email_config' self.routing_key_8 = 'channels.pagerduty_config' self.routing_key_9 = 'channels.opsgenie_config' self.routing_key_10 = 'channels.telegram_config' self.routing_key_11 = 'channels.twilio_config' self.nodes_config_1 = { "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": { "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_1(46.166.146.165:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://46.166.146.165:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" }, "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": { "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_2(172.16.151.10:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://172.16.151.10:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" } } self.repos_config_1 = { "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": { "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "tendermint/tendermint/", "monitor_repo": "true" }, "repo_83713022-4155-420b-ada1-73a863f58282": { "id": "repo_83713022-4155-420b-ada1-73a863f58282", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "SimplyVC/panic_cosmos/", "monitor_repo": "true" } } self.alerts_config_1 = { "1": { "name": "open_file_descriptors", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "2": { "name": "system_cpu_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "3": { "name": "system_storage_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "4": { "name": "system_ram_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "5": { "name": "system_is_down", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "200", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "0", "warning_enabled": "true" } } self.systems_config_1 = { "system_1d026af1-6cab-403d-8256-c8faa462930a": { "id": "system_1d026af1-6cab-403d-8256-c8faa462930a", "parent_id": "GLOBAL", "name": "matic_full_node_nl(172.26.10.137:9100)", "exporter_url": "http://172.26.10.137:9100/metrics", "monitor_system": "true" }, "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": { "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822", "parent_id": "GLOBAL", "name": "matic_full_node_mt(172.16.152.137:9100)", "exporter_url": "http://172.16.152.137:9100/metrics", "monitor_system": "true" } } self.telegram_config_1 = { "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": { "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9", "channel_name": "telegram_chat_1", "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE", "chat_id": "-759538717", "info": "true", "warning": "true", "critical": "true", "error": "true", "alerts": "false", "commands": "false", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.twilio_config_1 = { "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": { "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c", "channel_name": "twilio_caller_main", "account_sid": "ACb77777284e97e49eb2260aada0220e12", "auth_token": "d19f777777a0b8e274470d599e5bcc5e8", "twilio_phone_no": "+19893077770", "twilio_phone_numbers_to_dial_valid": "+35697777380", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.pagerduty_config_1 = { "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": { "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc", "channel_name": "pager_duty_1", "api_token": "meVp_vyQybcX7dA3o1fS", "integration_key": "4a520ce3577777ad89a3518096f3a5189", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.opsgenie_config_1 = { "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": { "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35", "channel_name": "ops_genie_main", "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06", "eu": "true", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.email_config_1 = { "email_01b23d79-10f5-4815-a11f-034f53974b23": { "id": "email_01b23d79-10f5-4815-a11f-034f53974b23", "channel_name": "main_email_channel", "port": "25", "smtp": "exchange.olive.com", "email_from": "*****@*****.**", "emails_to": "*****@*****.**", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.config_data_unexpected = {"unexpected": {}}
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = GithubStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE, GITHUB_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.repo_name = 'simplyvc/panic/' self.repo_id = 'test_repo_id' self.parent_id = 'test_parent_id' self.repo_name_2 = 'simplyvc/panic_oasis/' self.repo_id_2 = 'test_repo_id_2' self.parent_id_2 = 'test_parent_id_2' self.last_monitored = datetime(2012, 1, 1).timestamp() self.github_data_1 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 4, } } } } self.github_data_2 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 5, } } } } self.github_data_3 = { "result": { "meta_data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_error = { "error": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "time": self.last_monitored }, "code": "5006", "message": "error message" } } self.github_data_key_error = { "result": { "data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "wrong_data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_unexpected = {"unexpected": {}}
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.monitor_name = 'test_monitor' self.monitoring_period = 10 self.system_id = 'test_system_id' self.parent_id = 'test_parent_id' self.system_name = 'test_system' self.monitor_system = True self.node_exporter_url = 'test_url' self.routing_key = 'test_routing_key' self.test_data_str = 'test data' self.test_data_dict = { 'test_key_1': 'test_val_1', 'test_key_2': 'test_val_2', } self.test_heartbeat = { 'component_name': 'Test Component', 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.test_queue_name = 'Test Queue' self.metrics_to_monitor = [ 'process_cpu_seconds_total', 'go_memstats_alloc_bytes', 'go_memstats_alloc_bytes_total', 'process_virtual_memory_bytes', 'process_max_fds', 'process_open_fds', 'node_cpu_seconds_total', 'node_filesystem_avail_bytes', 'node_filesystem_size_bytes', 'node_memory_MemTotal_bytes', 'node_memory_MemAvailable_bytes', 'node_network_transmit_bytes_total', 'node_network_receive_bytes_total', 'node_disk_io_time_seconds_total'] self.retrieved_metrics_example = { 'go_memstats_alloc_bytes': 2003024.0, 'go_memstats_alloc_bytes_total': 435777412600.0, 'node_cpu_seconds_total': { '{"cpu": "0", "mode": "idle"}': 3626110.54, '{"cpu": "0", "mode": "iowait"}': 16892.07, '{"cpu": "0", "mode": "irq"}': 0.0, '{"cpu": "0", "mode": "nice"}': 131.77, '{"cpu": "0", "mode": "softirq"}': 8165.66, '{"cpu": "0", "mode": "steal"}': 0.0, '{"cpu": "0", "mode": "system"}': 46168.15, '{"cpu": "0", "mode": "user"}': 238864.68, '{"cpu": "1", "mode": "idle"}': 3630087.24, '{"cpu": "1", "mode": "iowait"}': 17084.42, '{"cpu": "1", "mode": "irq"}': 0.0, '{"cpu": "1", "mode": "nice"}': 145.18, '{"cpu": "1", "mode": "softirq"}': 5126.93, '{"cpu": "1", "mode": "steal"}': 0.0, '{"cpu": "1", "mode": "system"}': 46121.4, '{"cpu": "1", "mode": "user"}': 239419.51}, 'node_disk_io_time_seconds_total': { '{"device": "dm-0"}': 38359.0, '{"device": "sda"}': 38288.0, '{"device": "sr0"}': 0.0}, 'node_filesystem_avail_bytes': { '{"device": "/dev/mapper/ubuntu--vg-ubuntu--lv", ' '"fstype": "ext4", "mountpoint": "/"}': 57908170752.0, '{"device": "/dev/sda2", "fstype": "ext4", ' '"mountpoint": "/boot"}': 729411584.0, '{"device": "lxcfs", "fstype": "fuse.lxcfs", ' '"mountpoint": "/var/lib/lxcfs"}': 0.0, '{"device": "tmpfs", "fstype": "tmpfs", ' '"mountpoint": "/run"}': 207900672.0, '{"device": "tmpfs", "fstype": "tmpfs", "mountpoint": ' '"/run/lock"}': 5242880.0}, 'node_filesystem_size_bytes': { '{"device": "/dev/mapper/ubuntu--vg-ubuntu--lv", "fstype": ' '"ext4", "mountpoint": "/"}': 104560844800.0, '{"device": "/dev/sda2", "fstype": "ext4", "mountpoint": ' '"/boot"}': 1023303680.0, '{"device": "lxcfs", "fstype": "fuse.lxcfs", "mountpoint": ' '"/var/lib/lxcfs"}': 0.0, '{"device": "tmpfs", "fstype": "tmpfs", "mountpoint": "/run"}': 209027072.0, '{"device": "tmpfs", "fstype": "tmpfs", "mountpoint": ' '"/run/lock"}': 5242880.0}, 'node_memory_MemAvailable_bytes': 1377767424.0, 'node_memory_MemTotal_bytes': 2090237952.0, 'node_network_receive_bytes_total': { '{"device": "ens160"}': 722358765622.0, '{"device": "lo"}': 381405.0}, 'node_network_transmit_bytes_total': { '{"device": "ens160"}': 1011571824152.0, '{"device": "lo"}': 381405.0}, 'process_cpu_seconds_total': 2786.82, 'process_max_fds': 1024.0, 'process_open_fds': 8.0, 'process_virtual_memory_bytes': 118513664.0} self.processed_data_example = { 'process_cpu_seconds_total': 2786.82, 'process_memory_usage': 0.0, 'virtual_memory_usage': 118513664.0, 'open_file_descriptors': 0.78125, 'system_cpu_usage': 7.85, 'system_ram_usage': 34.09, 'system_storage_usage': 44.37, 'network_transmit_bytes_total': 1011572205557.0, 'network_receive_bytes_total': 722359147027.0, 'disk_io_time_seconds_total': 76647.0, } self.test_exception = PANICException('test_exception', 1) self.system_config = SystemConfig(self.system_id, self.parent_id, self.system_name, self.monitor_system, self.node_exporter_url) self.test_monitor = SystemMonitor(self.monitor_name, self.system_config, self.dummy_logger, self.monitoring_period, self.rabbitmq)
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.mongo = MongoApi(logger=self.dummy_logger.getChild( MongoApi.__name__), db_name=self.mongo_db, host=self.mongo_ip, port=self.mongo_port) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.test_store_name = 'store name' self.test_store = AlertStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(ALERT_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(ALERT_STORE_INPUT_QUEUE, STORE_EXCHANGE, ALERT_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.parent_id = 'test_parent_id' self.alert_id = 'test_alert_id' self.origin_id = 'test_origin_id' self.alert_name = 'test_alert' self.metric = 'system_is_down' self.severity = 'warning' self.message = 'alert message' self.value = 'alert_code_1' self.alert_id_2 = 'test_alert_id_2' self.origin_id_2 = 'test_origin_id_2' self.alert_name_2 = 'test_alert_2' self.severity_2 = 'critical' self.message_2 = 'alert message 2' self.value_2 = 'alert_code_2' self.alert_id_3 = 'test_alert_id_3' self.origin_id_3 = 'test_origin_id_3' self.alert_name_3 = 'test_alert_3' self.severity_3 = 'info' self.message_3 = 'alert message 3' self.value_3 = 'alert_code_3' self.last_monitored = datetime(2012, 1, 1).timestamp() self.none = None self.alert_data_1 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id, 'alert_code': { 'name': self.alert_name, 'value': self.value, }, 'severity': self.severity, 'metric': self.metric, 'message': self.message, 'timestamp': self.last_monitored, } self.alert_data_2 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_2, 'alert_code': { 'name': self.alert_name_2, 'value': self.value_2, }, 'severity': self.severity_2, 'metric': self.metric, 'message': self.message_2, 'timestamp': self.last_monitored, } self.alert_data_3 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_3, 'alert_code': { 'name': self.alert_name_3, 'value': self.value_3, }, 'severity': self.severity_3, 'metric': self.metric, 'message': self.message_3, 'timestamp': self.last_monitored, } self.alert_data_key_error = {"result": {"data": {}, "data2": {}}} self.alert_data_unexpected = {"unexpected": {}}