def delete_queue_if_exists(rabbit: RabbitMQApi, queue_name: str) -> None: try: rabbit.queue_declare(queue_name, passive=True) rabbit.queue_purge(queue_name) rabbit.queue_delete(queue_name) except pika.exceptions.ChannelClosedByBroker: print("Queue {} does not exist - don't need to close".format( queue_name ))
class TestConfigStore(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = ConfigStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True, False, False) self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, STORE_CONFIGS_ROUTING_KEY_CHAINS) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_parent_id = 'parent_id' self.test_config_type = 'config_type' self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.last_monitored = datetime(2012, 1, 1).timestamp() self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config' self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config' self.routing_key_3 = 'chains.cosmos.cosmos.repos_config' self.routing_key_4 = 'general.repos_config' self.routing_key_5 = 'general.alerts_config' self.routing_key_6 = 'general.systems_config' self.routing_key_7 = 'channels.email_config' self.routing_key_8 = 'channels.pagerduty_config' self.routing_key_9 = 'channels.opsgenie_config' self.routing_key_10 = 'channels.telegram_config' self.routing_key_11 = 'channels.twilio_config' self.nodes_config_1 = { "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": { "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_1(46.166.146.165:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://46.166.146.165:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" }, "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": { "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_2(172.16.151.10:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://172.16.151.10:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" } } self.repos_config_1 = { "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": { "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "tendermint/tendermint/", "monitor_repo": "true" }, "repo_83713022-4155-420b-ada1-73a863f58282": { "id": "repo_83713022-4155-420b-ada1-73a863f58282", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "SimplyVC/panic_cosmos/", "monitor_repo": "true" } } self.alerts_config_1 = { "1": { "name": "open_file_descriptors", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "2": { "name": "system_cpu_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "3": { "name": "system_storage_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "4": { "name": "system_ram_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "5": { "name": "system_is_down", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "200", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "0", "warning_enabled": "true" } } self.systems_config_1 = { "system_1d026af1-6cab-403d-8256-c8faa462930a": { "id": "system_1d026af1-6cab-403d-8256-c8faa462930a", "parent_id": "GLOBAL", "name": "matic_full_node_nl(172.26.10.137:9100)", "exporter_url": "http://172.26.10.137:9100/metrics", "monitor_system": "true" }, "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": { "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822", "parent_id": "GLOBAL", "name": "matic_full_node_mt(172.16.152.137:9100)", "exporter_url": "http://172.16.152.137:9100/metrics", "monitor_system": "true" } } self.telegram_config_1 = { "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": { "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9", "channel_name": "telegram_chat_1", "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE", "chat_id": "-759538717", "info": "true", "warning": "true", "critical": "true", "error": "true", "alerts": "false", "commands": "false", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.twilio_config_1 = { "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": { "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c", "channel_name": "twilio_caller_main", "account_sid": "ACb77777284e97e49eb2260aada0220e12", "auth_token": "d19f777777a0b8e274470d599e5bcc5e8", "twilio_phone_no": "+19893077770", "twilio_phone_numbers_to_dial_valid": "+35697777380", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.pagerduty_config_1 = { "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": { "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc", "channel_name": "pager_duty_1", "api_token": "meVp_vyQybcX7dA3o1fS", "integration_key": "4a520ce3577777ad89a3518096f3a5189", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.opsgenie_config_1 = { "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": { "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35", "channel_name": "ops_genie_main", "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06", "eu": "true", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.email_config_1 = { "email_01b23d79-10f5-4815-a11f-034f53974b23": { "id": "email_01b23d79-10f5-4815-a11f-034f53974b23", "channel_name": "main_email_channel", "port": "25", "smtp": "exchange.olive.com", "email_from": "*****@*****.**", "emails_to": "*****@*****.**", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.config_data_unexpected = {"unexpected": {}} def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, STORE_CONFIGS_QUEUE_NAME) delete_exchange_if_exists(self.rabbitmq, CONFIG_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.redis.delete_all_unsafe() self.redis = None self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, str(self.test_store)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, self.test_store.name) def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None: self.assertEqual(self.mongo_ip, self.test_store.mongo_ip) def test_mongo_db_property_returns_mongo_db_correctly(self) -> None: self.assertEqual(self.mongo_db, self.test_store.mongo_db) def test_mongo_port_property_returns_mongo_port_correctly(self) -> None: self.assertEqual(self.mongo_port, self.test_store.mongo_port) def test_redis_property_returns_redis_correctly(self) -> None: self.assertEqual(type(self.redis), type(self.test_store.redis)) def test_mongo_property_returns_none_when_mongo_not_init(self) -> None: self.assertEqual(None, self.test_store.mongo) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.exchange_delete(CONFIG_EXCHANGE) self.rabbitmq.disconnect() self.test_store._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store.rabbitmq.is_connected) self.assertTrue(self.test_store.rabbitmq.connection.is_open) self.assertTrue( self.test_store.rabbitmq.channel._delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store.rabbitmq.exchange_declare(CONFIG_EXCHANGE, passive=True) self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=CONFIG_EXCHANGE, routing_key=STORE_CONFIGS_ROUTING_KEY_CHAINS, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Re-declare queue to get the number of messages res = self.test_store.rabbitmq.queue_declare( STORE_CONFIGS_QUEUE_NAME, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RedisApi.hset", autospec=True) def test_process_redis_store_does_nothing_on_error_key(self, mock_hset) -> None: self.test_store._process_redis_store(self.test_parent_id, self.config_data_unexpected) mock_hset.assert_not_called() @parameterized.expand([ ("self.nodes_config_1", "self.routing_key_1"), ("self.alerts_config_1", "self.routing_key_2"), ("self.repos_config_1", "self.routing_key_3"), ("self.repos_config_1", "self.routing_key_4"), ("self.alerts_config_1", "self.routing_key_5"), ("self.systems_config_1", "self.routing_key_6"), ("self.email_config_1", "self.routing_key_7"), ("self.pagerduty_config_1", "self.routing_key_8"), ("self.opsgenie_config_1", "self.routing_key_9"), ("self.telegram_config_1", "self.routing_key_10"), ("self.twilio_config_1", "self.routing_key_11"), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_saves_in_redis(self, mock_config_data, mock_routing_key, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: data = eval(mock_config_data) routing_key = eval(mock_routing_key) self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=eval(mock_routing_key)) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() self.assertEqual( data, json.loads( self.redis.get( Keys.get_config(routing_key)).decode("utf-8"))) except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch( "src.data_store.stores.config.ConfigStore._process_redis_store", autospec=True) def test_process_data_sends_heartbeat_correctly(self, mock_process_redis_store, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=self.routing_key_1) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.nodes_config_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) heartbeat_test = { 'component_name': self.test_store_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp() } _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(heartbeat_test, json.loads(body)) mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) def test_process_data_doesnt_send_heartbeat_on_processing_error( self, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver(routing_key=None) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.nodes_config_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.nodes_config_1", "self.routing_key_1"), ("self.alerts_config_1", "self.routing_key_2"), ("self.repos_config_1", "self.routing_key_3"), ("self.repos_config_1", "self.routing_key_4"), ("self.alerts_config_1", "self.routing_key_5"), ("self.systems_config_1", "self.routing_key_6"), ("self.email_config_1", "self.routing_key_7"), ("self.pagerduty_config_1", "self.routing_key_8"), ("self.opsgenie_config_1", "self.routing_key_9"), ("self.telegram_config_1", "self.routing_key_10"), ("self.twilio_config_1", "self.routing_key_11"), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_saves_in_redis_then_removes_it_on_empty_config( self, mock_config_data, mock_routing_key, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: data = eval(mock_config_data) routing_key = eval(mock_routing_key) self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver(routing_key=routing_key) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() self.assertEqual( data, json.loads( self.redis.get( Keys.get_config(routing_key)).decode("utf-8"))) self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps({}).encode()) self.assertEqual(None, self.redis.get(Keys.get_config(routing_key))) except Exception as e: self.fail("Test failed: {}".format(e))
class TestStoreManager(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbitmq = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.manager_name = 'test_store_manager' self.routing_key = 'heartbeat.manager' self.test_queue_name = 'test queue' self.test_store_manager = StoreManager(self.dummy_logger, self.manager_name, self.rabbitmq) # Adding dummy process self.dummy_process = Process(target=infinite_fn, args=()) self.dummy_process.daemon = True connect_to_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(DATA_STORE_MAN_INPUT_QUEUE, False, True, False, False) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.rabbitmq.queue_bind(DATA_STORE_MAN_INPUT_QUEUE, HEALTH_CHECK_EXCHANGE, DATA_STORE_MAN_INPUT_ROUTING_KEY) self.test_data_str = 'test data' self.test_heartbeat = { 'component_name': self.manager_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.test_exception = PANICException('test_exception', 1) def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, DATA_STORE_MAN_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.dummy_logger = None self.dummy_process = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.manager_name, str(self.test_store_manager)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.manager_name, self.test_store_manager.name) def test_logger_property_returns_logger_correctly(self) -> None: self.assertEqual(self.dummy_logger, self.test_store_manager.logger) def test_rabbitmq_property_returns_rabbitmq_correctly(self) -> None: self.assertEqual(self.rabbitmq, self.test_store_manager.rabbitmq) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.disconnect() self.test_store_manager._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store_manager.rabbitmq.is_connected) self.assertTrue( self.test_store_manager.rabbitmq.connection.is_open) self.assertTrue(self.test_store_manager.rabbitmq.channel. _delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store_manager.rabbitmq.exchange_declare( HEALTH_CHECK_EXCHANGE, passive=True) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store_manager.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) res = self.test_rabbit_manager.queue_declare( self.test_queue_name, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat, and checks that the # heartbeat is received try: self.test_store_manager._initialise_rabbitmq() self.test_rabbit_manager.connect() res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_store_manager.rabbitmq.queue_bind( queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._send_heartbeat(self.test_heartbeat) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) # Check that the message received is actually the HB _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(self.test_heartbeat, json.loads(body)) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_system_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_system_store, new_entry_process._target) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_github_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_github_store, new_entry_process._target) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_alert_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ ALERT_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_alert_store, new_entry_process._target) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_config_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_config_store, new_entry_process._target) @mock.patch("src.data_store.starters.create_logger") def test_start_stores_processes_starts_the_processes_correctly( self, mock_create_logger) -> None: mock_create_logger.return_value = self.dummy_logger self.test_store_manager._start_stores_processes() # We need to sleep to give some time for the stores to be initialised, # otherwise the process would not terminate time.sleep(1) new_system_process = self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME] self.assertTrue(new_system_process.is_alive()) new_system_process.terminate() new_system_process.join() new_github_process = self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME] self.assertTrue(new_github_process.is_alive()) new_github_process.terminate() new_github_process.join() new_alert_process = self.test_store_manager._store_process_dict[ ALERT_STORE_NAME] self.assertTrue(new_alert_process.is_alive()) new_alert_process.terminate() new_alert_process.join() new_config_process = self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME] self.assertTrue(new_config_process.is_alive()) new_config_process.terminate() new_config_process.join() @freeze_time("2012-01-01") @mock.patch("src.data_store.starters.create_logger") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_ping_sends_a_valid_hb_if_process_is_alive( self, mock_ack, mock_create_logger) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat, and checks that the # received heartbeat is valid. mock_create_logger.return_value = self.dummy_logger mock_ack.return_value = None try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() # Give time for the processes to start time.sleep(1) self.test_rabbit_manager.queue_declare(queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) # Delete the queue before to avoid messages in the queue on error. self.test_rabbit_manager.queue_delete(self.test_queue_name) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel properties = pika.spec.BasicProperties() method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key) body = 'ping' res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._process_ping(blocking_channel, method_hb, properties, body) time.sleep(1) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) expected_output = { "component_name": self.manager_name, "dead_processes": [], "running_processes": [ SYSTEM_STORE_NAME, GITHUB_STORE_NAME, ALERT_STORE_NAME, CONFIG_STORE_NAME ], "timestamp": datetime(2012, 1, 1).timestamp() } # Check that the message received is a valid HB _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(expected_output, json.loads(body)) # Clean before test finishes self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.starters.create_logger") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_ping_sends_a_valid_hb_if_all_processes_are_dead( self, mock_ack, mock_create_logger) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat, and checks that the # received heartbeat is valid. mock_create_logger.return_value = self.dummy_logger mock_ack.return_value = None try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() # Give time for the processes to start time.sleep(1) self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() # Time for processes to terminate time.sleep(1) # Delete the queue before to avoid messages in the queue on error. self.test_rabbit_manager.queue_delete(self.test_queue_name) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel properties = pika.spec.BasicProperties() method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key) body = 'ping' res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._process_ping(blocking_channel, method_hb, properties, body) time.sleep(1) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) expected_output = { "component_name": self.manager_name, "dead_processes": [ SYSTEM_STORE_NAME, GITHUB_STORE_NAME, ALERT_STORE_NAME, CONFIG_STORE_NAME ], "running_processes": [], "timestamp": datetime(2012, 1, 1).timestamp() } # Check that the message received is a valid HB _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(expected_output, json.loads(body)) # Clean before test finishes self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() self.rabbitmq.disconnect() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch.object(RabbitMQApi, "basic_ack") @mock.patch("src.data_store.starters.create_logger") @mock.patch.object(StoreManager, "_send_heartbeat") def test_process_ping_restarts_dead_processes(self, send_hb_mock, mock_create_logger, mock_ack) -> None: send_hb_mock.return_value = None mock_create_logger.return_value = self.dummy_logger mock_ack.return_value = None try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() # Give time for the processes to start time.sleep(1) self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() # Give time for the processes to terminate time.sleep(1) # Check that that the processes have terminated self.assertFalse(self.test_store_manager. _store_process_dict[SYSTEM_STORE_NAME].is_alive()) self.assertFalse(self.test_store_manager. _store_process_dict[GITHUB_STORE_NAME].is_alive()) self.assertFalse(self.test_store_manager. _store_process_dict[ALERT_STORE_NAME].is_alive()) self.assertFalse(self.test_store_manager. _store_process_dict[CONFIG_STORE_NAME].is_alive()) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel properties = pika.spec.BasicProperties() method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key) body = 'ping' self.test_store_manager._process_ping(blocking_channel, method_hb, properties, body) # Give time for the processes to start time.sleep(1) self.assertTrue(self.test_store_manager. _store_process_dict[SYSTEM_STORE_NAME].is_alive()) self.assertTrue(self.test_store_manager. _store_process_dict[GITHUB_STORE_NAME].is_alive()) self.assertTrue(self.test_store_manager. _store_process_dict[ALERT_STORE_NAME].is_alive()) self.assertTrue(self.test_store_manager. _store_process_dict[CONFIG_STORE_NAME].is_alive()) # Clean before test finishes self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(multiprocessing.Process, "is_alive") @mock.patch.object(multiprocessing.Process, "start") @mock.patch.object(multiprocessing, 'Process') def test_process_ping_does_not_send_hb_if_processing_fails( self, mock_process, mock_start, is_alive_mock) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat. In this test we will # check that no heartbeat is sent when mocking a raised exception. is_alive_mock.side_effect = self.test_exception mock_start.return_value = None mock_process.side_effect = self.dummy_process try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() time.sleep(1) # Delete the queue before to avoid messages in the queue on error. self.test_rabbit_manager.queue_delete(self.test_queue_name) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel method = pika.spec.Basic.Deliver(routing_key=self.routing_key) properties = pika.spec.BasicProperties() body = 'ping' res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._process_ping(blocking_channel, method, properties, body) time.sleep(1) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed( self) -> None: try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() time.sleep(1) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel method = pika.spec.Basic.Deliver(routing_key='heartbeat.manager') properties = pika.spec.BasicProperties() body = 'ping' self.test_store_manager._process_ping(blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([("pika.exceptions.AMQPChannelError('test')", "pika.exceptions.AMQPChannelError"), ("self.test_exception", "PANICException"), ("pika.exceptions.AMQPConnectionError", "pika.exceptions.AMQPConnectionError")]) @mock.patch.object(StoreManager, "_send_heartbeat") def test_process_ping_send_hb_raises_exceptions(self, param_input, param_expected, hb_mock) -> None: hb_mock.side_effect = eval(param_input) try: self.test_store_manager._initialise_rabbitmq() # initialise blocking_channel = self.test_store_manager.rabbitmq.channel method = pika.spec.Basic.Deliver(routing_key=self.routing_key) properties = pika.spec.BasicProperties() body = 'ping' self.assertRaises(eval(param_expected), self.test_store_manager._process_ping, blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e))
class TestGithubStore(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = GithubStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE, GITHUB_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.repo_name = 'simplyvc/panic/' self.repo_id = 'test_repo_id' self.parent_id = 'test_parent_id' self.repo_name_2 = 'simplyvc/panic_oasis/' self.repo_id_2 = 'test_repo_id_2' self.parent_id_2 = 'test_parent_id_2' self.last_monitored = datetime(2012, 1, 1).timestamp() self.github_data_1 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 4, } } } } self.github_data_2 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 5, } } } } self.github_data_3 = { "result": { "meta_data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_error = { "error": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "time": self.last_monitored }, "code": "5006", "message": "error message" } } self.github_data_key_error = { "result": { "data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "wrong_data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_unexpected = {"unexpected": {}} def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, GITHUB_STORE_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.redis.delete_all_unsafe() self.redis = None self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, str(self.test_store)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, self.test_store.name) def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None: self.assertEqual(self.mongo_ip, self.test_store.mongo_ip) def test_mongo_db_property_returns_mongo_db_correctly(self) -> None: self.assertEqual(self.mongo_db, self.test_store.mongo_db) def test_mongo_port_property_returns_mongo_port_correctly(self) -> None: self.assertEqual(self.mongo_port, self.test_store.mongo_port) def test_redis_property_returns_redis_correctly(self) -> None: self.assertEqual(type(self.redis), type(self.test_store.redis)) def test_mongo_property_returns_none_when_mongo_not_init(self) -> None: self.assertEqual(None, self.test_store.mongo) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.exchange_delete(STORE_EXCHANGE) self.rabbitmq.disconnect() self.test_store._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store.rabbitmq.is_connected) self.assertTrue(self.test_store.rabbitmq.connection.is_open) self.assertTrue( self.test_store.rabbitmq.channel._delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE, passive=True) self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=STORE_EXCHANGE, routing_key=GITHUB_STORE_INPUT_ROUTING_KEY, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Re-declare queue to get the number of messages res = self.test_store.rabbitmq.queue_declare( GITHUB_STORE_INPUT_QUEUE, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.github_data_1", ), ("self.github_data_2", ), ("self.github_data_3", ), ]) @mock.patch.object(RedisApi, "hset_multiple") def test_process_redis_store_redis_is_called_correctly( self, mock_github_data, mock_hset_multiple) -> None: data = eval(mock_github_data) self.test_store._process_redis_store(data) meta_data = data['result']['meta_data'] repo_id = meta_data['repo_id'] parent_id = meta_data['repo_parent_id'] metrics = data['result']['data'] call_1 = call( Keys.get_hash_parent(parent_id), { Keys.get_github_no_of_releases(repo_id): str(metrics['no_of_releases']), Keys.get_github_last_monitored(repo_id): str(meta_data['last_monitored']), }) mock_hset_multiple.assert_has_calls([call_1]) @mock.patch("src.data_store.stores.store.RedisApi.hset_multiple", autospec=True) def test_process_redis_store_does_nothing_on_error_key( self, mock_hset_multiple) -> None: self.test_store._process_redis_store(self.github_data_error) mock_hset_multiple.assert_not_called() def test_process_redis_store_raises_exception_on_unexpected_key( self) -> None: self.assertRaises(ReceivedUnexpectedDataException, self.test_store._process_redis_store, self.github_data_unexpected) @parameterized.expand([ ("self.github_data_1", ), ("self.github_data_2", ), ("self.github_data_3", ), ]) def test_process_redis_store_redis_stores_correctly( self, mock_github_data) -> None: data = eval(mock_github_data) self.test_store._process_redis_store(data) meta_data = data['result']['meta_data'] repo_id = meta_data['repo_id'] parent_id = meta_data['repo_parent_id'] metrics = data['result']['data'] self.assertEqual( str(metrics['no_of_releases']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_no_of_releases(repo_id)).decode("utf-8")) self.assertEqual( str(meta_data['last_monitored']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_last_monitored(repo_id)).decode("utf-8")) @parameterized.expand([ ("self.github_data_1", ), ("self.github_data_2", ), ("self.github_data_3", ), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_saves_in_redis(self, mock_github_data, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_github_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() meta_data = data['result']['meta_data'] repo_id = meta_data['repo_id'] parent_id = meta_data['repo_parent_id'] metrics = data['result']['data'] self.assertEqual( str(metrics['no_of_releases']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_no_of_releases(repo_id)).decode("utf-8")) self.assertEqual( str(meta_data['last_monitored']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_last_monitored(repo_id)).decode("utf-8")) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("KeyError", "self.github_data_key_error "), ("ReceivedUnexpectedDataException", "self.github_data_unexpected"), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_with_bad_data_does_raises_exceptions( self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.github_data_unexpected).encode()) self.assertRaises(eval(mock_error), self.test_store._process_redis_store, eval(mock_bad_data)) mock_ack.assert_called_once() mock_send_hb.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch( "src.data_store.stores.github.GithubStore._process_redis_store", autospec=True) def test_process_data_sends_heartbeat_correctly(self, mock_process_redis_store, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.github_data_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) heartbeat_test = { 'component_name': self.test_store_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp() } _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(heartbeat_test, json.loads(body)) mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) def test_process_data_doesnt_send_heartbeat_on_processing_error( self, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.github_data_unexpected).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e))
class TestAlertStore(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.mongo = MongoApi(logger=self.dummy_logger.getChild( MongoApi.__name__), db_name=self.mongo_db, host=self.mongo_ip, port=self.mongo_port) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.test_store_name = 'store name' self.test_store = AlertStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(ALERT_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(ALERT_STORE_INPUT_QUEUE, STORE_EXCHANGE, ALERT_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.parent_id = 'test_parent_id' self.alert_id = 'test_alert_id' self.origin_id = 'test_origin_id' self.alert_name = 'test_alert' self.metric = 'system_is_down' self.severity = 'warning' self.message = 'alert message' self.value = 'alert_code_1' self.alert_id_2 = 'test_alert_id_2' self.origin_id_2 = 'test_origin_id_2' self.alert_name_2 = 'test_alert_2' self.severity_2 = 'critical' self.message_2 = 'alert message 2' self.value_2 = 'alert_code_2' self.alert_id_3 = 'test_alert_id_3' self.origin_id_3 = 'test_origin_id_3' self.alert_name_3 = 'test_alert_3' self.severity_3 = 'info' self.message_3 = 'alert message 3' self.value_3 = 'alert_code_3' self.last_monitored = datetime(2012, 1, 1).timestamp() self.none = None self.alert_data_1 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id, 'alert_code': { 'name': self.alert_name, 'value': self.value, }, 'severity': self.severity, 'metric': self.metric, 'message': self.message, 'timestamp': self.last_monitored, } self.alert_data_2 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_2, 'alert_code': { 'name': self.alert_name_2, 'value': self.value_2, }, 'severity': self.severity_2, 'metric': self.metric, 'message': self.message_2, 'timestamp': self.last_monitored, } self.alert_data_3 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_3, 'alert_code': { 'name': self.alert_name_3, 'value': self.value_3, }, 'severity': self.severity_3, 'metric': self.metric, 'message': self.message_3, 'timestamp': self.last_monitored, } self.alert_data_key_error = {"result": {"data": {}, "data2": {}}} self.alert_data_unexpected = {"unexpected": {}} def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, ALERT_STORE_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None self.redis.delete_all_unsafe() self.redis = None self.mongo.drop_collection(self.parent_id) self.mongo = None self.test_store = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, str(self.test_store)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, self.test_store.name) def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None: self.assertEqual(self.mongo_ip, self.test_store.mongo_ip) def test_mongo_db_property_returns_mongo_db_correctly(self) -> None: self.assertEqual(self.mongo_db, self.test_store.mongo_db) def test_mongo_port_property_returns_mongo_port_correctly(self) -> None: self.assertEqual(self.mongo_port, self.test_store.mongo_port) def test_mongo_property_returns_mongo(self) -> None: self.assertEqual(type(self.mongo), type(self.test_store.mongo)) def test_redis_property_returns_redis_correctly(self) -> None: self.assertEqual(type(self.redis), type(self.test_store.redis)) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.queue_delete(ALERT_STORE_INPUT_QUEUE) self.test_rabbit_manager.queue_delete(self.test_queue_name) self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.exchange_delete(STORE_EXCHANGE) self.rabbitmq.disconnect() self.test_store._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store.rabbitmq.is_connected) self.assertTrue(self.test_store.rabbitmq.connection.is_open) self.assertTrue( self.test_store.rabbitmq.channel._delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE, passive=True) self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=STORE_EXCHANGE, routing_key=ALERT_STORE_INPUT_ROUTING_KEY, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Re-declare queue to get the number of messages res = self.test_store.rabbitmq.queue_declare( ALERT_STORE_INPUT_QUEUE, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("KeyError", "self.alert_data_key_error "), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_with_bad_data_does_raises_exceptions( self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.alert_data_unexpected).encode()) self.assertRaises(eval(mock_error), self.test_store._process_mongo_store, eval(mock_bad_data)) mock_ack.assert_called_once() mock_send_hb.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store", autospec=True) def test_process_data_sends_heartbeat_correctly(self, mock_process_mongo_store, mock_process_redis_store, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.alert_data_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) heartbeat_test = { 'component_name': self.test_store_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp() } _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(heartbeat_test, json.loads(body)) mock_process_mongo_store.assert_called_once() mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) def test_process_data_doesnt_send_heartbeat_on_processing_error( self, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.alert_data_unexpected).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(MongoApi, "update_one") def test_process_mongo_store_calls_update_one(self, mock_update_one) -> None: self.test_store._process_mongo_store(self.alert_data_1) mock_update_one.assert_called_once() @mock.patch.object(RedisApi, "hset") def test_process_redis_store_calls_hset(self, mock_hset) -> None: self.test_store._process_redis_store(self.alert_data_1) mock_hset.assert_called_once() @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch.object(MongoApi, "update_one") def test_process_mongo_store_calls_mongo_correctly( self, mock_system_data, mock_update_one) -> None: data = eval(mock_system_data) self.test_store._process_mongo_store(data) call_1 = call(data['parent_id'], { 'doc_type': 'alert', 'n_alerts': { '$lt': 1000 } }, { '$push': { 'alerts': { 'origin': data['origin_id'], 'alert_name': data['alert_code']['name'], 'severity': data['severity'], 'metric': data['metric'], 'message': data['message'], 'timestamp': str(data['timestamp']), } }, '$min': { 'first': data['timestamp'] }, '$max': { 'last': data['timestamp'] }, '$inc': { 'n_alerts': 1 }, }) mock_update_one.assert_has_calls([call_1]) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch.object(RedisApi, "hset") def test_process_redis_store_calls_redis_correctly(self, mock_system_data, mock_hset) -> None: data = eval(mock_system_data) self.test_store._process_redis_store(data) metric_data = { 'severity': data['severity'], 'message': data['message'] } key = data['origin_id'] call_1 = call(Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric'])), json.dumps(metric_data)) mock_hset.assert_has_calls([call_1]) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) @mock.patch.object(MongoApi, "update_one") def test_process_data_calls_mongo_correctly(self, mock_system_data, mock_update_one, mock_send_hb, mock_process_redis_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() call_1 = call(data['parent_id'], { 'doc_type': 'alert', 'n_alerts': { '$lt': 1000 } }, { '$push': { 'alerts': { 'origin': data['origin_id'], 'alert_name': data['alert_code']['name'], 'severity': data['severity'], 'metric': data['metric'], 'message': data['message'], 'timestamp': str(data['timestamp']), } }, '$min': { 'first': data['timestamp'] }, '$max': { 'last': data['timestamp'] }, '$inc': { 'n_alerts': 1 }, }) mock_update_one.assert_has_calls([call_1]) mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) @mock.patch.object(RedisApi, "hset") def test_process_data_calls_redis_correctly(self, mock_system_data, mock_hset, mock_send_hb, mock_process_mongo_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() metric_data = { 'severity': data['severity'], 'message': data['message'] } key = data['origin_id'] call_1 = call( Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric'])), json.dumps(metric_data)) mock_hset.assert_has_calls([call_1]) mock_process_mongo_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) def test_process_mongo_store_mongo_stores_correctly( self, mock_system_data) -> None: data = eval(mock_system_data) self.test_store._process_mongo_store(data) documents = self.mongo.get_all(data['parent_id']) document = documents[0] expected = [ 'alert', 1, str(data['origin_id']), str(data['alert_code']['name']), str(data['severity']), str(data['metric']), str(data['message']), str(data['timestamp']) ] actual = [ document['doc_type'], document['n_alerts'], document['alerts'][0]['origin'], document['alerts'][0]['alert_name'], document['alerts'][0]['severity'], document['alerts'][0]['metric'], document['alerts'][0]['message'], document['alerts'][0]['timestamp'] ] self.assertListEqual(expected, actual) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) def test_process_redis_store_redis_stores_correctly( self, mock_system_data) -> None: data = eval(mock_system_data) self.test_store._process_redis_store(data) key = data['origin_id'] stored_data = self.redis.hget( Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric']))) expected_data = { 'severity': data['severity'], 'message': data['message'] } self.assertEqual(expected_data, json.loads(stored_data)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_results_stores_in_mongo_correctly( self, mock_system_data, mock_send_hb, mock_process_redis_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_process_redis_store.assert_called_once() mock_ack.assert_called_once() mock_send_hb.assert_called_once() documents = self.mongo.get_all(data['parent_id']) document = documents[0] expected = [ 'alert', 1, str(data['origin_id']), str(data['alert_code']['name']), str(data['severity']), str(data['message']), str(data['timestamp']) ] actual = [ document['doc_type'], document['n_alerts'], document['alerts'][0]['origin'], document['alerts'][0]['alert_name'], document['alerts'][0]['severity'], document['alerts'][0]['message'], document['alerts'][0]['timestamp'] ] self.assertListEqual(expected, actual) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_results_stores_in_redis_correctly( self, mock_system_data, mock_send_hb, mock_process_mongo_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_process_mongo_store.assert_called_once() mock_ack.assert_called_once() mock_send_hb.assert_called_once() key = data['origin_id'] stored_data = self.redis.hget( Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric']))) expected_data = { 'severity': data['severity'], 'message': data['message'] } self.assertEqual(expected_data, json.loads(stored_data)) except Exception as e: self.fail("Test failed: {}".format(e))