def delete_exchange_if_exists(rabbit: RabbitMQApi, exchange_name: str) -> None: try: rabbit.exchange_declare(exchange_name, passive=True) rabbit.exchange_delete(exchange_name) except pika.exceptions.ChannelClosedByBroker: print("Exchange {} does not exist - don't need to close".format( exchange_name))
def _initialise_and_declare_config_queues() -> None: # TODO: This can be refactored by storing the queue configurations in # : constant.py so that it is easier to maintain. dummy_logger = logging.getLogger('Dummy') while True: try: rabbitmq = RabbitMQApi(dummy_logger, env.RABBIT_IP) log_and_print( "Connecting with RabbitMQ to create and bind " "configuration queues.", dummy_logger) ret = rabbitmq.connect() if ret == -1: log_and_print( "RabbitMQ is temporarily unavailable. Re-trying in {} " "seconds.".format(RE_INITIALISE_SLEEPING_PERIOD), dummy_logger) time.sleep(RE_INITIALISE_SLEEPING_PERIOD) continue # Config exchange declaration log_and_print("Creating {} exchange.".format(CONFIG_EXCHANGE), dummy_logger) rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True, False, False) # Alert router queues log_and_print( "Creating queue '{}'".format(ALERT_ROUTER_CONFIGS_QUEUE_NAME), dummy_logger) rabbitmq.queue_declare(ALERT_ROUTER_CONFIGS_QUEUE_NAME, False, True, False, False) log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(ALERT_ROUTER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'channels.*'), dummy_logger) rabbitmq.queue_bind(ALERT_ROUTER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'channels.*') # System Alerters Manager queues log_and_print( "Creating queue '{}'".format( SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger) rabbitmq.queue_declare(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, False, True, False, False) log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'chains.*.*.alerts_config'), dummy_logger) rabbitmq.queue_bind(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'chains.*.*.alerts_config') log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'general.alerts_config'), dummy_logger) rabbitmq.queue_bind(SYSTEM_ALERTERS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'general.alerts_config') # Channels manager queues log_and_print( "Creating queue '{}'".format( CHANNELS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger) rabbitmq.queue_declare(CHANNELS_MANAGER_CONFIGS_QUEUE_NAME, False, True, False, False) log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(CHANNELS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'channels.*'), dummy_logger) rabbitmq.queue_bind(CHANNELS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'channels.*') # GitHub Monitors Manager queues log_and_print( "Creating queue '{}'".format( GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger) rabbitmq.queue_declare(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, False, True, False, False) log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'chains.*.*.repos_config'), dummy_logger) rabbitmq.queue_bind(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'chains.*.*.repos_config') log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'general.repos_config'), dummy_logger) rabbitmq.queue_bind(GITHUB_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'general.repos_config') # System Monitors Manager queues log_and_print( "Creating queue '{}'".format( SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME), dummy_logger) rabbitmq.queue_declare(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, False, True, False, False) log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'chains.*.*.nodes_config'), dummy_logger) rabbitmq.queue_bind(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'chains.*.*.nodes_config') log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'general.systems_config'), dummy_logger) rabbitmq.queue_bind(SYSTEM_MONITORS_MANAGER_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, 'general.systems_config') # Config Store queues log_and_print( "Creating queue '{}'".format(STORE_CONFIGS_QUEUE_NAME), dummy_logger) rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True, False, False) log_and_print( "Binding queue '{}' to '{}' exchange with routing " "key {}.".format(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, '#'), dummy_logger) rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, '#') ret = rabbitmq.disconnect() if ret == -1: log_and_print( "RabbitMQ is temporarily unavailable. Re-trying in {} " "seconds.".format(RE_INITIALISE_SLEEPING_PERIOD), dummy_logger) time.sleep(RE_INITIALISE_SLEEPING_PERIOD) continue log_and_print( "Configuration queues initialisation procedure has " "completed successfully. Disconnecting with " "RabbitMQ.", dummy_logger) break except pika.exceptions.AMQPChannelError as e: log_and_print( "Channel error while initialising the configuration " "queues: {}. Re-trying in {} " "seconds.".format(repr(e), RE_INITIALISE_SLEEPING_PERIOD), dummy_logger) time.sleep(RE_INITIALISE_SLEEPING_PERIOD) except pika.exceptions.AMQPConnectionError as e: log_and_print( "RabbitMQ connection error while initialising the " "configuration queues: {}. Re-trying in {} " "seconds.".format(repr(e), RE_INITIALISE_SLEEPING_PERIOD), dummy_logger) time.sleep(RE_INITIALISE_SLEEPING_PERIOD) except Exception as e: log_and_print( "Unexpected exception while initialising the " "configuration queues: {}. Re-trying in {} " "seconds.".format(repr(e), RE_INITIALISE_SLEEPING_PERIOD), dummy_logger) time.sleep(RE_INITIALISE_SLEEPING_PERIOD)
class TestConfigStore(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = ConfigStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True, False, False) self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, STORE_CONFIGS_ROUTING_KEY_CHAINS) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_parent_id = 'parent_id' self.test_config_type = 'config_type' self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.last_monitored = datetime(2012, 1, 1).timestamp() self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config' self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config' self.routing_key_3 = 'chains.cosmos.cosmos.repos_config' self.routing_key_4 = 'general.repos_config' self.routing_key_5 = 'general.alerts_config' self.routing_key_6 = 'general.systems_config' self.routing_key_7 = 'channels.email_config' self.routing_key_8 = 'channels.pagerduty_config' self.routing_key_9 = 'channels.opsgenie_config' self.routing_key_10 = 'channels.telegram_config' self.routing_key_11 = 'channels.twilio_config' self.nodes_config_1 = { "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": { "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_1(46.166.146.165:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://46.166.146.165:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" }, "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": { "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_2(172.16.151.10:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://172.16.151.10:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" } } self.repos_config_1 = { "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": { "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "tendermint/tendermint/", "monitor_repo": "true" }, "repo_83713022-4155-420b-ada1-73a863f58282": { "id": "repo_83713022-4155-420b-ada1-73a863f58282", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "SimplyVC/panic_cosmos/", "monitor_repo": "true" } } self.alerts_config_1 = { "1": { "name": "open_file_descriptors", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "2": { "name": "system_cpu_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "3": { "name": "system_storage_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "4": { "name": "system_ram_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "5": { "name": "system_is_down", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "200", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "0", "warning_enabled": "true" } } self.systems_config_1 = { "system_1d026af1-6cab-403d-8256-c8faa462930a": { "id": "system_1d026af1-6cab-403d-8256-c8faa462930a", "parent_id": "GLOBAL", "name": "matic_full_node_nl(172.26.10.137:9100)", "exporter_url": "http://172.26.10.137:9100/metrics", "monitor_system": "true" }, "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": { "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822", "parent_id": "GLOBAL", "name": "matic_full_node_mt(172.16.152.137:9100)", "exporter_url": "http://172.16.152.137:9100/metrics", "monitor_system": "true" } } self.telegram_config_1 = { "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": { "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9", "channel_name": "telegram_chat_1", "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE", "chat_id": "-759538717", "info": "true", "warning": "true", "critical": "true", "error": "true", "alerts": "false", "commands": "false", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.twilio_config_1 = { "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": { "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c", "channel_name": "twilio_caller_main", "account_sid": "ACb77777284e97e49eb2260aada0220e12", "auth_token": "d19f777777a0b8e274470d599e5bcc5e8", "twilio_phone_no": "+19893077770", "twilio_phone_numbers_to_dial_valid": "+35697777380", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.pagerduty_config_1 = { "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": { "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc", "channel_name": "pager_duty_1", "api_token": "meVp_vyQybcX7dA3o1fS", "integration_key": "4a520ce3577777ad89a3518096f3a5189", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.opsgenie_config_1 = { "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": { "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35", "channel_name": "ops_genie_main", "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06", "eu": "true", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.email_config_1 = { "email_01b23d79-10f5-4815-a11f-034f53974b23": { "id": "email_01b23d79-10f5-4815-a11f-034f53974b23", "channel_name": "main_email_channel", "port": "25", "smtp": "exchange.olive.com", "email_from": "*****@*****.**", "emails_to": "*****@*****.**", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.config_data_unexpected = {"unexpected": {}} def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, STORE_CONFIGS_QUEUE_NAME) delete_exchange_if_exists(self.rabbitmq, CONFIG_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.redis.delete_all_unsafe() self.redis = None self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, str(self.test_store)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, self.test_store.name) def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None: self.assertEqual(self.mongo_ip, self.test_store.mongo_ip) def test_mongo_db_property_returns_mongo_db_correctly(self) -> None: self.assertEqual(self.mongo_db, self.test_store.mongo_db) def test_mongo_port_property_returns_mongo_port_correctly(self) -> None: self.assertEqual(self.mongo_port, self.test_store.mongo_port) def test_redis_property_returns_redis_correctly(self) -> None: self.assertEqual(type(self.redis), type(self.test_store.redis)) def test_mongo_property_returns_none_when_mongo_not_init(self) -> None: self.assertEqual(None, self.test_store.mongo) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.exchange_delete(CONFIG_EXCHANGE) self.rabbitmq.disconnect() self.test_store._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store.rabbitmq.is_connected) self.assertTrue(self.test_store.rabbitmq.connection.is_open) self.assertTrue( self.test_store.rabbitmq.channel._delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store.rabbitmq.exchange_declare(CONFIG_EXCHANGE, passive=True) self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=CONFIG_EXCHANGE, routing_key=STORE_CONFIGS_ROUTING_KEY_CHAINS, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Re-declare queue to get the number of messages res = self.test_store.rabbitmq.queue_declare( STORE_CONFIGS_QUEUE_NAME, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RedisApi.hset", autospec=True) def test_process_redis_store_does_nothing_on_error_key(self, mock_hset) -> None: self.test_store._process_redis_store(self.test_parent_id, self.config_data_unexpected) mock_hset.assert_not_called() @parameterized.expand([ ("self.nodes_config_1", "self.routing_key_1"), ("self.alerts_config_1", "self.routing_key_2"), ("self.repos_config_1", "self.routing_key_3"), ("self.repos_config_1", "self.routing_key_4"), ("self.alerts_config_1", "self.routing_key_5"), ("self.systems_config_1", "self.routing_key_6"), ("self.email_config_1", "self.routing_key_7"), ("self.pagerduty_config_1", "self.routing_key_8"), ("self.opsgenie_config_1", "self.routing_key_9"), ("self.telegram_config_1", "self.routing_key_10"), ("self.twilio_config_1", "self.routing_key_11"), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_saves_in_redis(self, mock_config_data, mock_routing_key, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: data = eval(mock_config_data) routing_key = eval(mock_routing_key) self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=eval(mock_routing_key)) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() self.assertEqual( data, json.loads( self.redis.get( Keys.get_config(routing_key)).decode("utf-8"))) except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch( "src.data_store.stores.config.ConfigStore._process_redis_store", autospec=True) def test_process_data_sends_heartbeat_correctly(self, mock_process_redis_store, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=self.routing_key_1) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.nodes_config_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) heartbeat_test = { 'component_name': self.test_store_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp() } _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(heartbeat_test, json.loads(body)) mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) def test_process_data_doesnt_send_heartbeat_on_processing_error( self, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver(routing_key=None) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.nodes_config_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.nodes_config_1", "self.routing_key_1"), ("self.alerts_config_1", "self.routing_key_2"), ("self.repos_config_1", "self.routing_key_3"), ("self.repos_config_1", "self.routing_key_4"), ("self.alerts_config_1", "self.routing_key_5"), ("self.systems_config_1", "self.routing_key_6"), ("self.email_config_1", "self.routing_key_7"), ("self.pagerduty_config_1", "self.routing_key_8"), ("self.opsgenie_config_1", "self.routing_key_9"), ("self.telegram_config_1", "self.routing_key_10"), ("self.twilio_config_1", "self.routing_key_11"), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_saves_in_redis_then_removes_it_on_empty_config( self, mock_config_data, mock_routing_key, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: data = eval(mock_config_data) routing_key = eval(mock_routing_key) self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver(routing_key=routing_key) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() self.assertEqual( data, json.loads( self.redis.get( Keys.get_config(routing_key)).decode("utf-8"))) self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps({}).encode()) self.assertEqual(None, self.redis.get(Keys.get_config(routing_key))) except Exception as e: self.fail("Test failed: {}".format(e))
class TestStoreManager(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbitmq = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.manager_name = 'test_store_manager' self.routing_key = 'heartbeat.manager' self.test_queue_name = 'test queue' self.test_store_manager = StoreManager(self.dummy_logger, self.manager_name, self.rabbitmq) # Adding dummy process self.dummy_process = Process(target=infinite_fn, args=()) self.dummy_process.daemon = True connect_to_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(DATA_STORE_MAN_INPUT_QUEUE, False, True, False, False) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.rabbitmq.queue_bind(DATA_STORE_MAN_INPUT_QUEUE, HEALTH_CHECK_EXCHANGE, DATA_STORE_MAN_INPUT_ROUTING_KEY) self.test_data_str = 'test data' self.test_heartbeat = { 'component_name': self.manager_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.test_exception = PANICException('test_exception', 1) def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, DATA_STORE_MAN_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.dummy_logger = None self.dummy_process = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.manager_name, str(self.test_store_manager)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.manager_name, self.test_store_manager.name) def test_logger_property_returns_logger_correctly(self) -> None: self.assertEqual(self.dummy_logger, self.test_store_manager.logger) def test_rabbitmq_property_returns_rabbitmq_correctly(self) -> None: self.assertEqual(self.rabbitmq, self.test_store_manager.rabbitmq) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.disconnect() self.test_store_manager._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store_manager.rabbitmq.is_connected) self.assertTrue( self.test_store_manager.rabbitmq.connection.is_open) self.assertTrue(self.test_store_manager.rabbitmq.channel. _delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store_manager.rabbitmq.exchange_declare( HEALTH_CHECK_EXCHANGE, passive=True) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store_manager.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) res = self.test_rabbit_manager.queue_declare( self.test_queue_name, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) def test_send_heartbeat_sends_a_heartbeat_correctly(self) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat, and checks that the # heartbeat is received try: self.test_store_manager._initialise_rabbitmq() self.test_rabbit_manager.connect() res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_store_manager.rabbitmq.queue_bind( queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._send_heartbeat(self.test_heartbeat) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) # Check that the message received is actually the HB _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(self.test_heartbeat, json.loads(body)) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_system_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_system_store, new_entry_process._target) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_github_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_github_store, new_entry_process._target) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_alert_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ ALERT_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_alert_store, new_entry_process._target) @mock.patch.object(multiprocessing.Process, "start") def test_start_stores_processes_starts_config_store_correctly( self, mock_start) -> None: mock_start.return_value = None self.test_store_manager._start_stores_processes() new_entry_process = self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME] self.assertTrue(new_entry_process.daemon) self.assertEqual(0, len(new_entry_process._args)) self.assertEqual(start_config_store, new_entry_process._target) @mock.patch("src.data_store.starters.create_logger") def test_start_stores_processes_starts_the_processes_correctly( self, mock_create_logger) -> None: mock_create_logger.return_value = self.dummy_logger self.test_store_manager._start_stores_processes() # We need to sleep to give some time for the stores to be initialised, # otherwise the process would not terminate time.sleep(1) new_system_process = self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME] self.assertTrue(new_system_process.is_alive()) new_system_process.terminate() new_system_process.join() new_github_process = self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME] self.assertTrue(new_github_process.is_alive()) new_github_process.terminate() new_github_process.join() new_alert_process = self.test_store_manager._store_process_dict[ ALERT_STORE_NAME] self.assertTrue(new_alert_process.is_alive()) new_alert_process.terminate() new_alert_process.join() new_config_process = self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME] self.assertTrue(new_config_process.is_alive()) new_config_process.terminate() new_config_process.join() @freeze_time("2012-01-01") @mock.patch("src.data_store.starters.create_logger") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_ping_sends_a_valid_hb_if_process_is_alive( self, mock_ack, mock_create_logger) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat, and checks that the # received heartbeat is valid. mock_create_logger.return_value = self.dummy_logger mock_ack.return_value = None try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() # Give time for the processes to start time.sleep(1) self.test_rabbit_manager.queue_declare(queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) # Delete the queue before to avoid messages in the queue on error. self.test_rabbit_manager.queue_delete(self.test_queue_name) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel properties = pika.spec.BasicProperties() method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key) body = 'ping' res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._process_ping(blocking_channel, method_hb, properties, body) time.sleep(1) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) expected_output = { "component_name": self.manager_name, "dead_processes": [], "running_processes": [ SYSTEM_STORE_NAME, GITHUB_STORE_NAME, ALERT_STORE_NAME, CONFIG_STORE_NAME ], "timestamp": datetime(2012, 1, 1).timestamp() } # Check that the message received is a valid HB _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(expected_output, json.loads(body)) # Clean before test finishes self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.starters.create_logger") @mock.patch.object(RabbitMQApi, "basic_ack") def test_process_ping_sends_a_valid_hb_if_all_processes_are_dead( self, mock_ack, mock_create_logger) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat, and checks that the # received heartbeat is valid. mock_create_logger.return_value = self.dummy_logger mock_ack.return_value = None try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() # Give time for the processes to start time.sleep(1) self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() # Time for processes to terminate time.sleep(1) # Delete the queue before to avoid messages in the queue on error. self.test_rabbit_manager.queue_delete(self.test_queue_name) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel properties = pika.spec.BasicProperties() method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key) body = 'ping' res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._process_ping(blocking_channel, method_hb, properties, body) time.sleep(1) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) expected_output = { "component_name": self.manager_name, "dead_processes": [ SYSTEM_STORE_NAME, GITHUB_STORE_NAME, ALERT_STORE_NAME, CONFIG_STORE_NAME ], "running_processes": [], "timestamp": datetime(2012, 1, 1).timestamp() } # Check that the message received is a valid HB _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(expected_output, json.loads(body)) # Clean before test finishes self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() self.rabbitmq.disconnect() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch.object(RabbitMQApi, "basic_ack") @mock.patch("src.data_store.starters.create_logger") @mock.patch.object(StoreManager, "_send_heartbeat") def test_process_ping_restarts_dead_processes(self, send_hb_mock, mock_create_logger, mock_ack) -> None: send_hb_mock.return_value = None mock_create_logger.return_value = self.dummy_logger mock_ack.return_value = None try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() # Give time for the processes to start time.sleep(1) self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() # Give time for the processes to terminate time.sleep(1) # Check that that the processes have terminated self.assertFalse(self.test_store_manager. _store_process_dict[SYSTEM_STORE_NAME].is_alive()) self.assertFalse(self.test_store_manager. _store_process_dict[GITHUB_STORE_NAME].is_alive()) self.assertFalse(self.test_store_manager. _store_process_dict[ALERT_STORE_NAME].is_alive()) self.assertFalse(self.test_store_manager. _store_process_dict[CONFIG_STORE_NAME].is_alive()) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel properties = pika.spec.BasicProperties() method_hb = pika.spec.Basic.Deliver(routing_key=self.routing_key) body = 'ping' self.test_store_manager._process_ping(blocking_channel, method_hb, properties, body) # Give time for the processes to start time.sleep(1) self.assertTrue(self.test_store_manager. _store_process_dict[SYSTEM_STORE_NAME].is_alive()) self.assertTrue(self.test_store_manager. _store_process_dict[GITHUB_STORE_NAME].is_alive()) self.assertTrue(self.test_store_manager. _store_process_dict[ALERT_STORE_NAME].is_alive()) self.assertTrue(self.test_store_manager. _store_process_dict[CONFIG_STORE_NAME].is_alive()) # Clean before test finishes self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ ALERT_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].terminate() self.test_store_manager._store_process_dict[ SYSTEM_STORE_NAME].join() self.test_store_manager._store_process_dict[ GITHUB_STORE_NAME].join() self.test_store_manager._store_process_dict[ALERT_STORE_NAME].join( ) self.test_store_manager._store_process_dict[ CONFIG_STORE_NAME].join() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(multiprocessing.Process, "is_alive") @mock.patch.object(multiprocessing.Process, "start") @mock.patch.object(multiprocessing, 'Process') def test_process_ping_does_not_send_hb_if_processing_fails( self, mock_process, mock_start, is_alive_mock) -> None: # This test creates a queue which receives messages with the same # routing key as the ones sent by send_heartbeat. In this test we will # check that no heartbeat is sent when mocking a raised exception. is_alive_mock.side_effect = self.test_exception mock_start.return_value = None mock_process.side_effect = self.dummy_process try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() time.sleep(1) # Delete the queue before to avoid messages in the queue on error. self.test_rabbit_manager.queue_delete(self.test_queue_name) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel method = pika.spec.Basic.Deliver(routing_key=self.routing_key) properties = pika.spec.BasicProperties() body = 'ping' res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) self.test_store_manager._process_ping(blocking_channel, method, properties, body) time.sleep(1) # By re-declaring the queue again we can get the number of messages # in the queue. res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) def test_proc_ping_send_hb_does_not_raise_msg_not_del_exce_if_hb_not_routed( self) -> None: try: self.test_store_manager._initialise_rabbitmq() self.test_store_manager._start_stores_processes() time.sleep(1) # initialise blocking_channel = self.test_store_manager.rabbitmq.channel method = pika.spec.Basic.Deliver(routing_key='heartbeat.manager') properties = pika.spec.BasicProperties() body = 'ping' self.test_store_manager._process_ping(blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([("pika.exceptions.AMQPChannelError('test')", "pika.exceptions.AMQPChannelError"), ("self.test_exception", "PANICException"), ("pika.exceptions.AMQPConnectionError", "pika.exceptions.AMQPConnectionError")]) @mock.patch.object(StoreManager, "_send_heartbeat") def test_process_ping_send_hb_raises_exceptions(self, param_input, param_expected, hb_mock) -> None: hb_mock.side_effect = eval(param_input) try: self.test_store_manager._initialise_rabbitmq() # initialise blocking_channel = self.test_store_manager.rabbitmq.channel method = pika.spec.Basic.Deliver(routing_key=self.routing_key) properties = pika.spec.BasicProperties() body = 'ping' self.assertRaises(eval(param_expected), self.test_store_manager._process_ping, blocking_channel, method, properties, body) except Exception as e: self.fail("Test failed: {}".format(e))
class TestGithubStore(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = GithubStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE, GITHUB_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.repo_name = 'simplyvc/panic/' self.repo_id = 'test_repo_id' self.parent_id = 'test_parent_id' self.repo_name_2 = 'simplyvc/panic_oasis/' self.repo_id_2 = 'test_repo_id_2' self.parent_id_2 = 'test_parent_id_2' self.last_monitored = datetime(2012, 1, 1).timestamp() self.github_data_1 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 4, } } } } self.github_data_2 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 5, } } } } self.github_data_3 = { "result": { "meta_data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_error = { "error": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "time": self.last_monitored }, "code": "5006", "message": "error message" } } self.github_data_key_error = { "result": { "data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "wrong_data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_unexpected = {"unexpected": {}} def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, GITHUB_STORE_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.redis.delete_all_unsafe() self.redis = None self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, str(self.test_store)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, self.test_store.name) def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None: self.assertEqual(self.mongo_ip, self.test_store.mongo_ip) def test_mongo_db_property_returns_mongo_db_correctly(self) -> None: self.assertEqual(self.mongo_db, self.test_store.mongo_db) def test_mongo_port_property_returns_mongo_port_correctly(self) -> None: self.assertEqual(self.mongo_port, self.test_store.mongo_port) def test_redis_property_returns_redis_correctly(self) -> None: self.assertEqual(type(self.redis), type(self.test_store.redis)) def test_mongo_property_returns_none_when_mongo_not_init(self) -> None: self.assertEqual(None, self.test_store.mongo) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.exchange_delete(STORE_EXCHANGE) self.rabbitmq.disconnect() self.test_store._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store.rabbitmq.is_connected) self.assertTrue(self.test_store.rabbitmq.connection.is_open) self.assertTrue( self.test_store.rabbitmq.channel._delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE, passive=True) self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=STORE_EXCHANGE, routing_key=GITHUB_STORE_INPUT_ROUTING_KEY, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Re-declare queue to get the number of messages res = self.test_store.rabbitmq.queue_declare( GITHUB_STORE_INPUT_QUEUE, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.github_data_1", ), ("self.github_data_2", ), ("self.github_data_3", ), ]) @mock.patch.object(RedisApi, "hset_multiple") def test_process_redis_store_redis_is_called_correctly( self, mock_github_data, mock_hset_multiple) -> None: data = eval(mock_github_data) self.test_store._process_redis_store(data) meta_data = data['result']['meta_data'] repo_id = meta_data['repo_id'] parent_id = meta_data['repo_parent_id'] metrics = data['result']['data'] call_1 = call( Keys.get_hash_parent(parent_id), { Keys.get_github_no_of_releases(repo_id): str(metrics['no_of_releases']), Keys.get_github_last_monitored(repo_id): str(meta_data['last_monitored']), }) mock_hset_multiple.assert_has_calls([call_1]) @mock.patch("src.data_store.stores.store.RedisApi.hset_multiple", autospec=True) def test_process_redis_store_does_nothing_on_error_key( self, mock_hset_multiple) -> None: self.test_store._process_redis_store(self.github_data_error) mock_hset_multiple.assert_not_called() def test_process_redis_store_raises_exception_on_unexpected_key( self) -> None: self.assertRaises(ReceivedUnexpectedDataException, self.test_store._process_redis_store, self.github_data_unexpected) @parameterized.expand([ ("self.github_data_1", ), ("self.github_data_2", ), ("self.github_data_3", ), ]) def test_process_redis_store_redis_stores_correctly( self, mock_github_data) -> None: data = eval(mock_github_data) self.test_store._process_redis_store(data) meta_data = data['result']['meta_data'] repo_id = meta_data['repo_id'] parent_id = meta_data['repo_parent_id'] metrics = data['result']['data'] self.assertEqual( str(metrics['no_of_releases']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_no_of_releases(repo_id)).decode("utf-8")) self.assertEqual( str(meta_data['last_monitored']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_last_monitored(repo_id)).decode("utf-8")) @parameterized.expand([ ("self.github_data_1", ), ("self.github_data_2", ), ("self.github_data_3", ), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_saves_in_redis(self, mock_github_data, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_github_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() meta_data = data['result']['meta_data'] repo_id = meta_data['repo_id'] parent_id = meta_data['repo_parent_id'] metrics = data['result']['data'] self.assertEqual( str(metrics['no_of_releases']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_no_of_releases(repo_id)).decode("utf-8")) self.assertEqual( str(meta_data['last_monitored']), self.redis.hget( Keys.get_hash_parent(parent_id), Keys.get_github_last_monitored(repo_id)).decode("utf-8")) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("KeyError", "self.github_data_key_error "), ("ReceivedUnexpectedDataException", "self.github_data_unexpected"), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_with_bad_data_does_raises_exceptions( self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None: self.rabbitmq.connect() mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.github_data_unexpected).encode()) self.assertRaises(eval(mock_error), self.test_store._process_redis_store, eval(mock_bad_data)) mock_ack.assert_called_once() mock_send_hb.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch( "src.data_store.stores.github.GithubStore._process_redis_store", autospec=True) def test_process_data_sends_heartbeat_correctly(self, mock_process_redis_store, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.github_data_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) heartbeat_test = { 'component_name': self.test_store_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp() } _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(heartbeat_test, json.loads(body)) mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) def test_process_data_doesnt_send_heartbeat_on_processing_error( self, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=GITHUB_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.github_data_unexpected).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e))
class TestConfigsManager(unittest.TestCase): def setUp(self) -> None: self.CONFIG_MANAGER_NAME = "Config Manager" self.config_manager_logger = logging.getLogger("test_config_manager") self.config_manager_logger.disabled = True self.rabbit_logger = logging.getLogger("test_rabbit") self.rabbit_logger.disabled = True self.config_directory = "config" file_patterns = ["*.ini"] rabbit_ip = env.RABBIT_IP self.test_config_manager = ConfigsManager(self.CONFIG_MANAGER_NAME, self.config_manager_logger, self.config_directory, rabbit_ip, file_patterns=file_patterns) self.rabbitmq = RabbitMQApi( self.rabbit_logger, rabbit_ip, connection_check_time_interval=timedelta(seconds=0)) def tearDown(self) -> None: # flush and consume all from rabbit queues and exchanges connect_to_rabbit(self.rabbitmq) queues = [CONFIG_PING_QUEUE] for queue in queues: delete_queue_if_exists(self.rabbitmq, queue) exchanges = [CONFIG_EXCHANGE, HEALTH_CHECK_EXCHANGE] for exchange in exchanges: delete_exchange_if_exists(self.rabbitmq, exchange) disconnect_from_rabbit(self.rabbitmq) self.rabbitmq = None self.test_config_manager._rabbitmq = None self.test_config_manager._heartbeat_rabbit = None self.test_config_manager = None def test_instance_created(self): self.assertIsNotNone(self.test_config_manager) def test_name_returns_component_name(self): self.assertEqual(self.CONFIG_MANAGER_NAME, self.test_config_manager.name) @parameterized.expand([ (CONFIG_PING_QUEUE, ), ]) @mock.patch.object(RabbitMQApi, "confirm_delivery") @mock.patch.object(RabbitMQApi, "basic_consume") @mock.patch.object(RabbitMQApi, "basic_qos") def test__initialise_rabbit_initialises_queues( self, queue_to_check: str, mock_basic_qos: MagicMock, mock_basic_consume: MagicMock, mock_confirm_delivery: MagicMock): mock_basic_consume.return_value = None mock_confirm_delivery.return_value = None try: connect_to_rabbit(self.rabbitmq) # Testing this separately since this is a critical function self.test_config_manager._initialise_rabbitmq() mock_basic_qos.assert_called_once() mock_basic_consume.assert_called_once() mock_confirm_delivery.assert_called() self.rabbitmq.queue_declare(queue_to_check, passive=True) except pika.exceptions.ConnectionClosedByBroker: self.fail("Queue {} was not declared".format(queue_to_check)) finally: disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit) @parameterized.expand([ (CONFIG_EXCHANGE, ), (HEALTH_CHECK_EXCHANGE, ), ]) @mock.patch.object(RabbitMQApi, "confirm_delivery") @mock.patch.object(RabbitMQApi, "basic_consume") @mock.patch.object(RabbitMQApi, "basic_qos") def test__initialise_rabbit_initialises_exchanges( self, exchange_to_check: str, mock_basic_qos: MagicMock, mock_basic_consume: MagicMock, mock_confirm_delivery: MagicMock): mock_basic_consume.return_value = None mock_confirm_delivery.return_value = None try: connect_to_rabbit(self.rabbitmq) # Testing this separately since this is a critical function self.test_config_manager._initialise_rabbitmq() mock_basic_qos.assert_called_once() mock_basic_consume.assert_called() mock_confirm_delivery.assert_called() self.rabbitmq.exchange_declare(exchange_to_check, passive=True) except pika.exceptions.ConnectionClosedByBroker: self.fail("Exchange {} was not declared".format(exchange_to_check)) finally: disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit) @mock.patch.object(RabbitMQApi, "connect_till_successful", autospec=True) def test__connect_to_rabbit(self, mock_connect: MagicMock): mock_connect.return_value = None self.test_config_manager._connect_to_rabbit() mock_connect.assert_called() self.assertEqual(2, mock_connect.call_count) self.assertTrue(self.test_config_manager.connected_to_rabbit) @mock.patch.object(RabbitMQApi, "disconnect_till_successful", autospec=True) def test_disconnect_from_rabbit(self, mock_disconnect: MagicMock): mock_disconnect.return_value = None self.test_config_manager._connected_to_rabbit = True self.test_config_manager.disconnect_from_rabbit() mock_disconnect.assert_called() self.assertEqual(2, mock_disconnect.call_count) self.assertFalse(self.test_config_manager.connected_to_rabbit) @freeze_time("1997-08-15T10:21:33.000030") @mock.patch.object(PollingObserver, "is_alive", autospec=True) def test__process_ping_sends_valid_hb(self, mock_is_alive: MagicMock): mock_is_alive.return_value = True expected_output = { 'component_name': self.CONFIG_MANAGER_NAME, 'is_alive': True, 'timestamp': datetime(year=1997, month=8, day=15, hour=10, minute=21, second=33, microsecond=30).timestamp() } HEARTBEAT_QUEUE = "hb_test" try: connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, "topic", False, True, False, False) queue_res = self.rabbitmq.queue_declare(queue=HEARTBEAT_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, queue_res.method.message_count) self.rabbitmq.queue_bind(HEARTBEAT_QUEUE, HEALTH_CHECK_EXCHANGE, "heartbeat.*") self.test_config_manager._initialise_rabbitmq() blocking_channel = self.test_config_manager._rabbitmq.channel method_chains = pika.spec.Basic.Deliver(routing_key="ping") properties = pika.spec.BasicProperties() self.test_config_manager._process_ping(blocking_channel, method_chains, properties, b"ping") # By re-declaring the queue again we can get the number of messages # in the queue. queue_res = self.rabbitmq.queue_declare(queue=HEARTBEAT_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, queue_res.method.message_count) # Check that the message received is a valid HB _, _, body = self.rabbitmq.basic_get(HEARTBEAT_QUEUE) self.assertDictEqual(expected_output, json.loads(body)) finally: delete_queue_if_exists(self.rabbitmq, HEARTBEAT_QUEUE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit) @parameterized.expand([ (FileCreatedEvent("test_config"), """ [test_section_1] test_field_1=Hello test_field_2= test_field_3=10 test_field_4=true """, { "DEFAULT": {}, "test_section_1": { "test_field_1": "Hello", "test_field_2": "", "test_field_3": "10", "test_field_4": "true" } }), (FileModifiedEvent("test_config"), """ [test_section_1] test_field_1=Hello test_field_2= test_field_3=10 test_field_4=true [test_section_2] test_field_1=OK test_field_2=Bye test_field_3=4 test_field_4=off """, { "DEFAULT": {}, "test_section_1": { "test_field_1": "Hello", "test_field_2": "", "test_field_3": "10", "test_field_4": "true" }, "test_section_2": { "test_field_1": "OK", "test_field_2": "Bye", "test_field_3": "4", "test_field_4": "off" } }), (FileDeletedEvent("test_config"), "", {}), ]) @mock.patch.object(ConfigParser, "read", autospec=True) @mock.patch.object(ConfigsManager, "_send_data", autospec=True) @mock.patch("src.utils.routing_key.get_routing_key", autospec=True) def test__on_event_thrown(self, event_to_trigger: FileSystemEvent, config_file_input: str, expected_dict: Dict, mock_get_routing_key: MagicMock, mock_send_data: MagicMock, mock_config_parser: MagicMock): TEST_ROUTING_KEY = "test_config" def read_config_side_effect(cp: ConfigParser, *args, **kwargs) -> None: """ cp would be "self" in the context of this function being injected. """ cp.read_string(config_file_input) mock_get_routing_key.return_value = TEST_ROUTING_KEY mock_send_data.return_value = None mock_config_parser.side_effect = read_config_side_effect self.test_config_manager._on_event_thrown(event_to_trigger) mock_get_routing_key.assert_called_once() mock_send_data.assert_called_once_with(self.test_config_manager, expected_dict, TEST_ROUTING_KEY) @parameterized.expand([ ({}, ), ({ "DEFAULT": {}, "test_section_1": { "test_field_1": "Hello", "test_field_2": "", "test_field_3": "10", "test_field_4": "true" } }, ), ({ "DEFAULT": {}, "test_section_1": { "test_field_1": "Hello", "test_field_2": "", "test_field_3": "10", "test_field_4": "true" }, "test_section_2": { "test_field_1": "OK", "test_field_2": "Bye", "test_field_3": "4", "test_field_4": "off" } }, ), ]) def test_send_data(self, config: Dict[str, Any]): route_key = "test.route" CONFIG_QUEUE = "hb_test" try: self.test_config_manager._initialise_rabbitmq() connect_to_rabbit(self.rabbitmq) queue_res = self.rabbitmq.queue_declare(queue=CONFIG_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, queue_res.method.message_count) self.rabbitmq.queue_bind(CONFIG_QUEUE, CONFIG_EXCHANGE, route_key) self.test_config_manager._send_data(copy.deepcopy(config), route_key) # By re-declaring the queue again we can get the number of messages # in the queue. queue_res = self.rabbitmq.queue_declare(queue=CONFIG_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, queue_res.method.message_count) # Check that the message received is what's expected _, _, body = self.rabbitmq.basic_get(CONFIG_QUEUE) self.assertDictEqual(config, json.loads(body)) finally: delete_queue_if_exists(self.rabbitmq, CONFIG_QUEUE) disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit) @mock.patch.object(ConfigsManager, "_initialise_rabbitmq", autospec=True) @mock.patch.object(ConfigsManager, "foreach_config_file", autospec=True) @mock.patch.object(PollingObserver, "start", autospec=True) @mock.patch.object(RabbitMQApi, "start_consuming", autospec=True) def test_start_not_watching(self, mock_start_consuming: MagicMock, mock_observer_start: MagicMock, mock_foreach: MagicMock, mock_initialise_rabbit: MagicMock): self.test_config_manager._watching = False mock_foreach.return_value = None mock_initialise_rabbit.return_value = None mock_observer_start.return_value = None mock_start_consuming.return_value = None self.test_config_manager.start() mock_initialise_rabbit.assert_called_once() mock_foreach.assert_called_once() mock_observer_start.assert_called_once() mock_start_consuming.assert_called_once() disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit) @mock.patch.object(ConfigsManager, "_initialise_rabbitmq", autospec=True) @mock.patch.object(ConfigsManager, "foreach_config_file", autospec=True) @mock.patch.object(PollingObserver, "start", autospec=True) def test_start_after_watching(self, mock_observer_start: MagicMock, mock_foreach: MagicMock, mock_initialise_rabbit: MagicMock): self.test_config_manager._watching = True mock_foreach.return_value = None mock_initialise_rabbit.return_value = None mock_observer_start.return_value = None self.test_config_manager.start() mock_initialise_rabbit.assert_called_once() mock_foreach.assert_called_once() mock_observer_start.assert_not_called() disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit) @mock.patch('sys.exit', autospec=True) @mock.patch.object(ConfigsManager, "disconnect_from_rabbit", autospec=True) def test__on_terminate_when_not_observing(self, mock_disconnect: MagicMock, mock_sys_exit: MagicMock): mock_disconnect.return_value = None mock_sys_exit.return_value = None # We mock the stack frame since we don't need it. mock_signal = MagicMock() mock_stack_frame = MagicMock() self.test_config_manager._on_terminate(mock_signal, mock_stack_frame) self.assertFalse(self.test_config_manager._watching) mock_disconnect.assert_called_once() mock_sys_exit.assert_called_once() @mock.patch('sys.exit', autospec=True) @mock.patch.object(ConfigsManager, "disconnect_from_rabbit", autospec=True) @mock.patch.object(PollingObserver, "stop", autospec=True) @mock.patch.object(PollingObserver, "join", autospec=True) def test__on_terminate_when_observing(self, mock_join: MagicMock, mock_stop: MagicMock, mock_disconnect: MagicMock, mock_sys_exit: MagicMock): mock_join.return_value = None mock_stop.return_value = None mock_disconnect.return_value = None mock_sys_exit.return_value = None self.test_config_manager._watching = True # We mock the signal and stack frame since we don't need them. mock_signal = MagicMock() mock_stack_frame = MagicMock() self.test_config_manager._on_terminate(mock_signal, mock_stack_frame) self.assertFalse(self.test_config_manager._watching) mock_disconnect.assert_called_once() mock_stop.assert_called_once() mock_join.assert_called_once() mock_sys_exit.assert_called_once() @mock.patch("os.path.join", autospec=True) @mock.patch("os.walk", autospec=True) def test_foreach_config_file(self, mock_os_walk: MagicMock, mock_os_path_join: MagicMock): def os_walk_fn(directory: str): file_system = [ ('/foo', ('bar', ), ('baz.ini', )), ('/foo/bar', (), ('spam.ini', 'eggs.txt')), ] for root, dirs, files in file_system: yield root, dirs, files def test_callback(input: str) -> None: self.assertIn(input, ['/foo/baz.ini', '/foo/bar/spam.ini']) mock_os_walk.side_effect = os_walk_fn mock_os_path_join.side_effect = lambda x, y: x + "/" + y self.test_config_manager.foreach_config_file(test_callback)
class ConfigsManager(PublisherSubscriberComponent): """ This class reads all configurations and sends them over to the "config" topic in Rabbit MQ. Updated configs are sent as well """ def __init__(self, name: str, logger: logging.Logger, config_directory: str, rabbit_ip: str, file_patterns: Optional[List[str]] = None, ignore_file_patterns: Optional[List[str]] = None, ignore_directories: bool = True, case_sensitive: bool = False): """ Constructs the ConfigsManager instance :param config_directory: The root config directory to watch. This is searched recursively. :param file_patterns: The file patterns in the directory to watch. Defaults to all ini files :param ignore_file_patterns: Any file patterns to ignore. Defaults to None :param ignore_directories: Whether changes in directories should be ignored. Default: True :param case_sensitive: Whether the patterns in `file_patterns` and `ignore_file_patterns` are case sensitive. Defaults to False """ if not file_patterns: file_patterns = ['*.ini'] self._name = name self._config_directory = config_directory self._file_patterns = file_patterns self._watching = False self._connected_to_rabbit = False logger.debug("Creating config RabbitMQ connection") rabbitmq = RabbitMQApi(logger.getChild("config_{}".format( RabbitMQApi.__name__)), host=rabbit_ip) super().__init__(logger, rabbitmq) self._logger.debug("Creating heartbeat RabbitMQ connection") self._heartbeat_rabbit = RabbitMQApi(logger.getChild( "heartbeat_{}".format(RabbitMQApi.__name__)), host=rabbit_ip) self._event_handler = ConfigFileEventHandler( self._logger.getChild(ConfigFileEventHandler.__name__), self._on_event_thrown, file_patterns, ignore_file_patterns, ignore_directories, case_sensitive) self._observer = PollingObserver() self._observer.schedule(self._event_handler, config_directory, recursive=True) def __str__(self) -> str: return self.name @property def name(self) -> str: return self._name def _initialise_rabbitmq(self) -> None: while True: try: self._connect_to_rabbit() self._logger.info("Connected to Rabbit") self.rabbitmq.confirm_delivery() self._logger.info("Enabled delivery confirmation on configs" "RabbitMQ channel") self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True, False, False) self._logger.info("Declared %s exchange in Rabbit", CONFIG_EXCHANGE) self._heartbeat_rabbit.confirm_delivery() self._logger.info("Enabled delivery confirmation on heartbeat" "RabbitMQ channel") self._heartbeat_rabbit.exchange_declare( HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self._logger.info("Declared %s exchange in Rabbit", HEALTH_CHECK_EXCHANGE) self._logger.info( "Creating and binding queue '%s' to exchange '%s' with " "routing key '%s", CONFIG_PING_QUEUE, HEALTH_CHECK_EXCHANGE, _HEARTBEAT_ROUTING_KEY) self._heartbeat_rabbit.queue_declare(CONFIG_PING_QUEUE, False, True, False, False) self._logger.debug("Declared '%s' queue", CONFIG_PING_QUEUE) self._heartbeat_rabbit.queue_bind(CONFIG_PING_QUEUE, HEALTH_CHECK_EXCHANGE, 'ping') self._logger.debug("Bound queue '%s' to exchange '%s'", CONFIG_PING_QUEUE, HEALTH_CHECK_EXCHANGE) # Pre-fetch count is set to 300 prefetch_count = round(300) self._heartbeat_rabbit.basic_qos(prefetch_count=prefetch_count) self._logger.debug("Declaring consuming intentions") self._heartbeat_rabbit.basic_consume(CONFIG_PING_QUEUE, self._process_ping, True, False, None) break except (ConnectionNotInitialisedException, AMQPConnectionError) as connection_error: # Should be impossible, but since exchange_declare can throw # it we shall ensure to log that the error passed through here # too. self._logger.error( "Something went wrong that meant a connection was not made" ) self._logger.error(connection_error.message) raise connection_error except AMQPChannelError: # This error would have already been logged by the RabbitMQ # logger and handled by RabbitMQ. As a result we don't need to # anything here, just re-try. time.sleep(RE_INITIALISE_SLEEPING_PERIOD) def _connect_to_rabbit(self) -> None: if not self._connected_to_rabbit: self._logger.info("Connecting to the config RabbitMQ") self.rabbitmq.connect_till_successful() self._logger.info("Connected to config RabbitMQ") self._logger.info("Connecting to the heartbeat RabbitMQ") self._heartbeat_rabbit.connect_till_successful() self._logger.info("Connected to heartbeat RabbitMQ") self._connected_to_rabbit = True else: self._logger.info( "Already connected to RabbitMQ, will not connect again") def disconnect_from_rabbit(self) -> None: if self._connected_to_rabbit: self._logger.info("Disconnecting from the config RabbitMQ") self.rabbitmq.disconnect_till_successful() self._logger.info("Disconnected from the config RabbitMQ") self._logger.info("Disconnecting from the heartbeat RabbitMQ") self._heartbeat_rabbit.disconnect_till_successful() self._logger.info("Disconnected from the heartbeat RabbitMQ") self._connected_to_rabbit = False else: self._logger.info("Already disconnected from RabbitMQ") def _send_heartbeat(self, data_to_send: dict) -> None: self._logger.debug("Sending heartbeat to the %s exchange", HEALTH_CHECK_EXCHANGE) self._logger.debug("Sending %s", data_to_send) self._heartbeat_rabbit.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=_HEARTBEAT_ROUTING_KEY, body=data_to_send, is_body_dict=True, properties=pika.BasicProperties(delivery_mode=2), mandatory=True) self._logger.debug("Sent heartbeat to %s exchange", HEALTH_CHECK_EXCHANGE) def _process_ping(self, ch: BlockingChannel, method: pika.spec.Basic.Deliver, properties: pika.spec.BasicProperties, body: bytes) -> None: self._logger.debug("Received %s. Let's pong", body) try: heartbeat = { 'component_name': self.name, 'is_alive': self._observer.is_alive(), 'timestamp': datetime.now().timestamp(), } self._send_heartbeat(heartbeat) except MessageWasNotDeliveredException as e: # Log the message and do not raise it as heartbeats must be # real-time self._logger.error("Error when sending heartbeat") self._logger.exception(e) def _send_data(self, config: Dict[str, Any], route_key: str) -> None: self._logger.debug("Sending %s with routing key %s", config, route_key) while True: try: self._logger.debug( "Attempting to send config with routing key %s", route_key) # We need to definitely send this self.rabbitmq.basic_publish_confirm( CONFIG_EXCHANGE, route_key, config, mandatory=True, is_body_dict=True, properties=BasicProperties(delivery_mode=2)) self._logger.info("Configuration update sent") break except MessageWasNotDeliveredException as mwnde: self._logger.error( "Config was not successfully sent with " "routing key %s", route_key) self._logger.exception(mwnde) self._logger.info( "Will attempt sending the config again with " "routing key %s", route_key) self.rabbitmq.connection.sleep(10) except (ConnectionNotInitialisedException, AMQPConnectionError) as connection_error: # If the connection is not initialised or there is a connection # error, we need to restart the connection and try it again self._logger.error("There has been a connection error") self._logger.exception(connection_error) self._logger.info("Restarting the connection") self._connected_to_rabbit = False # Wait some time before reconnecting and then retrying time.sleep(RE_INITIALISE_SLEEPING_PERIOD) self._connect_to_rabbit() self._logger.info( "Connection restored, will attempt sending " "the config with routing key %s", route_key) except AMQPChannelError: # This error would have already been logged by the RabbitMQ # logger and handled by RabbitMQ. Since a new channel is created # we need to re-initialise RabbitMQ self._initialise_rabbitmq() def _on_event_thrown(self, event: FileSystemEvent) -> None: """ When an event is thrown, it reads the config and sends it as a dict via rabbitmq to the config exchange of type topic with the routing key determined by the relative file path. :param event: The event passed by watchdog :return None """ self._logger.debug("Event thrown: %s", event) self._logger.info("Detected a config %s in %s", event.event_type, event.src_path) if event.event_type == "deleted": self._logger.debug("Creating empty dict") config_dict = {} else: config = ConfigParser() self._logger.debug("Reading configuration") try: config.read(event.src_path) except (DuplicateSectionError, DuplicateOptionError, InterpolationError, ParsingError) as e: self._logger.error(e.message) # When the config is invalid, we do nothing and discard this # event. return None self._logger.debug("Config read successfully") config_dict = {key: dict(config[key]) for key in config} self._logger.debug("Config converted to dict: %s", config_dict) # Since the watcher is configured to watch files in # self._config_directory we only need check that (for get_routing_key) config_folder = os.path.normpath(self._config_directory) key = routing_key.get_routing_key(event.src_path, config_folder) self._logger.debug("Sending config %s to RabbitMQ with routing key %s", config_dict, key) self._send_data(config_dict, key) @property def config_directory(self) -> str: return self._config_directory @property def watching(self) -> bool: return self._watching @property def connected_to_rabbit(self) -> bool: return self._connected_to_rabbit def start(self) -> None: """ This method is used to start rabbit and the observer and begin watching the config files. It also sends the configuration files for the first time :return None """ log_and_print("{} started.".format(self), self._logger) self._initialise_rabbitmq() def do_first_run_event(name: str) -> None: event = FileSystemEvent(name) event.event_type = _FIRST_RUN_EVENT self._on_event_thrown(event) self._logger.info("Throwing first run event for all config files") self.foreach_config_file(do_first_run_event) if not self._watching: self._logger.info("Starting config file observer") self._observer.start() self._watching = True else: self._logger.info("File observer is already running") self._logger.debug("Config file observer started") self._connect_to_rabbit() self._listen_for_data() def _listen_for_data(self) -> None: self._logger.info("Starting the config ping listener") self._heartbeat_rabbit.start_consuming() def _on_terminate(self, signum: int, stack: FrameType) -> None: """ This method is used to stop the observer and join the threads """ log_and_print( "{} is terminating. Connections with RabbitMQ will be " "closed, and afterwards the process will exit.".format(self), self._logger) if self._watching: self._logger.info("Stopping config file observer") self._observer.stop() self._observer.join() self._watching = False self._logger.debug("Config file observer stopped") else: self._logger.info("Config file observer already stopped") self.disconnect_from_rabbit() log_and_print("{} terminated.".format(self), self._logger) sys.exit() def foreach_config_file(self, callback: Callable[[str], None]) -> None: """ Runs a function over all the files being watched by this class :param callback: The function to watch. Must accept a string for the file path as {config_directory} + {file path} :return: Nothing """ for root, dirs, files in os.walk(self.config_directory): for name in files: if any([ fnmatch.fnmatch(name, pattern) for pattern in self._file_patterns ]): callback(os.path.join(root, name))
class TestAlertStore(unittest.TestCase): def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.mongo = MongoApi(logger=self.dummy_logger.getChild( MongoApi.__name__), db_name=self.mongo_db, host=self.mongo_ip, port=self.mongo_port) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.test_store_name = 'store name' self.test_store = AlertStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(ALERT_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(ALERT_STORE_INPUT_QUEUE, STORE_EXCHANGE, ALERT_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.parent_id = 'test_parent_id' self.alert_id = 'test_alert_id' self.origin_id = 'test_origin_id' self.alert_name = 'test_alert' self.metric = 'system_is_down' self.severity = 'warning' self.message = 'alert message' self.value = 'alert_code_1' self.alert_id_2 = 'test_alert_id_2' self.origin_id_2 = 'test_origin_id_2' self.alert_name_2 = 'test_alert_2' self.severity_2 = 'critical' self.message_2 = 'alert message 2' self.value_2 = 'alert_code_2' self.alert_id_3 = 'test_alert_id_3' self.origin_id_3 = 'test_origin_id_3' self.alert_name_3 = 'test_alert_3' self.severity_3 = 'info' self.message_3 = 'alert message 3' self.value_3 = 'alert_code_3' self.last_monitored = datetime(2012, 1, 1).timestamp() self.none = None self.alert_data_1 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id, 'alert_code': { 'name': self.alert_name, 'value': self.value, }, 'severity': self.severity, 'metric': self.metric, 'message': self.message, 'timestamp': self.last_monitored, } self.alert_data_2 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_2, 'alert_code': { 'name': self.alert_name_2, 'value': self.value_2, }, 'severity': self.severity_2, 'metric': self.metric, 'message': self.message_2, 'timestamp': self.last_monitored, } self.alert_data_3 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_3, 'alert_code': { 'name': self.alert_name_3, 'value': self.value_3, }, 'severity': self.severity_3, 'metric': self.metric, 'message': self.message_3, 'timestamp': self.last_monitored, } self.alert_data_key_error = {"result": {"data": {}, "data2": {}}} self.alert_data_unexpected = {"unexpected": {}} def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, ALERT_STORE_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None self.redis.delete_all_unsafe() self.redis = None self.mongo.drop_collection(self.parent_id) self.mongo = None self.test_store = None def test__str__returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, str(self.test_store)) def test_name_property_returns_name_correctly(self) -> None: self.assertEqual(self.test_store_name, self.test_store.name) def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None: self.assertEqual(self.mongo_ip, self.test_store.mongo_ip) def test_mongo_db_property_returns_mongo_db_correctly(self) -> None: self.assertEqual(self.mongo_db, self.test_store.mongo_db) def test_mongo_port_property_returns_mongo_port_correctly(self) -> None: self.assertEqual(self.mongo_port, self.test_store.mongo_port) def test_mongo_property_returns_mongo(self) -> None: self.assertEqual(type(self.mongo), type(self.test_store.mongo)) def test_redis_property_returns_redis_correctly(self) -> None: self.assertEqual(type(self.redis), type(self.test_store.redis)) def test_initialise_rabbitmq_initialises_everything_as_expected( self) -> None: try: # To make sure that the exchanges have not already been declared self.rabbitmq.connect() self.rabbitmq.queue_delete(ALERT_STORE_INPUT_QUEUE) self.test_rabbit_manager.queue_delete(self.test_queue_name) self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE) self.rabbitmq.exchange_delete(STORE_EXCHANGE) self.rabbitmq.disconnect() self.test_store._initialise_rabbitmq() # Perform checks that the connection has been opened, marked as open # and that the delivery confirmation variable is set. self.assertTrue(self.test_store.rabbitmq.is_connected) self.assertTrue(self.test_store.rabbitmq.connection.is_open) self.assertTrue( self.test_store.rabbitmq.channel._delivery_confirmation) # Check whether the producing exchanges have been created by # using passive=True. If this check fails an exception is raised # automatically. self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE, passive=True) self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, passive=True) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Check whether the exchange has been creating by sending messages # to it. If this fails an exception is raised, hence the test fails. self.test_store.rabbitmq.basic_publish_confirm( exchange=STORE_EXCHANGE, routing_key=ALERT_STORE_INPUT_ROUTING_KEY, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=False) # Re-declare queue to get the number of messages res = self.test_store.rabbitmq.queue_declare( ALERT_STORE_INPUT_QUEUE, False, True, False, False) self.assertEqual(1, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("KeyError", "self.alert_data_key_error "), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_with_bad_data_does_raises_exceptions( self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.alert_data_unexpected).encode()) self.assertRaises(eval(mock_error), self.test_store._process_mongo_store, eval(mock_bad_data)) mock_ack.assert_called_once() mock_send_hb.assert_not_called() except Exception as e: self.fail("Test failed: {}".format(e)) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store", autospec=True) def test_process_data_sends_heartbeat_correctly(self, mock_process_mongo_store, mock_process_redis_store, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.alert_data_1).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, res.method.message_count) heartbeat_test = { 'component_name': self.test_store_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp() } _, _, body = self.test_rabbit_manager.basic_get( self.test_queue_name) self.assertEqual(heartbeat_test, json.loads(body)) mock_process_mongo_store.assert_called_once() mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) def test_process_data_doesnt_send_heartbeat_on_processing_error( self, mock_basic_ack) -> None: mock_basic_ack.return_value = None try: self.test_rabbit_manager.connect() self.test_store._initialise_rabbitmq() self.test_rabbit_manager.queue_delete(self.test_queue_name) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, res.method.message_count) self.test_rabbit_manager.queue_bind(queue=self.test_queue_name, exchange=HEALTH_CHECK_EXCHANGE, routing_key=self.routing_key) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data( blocking_channel, method_chains, properties, json.dumps(self.alert_data_unexpected).encode()) res = self.test_rabbit_manager.queue_declare( queue=self.test_queue_name, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e)) @mock.patch.object(MongoApi, "update_one") def test_process_mongo_store_calls_update_one(self, mock_update_one) -> None: self.test_store._process_mongo_store(self.alert_data_1) mock_update_one.assert_called_once() @mock.patch.object(RedisApi, "hset") def test_process_redis_store_calls_hset(self, mock_hset) -> None: self.test_store._process_redis_store(self.alert_data_1) mock_hset.assert_called_once() @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch.object(MongoApi, "update_one") def test_process_mongo_store_calls_mongo_correctly( self, mock_system_data, mock_update_one) -> None: data = eval(mock_system_data) self.test_store._process_mongo_store(data) call_1 = call(data['parent_id'], { 'doc_type': 'alert', 'n_alerts': { '$lt': 1000 } }, { '$push': { 'alerts': { 'origin': data['origin_id'], 'alert_name': data['alert_code']['name'], 'severity': data['severity'], 'metric': data['metric'], 'message': data['message'], 'timestamp': str(data['timestamp']), } }, '$min': { 'first': data['timestamp'] }, '$max': { 'last': data['timestamp'] }, '$inc': { 'n_alerts': 1 }, }) mock_update_one.assert_has_calls([call_1]) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch.object(RedisApi, "hset") def test_process_redis_store_calls_redis_correctly(self, mock_system_data, mock_hset) -> None: data = eval(mock_system_data) self.test_store._process_redis_store(data) metric_data = { 'severity': data['severity'], 'message': data['message'] } key = data['origin_id'] call_1 = call(Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric'])), json.dumps(metric_data)) mock_hset.assert_has_calls([call_1]) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) @mock.patch.object(MongoApi, "update_one") def test_process_data_calls_mongo_correctly(self, mock_system_data, mock_update_one, mock_send_hb, mock_process_redis_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() call_1 = call(data['parent_id'], { 'doc_type': 'alert', 'n_alerts': { '$lt': 1000 } }, { '$push': { 'alerts': { 'origin': data['origin_id'], 'alert_name': data['alert_code']['name'], 'severity': data['severity'], 'metric': data['metric'], 'message': data['message'], 'timestamp': str(data['timestamp']), } }, '$min': { 'first': data['timestamp'] }, '$max': { 'last': data['timestamp'] }, '$inc': { 'n_alerts': 1 }, }) mock_update_one.assert_has_calls([call_1]) mock_process_redis_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @freeze_time("2012-01-01") @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) @mock.patch.object(RedisApi, "hset") def test_process_data_calls_redis_correctly(self, mock_system_data, mock_hset, mock_send_hb, mock_process_mongo_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_ack.assert_called_once() mock_send_hb.assert_called_once() metric_data = { 'severity': data['severity'], 'message': data['message'] } key = data['origin_id'] call_1 = call( Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric'])), json.dumps(metric_data)) mock_hset.assert_has_calls([call_1]) mock_process_mongo_store.assert_called_once() except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) def test_process_mongo_store_mongo_stores_correctly( self, mock_system_data) -> None: data = eval(mock_system_data) self.test_store._process_mongo_store(data) documents = self.mongo.get_all(data['parent_id']) document = documents[0] expected = [ 'alert', 1, str(data['origin_id']), str(data['alert_code']['name']), str(data['severity']), str(data['metric']), str(data['message']), str(data['timestamp']) ] actual = [ document['doc_type'], document['n_alerts'], document['alerts'][0]['origin'], document['alerts'][0]['alert_name'], document['alerts'][0]['severity'], document['alerts'][0]['metric'], document['alerts'][0]['message'], document['alerts'][0]['timestamp'] ] self.assertListEqual(expected, actual) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) def test_process_redis_store_redis_stores_correctly( self, mock_system_data) -> None: data = eval(mock_system_data) self.test_store._process_redis_store(data) key = data['origin_id'] stored_data = self.redis.hget( Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric']))) expected_data = { 'severity': data['severity'], 'message': data['message'] } self.assertEqual(expected_data, json.loads(stored_data)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_redis_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_results_stores_in_mongo_correctly( self, mock_system_data, mock_send_hb, mock_process_redis_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_process_redis_store.assert_called_once() mock_ack.assert_called_once() mock_send_hb.assert_called_once() documents = self.mongo.get_all(data['parent_id']) document = documents[0] expected = [ 'alert', 1, str(data['origin_id']), str(data['alert_code']['name']), str(data['severity']), str(data['message']), str(data['timestamp']) ] actual = [ document['doc_type'], document['n_alerts'], document['alerts'][0]['origin'], document['alerts'][0]['alert_name'], document['alerts'][0]['severity'], document['alerts'][0]['message'], document['alerts'][0]['timestamp'] ] self.assertListEqual(expected, actual) except Exception as e: self.fail("Test failed: {}".format(e)) @parameterized.expand([ ("self.alert_data_1", ), ("self.alert_data_2", ), ("self.alert_data_3", ), ]) @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack", autospec=True) @mock.patch("src.data_store.stores.alert.AlertStore._process_mongo_store", autospec=True) @mock.patch("src.data_store.stores.store.Store._send_heartbeat", autospec=True) def test_process_data_results_stores_in_redis_correctly( self, mock_system_data, mock_send_hb, mock_process_mongo_store, mock_ack) -> None: mock_ack.return_value = None try: self.test_store._initialise_rabbitmq() data = eval(mock_system_data) blocking_channel = self.test_store.rabbitmq.channel method_chains = pika.spec.Basic.Deliver( routing_key=ALERT_STORE_INPUT_ROUTING_KEY) properties = pika.spec.BasicProperties() self.test_store._process_data(blocking_channel, method_chains, properties, json.dumps(data).encode()) mock_process_mongo_store.assert_called_once() mock_ack.assert_called_once() mock_send_hb.assert_called_once() key = data['origin_id'] stored_data = self.redis.hget( Keys.get_hash_parent(data['parent_id']), eval('Keys.get_alert_{}(key)'.format(data['metric']))) expected_data = { 'severity': data['severity'], 'message': data['message'] } self.assertEqual(expected_data, json.loads(stored_data)) except Exception as e: self.fail("Test failed: {}".format(e))