def test_send_data(self, config: Dict[str, Any]): route_key = "test.route" CONFIG_QUEUE = "hb_test" try: self.test_config_manager._initialise_rabbitmq() connect_to_rabbit(self.rabbitmq) queue_res = self.rabbitmq.queue_declare(queue=CONFIG_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, queue_res.method.message_count) self.rabbitmq.queue_bind(CONFIG_QUEUE, CONFIG_EXCHANGE, route_key) self.test_config_manager._send_data(copy.deepcopy(config), route_key) # By re-declaring the queue again we can get the number of messages # in the queue. queue_res = self.rabbitmq.queue_declare(queue=CONFIG_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, queue_res.method.message_count) # Check that the message received is what's expected _, _, body = self.rabbitmq.basic_get(CONFIG_QUEUE) self.assertDictEqual(config, json.loads(body)) finally: delete_queue_if_exists(self.rabbitmq, CONFIG_QUEUE) disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit)
def tearDown(self) -> None: # Delete any queues and exchanges which are common across many tests connect_to_rabbit(self.test_telegram_commands_handler.rabbitmq) delete_queue_if_exists(self.test_telegram_commands_handler.rabbitmq, self.test_rabbit_queue_name) delete_queue_if_exists( self.test_telegram_commands_handler.rabbitmq, self. test_telegram_commands_handler._telegram_commands_handler_queue) delete_exchange_if_exists(self.test_telegram_commands_handler.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.test_telegram_commands_handler.rabbitmq) self.dummy_logger = None self.test_channel_logger = None self.test_command_handlers_logger = None self.rabbitmq = None self.cmd_handlers_rabbit = None self.test_alert = None self.test_channel = None self.test_api = None self.redis = None self.mongo = None self.test_telegram_commands_handler._updater.stop() self.test_telegram_command_handlers = None self.test_telegram_commands_handler = None
def test__process_ping_sends_valid_hb(self, mock_is_alive: MagicMock): mock_is_alive.return_value = True expected_output = { 'component_name': self.CONFIG_MANAGER_NAME, 'is_alive': True, 'timestamp': datetime(year=1997, month=8, day=15, hour=10, minute=21, second=33, microsecond=30).timestamp() } HEARTBEAT_QUEUE = "hb_test" try: connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, "topic", False, True, False, False) queue_res = self.rabbitmq.queue_declare(queue=HEARTBEAT_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=False) self.assertEqual(0, queue_res.method.message_count) self.rabbitmq.queue_bind(HEARTBEAT_QUEUE, HEALTH_CHECK_EXCHANGE, "heartbeat.*") self.test_config_manager._initialise_rabbitmq() blocking_channel = self.test_config_manager._rabbitmq.channel method_chains = pika.spec.Basic.Deliver(routing_key="ping") properties = pika.spec.BasicProperties() self.test_config_manager._process_ping(blocking_channel, method_chains, properties, b"ping") # By re-declaring the queue again we can get the number of messages # in the queue. queue_res = self.rabbitmq.queue_declare(queue=HEARTBEAT_QUEUE, durable=True, exclusive=False, auto_delete=False, passive=True) self.assertEqual(1, queue_res.method.message_count) # Check that the message received is a valid HB _, _, body = self.rabbitmq.basic_get(HEARTBEAT_QUEUE) self.assertDictEqual(expected_output, json.loads(body)) finally: delete_queue_if_exists(self.rabbitmq, HEARTBEAT_QUEUE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit)
def test_initialise_rabbitmq_initialises_rabbit_correctly(self) -> None: try: # To make sure that there is no connection/channel already # established self.assertIsNone(self.rabbitmq.connection) self.assertIsNone(self.rabbitmq.channel) # To make sure that the exchanges and queues have not already been # declared connect_to_rabbit(self.rabbitmq) self.test_telegram_commands_handler.rabbitmq.queue_delete( self.test_telegram_commands_handler. _telegram_commands_handler_queue) self.test_telegram_commands_handler.rabbitmq.exchange_delete( HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) self.test_telegram_commands_handler._initialise_rabbitmq() # Perform checks that the connection has been opened and marked as # open, that the delivery confirmation variable is set and basic_qos # called successfully. self.assertTrue( self.test_telegram_commands_handler.rabbitmq.is_connected) self.assertTrue(self.test_telegram_commands_handler.rabbitmq. connection.is_open) self.assertTrue(self.test_telegram_commands_handler.rabbitmq. channel._delivery_confirmation) # Check whether the consuming exchanges and queues have been # creating by sending messages with the same routing keys as for the # bindings. We will also check if the size of the queues is 0 to # confirm that basic_consume was called (it will store the msg in # the component memory immediately). If one of the exchanges or # queues is not created or basic_consume is not called, then either # an exception will be thrown or the queue size would be 1 # respectively. Note when deleting the exchanges in the beginning we # also released every binding, hence there are no other queue binded # with the same routing key to any exchange at this point. self.test_telegram_commands_handler.rabbitmq.basic_publish_confirm( exchange=HEALTH_CHECK_EXCHANGE, routing_key=TCH_INPUT_ROUTING_KEY, body=self.test_data_str, is_body_dict=False, properties=pika.BasicProperties(delivery_mode=2), mandatory=True) # Re-declare queue to get the number of messages res = self.test_telegram_commands_handler.rabbitmq.queue_declare( self.test_telegram_commands_handler. _telegram_commands_handler_queue, False, True, False, False) self.assertEqual(0, res.method.message_count) except Exception as e: self.fail("Test failed: {}".format(e))
def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, DATA_STORE_MAN_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.dummy_logger = None self.dummy_process = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None
def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, STORE_CONFIGS_QUEUE_NAME) delete_exchange_if_exists(self.rabbitmq, CONFIG_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.redis.delete_all_unsafe() self.redis = None self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None
def tearDown(self) -> None: # flush and consume all from rabbit queues and exchanges connect_to_rabbit(self.rabbitmq) queues = [CONFIG_PING_QUEUE] for queue in queues: delete_queue_if_exists(self.rabbitmq, queue) exchanges = [CONFIG_EXCHANGE, HEALTH_CHECK_EXCHANGE] for exchange in exchanges: delete_exchange_if_exists(self.rabbitmq, exchange) disconnect_from_rabbit(self.rabbitmq) self.rabbitmq = None self.test_config_manager._rabbitmq = None self.test_config_manager._heartbeat_rabbit = None self.test_config_manager = None
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbitmq = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, env.RABBIT_IP, connection_check_time_interval=self.connection_check_time_interval) self.manager_name = 'test_store_manager' self.routing_key = 'heartbeat.manager' self.test_queue_name = 'test queue' self.test_store_manager = StoreManager(self.dummy_logger, self.manager_name, self.rabbitmq) # Adding dummy process self.dummy_process = Process(target=infinite_fn, args=()) self.dummy_process.daemon = True connect_to_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(DATA_STORE_MAN_INPUT_QUEUE, False, True, False, False) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.rabbitmq.queue_bind(DATA_STORE_MAN_INPUT_QUEUE, HEALTH_CHECK_EXCHANGE, DATA_STORE_MAN_INPUT_ROUTING_KEY) self.test_data_str = 'test data' self.test_heartbeat = { 'component_name': self.manager_name, 'is_alive': True, 'timestamp': datetime(2012, 1, 1).timestamp(), } self.test_exception = PANICException('test_exception', 1)
def tearDown(self) -> None: connect_to_rabbit(self.rabbitmq) delete_queue_if_exists(self.rabbitmq, ALERT_STORE_INPUT_QUEUE) delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE) delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE) disconnect_from_rabbit(self.rabbitmq) connect_to_rabbit(self.test_rabbit_manager) delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name) disconnect_from_rabbit(self.test_rabbit_manager) self.dummy_logger = None self.connection_check_time_interval = None self.rabbitmq = None self.test_rabbit_manager = None self.redis.delete_all_unsafe() self.redis = None self.mongo.drop_collection(self.parent_id) self.mongo = None self.test_store = None
def tearDown(self) -> None: # Delete any queues and exchanges which are common across many tests connect_to_rabbit(self.test_console_alerts_handler.rabbitmq) delete_queue_if_exists(self.test_console_alerts_handler.rabbitmq, self.test_rabbit_queue_name) delete_queue_if_exists( self.test_console_alerts_handler.rabbitmq, self.test_console_alerts_handler._console_alerts_handler_queue) delete_exchange_if_exists(self.test_console_alerts_handler.rabbitmq, HEALTH_CHECK_EXCHANGE) delete_exchange_if_exists(self.test_console_alerts_handler.rabbitmq, ALERT_EXCHANGE) disconnect_from_rabbit(self.test_console_alerts_handler.rabbitmq) self.dummy_logger = None self.test_channel_logger = None self.rabbitmq = None self.test_channel = None self.test_console_alerts_handler = None self.test_alert = None
def test__initialise_rabbit_initialises_queues( self, queue_to_check: str, mock_basic_qos: MagicMock, mock_basic_consume: MagicMock, mock_confirm_delivery: MagicMock): mock_basic_consume.return_value = None mock_confirm_delivery.return_value = None try: connect_to_rabbit(self.rabbitmq) # Testing this separately since this is a critical function self.test_config_manager._initialise_rabbitmq() mock_basic_qos.assert_called_once() mock_basic_consume.assert_called_once() mock_confirm_delivery.assert_called() self.rabbitmq.queue_declare(queue_to_check, passive=True) except pika.exceptions.ConnectionClosedByBroker: self.fail("Queue {} was not declared".format(queue_to_check)) finally: disconnect_from_rabbit(self.test_config_manager._rabbitmq) disconnect_from_rabbit(self.test_config_manager._heartbeat_rabbit)
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = ConfigStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True, False, False) self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE, STORE_CONFIGS_ROUTING_KEY_CHAINS) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_parent_id = 'parent_id' self.test_config_type = 'config_type' self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.last_monitored = datetime(2012, 1, 1).timestamp() self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config' self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config' self.routing_key_3 = 'chains.cosmos.cosmos.repos_config' self.routing_key_4 = 'general.repos_config' self.routing_key_5 = 'general.alerts_config' self.routing_key_6 = 'general.systems_config' self.routing_key_7 = 'channels.email_config' self.routing_key_8 = 'channels.pagerduty_config' self.routing_key_9 = 'channels.opsgenie_config' self.routing_key_10 = 'channels.telegram_config' self.routing_key_11 = 'channels.twilio_config' self.nodes_config_1 = { "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": { "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_1(46.166.146.165:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://46.166.146.165:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" }, "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": { "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "name": "cosmos_sentry_2(172.16.151.10:9100)", "monitor_tendermint": "false", "monitor_rpc": "false", "monitor_prometheus": "false", "exporter_url": "http://172.16.151.10:9100/metrics", "monitor_system": "true", "is_validator": "false", "monitor_node": "true", "is_archive_node": "true", "use_as_data_source": "true" } } self.repos_config_1 = { "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": { "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "tendermint/tendermint/", "monitor_repo": "true" }, "repo_83713022-4155-420b-ada1-73a863f58282": { "id": "repo_83713022-4155-420b-ada1-73a863f58282", "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548", "repo_name": "SimplyVC/panic_cosmos/", "monitor_repo": "true" } } self.alerts_config_1 = { "1": { "name": "open_file_descriptors", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "2": { "name": "system_cpu_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "3": { "name": "system_storage_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "4": { "name": "system_ram_usage", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "95", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "85", "warning_enabled": "true" }, "5": { "name": "system_is_down", "enabled": "true", "parent_id": "GLOBAL", "critical_threshold": "200", "critical_repeat": "300", "critical_enabled": "true", "warning_threshold": "0", "warning_enabled": "true" } } self.systems_config_1 = { "system_1d026af1-6cab-403d-8256-c8faa462930a": { "id": "system_1d026af1-6cab-403d-8256-c8faa462930a", "parent_id": "GLOBAL", "name": "matic_full_node_nl(172.26.10.137:9100)", "exporter_url": "http://172.26.10.137:9100/metrics", "monitor_system": "true" }, "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": { "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822", "parent_id": "GLOBAL", "name": "matic_full_node_mt(172.16.152.137:9100)", "exporter_url": "http://172.16.152.137:9100/metrics", "monitor_system": "true" } } self.telegram_config_1 = { "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": { "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9", "channel_name": "telegram_chat_1", "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE", "chat_id": "-759538717", "info": "true", "warning": "true", "critical": "true", "error": "true", "alerts": "false", "commands": "false", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.twilio_config_1 = { "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": { "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c", "channel_name": "twilio_caller_main", "account_sid": "ACb77777284e97e49eb2260aada0220e12", "auth_token": "d19f777777a0b8e274470d599e5bcc5e8", "twilio_phone_no": "+19893077770", "twilio_phone_numbers_to_dial_valid": "+35697777380", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.pagerduty_config_1 = { "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": { "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc", "channel_name": "pager_duty_1", "api_token": "meVp_vyQybcX7dA3o1fS", "integration_key": "4a520ce3577777ad89a3518096f3a5189", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.opsgenie_config_1 = { "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": { "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35", "channel_name": "ops_genie_main", "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06", "eu": "true", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.email_config_1 = { "email_01b23d79-10f5-4815-a11f-034f53974b23": { "id": "email_01b23d79-10f5-4815-a11f-034f53974b23", "channel_name": "main_email_channel", "port": "25", "smtp": "exchange.olive.com", "email_from": "*****@*****.**", "emails_to": "*****@*****.**", "info": "true", "warning": "true", "critical": "true", "error": "true", "parent_ids": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL", "parent_names": "cosmos,kusama,GLOBAL" } } self.config_data_unexpected = {"unexpected": {}}
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.test_store_name = 'store name' self.test_store = GithubStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE, GITHUB_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.repo_name = 'simplyvc/panic/' self.repo_id = 'test_repo_id' self.parent_id = 'test_parent_id' self.repo_name_2 = 'simplyvc/panic_oasis/' self.repo_id_2 = 'test_repo_id_2' self.parent_id_2 = 'test_parent_id_2' self.last_monitored = datetime(2012, 1, 1).timestamp() self.github_data_1 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 4, } } } } self.github_data_2 = { "result": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 5, "previous": 5, } } } } self.github_data_3 = { "result": { "meta_data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_error = { "error": { "meta_data": { "repo_name": self.repo_name, "repo_id": self.repo_id, "repo_parent_id": self.parent_id, "time": self.last_monitored }, "code": "5006", "message": "error message" } } self.github_data_key_error = { "result": { "data": { "repo_name": self.repo_name_2, "repo_id": self.repo_id_2, "repo_parent_id": self.parent_id_2, "last_monitored": self.last_monitored }, "wrong_data": { "no_of_releases": { "current": 8, "previous": 1, } } } } self.github_data_unexpected = {"unexpected": {}}
def setUp(self) -> None: self.dummy_logger = logging.getLogger('Dummy') self.dummy_logger.disabled = True self.connection_check_time_interval = timedelta(seconds=0) self.rabbit_ip = env.RABBIT_IP self.rabbitmq = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.test_rabbit_manager = RabbitMQApi( self.dummy_logger, self.rabbit_ip, connection_check_time_interval=self.connection_check_time_interval) self.mongo_ip = env.DB_IP self.mongo_db = env.DB_NAME self.mongo_port = env.DB_PORT self.mongo = MongoApi(logger=self.dummy_logger.getChild( MongoApi.__name__), db_name=self.mongo_db, host=self.mongo_ip, port=self.mongo_port) self.redis_db = env.REDIS_DB self.redis_host = env.REDIS_IP self.redis_port = env.REDIS_PORT self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER self.redis = RedisApi(self.dummy_logger, self.redis_db, self.redis_host, self.redis_port, '', self.redis_namespace, self.connection_check_time_interval) self.test_store_name = 'store name' self.test_store = AlertStore(self.test_store_name, self.dummy_logger, self.rabbitmq) self.routing_key = 'heartbeat.worker' self.test_queue_name = 'test queue' connect_to_rabbit(self.rabbitmq) self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False, True, False, False) self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True, False, False) self.rabbitmq.queue_declare(ALERT_STORE_INPUT_QUEUE, False, True, False, False) self.rabbitmq.queue_bind(ALERT_STORE_INPUT_QUEUE, STORE_EXCHANGE, ALERT_STORE_INPUT_ROUTING_KEY) connect_to_rabbit(self.test_rabbit_manager) self.test_rabbit_manager.queue_declare(self.test_queue_name, False, True, False, False) self.test_rabbit_manager.queue_bind(self.test_queue_name, HEALTH_CHECK_EXCHANGE, self.routing_key) self.test_data_str = 'test data' self.test_exception = PANICException('test_exception', 1) self.parent_id = 'test_parent_id' self.alert_id = 'test_alert_id' self.origin_id = 'test_origin_id' self.alert_name = 'test_alert' self.metric = 'system_is_down' self.severity = 'warning' self.message = 'alert message' self.value = 'alert_code_1' self.alert_id_2 = 'test_alert_id_2' self.origin_id_2 = 'test_origin_id_2' self.alert_name_2 = 'test_alert_2' self.severity_2 = 'critical' self.message_2 = 'alert message 2' self.value_2 = 'alert_code_2' self.alert_id_3 = 'test_alert_id_3' self.origin_id_3 = 'test_origin_id_3' self.alert_name_3 = 'test_alert_3' self.severity_3 = 'info' self.message_3 = 'alert message 3' self.value_3 = 'alert_code_3' self.last_monitored = datetime(2012, 1, 1).timestamp() self.none = None self.alert_data_1 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id, 'alert_code': { 'name': self.alert_name, 'value': self.value, }, 'severity': self.severity, 'metric': self.metric, 'message': self.message, 'timestamp': self.last_monitored, } self.alert_data_2 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_2, 'alert_code': { 'name': self.alert_name_2, 'value': self.value_2, }, 'severity': self.severity_2, 'metric': self.metric, 'message': self.message_2, 'timestamp': self.last_monitored, } self.alert_data_3 = { 'parent_id': self.parent_id, 'origin_id': self.origin_id_3, 'alert_code': { 'name': self.alert_name_3, 'value': self.value_3, }, 'severity': self.severity_3, 'metric': self.metric, 'message': self.message_3, 'timestamp': self.last_monitored, } self.alert_data_key_error = {"result": {"data": {}, "data2": {}}} self.alert_data_unexpected = {"unexpected": {}}