Beispiel #1
0
def save_github_repo_to_redis(redis: RedisApi, github_repo: GitHubRepo) -> None:
    redis_hash = Keys.get_hash_parent(github_repo.parent_id)
    repo_id = github_repo.repo_id
    redis.hset_multiple(redis_hash, {
        Keys.get_github_no_of_releases(repo_id): github_repo.no_of_releases,
        Keys.get_github_last_monitored(repo_id): github_repo.last_monitored,
    })
Beispiel #2
0
def save_system_to_redis(redis: RedisApi, system: System) -> None:
    redis_hash = Keys.get_hash_parent(system.parent_id)
    system_id = system.system_id
    redis.hset_multiple(redis_hash, {
        Keys.get_system_process_cpu_seconds_total(system_id):
            system.process_cpu_seconds_total,
        Keys.get_system_process_memory_usage(system_id):
            system.process_memory_usage,
        Keys.get_system_virtual_memory_usage(system_id):
            system.virtual_memory_usage,
        Keys.get_system_open_file_descriptors(system_id):
            system.open_file_descriptors,
        Keys.get_system_system_cpu_usage(system_id):
            system.system_cpu_usage,
        Keys.get_system_system_ram_usage(system_id):
            system.system_ram_usage,
        Keys.get_system_system_storage_usage(system_id):
            system.system_storage_usage,
        Keys.get_system_network_transmit_bytes_per_second(system_id):
            system.network_transmit_bytes_per_second,
        Keys.get_system_network_receive_bytes_per_second(system_id):
            system.network_receive_bytes_per_second,
        Keys.get_system_network_transmit_bytes_total(system_id):
            system.network_transmit_bytes_total,
        Keys.get_system_network_receive_bytes_total(system_id):
            system.network_receive_bytes_total,
        Keys.get_system_disk_io_time_seconds_in_interval(system_id):
            system.disk_io_time_seconds_in_interval,
        Keys.get_system_disk_io_time_seconds_total(system_id):
            system.disk_io_time_seconds_total,
        Keys.get_system_last_monitored(system_id): system.last_monitored,
        Keys.get_system_went_down_at(system_id): system.went_down_at,
    })
Beispiel #3
0
 def setUp(self) -> None:
     self.dummy_logger = logging.getLogger('Dummy')
     self.dummy_logger.disabled = True
     self.transformer_display_name = 'Test Data Transformer'
     self.transformer_module_name = 'TestDataTransformer'
     self.connection_check_time_interval = timedelta(seconds=0)
     self.rabbit_ip = env.RABBIT_IP
     self.rabbitmq = RabbitMQApi(
         self.dummy_logger,
         self.rabbit_ip,
         connection_check_time_interval=self.connection_check_time_interval)
     self.redis_db = env.REDIS_DB
     self.redis_host = env.REDIS_IP
     self.redis_port = env.REDIS_PORT
     self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
     self.redis = RedisApi(self.dummy_logger, self.redis_db,
                           self.redis_host, self.redis_port, '',
                           self.redis_namespace,
                           self.connection_check_time_interval)
     self.github_dt_name = 'test_github_data_transformer'
     self.github_dt_publishing_queue_size = 1000
     self.publishing_queue_github_dt = Queue(
         self.github_dt_publishing_queue_size)
     self.test_github_dt = GitHubDataTransformer(
         self.github_dt_name, self.dummy_logger, self.redis, self.rabbitmq,
         self.github_dt_publishing_queue_size)
     self.test_github_dt._publishing_queue = self.publishing_queue_github_dt
     self.system_dt_name = 'test_system_data_transformer'
     self.system_dt_publishing_queue_size = 1001
     self.publishing_queue_system_dt = Queue(
         self.system_dt_publishing_queue_size)
     self.test_system_dt = SystemDataTransformer(
         self.system_dt_name, self.dummy_logger, self.redis, self.rabbitmq,
         self.system_dt_publishing_queue_size)
     self.test_system_dt._publishing_queue = self.publishing_queue_system_dt
Beispiel #4
0
def _initialise_component_redis(component_display_name: str,
                                component_logger: logging.Logger) -> RedisApi:
    # Try initialising the Redis API until successful. This had to be done
    # separately to avoid instances when Redis creation failed and we
    # attempt to use it.
    while True:
        try:
            redis_db = env.REDIS_DB
            redis_port = env.REDIS_PORT
            redis_host = env.REDIS_IP
            unique_alerter_identifier = env.UNIQUE_ALERTER_IDENTIFIER

            redis = RedisApi(logger=component_logger.getChild(
                RedisApi.__name__),
                             db=redis_db,
                             host=redis_host,
                             port=redis_port,
                             namespace=unique_alerter_identifier)
            break
        except Exception as e:
            msg = get_initialisation_error_message(component_display_name, e)
            log_and_print(msg, component_logger)
            # sleep before trying again
            time.sleep(RE_INITIALISE_SLEEPING_PERIOD)

    return redis
Beispiel #5
0
def _initialise_telegram_commands_handler(
        bot_token: str, bot_chat_id: str, channel_id: str, channel_name: str,
        associated_chains: Dict) -> TelegramCommandsHandler:
    # Handler display name based on channel name
    handler_display_name = TELEGRAM_COMMANDS_HANDLER_NAME_TEMPLATE.format(
        channel_name)
    handler_logger = _initialise_channel_handler_logger(
        handler_display_name, TelegramCommandsHandler.__name__)

    # Try initialising handler until successful
    while True:
        try:
            telegram_bot = TelegramBotApi(bot_token, bot_chat_id)

            telegram_channel = TelegramChannel(
                channel_name, channel_id,
                handler_logger.getChild(TelegramChannel.__name__),
                telegram_bot)

            cmd_handlers_logger = handler_logger.getChild(
                TelegramCommandHandlers.__name__)
            cmd_handlers_rabbitmq = RabbitMQApi(
                logger=cmd_handlers_logger.getChild(RabbitMQApi.__name__),
                host=env.RABBIT_IP)
            cmd_handlers_redis = RedisApi(
                logger=cmd_handlers_logger.getChild(RedisApi.__name__),
                host=env.REDIS_IP,
                db=env.REDIS_DB,
                port=env.REDIS_PORT,
                namespace=env.UNIQUE_ALERTER_IDENTIFIER)
            cmd_handlers_mongo = MongoApi(logger=cmd_handlers_logger.getChild(
                MongoApi.__name__),
                                          host=env.DB_IP,
                                          db_name=env.DB_NAME,
                                          port=env.DB_PORT)

            cmd_handlers = TelegramCommandHandlers(
                TELEGRAM_COMMAND_HANDLERS_NAME, cmd_handlers_logger,
                associated_chains, telegram_channel, cmd_handlers_rabbitmq,
                cmd_handlers_redis, cmd_handlers_mongo)
            handler_rabbitmq = RabbitMQApi(logger=handler_logger.getChild(
                RabbitMQApi.__name__),
                                           host=env.RABBIT_IP)

            telegram_commands_handler = TelegramCommandsHandler(
                handler_display_name, handler_logger, handler_rabbitmq,
                telegram_channel, cmd_handlers)
            log_and_print(
                "Successfully initialised {}".format(handler_display_name),
                handler_logger)
            break
        except Exception as e:
            msg = get_initialisation_error_message(handler_display_name, e)
            log_and_print(msg, handler_logger)
            # sleep before trying again
            time.sleep(RE_INITIALISE_SLEEPING_PERIOD)

    return telegram_commands_handler
Beispiel #6
0
    def __init__(self, name: str, logger: Logger, rabbit_ip: str,
                 redis_ip: str, redis_db: int, redis_port: int,
                 unique_alerter_identifier: str, enable_console_alerts: bool,
                 enable_log_alerts: bool):

        self._name = name
        self._redis = RedisApi(logger.getChild(RedisApi.__name__),
                               host=redis_ip,
                               db=redis_db,
                               port=redis_port,
                               namespace=unique_alerter_identifier)
        self._enable_console_alerts = enable_console_alerts
        self._enable_log_alerts = enable_log_alerts

        self._config = {}

        super().__init__(
            logger,
            RabbitMQApi(logger=logger.getChild(RabbitMQApi.__name__),
                        host=rabbit_ip),
            env.ALERT_ROUTER_PUBLISHING_QUEUE_SIZE)
Beispiel #7
0
def _initialise_transformer_redis(
        transformer_name: str, transformer_logger: logging.Logger) -> RedisApi:
    # Try initialising the Redis API until successful. This had to be done
    # separately to avoid instances when Redis creation failed and we
    # attempt to use it.
    while True:
        try:
            redis = RedisApi(logger=transformer_logger.getChild(
                RedisApi.__name__),
                             db=env.REDIS_DB,
                             host=env.REDIS_IP,
                             port=env.REDIS_PORT,
                             namespace=env.UNIQUE_ALERTER_IDENTIFIER)
            break
        except Exception as e:
            msg = get_initialisation_error_message(transformer_name, e)
            log_and_print(msg, transformer_logger)
            # sleep before trying again
            time.sleep(RE_INITIALISE_SLEEPING_PERIOD)

    return redis
Beispiel #8
0
class AlertRouter(QueuingPublisherSubscriberComponent):
    def __init__(self, name: str, logger: Logger, rabbit_ip: str,
                 redis_ip: str, redis_db: int, redis_port: int,
                 unique_alerter_identifier: str, enable_console_alerts: bool,
                 enable_log_alerts: bool):

        self._name = name
        self._redis = RedisApi(logger.getChild(RedisApi.__name__),
                               host=redis_ip,
                               db=redis_db,
                               port=redis_port,
                               namespace=unique_alerter_identifier)
        self._enable_console_alerts = enable_console_alerts
        self._enable_log_alerts = enable_log_alerts

        self._config = {}

        super().__init__(
            logger,
            RabbitMQApi(logger=logger.getChild(RabbitMQApi.__name__),
                        host=rabbit_ip),
            env.ALERT_ROUTER_PUBLISHING_QUEUE_SIZE)

    def __str__(self) -> str:
        return self.name

    @property
    def name(self) -> str:
        return self._name

    def _initialise_rabbitmq(self) -> None:
        """
        Initialises the rabbit connection and the exchanges needed
        :return: None
        """
        self._rabbitmq.connect_till_successful()
        self._logger.info("Setting delivery confirmation on RabbitMQ channel")
        self._rabbitmq.confirm_delivery()

        # Pre-fetch count is 5 times less the maximum queue size
        prefetch_count = round(self._publishing_queue.maxsize / 5)
        self._rabbitmq.basic_qos(prefetch_count=prefetch_count)

        self._declare_exchange_and_bind_queue(ALERT_ROUTER_CONFIGS_QUEUE_NAME,
                                              CONFIG_EXCHANGE, 'topic',
                                              'channels.*')
        self._rabbitmq.basic_consume(queue=ALERT_ROUTER_CONFIGS_QUEUE_NAME,
                                     on_message_callback=self._process_configs,
                                     auto_ack=False,
                                     exclusive=False,
                                     consumer_tag=None)

        self._declare_exchange_and_bind_queue(_ALERT_ROUTER_INPUT_QUEUE_NAME,
                                              ALERT_EXCHANGE, 'topic',
                                              'alert_router.*')
        self._rabbitmq.basic_consume(queue=_ALERT_ROUTER_INPUT_QUEUE_NAME,
                                     on_message_callback=self._process_alert,
                                     auto_ack=False,
                                     exclusive=False,
                                     consumer_tag=None)

        # Declare store exchange just in case it hasn't been declared
        # yet
        self._rabbitmq.exchange_declare(exchange=STORE_EXCHANGE,
                                        exchange_type='direct',
                                        passive=False,
                                        durable=True,
                                        auto_delete=False,
                                        internal=False)

        self._declare_exchange_and_bind_queue(_HEARTBEAT_QUEUE_NAME,
                                              HEALTH_CHECK_EXCHANGE, 'topic',
                                              'ping')

        self._logger.debug("Declaring consuming intentions")
        self._rabbitmq.basic_consume(_HEARTBEAT_QUEUE_NAME, self._process_ping,
                                     True, False, None)

    def _declare_exchange_and_bind_queue(self, queue_name: str,
                                         exchange_name: str,
                                         exchange_type: str,
                                         routing_key: str) -> None:
        """
        Declare the specified exchange and queue and binds that queue to the
        exchange
        :param exchange_type:
        :param queue_name: The queue to declare and bind to the exchange
        :param exchange_name: The exchange to declare and bind the queue to
        :return: None
        """
        self._logger.info("Creating %s exchange", exchange_name)
        self._rabbitmq.exchange_declare(exchange_name, exchange_type, False,
                                        True, False, False)
        self._logger.info("Creating and binding queue for %s exchange",
                          exchange_name)
        self._logger.debug("Creating queue %s", queue_name)
        self._rabbitmq.queue_declare(queue_name, False, True, False, False)
        self._logger.debug("Binding queue %s to %s exchange", queue_name,
                           exchange_name)
        self._rabbitmq.queue_bind(queue_name, exchange_name, routing_key)

    def _process_configs(self, ch: BlockingChannel,
                         method: pika.spec.Basic.Deliver,
                         properties: pika.spec.BasicProperties,
                         body: bytes) -> None:

        recv_config = ConfigParser()
        recv_config.read_dict(json.loads(body))
        config_filename = method.routing_key

        self._logger.info("Received a new configuration from %s",
                          config_filename)
        self._logger.debug("recv_config = %s", recv_config)

        previous_config = self._config.get(config_filename, None)
        self._config[config_filename] = {}

        # Only take from the config if it is not empty
        if recv_config:
            # Taking what we need, and checking types
            try:
                for key in recv_config.sections():
                    self._config[config_filename][key] = self.extract_config(
                        recv_config[key], config_filename)
            except (NoOptionError, NoSectionError,
                    MissingKeyInConfigException) as missing_error:
                self._logger.error(
                    "The configuration file %s is missing some configs",
                    config_filename)
                self._logger.error(missing_error.message)
                self._logger.warning(
                    "The previous configuration will be used instead")
                self._config[config_filename] = previous_config
            except Exception as e:
                self._logger.error("Encountered an error when reading the "
                                   "configuration files")
                self._logger.exception(e)
                self._logger.warning(
                    "The previous configuration will be used instead")
                self._config[config_filename] = previous_config
            self._logger.debug(self._config)

        self._rabbitmq.basic_ack(method.delivery_tag, False)

    def _process_alert(self, ch: BlockingChannel,
                       method: pika.spec.Basic.Deliver,
                       properties: pika.spec.BasicProperties,
                       body: bytes) -> None:
        recv_alert: Dict = {}
        has_error: bool = False
        send_to_ids: List[str] = []
        try:
            # Placed in try-except in case of malformed JSON
            recv_alert = json.loads(body)

            if recv_alert and 'severity' in recv_alert:
                self._logger.debug("Received an alert to route")
                self._logger.debug("recv_alert = %s", recv_alert)
                # Where to route this alert to

                self._logger.debug("Checking if alert is muted")
                is_all_muted = self.is_all_muted(recv_alert.get('severity'))
                is_chain_severity_muted = self.is_chain_severity_muted(
                    recv_alert.get('parent_id'), recv_alert.get('severity'))

                if is_all_muted or is_chain_severity_muted:
                    self._logger.info("This alert has been muted")
                    self._logger.info(
                        "is_all_muted=%s, is_chain_severity_muted=%s",
                        is_all_muted, is_chain_severity_muted)
                else:
                    self._logger.info("Obtaining list of channels to alert")
                    self._logger.info([
                        channel.get('id')
                        for channel_type in self._config.values()
                        for channel in channel_type.values()
                    ])
                    send_to_ids = [
                        channel.get('id')
                        for channel_type in self._config.values()
                        for channel in channel_type.values()
                        if channel.get(recv_alert.get('severity').lower())
                        and recv_alert.get('parent_id') in channel.get(
                            'parent_ids')
                    ]

                    self._logger.debug("send_to_ids = %s", send_to_ids)
        except JSONDecodeError as json_e:
            self._logger.error("Alert was not a valid JSON object")
            self._logger.exception(json_e)
            has_error = True
        except Exception as e:
            self._logger.error("Error when processing alert: %s", recv_alert)
            self._logger.exception(e)
            has_error = True

        self._rabbitmq.basic_ack(method.delivery_tag, False)

        if not has_error:
            # This will be empty if the alert was muted
            for channel_id in send_to_ids:
                send_alert: Dict = {**recv_alert, 'destination_id': channel_id}

                self._logger.debug("Queuing %s to be sent to %s", send_alert,
                                   channel_id)

                self._push_to_queue(send_alert,
                                    ALERT_EXCHANGE,
                                    "channel.{}".format(channel_id),
                                    mandatory=False)
                self._logger.debug(_ROUTED_ALERT_QUEUED_LOG_MESSAGE)

            # Enqueue once to the console
            if self._enable_console_alerts:
                self._logger.debug("Queuing %s to be sent to console",
                                   recv_alert)
                self._push_to_queue({
                    **recv_alert, 'destination_id': "console"
                },
                                    ALERT_EXCHANGE,
                                    "channel.console",
                                    mandatory=True)
                self._logger.debug(_ROUTED_ALERT_QUEUED_LOG_MESSAGE)

            if self._enable_log_alerts:
                self._logger.debug("Queuing %s to be sent to the alerts log",
                                   recv_alert)

                self._push_to_queue({
                    **recv_alert, 'destination_id': "log"
                },
                                    ALERT_EXCHANGE,
                                    "channel.log",
                                    mandatory=True)
                self._logger.debug(_ROUTED_ALERT_QUEUED_LOG_MESSAGE)

            # Enqueue once to the data store
            self._push_to_queue(recv_alert,
                                STORE_EXCHANGE,
                                "alert",
                                mandatory=True)

            self._logger.debug("Alert routed successfully")

        # Send any data waiting in the publisher queue, if any
        try:
            self._send_data()
        except MessageWasNotDeliveredException as e:
            # Log the message and do not raise it as message is residing in the
            # publisher queue.
            self._logger.exception(e)

    def _send_heartbeat(self, data_to_send: dict) -> None:
        self._rabbitmq.basic_publish_confirm(
            exchange=HEALTH_CHECK_EXCHANGE,
            routing_key='heartbeat.worker',
            body=data_to_send,
            is_body_dict=True,
            properties=pika.BasicProperties(delivery_mode=2),
            mandatory=True)
        self._logger.debug("Sent heartbeat to %s exchange",
                           HEALTH_CHECK_EXCHANGE)

    def _process_ping(self, ch: BlockingChannel,
                      method: pika.spec.Basic.Deliver,
                      properties: pika.spec.BasicProperties,
                      body: bytes) -> None:

        self._logger.debug("Received %s. Let's pong", body)
        try:
            heartbeat = {
                'component_name': self.name,
                'is_alive': True,
                'timestamp': datetime.now().timestamp(),
            }

            self._send_heartbeat(heartbeat)
        except MessageWasNotDeliveredException as e:
            # Log the message and do not raise it as the heartbeats must be
            # real-time
            self._logger.error("Problem sending heartbeat")
            self._logger.exception(e)

    def start(self) -> None:
        log_and_print("{} started.".format(self), self._logger)
        self._initialise_rabbitmq()
        while True:
            try:
                # Before listening for new data send the data waiting to be sent
                # in the publishing queue. If the message is not routed, start
                # consuming and perform sending later.
                try:
                    self._send_data()
                except MessageWasNotDeliveredException as e:
                    self._logger.exception(e)

                self._listen_for_data()
            except (pika.exceptions.AMQPConnectionError,
                    pika.exceptions.AMQPChannelError) as e:
                # If we have either a channel error or connection error, the
                # channel is reset, therefore we need to re-initialise the
                # connection or channel settings
                raise e
            except Exception as e:
                self._logger.exception(e)
                raise e

    def _listen_for_data(self) -> None:
        self._logger.info("Starting the alert router listeners")
        self._rabbitmq.start_consuming()

    def _on_terminate(self, signum: int, stack: FrameType) -> None:
        log_and_print(
            "{} is terminating. Connections with RabbitMQ will be "
            "closed, and afterwards the process will exit.".format(self),
            self._logger)
        self.disconnect_from_rabbit()
        log_and_print("{} terminated.".format(self), self._logger)
        sys.exit()

    def is_all_muted(self, severity: str) -> bool:
        self._logger.debug("Getting mute_all key")
        alerter_mute_key = Keys.get_alerter_mute()

        self._logger.debug("Getting severities mute status")
        severities_muted = json.loads(
            self._redis.get(alerter_mute_key, default=b"{}"))
        return bool(severities_muted.get(severity, False))

    def is_chain_severity_muted(self, parent_id: str, severity: str) -> bool:
        self._logger.debug("Getting chain mute key")
        mute_alerts_key = Keys.get_chain_mute_alerts()

        self._logger.debug("Getting chain hashes")
        chain_hash = Keys.get_hash_parent(parent_id)

        self._logger.debug("Getting severities mute status")
        severities_muted = json.loads(
            self._redis.hget(chain_hash, mute_alerts_key, default=b"{}"))

        return bool(severities_muted.get(severity, False))

    @staticmethod
    def extract_config(section: SectionProxy,
                       config_filename: str) -> Dict[str, str]:
        AlertRouter.validate_config_fields_existence(section, config_filename)

        if "twilio" in config_filename:
            return {
                'id':
                section.get('id'),
                'parent_ids':
                [x for x in section.get('parent_ids').split(",") if x.strip()],
                'info':
                False,
                'warning':
                False,
                'error':
                False,
                'critical':
                True,
            }

        return {
            'id':
            section.get('id'),
            'parent_ids':
            [x for x in section.get('parent_ids').split(",") if x.strip()],
            'info':
            section.getboolean('info'),
            'warning':
            section.getboolean('warning'),
            'error':
            section.getboolean('error'),
            'critical':
            section.getboolean('critical'),
        }

    @staticmethod
    def validate_config_fields_existence(section: SectionProxy,
                                         config_filename: str) -> None:
        keys_expected = {'id', 'parent_ids'}
        if 'twilio' not in config_filename:
            keys_expected |= {'info', 'warning', 'error', 'critical'}
        for key in keys_expected:
            if key not in section:
                raise MissingKeyInConfigException(key, config_filename)
Beispiel #9
0
 def setUp(self) -> None:
     self.test_handler_name = 'test_telegram_commands_handler'
     self.dummy_logger = logging.getLogger('Dummy')
     self.dummy_logger.disabled = True
     self.connection_check_time_interval = timedelta(seconds=0)
     self.rabbit_ip = env.RABBIT_IP
     self.rabbitmq = RabbitMQApi(
         self.dummy_logger,
         self.rabbit_ip,
         connection_check_time_interval=self.connection_check_time_interval)
     self.test_channel_name = 'test_telegram_channel'
     self.test_channel_id = 'test_telegram_id12345'
     self.test_channel_logger = self.dummy_logger.getChild('dummy_channel')
     self.test_bot_token = '1234567891:ABC-67ABCrfZFdddqRT5Gh837T2rtUFHgTY'
     self.test_bot_chat_id = 'test_bot_chat_id'
     self.test_base_url = \
         "https://api.telegram.org/bot" + self.test_bot_token
     self.test_api = TelegramBotApi(self.test_bot_token,
                                    self.test_bot_chat_id)
     self.test_channel = TelegramChannel(self.test_channel_name,
                                         self.test_channel_id,
                                         self.test_channel_logger,
                                         self.test_api)
     self.cmd_handlers_rabbit = RabbitMQApi(
         logger=self.dummy_logger.getChild(RabbitMQApi.__name__),
         host=self.rabbit_ip,
         connection_check_time_interval=self.connection_check_time_interval)
     self.redis = RedisApi(logger=self.dummy_logger.getChild(
         RedisApi.__name__),
                           host=env.REDIS_IP,
                           db=env.REDIS_DB,
                           port=env.REDIS_PORT,
                           namespace=env.UNIQUE_ALERTER_IDENTIFIER)
     self.mongo = MongoApi(logger=self.dummy_logger.getChild(
         MongoApi.__name__),
                           host=env.DB_IP,
                           db_name=env.DB_NAME,
                           port=env.DB_PORT)
     self.test_command_handlers_logger = self.dummy_logger.getChild(
         'command_handlers')
     self.test_chain_1 = 'Kusama'
     self.test_chain_2 = 'Cosmos'
     self.test_chain_3 = 'Test_Chain'
     self.test_chain1_id = 'kusama1234'
     self.test_chain2_id = 'cosmos1234'
     self.test_chain3_id = 'test_chain11123'
     self.test_associated_chains = {
         self.test_chain1_id: self.test_chain_1,
         self.test_chain2_id: self.test_chain_2,
         self.test_chain3_id: self.test_chain_3
     }
     self.test_telegram_command_handlers = TelegramCommandHandlers(
         self.test_handler_name, self.test_command_handlers_logger,
         self.test_associated_chains, self.test_channel,
         self.cmd_handlers_rabbit, self.redis, self.mongo)
     self.test_telegram_commands_handler = TelegramCommandsHandler(
         self.test_handler_name, self.dummy_logger, self.rabbitmq,
         self.test_channel, self.test_telegram_command_handlers)
     self.test_data_str = "this is a test string"
     self.test_rabbit_queue_name = 'Test Queue'
     self.test_timestamp = 45676565.556
     self.test_heartbeat = {
         'component_name': 'Test Component',
         'timestamp': self.test_timestamp,
     }
     self.test_system_name = 'test_system'
     self.test_percentage_usage = 50
     self.test_panic_severity = 'WARNING'
     self.test_parent_id = 'parent_1234'
     self.test_system_id = 'system_id32423'
     self.test_alert = OpenFileDescriptorsIncreasedAboveThresholdAlert(
         self.test_system_name, self.test_percentage_usage,
         self.test_panic_severity, self.test_timestamp,
         self.test_panic_severity, self.test_parent_id, self.test_system_id)
Beispiel #10
0
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.test_store_name = 'store name'
        self.test_store = ConfigStore(self.test_store_name, self.dummy_logger,
                                      self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE,
                                 STORE_CONFIGS_ROUTING_KEY_CHAINS)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_parent_id = 'parent_id'
        self.test_config_type = 'config_type'

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.last_monitored = datetime(2012, 1, 1).timestamp()

        self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config'
        self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config'
        self.routing_key_3 = 'chains.cosmos.cosmos.repos_config'

        self.routing_key_4 = 'general.repos_config'
        self.routing_key_5 = 'general.alerts_config'
        self.routing_key_6 = 'general.systems_config'

        self.routing_key_7 = 'channels.email_config'
        self.routing_key_8 = 'channels.pagerduty_config'
        self.routing_key_9 = 'channels.opsgenie_config'
        self.routing_key_10 = 'channels.telegram_config'
        self.routing_key_11 = 'channels.twilio_config'

        self.nodes_config_1 = {
            "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": {
                "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "name": "cosmos_sentry_1(46.166.146.165:9100)",
                "monitor_tendermint": "false",
                "monitor_rpc": "false",
                "monitor_prometheus": "false",
                "exporter_url": "http://46.166.146.165:9100/metrics",
                "monitor_system": "true",
                "is_validator": "false",
                "monitor_node": "true",
                "is_archive_node": "true",
                "use_as_data_source": "true"
            },
            "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": {
                "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "name": "cosmos_sentry_2(172.16.151.10:9100)",
                "monitor_tendermint": "false",
                "monitor_rpc": "false",
                "monitor_prometheus": "false",
                "exporter_url": "http://172.16.151.10:9100/metrics",
                "monitor_system": "true",
                "is_validator": "false",
                "monitor_node": "true",
                "is_archive_node": "true",
                "use_as_data_source": "true"
            }
        }

        self.repos_config_1 = {
            "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": {
                "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "repo_name": "tendermint/tendermint/",
                "monitor_repo": "true"
            },
            "repo_83713022-4155-420b-ada1-73a863f58282": {
                "id": "repo_83713022-4155-420b-ada1-73a863f58282",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "repo_name": "SimplyVC/panic_cosmos/",
                "monitor_repo": "true"
            }
        }

        self.alerts_config_1 = {
            "1": {
                "name": "open_file_descriptors",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "2": {
                "name": "system_cpu_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "3": {
                "name": "system_storage_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "4": {
                "name": "system_ram_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "5": {
                "name": "system_is_down",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "200",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "0",
                "warning_enabled": "true"
            }
        }

        self.systems_config_1 = {
            "system_1d026af1-6cab-403d-8256-c8faa462930a": {
                "id": "system_1d026af1-6cab-403d-8256-c8faa462930a",
                "parent_id": "GLOBAL",
                "name": "matic_full_node_nl(172.26.10.137:9100)",
                "exporter_url": "http://172.26.10.137:9100/metrics",
                "monitor_system": "true"
            },
            "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": {
                "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822",
                "parent_id": "GLOBAL",
                "name": "matic_full_node_mt(172.16.152.137:9100)",
                "exporter_url": "http://172.16.152.137:9100/metrics",
                "monitor_system": "true"
            }
        }

        self.telegram_config_1 = {
            "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": {
                "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9",
                "channel_name": "telegram_chat_1",
                "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE",
                "chat_id": "-759538717",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "alerts": "false",
                "commands": "false",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.twilio_config_1 = {
            "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": {
                "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c",
                "channel_name": "twilio_caller_main",
                "account_sid": "ACb77777284e97e49eb2260aada0220e12",
                "auth_token": "d19f777777a0b8e274470d599e5bcc5e8",
                "twilio_phone_no": "+19893077770",
                "twilio_phone_numbers_to_dial_valid": "+35697777380",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.pagerduty_config_1 = {
            "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": {
                "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc",
                "channel_name": "pager_duty_1",
                "api_token": "meVp_vyQybcX7dA3o1fS",
                "integration_key": "4a520ce3577777ad89a3518096f3a5189",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.opsgenie_config_1 = {
            "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": {
                "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35",
                "channel_name": "ops_genie_main",
                "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06",
                "eu": "true",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.email_config_1 = {
            "email_01b23d79-10f5-4815-a11f-034f53974b23": {
                "id": "email_01b23d79-10f5-4815-a11f-034f53974b23",
                "channel_name": "main_email_channel",
                "port": "25",
                "smtp": "exchange.olive.com",
                "email_from": "*****@*****.**",
                "emails_to": "*****@*****.**",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.config_data_unexpected = {"unexpected": {}}
Beispiel #11
0
class TestConfigStore(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.test_store_name = 'store name'
        self.test_store = ConfigStore(self.test_store_name, self.dummy_logger,
                                      self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(CONFIG_EXCHANGE, 'topic', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(STORE_CONFIGS_QUEUE_NAME, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(STORE_CONFIGS_QUEUE_NAME, CONFIG_EXCHANGE,
                                 STORE_CONFIGS_ROUTING_KEY_CHAINS)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_parent_id = 'parent_id'
        self.test_config_type = 'config_type'

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.last_monitored = datetime(2012, 1, 1).timestamp()

        self.routing_key_1 = 'chains.cosmos.cosmos.nodes_config'
        self.routing_key_2 = 'chains.cosmos.cosmos.alerts_config'
        self.routing_key_3 = 'chains.cosmos.cosmos.repos_config'

        self.routing_key_4 = 'general.repos_config'
        self.routing_key_5 = 'general.alerts_config'
        self.routing_key_6 = 'general.systems_config'

        self.routing_key_7 = 'channels.email_config'
        self.routing_key_8 = 'channels.pagerduty_config'
        self.routing_key_9 = 'channels.opsgenie_config'
        self.routing_key_10 = 'channels.telegram_config'
        self.routing_key_11 = 'channels.twilio_config'

        self.nodes_config_1 = {
            "node_3e0a5189-f474-4120-a0a4-d5ab817c0504": {
                "id": "node_3e0a5189-f474-4120-a0a4-d5ab817c0504",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "name": "cosmos_sentry_1(46.166.146.165:9100)",
                "monitor_tendermint": "false",
                "monitor_rpc": "false",
                "monitor_prometheus": "false",
                "exporter_url": "http://46.166.146.165:9100/metrics",
                "monitor_system": "true",
                "is_validator": "false",
                "monitor_node": "true",
                "is_archive_node": "true",
                "use_as_data_source": "true"
            },
            "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc": {
                "id": "node_f8ebf267-9b53-4aa1-9c45-e84a9cba5fbc",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "name": "cosmos_sentry_2(172.16.151.10:9100)",
                "monitor_tendermint": "false",
                "monitor_rpc": "false",
                "monitor_prometheus": "false",
                "exporter_url": "http://172.16.151.10:9100/metrics",
                "monitor_system": "true",
                "is_validator": "false",
                "monitor_node": "true",
                "is_archive_node": "true",
                "use_as_data_source": "true"
            }
        }

        self.repos_config_1 = {
            "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e": {
                "id": "repo_4ea76d87-d291-4b68-88af-da2bd1e16e2e",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "repo_name": "tendermint/tendermint/",
                "monitor_repo": "true"
            },
            "repo_83713022-4155-420b-ada1-73a863f58282": {
                "id": "repo_83713022-4155-420b-ada1-73a863f58282",
                "parent_id": "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548",
                "repo_name": "SimplyVC/panic_cosmos/",
                "monitor_repo": "true"
            }
        }

        self.alerts_config_1 = {
            "1": {
                "name": "open_file_descriptors",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "2": {
                "name": "system_cpu_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "3": {
                "name": "system_storage_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "4": {
                "name": "system_ram_usage",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "95",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "85",
                "warning_enabled": "true"
            },
            "5": {
                "name": "system_is_down",
                "enabled": "true",
                "parent_id": "GLOBAL",
                "critical_threshold": "200",
                "critical_repeat": "300",
                "critical_enabled": "true",
                "warning_threshold": "0",
                "warning_enabled": "true"
            }
        }

        self.systems_config_1 = {
            "system_1d026af1-6cab-403d-8256-c8faa462930a": {
                "id": "system_1d026af1-6cab-403d-8256-c8faa462930a",
                "parent_id": "GLOBAL",
                "name": "matic_full_node_nl(172.26.10.137:9100)",
                "exporter_url": "http://172.26.10.137:9100/metrics",
                "monitor_system": "true"
            },
            "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822": {
                "id": "system_a51b3a33-cb3f-4f53-a657-8a5a0efe0822",
                "parent_id": "GLOBAL",
                "name": "matic_full_node_mt(172.16.152.137:9100)",
                "exporter_url": "http://172.16.152.137:9100/metrics",
                "monitor_system": "true"
            }
        }

        self.telegram_config_1 = {
            "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9": {
                "id": "telegram_8431a28e-a2ce-4e9b-839c-299b62e3d5b9",
                "channel_name": "telegram_chat_1",
                "bot_token": "1277777773:AAF-78AENtsYXxxdqTL3Ip987N7gmIKJaBE",
                "chat_id": "-759538717",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "alerts": "false",
                "commands": "false",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.twilio_config_1 = {
            "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c": {
                "id": "twilio_a7016a6b-9394-4584-abe3-5a5c434b6b7c",
                "channel_name": "twilio_caller_main",
                "account_sid": "ACb77777284e97e49eb2260aada0220e12",
                "auth_token": "d19f777777a0b8e274470d599e5bcc5e8",
                "twilio_phone_no": "+19893077770",
                "twilio_phone_numbers_to_dial_valid": "+35697777380",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.pagerduty_config_1 = {
            "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc": {
                "id": "pagerduty_4092d0ed-ac45-462b-b62a-89cffd4833cc",
                "channel_name": "pager_duty_1",
                "api_token": "meVp_vyQybcX7dA3o1fS",
                "integration_key": "4a520ce3577777ad89a3518096f3a5189",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.opsgenie_config_1 = {
            "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35": {
                "id": "opsgenie_9550bee1-5880-41f6-bdcf-a289472d7c35",
                "channel_name": "ops_genie_main",
                "api_token": "77777777-0708-4b7e-a46f-496c85fa0b06",
                "eu": "true",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.email_config_1 = {
            "email_01b23d79-10f5-4815-a11f-034f53974b23": {
                "id": "email_01b23d79-10f5-4815-a11f-034f53974b23",
                "channel_name": "main_email_channel",
                "port": "25",
                "smtp": "exchange.olive.com",
                "email_from": "*****@*****.**",
                "emails_to": "*****@*****.**",
                "info": "true",
                "warning": "true",
                "critical": "true",
                "error": "true",
                "parent_ids":
                "chain_name_7f4bc842-21b1-4bcb-8ab9-d86e08149548,chain_name_94aafe04-8287-463a-8416-0401852b3ca2,GLOBAL",
                "parent_names": "cosmos,kusama,GLOBAL"
            }
        }

        self.config_data_unexpected = {"unexpected": {}}

    def tearDown(self) -> None:
        connect_to_rabbit(self.rabbitmq)
        delete_queue_if_exists(self.rabbitmq, STORE_CONFIGS_QUEUE_NAME)
        delete_exchange_if_exists(self.rabbitmq, CONFIG_EXCHANGE)
        delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE)
        disconnect_from_rabbit(self.rabbitmq)

        connect_to_rabbit(self.test_rabbit_manager)
        delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name)
        disconnect_from_rabbit(self.test_rabbit_manager)

        self.redis.delete_all_unsafe()
        self.redis = None
        self.dummy_logger = None
        self.connection_check_time_interval = None
        self.rabbitmq = None
        self.test_rabbit_manager = None

    def test__str__returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, str(self.test_store))

    def test_name_property_returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, self.test_store.name)

    def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None:
        self.assertEqual(self.mongo_ip, self.test_store.mongo_ip)

    def test_mongo_db_property_returns_mongo_db_correctly(self) -> None:
        self.assertEqual(self.mongo_db, self.test_store.mongo_db)

    def test_mongo_port_property_returns_mongo_port_correctly(self) -> None:
        self.assertEqual(self.mongo_port, self.test_store.mongo_port)

    def test_redis_property_returns_redis_correctly(self) -> None:
        self.assertEqual(type(self.redis), type(self.test_store.redis))

    def test_mongo_property_returns_none_when_mongo_not_init(self) -> None:
        self.assertEqual(None, self.test_store.mongo)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.exchange_delete(CONFIG_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_store._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_store.rabbitmq.is_connected)
            self.assertTrue(self.test_store.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_store.rabbitmq.channel._delivery_confirmation)

            # Check whether the producing exchanges have been created by
            # using passive=True. If this check fails an exception is raised
            # automatically.
            self.test_store.rabbitmq.exchange_declare(CONFIG_EXCHANGE,
                                                      passive=True)
            self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE,
                                                      passive=True)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=CONFIG_EXCHANGE,
                routing_key=STORE_CONFIGS_ROUTING_KEY_CHAINS,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            # Re-declare queue to get the number of messages
            res = self.test_store.rabbitmq.queue_declare(
                STORE_CONFIGS_QUEUE_NAME, False, True, False, False)

            self.assertEqual(1, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RedisApi.hset", autospec=True)
    def test_process_redis_store_does_nothing_on_error_key(self,
                                                           mock_hset) -> None:
        self.test_store._process_redis_store(self.test_parent_id,
                                             self.config_data_unexpected)
        mock_hset.assert_not_called()

    @parameterized.expand([
        ("self.nodes_config_1", "self.routing_key_1"),
        ("self.alerts_config_1", "self.routing_key_2"),
        ("self.repos_config_1", "self.routing_key_3"),
        ("self.repos_config_1", "self.routing_key_4"),
        ("self.alerts_config_1", "self.routing_key_5"),
        ("self.systems_config_1", "self.routing_key_6"),
        ("self.email_config_1", "self.routing_key_7"),
        ("self.pagerduty_config_1", "self.routing_key_8"),
        ("self.opsgenie_config_1", "self.routing_key_9"),
        ("self.telegram_config_1", "self.routing_key_10"),
        ("self.twilio_config_1", "self.routing_key_11"),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_saves_in_redis(self, mock_config_data,
                                         mock_routing_key, mock_send_hb,
                                         mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            data = eval(mock_config_data)
            routing_key = eval(mock_routing_key)

            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=eval(mock_routing_key))

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            self.assertEqual(
                data,
                json.loads(
                    self.redis.get(
                        Keys.get_config(routing_key)).decode("utf-8")))

        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch(
        "src.data_store.stores.config.ConfigStore._process_redis_store",
        autospec=True)
    def test_process_data_sends_heartbeat_correctly(self,
                                                    mock_process_redis_store,
                                                    mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=self.routing_key_1)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.nodes_config_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            heartbeat_test = {
                'component_name': self.test_store_name,
                'is_alive': True,
                'timestamp': datetime(2012, 1, 1).timestamp()
            }

            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(heartbeat_test, json.loads(body))
            mock_process_redis_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    def test_process_data_doesnt_send_heartbeat_on_processing_error(
            self, mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(routing_key=None)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.nodes_config_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.nodes_config_1", "self.routing_key_1"),
        ("self.alerts_config_1", "self.routing_key_2"),
        ("self.repos_config_1", "self.routing_key_3"),
        ("self.repos_config_1", "self.routing_key_4"),
        ("self.alerts_config_1", "self.routing_key_5"),
        ("self.systems_config_1", "self.routing_key_6"),
        ("self.email_config_1", "self.routing_key_7"),
        ("self.pagerduty_config_1", "self.routing_key_8"),
        ("self.opsgenie_config_1", "self.routing_key_9"),
        ("self.telegram_config_1", "self.routing_key_10"),
        ("self.twilio_config_1", "self.routing_key_11"),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_saves_in_redis_then_removes_it_on_empty_config(
            self, mock_config_data, mock_routing_key, mock_send_hb,
            mock_ack) -> None:

        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            data = eval(mock_config_data)
            routing_key = eval(mock_routing_key)

            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(routing_key=routing_key)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            self.assertEqual(
                data,
                json.loads(
                    self.redis.get(
                        Keys.get_config(routing_key)).decode("utf-8")))

            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps({}).encode())

            self.assertEqual(None,
                             self.redis.get(Keys.get_config(routing_key)))

        except Exception as e:
            self.fail("Test failed: {}".format(e))
Beispiel #12
0
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.test_store_name = 'store name'
        self.test_store = GithubStore(self.test_store_name, self.dummy_logger,
                                      self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE,
                                 GITHUB_STORE_INPUT_ROUTING_KEY)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.repo_name = 'simplyvc/panic/'
        self.repo_id = 'test_repo_id'
        self.parent_id = 'test_parent_id'

        self.repo_name_2 = 'simplyvc/panic_oasis/'
        self.repo_id_2 = 'test_repo_id_2'
        self.parent_id_2 = 'test_parent_id_2'

        self.last_monitored = datetime(2012, 1, 1).timestamp()
        self.github_data_1 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 5,
                        "previous": 4,
                    }
                }
            }
        }
        self.github_data_2 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 5,
                        "previous": 5,
                    }
                }
            }
        }
        self.github_data_3 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name_2,
                    "repo_id": self.repo_id_2,
                    "repo_parent_id": self.parent_id_2,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 8,
                        "previous": 1,
                    }
                }
            }
        }
        self.github_data_error = {
            "error": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "time": self.last_monitored
                },
                "code": "5006",
                "message": "error message"
            }
        }
        self.github_data_key_error = {
            "result": {
                "data": {
                    "repo_name": self.repo_name_2,
                    "repo_id": self.repo_id_2,
                    "repo_parent_id": self.parent_id_2,
                    "last_monitored": self.last_monitored
                },
                "wrong_data": {
                    "no_of_releases": {
                        "current": 8,
                        "previous": 1,
                    }
                }
            }
        }
        self.github_data_unexpected = {"unexpected": {}}
Beispiel #13
0
class TestGithubStore(unittest.TestCase):
    def setUp(self) -> None:
        self.dummy_logger = logging.getLogger('Dummy')
        self.dummy_logger.disabled = True
        self.connection_check_time_interval = timedelta(seconds=0)
        self.rabbit_ip = env.RABBIT_IP
        self.rabbitmq = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.test_rabbit_manager = RabbitMQApi(
            self.dummy_logger,
            self.rabbit_ip,
            connection_check_time_interval=self.connection_check_time_interval)

        self.redis_db = env.REDIS_DB
        self.redis_host = env.REDIS_IP
        self.redis_port = env.REDIS_PORT
        self.redis_namespace = env.UNIQUE_ALERTER_IDENTIFIER
        self.redis = RedisApi(self.dummy_logger, self.redis_db,
                              self.redis_host, self.redis_port, '',
                              self.redis_namespace,
                              self.connection_check_time_interval)

        self.mongo_ip = env.DB_IP
        self.mongo_db = env.DB_NAME
        self.mongo_port = env.DB_PORT

        self.test_store_name = 'store name'
        self.test_store = GithubStore(self.test_store_name, self.dummy_logger,
                                      self.rabbitmq)

        self.routing_key = 'heartbeat.worker'
        self.test_queue_name = 'test queue'

        connect_to_rabbit(self.rabbitmq)
        self.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE, 'topic', False,
                                       True, False, False)
        self.rabbitmq.exchange_declare(STORE_EXCHANGE, 'direct', False, True,
                                       False, False)
        self.rabbitmq.queue_declare(GITHUB_STORE_INPUT_QUEUE, False, True,
                                    False, False)
        self.rabbitmq.queue_bind(GITHUB_STORE_INPUT_QUEUE, STORE_EXCHANGE,
                                 GITHUB_STORE_INPUT_ROUTING_KEY)

        connect_to_rabbit(self.test_rabbit_manager)
        self.test_rabbit_manager.queue_declare(self.test_queue_name, False,
                                               True, False, False)
        self.test_rabbit_manager.queue_bind(self.test_queue_name,
                                            HEALTH_CHECK_EXCHANGE,
                                            self.routing_key)

        self.test_data_str = 'test data'
        self.test_exception = PANICException('test_exception', 1)

        self.repo_name = 'simplyvc/panic/'
        self.repo_id = 'test_repo_id'
        self.parent_id = 'test_parent_id'

        self.repo_name_2 = 'simplyvc/panic_oasis/'
        self.repo_id_2 = 'test_repo_id_2'
        self.parent_id_2 = 'test_parent_id_2'

        self.last_monitored = datetime(2012, 1, 1).timestamp()
        self.github_data_1 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 5,
                        "previous": 4,
                    }
                }
            }
        }
        self.github_data_2 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 5,
                        "previous": 5,
                    }
                }
            }
        }
        self.github_data_3 = {
            "result": {
                "meta_data": {
                    "repo_name": self.repo_name_2,
                    "repo_id": self.repo_id_2,
                    "repo_parent_id": self.parent_id_2,
                    "last_monitored": self.last_monitored
                },
                "data": {
                    "no_of_releases": {
                        "current": 8,
                        "previous": 1,
                    }
                }
            }
        }
        self.github_data_error = {
            "error": {
                "meta_data": {
                    "repo_name": self.repo_name,
                    "repo_id": self.repo_id,
                    "repo_parent_id": self.parent_id,
                    "time": self.last_monitored
                },
                "code": "5006",
                "message": "error message"
            }
        }
        self.github_data_key_error = {
            "result": {
                "data": {
                    "repo_name": self.repo_name_2,
                    "repo_id": self.repo_id_2,
                    "repo_parent_id": self.parent_id_2,
                    "last_monitored": self.last_monitored
                },
                "wrong_data": {
                    "no_of_releases": {
                        "current": 8,
                        "previous": 1,
                    }
                }
            }
        }
        self.github_data_unexpected = {"unexpected": {}}

    def tearDown(self) -> None:
        connect_to_rabbit(self.rabbitmq)
        delete_queue_if_exists(self.rabbitmq, GITHUB_STORE_INPUT_QUEUE)
        delete_exchange_if_exists(self.rabbitmq, STORE_EXCHANGE)
        delete_exchange_if_exists(self.rabbitmq, HEALTH_CHECK_EXCHANGE)
        disconnect_from_rabbit(self.rabbitmq)

        connect_to_rabbit(self.test_rabbit_manager)
        delete_queue_if_exists(self.test_rabbit_manager, self.test_queue_name)
        disconnect_from_rabbit(self.test_rabbit_manager)

        self.redis.delete_all_unsafe()
        self.redis = None
        self.dummy_logger = None
        self.connection_check_time_interval = None
        self.rabbitmq = None
        self.test_rabbit_manager = None

    def test__str__returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, str(self.test_store))

    def test_name_property_returns_name_correctly(self) -> None:
        self.assertEqual(self.test_store_name, self.test_store.name)

    def test_mongo_ip_property_returns_mongo_ip_correctly(self) -> None:
        self.assertEqual(self.mongo_ip, self.test_store.mongo_ip)

    def test_mongo_db_property_returns_mongo_db_correctly(self) -> None:
        self.assertEqual(self.mongo_db, self.test_store.mongo_db)

    def test_mongo_port_property_returns_mongo_port_correctly(self) -> None:
        self.assertEqual(self.mongo_port, self.test_store.mongo_port)

    def test_redis_property_returns_redis_correctly(self) -> None:
        self.assertEqual(type(self.redis), type(self.test_store.redis))

    def test_mongo_property_returns_none_when_mongo_not_init(self) -> None:
        self.assertEqual(None, self.test_store.mongo)

    def test_initialise_rabbitmq_initialises_everything_as_expected(
            self) -> None:
        try:
            # To make sure that the exchanges have not already been declared
            self.rabbitmq.connect()
            self.rabbitmq.exchange_delete(HEALTH_CHECK_EXCHANGE)
            self.rabbitmq.exchange_delete(STORE_EXCHANGE)
            self.rabbitmq.disconnect()

            self.test_store._initialise_rabbitmq()

            # Perform checks that the connection has been opened, marked as open
            # and that the delivery confirmation variable is set.
            self.assertTrue(self.test_store.rabbitmq.is_connected)
            self.assertTrue(self.test_store.rabbitmq.connection.is_open)
            self.assertTrue(
                self.test_store.rabbitmq.channel._delivery_confirmation)

            # Check whether the producing exchanges have been created by
            # using passive=True. If this check fails an exception is raised
            # automatically.
            self.test_store.rabbitmq.exchange_declare(STORE_EXCHANGE,
                                                      passive=True)
            self.test_store.rabbitmq.exchange_declare(HEALTH_CHECK_EXCHANGE,
                                                      passive=True)

            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=HEALTH_CHECK_EXCHANGE,
                routing_key=self.routing_key,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)
            # Check whether the exchange has been creating by sending messages
            # to it. If this fails an exception is raised, hence the test fails.
            self.test_store.rabbitmq.basic_publish_confirm(
                exchange=STORE_EXCHANGE,
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY,
                body=self.test_data_str,
                is_body_dict=False,
                properties=pika.BasicProperties(delivery_mode=2),
                mandatory=False)

            # Re-declare queue to get the number of messages
            res = self.test_store.rabbitmq.queue_declare(
                GITHUB_STORE_INPUT_QUEUE, False, True, False, False)

            self.assertEqual(1, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("self.github_data_1", ),
        ("self.github_data_2", ),
        ("self.github_data_3", ),
    ])
    @mock.patch.object(RedisApi, "hset_multiple")
    def test_process_redis_store_redis_is_called_correctly(
            self, mock_github_data, mock_hset_multiple) -> None:

        data = eval(mock_github_data)
        self.test_store._process_redis_store(data)

        meta_data = data['result']['meta_data']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['result']['data']

        call_1 = call(
            Keys.get_hash_parent(parent_id), {
                Keys.get_github_no_of_releases(repo_id):
                str(metrics['no_of_releases']),
                Keys.get_github_last_monitored(repo_id):
                str(meta_data['last_monitored']),
            })
        mock_hset_multiple.assert_has_calls([call_1])

    @mock.patch("src.data_store.stores.store.RedisApi.hset_multiple",
                autospec=True)
    def test_process_redis_store_does_nothing_on_error_key(
            self, mock_hset_multiple) -> None:
        self.test_store._process_redis_store(self.github_data_error)
        mock_hset_multiple.assert_not_called()

    def test_process_redis_store_raises_exception_on_unexpected_key(
            self) -> None:
        self.assertRaises(ReceivedUnexpectedDataException,
                          self.test_store._process_redis_store,
                          self.github_data_unexpected)

    @parameterized.expand([
        ("self.github_data_1", ),
        ("self.github_data_2", ),
        ("self.github_data_3", ),
    ])
    def test_process_redis_store_redis_stores_correctly(
            self, mock_github_data) -> None:

        data = eval(mock_github_data)
        self.test_store._process_redis_store(data)

        meta_data = data['result']['meta_data']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['result']['data']

        self.assertEqual(
            str(metrics['no_of_releases']),
            self.redis.hget(
                Keys.get_hash_parent(parent_id),
                Keys.get_github_no_of_releases(repo_id)).decode("utf-8"))
        self.assertEqual(
            str(meta_data['last_monitored']),
            self.redis.hget(
                Keys.get_hash_parent(parent_id),
                Keys.get_github_last_monitored(repo_id)).decode("utf-8"))

    @parameterized.expand([
        ("self.github_data_1", ),
        ("self.github_data_2", ),
        ("self.github_data_3", ),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_saves_in_redis(self, mock_github_data, mock_send_hb,
                                         mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()
            data = eval(mock_github_data)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            meta_data = data['result']['meta_data']
            repo_id = meta_data['repo_id']
            parent_id = meta_data['repo_parent_id']
            metrics = data['result']['data']

            self.assertEqual(
                str(metrics['no_of_releases']),
                self.redis.hget(
                    Keys.get_hash_parent(parent_id),
                    Keys.get_github_no_of_releases(repo_id)).decode("utf-8"))
            self.assertEqual(
                str(meta_data['last_monitored']),
                self.redis.hget(
                    Keys.get_hash_parent(parent_id),
                    Keys.get_github_last_monitored(repo_id)).decode("utf-8"))
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @parameterized.expand([
        ("KeyError", "self.github_data_key_error "),
        ("ReceivedUnexpectedDataException", "self.github_data_unexpected"),
    ])
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch("src.data_store.stores.store.Store._send_heartbeat",
                autospec=True)
    def test_process_data_with_bad_data_does_raises_exceptions(
            self, mock_error, mock_bad_data, mock_send_hb, mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.github_data_unexpected).encode())
            self.assertRaises(eval(mock_error),
                              self.test_store._process_redis_store,
                              eval(mock_bad_data))
            mock_ack.assert_called_once()
            mock_send_hb.assert_not_called()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @freeze_time("2012-01-01")
    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    @mock.patch(
        "src.data_store.stores.github.GithubStore._process_redis_store",
        autospec=True)
    def test_process_data_sends_heartbeat_correctly(self,
                                                    mock_process_redis_store,
                                                    mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.github_data_1).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(1, res.method.message_count)

            heartbeat_test = {
                'component_name': self.test_store_name,
                'is_alive': True,
                'timestamp': datetime(2012, 1, 1).timestamp()
            }

            _, _, body = self.test_rabbit_manager.basic_get(
                self.test_queue_name)
            self.assertEqual(heartbeat_test, json.loads(body))
            mock_process_redis_store.assert_called_once()
        except Exception as e:
            self.fail("Test failed: {}".format(e))

    @mock.patch("src.data_store.stores.store.RabbitMQApi.basic_ack",
                autospec=True)
    def test_process_data_doesnt_send_heartbeat_on_processing_error(
            self, mock_basic_ack) -> None:

        mock_basic_ack.return_value = None
        try:
            self.test_rabbit_manager.connect()
            self.test_store._initialise_rabbitmq()

            self.test_rabbit_manager.queue_delete(self.test_queue_name)
            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=False)
            self.assertEqual(0, res.method.message_count)

            self.test_rabbit_manager.queue_bind(queue=self.test_queue_name,
                                                exchange=HEALTH_CHECK_EXCHANGE,
                                                routing_key=self.routing_key)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(
                blocking_channel, method_chains, properties,
                json.dumps(self.github_data_unexpected).encode())

            res = self.test_rabbit_manager.queue_declare(
                queue=self.test_queue_name,
                durable=True,
                exclusive=False,
                auto_delete=False,
                passive=True)
            self.assertEqual(0, res.method.message_count)
        except Exception as e:
            self.fail("Test failed: {}".format(e))
Beispiel #14
0
 def setUp(self) -> None:
     self.dummy_logger = logging.getLogger('Dummy')
     self.dummy_logger.disabled = True
     self.handler_display_name = 'Test Channel Handler'
     self.handler_module_name = 'TestChannelHandler'
     self.connection_check_time_interval = timedelta(seconds=0)
     self.rabbit_ip = env.RABBIT_IP
     self.rabbitmq = RabbitMQApi(
         self.dummy_logger,
         self.rabbit_ip,
         connection_check_time_interval=self.connection_check_time_interval)
     self.telegram_channel_name = 'test_telegram_channel'
     self.telegram_channel_id = 'test_telegram_id12345'
     self.telegram_channel_logger = self.dummy_logger.getChild(
         'telegram_channel_logger')
     self.bot_token = '1234567891:ABC-67ABCrfZFdddqRT5Gh837T2rtUFHgTY'
     self.bot_chat_id = 'test_bot_chat_id'
     self.telegram_base_url = "https://api.telegram.org/bot" + self.bot_token
     self.telegram_bot_api = TelegramBotApi(self.bot_token,
                                            self.bot_chat_id)
     self.telegram_channel = TelegramChannel(self.telegram_channel_name,
                                             self.telegram_channel_id,
                                             self.telegram_channel_logger,
                                             self.telegram_bot_api)
     self.test_queue_size = 1000
     self.test_max_attempts = 5
     self.test_alert_validity_threshold = 300
     self.telegram_alerts_handler = TelegramAlertsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.telegram_channel, self.test_queue_size,
         self.test_max_attempts, self.test_alert_validity_threshold)
     self.cmd_handlers_rabbit = RabbitMQApi(
         logger=self.dummy_logger.getChild(RabbitMQApi.__name__),
         host=self.rabbit_ip,
         connection_check_time_interval=self.connection_check_time_interval)
     self.redis = RedisApi(logger=self.dummy_logger.getChild(
         RedisApi.__name__),
                           host=env.REDIS_IP,
                           db=env.REDIS_DB,
                           port=env.REDIS_PORT,
                           namespace=env.UNIQUE_ALERTER_IDENTIFIER)
     self.mongo = MongoApi(logger=self.dummy_logger.getChild(
         MongoApi.__name__),
                           host=env.DB_IP,
                           db_name=env.DB_NAME,
                           port=env.DB_PORT)
     self.command_handlers_logger = self.dummy_logger.getChild(
         TelegramCommandHandlers.__name__)
     self.test_chain_1 = 'Kusama'
     self.test_chain_2 = 'Cosmos'
     self.test_chain_3 = 'Test_Chain'
     self.test_chain1_id = 'kusama1234'
     self.test_chain2_id = 'cosmos1234'
     self.test_chain3_id = 'test_chain11123'
     self.test_associated_chains = {
         self.test_chain1_id: self.test_chain_1,
         self.test_chain2_id: self.test_chain_2,
         self.test_chain3_id: self.test_chain_3
     }
     self.telegram_command_handlers = TelegramCommandHandlers(
         self.handler_display_name, self.command_handlers_logger,
         self.test_associated_chains, self.telegram_channel,
         self.cmd_handlers_rabbit, self.redis, self.mongo)
     self.telegram_commands_handler = TelegramCommandsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.telegram_channel, self.telegram_command_handlers)
     self.twilio_channel_name = 'test_twilio_channel'
     self.twilio_channel_id = 'test_twilio_id12345'
     self.twilio_channel_logger = self.dummy_logger.getChild(
         'twilio_channel')
     self.account_sid = 'test_account_sid'
     self.auth_token = 'test_auth_token'
     self.call_from = '+35699999999'
     self.call_to = ['+35611111111', '+35644545454', '+35634343434']
     self.twiml = '<Response><Reject/></Response>'
     self.twiml_is_url = False
     self.twilio_api = TwilioApi(self.account_sid, self.auth_token)
     self.twilio_channel = TwilioChannel(self.twilio_channel_name,
                                         self.twilio_channel_id,
                                         self.twilio_channel_logger,
                                         self.twilio_api)
     self.twilio_alerts_handler = TwilioAlertsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.twilio_channel, self.call_from, self.call_to, self.twiml,
         self.twiml_is_url, self.test_max_attempts,
         self.test_alert_validity_threshold)
     self.integration_key = 'test_integration_key'
     self.pagerduty_channel_name = 'test_pagerduty_channel'
     self.pagerduty_channel_id = 'test_pagerduty_id12345'
     self.pagerduty_channel_logger = self.dummy_logger.getChild('pagerduty')
     self.pagerduty_api = PagerDutyApi(self.integration_key)
     self.pagerduty_channel = PagerDutyChannel(
         self.pagerduty_channel_name, self.pagerduty_channel_id,
         self.pagerduty_channel_logger, self.pagerduty_api)
     self.pagerduty_alerts_handler = PagerDutyAlertsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.pagerduty_channel, self.test_queue_size,
         self.test_max_attempts, self.test_alert_validity_threshold)
     self.email_channel_name = 'test_email_channel'
     self.email_channel_id = 'test_email1234'
     self.email_channel_logger = self.dummy_logger.getChild('email_channel')
     self.emails_to = [
         '*****@*****.**', '*****@*****.**', '*****@*****.**'
     ]
     self.smtp = 'test smtp server'
     self.sender = 'test sender'
     self.username = '******'
     self.password = '******'
     self.port = 10
     self.email_api = EmailApi(self.smtp, self.sender, self.username,
                               self.password, self.port)
     self.email_channel = EmailChannel(self.email_channel_name,
                                       self.email_channel_id,
                                       self.email_channel_logger,
                                       self.emails_to, self.email_api)
     self.email_alerts_handler = EmailAlertsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.email_channel, self.test_queue_size, self.test_max_attempts,
         self.test_alert_validity_threshold)
     self.api_key = 'test api key'
     self.opsgenie_channel_name = 'test_opgenie_channel'
     self.opsgenie_channel_id = 'test_opsgenie_id12345'
     self.opsgenie_channel_logger = self.dummy_logger.getChild(
         'opsgenie_channel')
     self.eu_host = True
     self.opsgenie_api = OpsgenieApi(self.api_key, self.eu_host)
     self.opsgenie_channel = OpsgenieChannel(self.opsgenie_channel_name,
                                             self.opsgenie_channel_id,
                                             self.opsgenie_channel_logger,
                                             self.opsgenie_api)
     self.opsgenie_alerts_handler = OpsgenieAlertsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.opsgenie_channel, self.test_queue_size,
         self.test_max_attempts, self.test_alert_validity_threshold)
     self.console_channel_name = 'test_console_channel'
     self.console_channel_id = 'test_console1234'
     self.console_channel_logger = self.dummy_logger.getChild(
         'console_channel')
     self.console_channel = ConsoleChannel(self.console_channel_name,
                                           self.console_channel_id,
                                           self.console_channel_logger)
     self.console_alerts_handler = ConsoleAlertsHandler(
         self.handler_display_name, self.dummy_logger, self.rabbitmq,
         self.console_channel)
     self.log_channel_name = 'test_logger_channel'
     self.log_channel_id = 'test_logger1234'
     self.log_channel_logger = self.dummy_logger.getChild('log_channel')
     self.alerts_logger = self.dummy_logger.getChild('alerts_logger')
     self.log_channel = LogChannel(self.log_channel_name,
                                   self.log_channel_id,
                                   self.log_channel_logger,
                                   self.alerts_logger)
     self.log_alerts_handler = LogAlertsHandler(self.handler_display_name,
                                                self.dummy_logger,
                                                self.rabbitmq,
                                                self.log_channel)