예제 #1
0
 def _process_redis_store(self, routing_key: str, data: Dict) -> None:
     if data:
         self.logger.debug("Saving for %s the data=%s.", routing_key, data)
         self.redis.set(Keys.get_config(routing_key), json.dumps(data))
     else:
         self.logger.debug("Removing the saved config for key %s .",
                           routing_key)
         if self.redis.exists(Keys.get_config(routing_key)):
             self.redis.remove(Keys.get_config(routing_key))
예제 #2
0
    def test_process_data_saves_in_redis(self, mock_config_data,
                                         mock_routing_key, mock_send_hb,
                                         mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            data = eval(mock_config_data)
            routing_key = eval(mock_routing_key)

            self.test_store._initialise_rabbitmq()

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=eval(mock_routing_key))

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            self.assertEqual(
                data,
                json.loads(
                    self.redis.get(
                        Keys.get_config(routing_key)).decode("utf-8")))

        except Exception as e:
            self.fail("Test failed: {}".format(e))
예제 #3
0
    def _process_redis_error_store(self, data: Dict) -> None:
        meta_data = data['meta_data']
        error_code = data['code']
        system_name = meta_data['system_name']
        downtime_exception = SystemIsDownException(system_name)

        if error_code == downtime_exception.code:
            system_id = meta_data['system_id']
            parent_id = meta_data['system_parent_id']
            metrics = data['data']

            self.logger.debug("Saving %s state: _went_down_at=%s", system_name,
                              metrics['went_down_at'])

            self.redis.hset(Keys.get_hash_parent(parent_id),
                            Keys.get_system_went_down_at(system_id),
                            str(metrics['went_down_at']))
예제 #4
0
파일: alert.py 프로젝트: SimplyVC/panic
 def _process_redis_store(self, alert: Dict) -> None:
     metric_data = {
         'severity': alert['severity'],
         'message': alert['message']
     }
     key = alert['origin_id']
     self.redis.hset(Keys.get_hash_parent(alert['parent_id']),
                     eval('Keys.get_alert_{}(key)'.format(alert['metric'])),
                     json.dumps(metric_data))
예제 #5
0
파일: github.py 프로젝트: SimplyVC/panic
    def _process_redis_result_store(self, data: Dict) -> None:
        meta_data = data['meta_data']
        repo_name = meta_data['repo_name']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['data']

        self.logger.debug(
            "Saving %s state: _no_of_releases=%s, _last_monitored=%s",
            repo_name, metrics['no_of_releases'], meta_data['last_monitored'])

        self.redis.hset_multiple(
            Keys.get_hash_parent(parent_id), {
                Keys.get_github_no_of_releases(repo_id):
                str(metrics['no_of_releases']),
                Keys.get_github_last_monitored(repo_id):
                str(meta_data['last_monitored']),
            })
예제 #6
0
    def test_process_redis_store_redis_is_called_correctly(
            self, mock_github_data, mock_hset_multiple) -> None:

        data = eval(mock_github_data)
        self.test_store._process_redis_store(data)

        meta_data = data['result']['meta_data']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['result']['data']

        call_1 = call(
            Keys.get_hash_parent(parent_id), {
                Keys.get_github_no_of_releases(repo_id):
                str(metrics['no_of_releases']),
                Keys.get_github_last_monitored(repo_id):
                str(meta_data['last_monitored']),
            })
        mock_hset_multiple.assert_has_calls([call_1])
예제 #7
0
    def test_process_redis_store_redis_stores_correctly(
            self, mock_github_data) -> None:

        data = eval(mock_github_data)
        self.test_store._process_redis_store(data)

        meta_data = data['result']['meta_data']
        repo_id = meta_data['repo_id']
        parent_id = meta_data['repo_parent_id']
        metrics = data['result']['data']

        self.assertEqual(
            str(metrics['no_of_releases']),
            self.redis.hget(
                Keys.get_hash_parent(parent_id),
                Keys.get_github_no_of_releases(repo_id)).decode("utf-8"))
        self.assertEqual(
            str(meta_data['last_monitored']),
            self.redis.hget(
                Keys.get_hash_parent(parent_id),
                Keys.get_github_last_monitored(repo_id)).decode("utf-8"))
예제 #8
0
    def _process_heartbeat(self, ch: BlockingChannel,
                           method: pika.spec.Basic.Deliver,
                           properties: pika.spec.BasicProperties, body: bytes) \
            -> None:
        heartbeat = json.loads(body)
        self.logger.debug("Received %s. Now processing this data.", heartbeat)

        try:
            if method.routing_key == 'heartbeat.worker' or \
                    method.routing_key == 'heartbeat.manager':
                component_name = heartbeat['component_name']

                key_heartbeat = Keys.get_component_heartbeat(component_name)
                transformed_heartbeat = json.dumps(heartbeat)
                self._save_to_redis_and_add_to_state_if_fail(
                    key_heartbeat, transformed_heartbeat)

                self._dump_unsavable_redis_data()

                self.logger.debug("Successfully processed %s", heartbeat)
            else:
                raise ReceivedUnexpectedDataException(
                    "{}: _process_heartbeat".format(self))
        except Exception as e:
            self.logger.error("Error when processing %s", heartbeat)
            self.logger.exception(e)

        self.rabbitmq.basic_ack(method.delivery_tag, False)

        self.logger.debug("Saving %s heartbeat to Redis", self)
        key_heartbeat = Keys.get_component_heartbeat(self.name)
        handler_heartbeat = {
            'component_name': self.name,
            'timestamp': datetime.now().timestamp()
        }
        transformed_handler_heartbeat = json.dumps(handler_heartbeat)
        ret = self.redis.set(key_heartbeat, transformed_handler_heartbeat)
        if ret is None:
            self.logger.error("Could not save %s=%s to Redis.", key_heartbeat,
                              transformed_handler_heartbeat)
예제 #9
0
파일: github.py 프로젝트: SimplyVC/panic
    def load_state(self, repo: Union[System, GitHubRepo]) \
            -> Union[System, GitHubRepo]:
        # If Redis is down, the data passed as default will be stored as
        # the repo state.

        self.logger.debug("Loading the state of %s from Redis", repo)
        redis_hash = Keys.get_hash_parent(repo.parent_id)
        repo_id = repo.repo_id

        # Below, we will try and get the data stored in redis and store it
        # in the repo's state. If the data from Redis cannot be obtained, the
        # state won't be updated.

        # Load no_of_releases from Redis
        state_no_of_releases = repo.no_of_releases
        redis_no_of_releases = self.redis.hget(
            redis_hash, Keys.get_github_no_of_releases(repo_id),
            state_no_of_releases)
        no_of_releases = \
            convert_to_int_if_not_none(redis_no_of_releases, None)
        repo.set_no_of_releases(no_of_releases)

        # Load last_monitored from Redis
        state_last_monitored = repo.last_monitored
        redis_last_monitored = self.redis.hget(
            redis_hash, Keys.get_github_last_monitored(repo_id),
            state_last_monitored)
        last_monitored = \
            convert_to_float_if_not_none(redis_last_monitored, None)
        repo.set_last_monitored(last_monitored)

        self.logger.debug(
            "Restored %s state: _no_of_releases=%s, _last_monitored=%s", repo,
            no_of_releases, last_monitored)

        return repo
예제 #10
0
    def test_process_data_saves_in_redis(self, mock_github_data, mock_send_hb,
                                         mock_ack) -> None:
        self.rabbitmq.connect()
        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()
            data = eval(mock_github_data)

            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=GITHUB_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            meta_data = data['result']['meta_data']
            repo_id = meta_data['repo_id']
            parent_id = meta_data['repo_parent_id']
            metrics = data['result']['data']

            self.assertEqual(
                str(metrics['no_of_releases']),
                self.redis.hget(
                    Keys.get_hash_parent(parent_id),
                    Keys.get_github_no_of_releases(repo_id)).decode("utf-8"))
            self.assertEqual(
                str(meta_data['last_monitored']),
                self.redis.hget(
                    Keys.get_hash_parent(parent_id),
                    Keys.get_github_last_monitored(repo_id)).decode("utf-8"))
        except Exception as e:
            self.fail("Test failed: {}".format(e))
예제 #11
0
    def test_process_redis_store_calls_redis_correctly(self, mock_system_data,
                                                       mock_hset) -> None:
        data = eval(mock_system_data)
        self.test_store._process_redis_store(data)

        metric_data = {
            'severity': data['severity'],
            'message': data['message']
        }
        key = data['origin_id']

        call_1 = call(Keys.get_hash_parent(data['parent_id']),
                      eval('Keys.get_alert_{}(key)'.format(data['metric'])),
                      json.dumps(metric_data))
        mock_hset.assert_has_calls([call_1])
예제 #12
0
    def test_process_redis_store_redis_stores_correctly(
            self, mock_system_data) -> None:

        data = eval(mock_system_data)
        self.test_store._process_redis_store(data)

        key = data['origin_id']

        stored_data = self.redis.hget(
            Keys.get_hash_parent(data['parent_id']),
            eval('Keys.get_alert_{}(key)'.format(data['metric'])))

        expected_data = {
            'severity': data['severity'],
            'message': data['message']
        }

        self.assertEqual(expected_data, json.loads(stored_data))
예제 #13
0
    def test_process_data_results_stores_in_redis_correctly(
            self, mock_system_data, mock_send_hb, mock_process_mongo_store,
            mock_ack) -> None:

        mock_ack.return_value = None
        try:
            self.test_store._initialise_rabbitmq()

            data = eval(mock_system_data)
            blocking_channel = self.test_store.rabbitmq.channel
            method_chains = pika.spec.Basic.Deliver(
                routing_key=ALERT_STORE_INPUT_ROUTING_KEY)

            properties = pika.spec.BasicProperties()
            self.test_store._process_data(blocking_channel, method_chains,
                                          properties,
                                          json.dumps(data).encode())

            mock_process_mongo_store.assert_called_once()
            mock_ack.assert_called_once()
            mock_send_hb.assert_called_once()

            key = data['origin_id']

            stored_data = self.redis.hget(
                Keys.get_hash_parent(data['parent_id']),
                eval('Keys.get_alert_{}(key)'.format(data['metric'])))

            expected_data = {
                'severity': data['severity'],
                'message': data['message']
            }

            self.assertEqual(expected_data, json.loads(stored_data))
        except Exception as e:
            self.fail("Test failed: {}".format(e))
예제 #14
0
    def load_state(self, system: Union[System, GitHubRepo]) \
            -> Union[System, GitHubRepo]:
        # If Redis is down, the data passed as default will be stored as
        # the system state.

        self.logger.debug("Loading the state of %s from Redis", system)
        redis_hash = Keys.get_hash_parent(system.parent_id)
        system_id = system.system_id

        # Below, we will try and get the data stored in redis and store it
        # in the system's state. If the data from Redis cannot be obtained, the
        # state won't be updated.

        # Load process_cpu_seconds_total from Redis
        state_process_cpu_seconds_total = system.process_cpu_seconds_total
        redis_process_cpu_seconds_total = self.redis.hget(
            redis_hash, Keys.get_system_process_cpu_seconds_total(system_id),
            state_process_cpu_seconds_total)
        process_cpu_seconds_total = \
            convert_to_float_if_not_none(redis_process_cpu_seconds_total, None)
        system.set_process_cpu_seconds_total(process_cpu_seconds_total)

        # Load process_memory_usage from Redis
        state_process_memory_usage = system.process_memory_usage
        redis_process_memory_usage = self.redis.hget(
            redis_hash, Keys.get_system_process_memory_usage(system_id),
            state_process_memory_usage)
        process_memory_usage = \
            convert_to_float_if_not_none(redis_process_memory_usage, None)
        system.set_process_memory_usage(process_memory_usage)

        # Load virtual_memory_usage from Redis
        state_virtual_memory_usage = system.virtual_memory_usage
        redis_virtual_memory_usage = self.redis.hget(
            redis_hash, Keys.get_system_virtual_memory_usage(system_id),
            state_virtual_memory_usage)
        virtual_memory_usage = \
            convert_to_float_if_not_none(redis_virtual_memory_usage, None)
        system.set_virtual_memory_usage(virtual_memory_usage)

        # Load open_file_descriptors from Redis
        state_open_file_descriptors = system.open_file_descriptors
        redis_open_file_descriptors = self.redis.hget(
            redis_hash, Keys.get_system_open_file_descriptors(system_id),
            state_open_file_descriptors)
        open_file_descriptors = \
            convert_to_float_if_not_none(redis_open_file_descriptors, None)
        system.set_open_file_descriptors(open_file_descriptors)

        # Load system_cpu_usage from Redis
        state_system_cpu_usage = system.system_cpu_usage
        redis_system_cpu_usage = self.redis.hget(
            redis_hash, Keys.get_system_system_cpu_usage(system_id),
            state_system_cpu_usage)
        system_cpu_usage = \
            convert_to_float_if_not_none(redis_system_cpu_usage, None)
        system.set_system_cpu_usage(system_cpu_usage)

        # Load system_ram_usage from Redis
        state_system_ram_usage = system.system_ram_usage
        redis_system_ram_usage = self.redis.hget(
            redis_hash, Keys.get_system_system_ram_usage(system_id),
            state_system_ram_usage)
        system_ram_usage = \
            convert_to_float_if_not_none(redis_system_ram_usage, None)
        system.set_system_ram_usage(system_ram_usage)

        # Load system_storage_usage from Redis
        state_system_storage_usage = system.system_storage_usage
        redis_system_storage_usage = self.redis.hget(
            redis_hash, Keys.get_system_system_storage_usage(system_id),
            state_system_storage_usage)
        system_storage_usage = \
            convert_to_float_if_not_none(redis_system_storage_usage, None)
        system.set_system_storage_usage(system_storage_usage)

        # Load network_transmit_bytes_per_second from Redis
        state_network_transmit_bytes_per_second = \
            system.network_transmit_bytes_per_second
        redis_network_transmit_bytes_per_second = self.redis.hget(
            redis_hash,
            Keys.get_system_network_transmit_bytes_per_second(system_id),
            state_network_transmit_bytes_per_second)
        network_transmit_bytes_per_second = \
            convert_to_float_if_not_none(
                redis_network_transmit_bytes_per_second, None)
        system.set_network_transmit_bytes_per_second(
            network_transmit_bytes_per_second)

        # Load network_receive_bytes_per_second from Redis
        state_network_receive_bytes_per_second = \
            system.network_receive_bytes_per_second
        redis_network_receive_bytes_per_second = self.redis.hget(
            redis_hash,
            Keys.get_system_network_receive_bytes_per_second(system_id),
            state_network_receive_bytes_per_second)
        network_receive_bytes_per_second = \
            convert_to_float_if_not_none(
                redis_network_receive_bytes_per_second, None)
        system.set_network_receive_bytes_per_second(
            network_receive_bytes_per_second)

        # Load network_transmit_bytes_total from Redis
        state_network_transmit_bytes_total = system.network_transmit_bytes_total
        redis_network_transmit_bytes_total = self.redis.hget(
            redis_hash, Keys.get_system_network_transmit_bytes_total(system_id),
            state_network_transmit_bytes_total)
        network_transmit_bytes_total = \
            convert_to_float_if_not_none(redis_network_transmit_bytes_total,
                                         None)
        system.set_network_transmit_bytes_total(network_transmit_bytes_total)

        # Load network_receive_bytes_total from Redis
        state_network_receive_bytes_total = system.network_receive_bytes_total
        redis_network_receive_bytes_total = self.redis.hget(
            redis_hash, Keys.get_system_network_receive_bytes_total(system_id),
            state_network_receive_bytes_total)
        network_receive_bytes_total = \
            convert_to_float_if_not_none(redis_network_receive_bytes_total,
                                         None)
        system.set_network_receive_bytes_total(network_receive_bytes_total)

        # Load disk_io_time_seconds_in_interval from Redis
        state_disk_io_time_seconds_in_interval = \
            system.disk_io_time_seconds_in_interval
        redis_disk_io_time_seconds_in_interval = self.redis.hget(
            redis_hash,
            Keys.get_system_disk_io_time_seconds_in_interval(system_id),
            state_disk_io_time_seconds_in_interval)
        disk_io_time_seconds_in_interval = \
            convert_to_float_if_not_none(
                redis_disk_io_time_seconds_in_interval, None)
        system.set_disk_io_time_seconds_in_interval(
            disk_io_time_seconds_in_interval)

        # Load disk_io_time_seconds_total from Redis
        state_disk_io_time_seconds_total = system.disk_io_time_seconds_total
        redis_disk_io_time_seconds_total = self.redis.hget(
            redis_hash, Keys.get_system_disk_io_time_seconds_total(system_id),
            state_disk_io_time_seconds_total)
        disk_io_time_seconds_total = \
            convert_to_float_if_not_none(redis_disk_io_time_seconds_total, None)
        system.set_disk_io_time_seconds_total(disk_io_time_seconds_total)

        # Load last_monitored from Redis
        state_last_monitored = system.last_monitored
        redis_last_monitored = self.redis.hget(
            redis_hash, Keys.get_system_last_monitored(system_id),
            state_last_monitored)
        last_monitored = convert_to_float_if_not_none(redis_last_monitored,
                                                      None)
        system.set_last_monitored(last_monitored)

        # Load went_down_at from Redis
        state_went_down_at = system.went_down_at
        redis_went_down_at = self.redis.hget(
            redis_hash, Keys.get_system_went_down_at(system_id),
            state_went_down_at)
        went_down_at = convert_to_float_if_not_none(redis_went_down_at, None)
        system.set_went_down_at(went_down_at)

        self.logger.debug(
            "Restored %s state: _process_cpu_seconds_total=%s, "
            "_process_memory_usage=%s, _virtual_memory_usage=%s, "
            "_open_file_descriptors=%s, _system_cpu_usage=%s, "
            "_system_ram_usage=%s, _system_storage_usage=%s, "
            "_network_transmit_bytes_per_second=%s, "
            "_network_receive_bytes_per_second=%s, "
            "_network_transmit_bytes_total=%s, "
            "_network_receive_bytes_total=%s, "
            "_disk_io_time_seconds_in_interval=%s, "
            "_disk_io_time_seconds_total=%s, _last_monitored=%s, "
            "_went_down_at=%s", system, process_cpu_seconds_total,
            process_memory_usage, virtual_memory_usage, open_file_descriptors,
            system_cpu_usage, system_ram_usage, system_storage_usage,
            network_transmit_bytes_per_second, network_receive_bytes_per_second,
            network_transmit_bytes_total, network_receive_bytes_total,
            disk_io_time_seconds_in_interval, disk_io_time_seconds_total,
            last_monitored, went_down_at)

        return system
예제 #15
0
    def _process_redis_result_store(self, data: Dict) -> None:
        meta_data = data['meta_data']
        system_name = meta_data['system_name']
        system_id = meta_data['system_id']
        parent_id = meta_data['system_parent_id']
        metrics = data['data']

        self.logger.debug(
            'Saving %s state: _process_cpu_seconds_total=%s, '
            '_process_memory_usage=%s, _virtual_memory_usage=%s, '
            '_open_file_descriptors=%s, _system_cpu_usage=%s, '
            '_system_ram_usage=%s, _system_storage_usage=%s, '
            '_network_transmit_bytes_per_second=%s, '
            '_network_receive_bytes_per_second=%s, '
            '_network_receive_bytes_total=%s, '
            '_network_transmit_bytes_total=%s, '
            '_disk_io_time_seconds_total=%s, '
            '_disk_io_time_seconds_in_interval=%s, _went_down_at=%s, '
            '_last_monitored=%s', system_name,
            metrics['process_cpu_seconds_total'],
            metrics['process_memory_usage'], metrics['virtual_memory_usage'],
            metrics['open_file_descriptors'], metrics['system_cpu_usage'],
            metrics['system_ram_usage'], metrics['system_storage_usage'],
            metrics['network_transmit_bytes_per_second'],
            metrics['network_receive_bytes_per_second'],
            metrics['network_receive_bytes_total'],
            metrics['network_transmit_bytes_total'],
            metrics['disk_io_time_seconds_total'],
            metrics['disk_io_time_seconds_in_interval'],
            metrics['went_down_at'], meta_data['last_monitored'])

        self.redis.hset_multiple(
            Keys.get_hash_parent(parent_id), {
                Keys.get_system_process_cpu_seconds_total(system_id):
                str(metrics['process_cpu_seconds_total']),
                Keys.get_system_process_memory_usage(system_id):
                str(metrics['process_memory_usage']),
                Keys.get_system_virtual_memory_usage(system_id):
                str(metrics['virtual_memory_usage']),
                Keys.get_system_open_file_descriptors(system_id):
                str(metrics['open_file_descriptors']),
                Keys.get_system_system_cpu_usage(system_id):
                str(metrics['system_cpu_usage']),
                Keys.get_system_system_ram_usage(system_id):
                str(metrics['system_ram_usage']),
                Keys.get_system_system_storage_usage(system_id):
                str(metrics['system_storage_usage']),
                Keys.get_system_network_transmit_bytes_per_second(system_id):
                str(metrics['network_transmit_bytes_per_second']),
                Keys.get_system_network_receive_bytes_per_second(system_id):
                str(metrics['network_receive_bytes_per_second']),
                Keys.get_system_network_receive_bytes_total(system_id):
                str(metrics['network_receive_bytes_total']),
                Keys.get_system_network_transmit_bytes_total(system_id):
                str(metrics['network_transmit_bytes_total']),
                Keys.get_system_disk_io_time_seconds_total(system_id):
                str(metrics['disk_io_time_seconds_total']),
                Keys.get_system_disk_io_time_seconds_in_interval(system_id):
                str(metrics['disk_io_time_seconds_in_interval']),
                Keys.get_system_went_down_at(system_id):
                str(metrics['went_down_at']),
                Keys.get_system_last_monitored(system_id):
                str(meta_data['last_monitored']),
            })