def setUp(self):
     self._halt_event = Event()
     self._redis_queue = gevent.queue.Queue()
     self._redis_sink = OperationalStatsRedisSink(self._halt_event, self._redis_queue, _node_name)
     self._redis_sink.link_exception(_unhandled_greenlet_exception)
     self._redis_sink.start()
     self._redis_connection = create_redis_connection()
Esempio n. 2
0
 def setUp(self):
     self._halt_event = Event()
     self._redis_queue = gevent.queue.Queue()
     self._redis_sink = OperationalStatsRedisSink(self._halt_event,
                                                  self._redis_queue,
                                                  _node_name)
     self._redis_sink.link_exception(_unhandled_greenlet_exception)
     self._redis_sink.start()
     self._redis_connection = create_redis_connection()
Esempio n. 3
0
    def _run(self):
        self._redis_connection = create_redis_connection()

        self._log.debug("start halt_event loop")
        while not self._halt_event.is_set():
            try:
                key, entry = self._redis_queue.get(block=True, timeout=1.0)
            except gevent.queue.Empty:
                continue
            self.store(key, entry)

        self._log.debug("end halt_event loop")
Esempio n. 4
0
def _process_one_node(node_name,
                      node_id,
                      timestamp_cutoff,
                      dedupe_set,
                      collection_ops_accounting_rows,
                      new_dedupes,
                      node_keys_processed):
    log = logging.getLogger("_process_one_node")
    redis_connection = create_redis_connection(host=node_name)
    search_key = compute_search_key(node_name)
    keys = redis_connection.keys(search_key)
    log.debug("found {0} keys from {1}".format(len(keys), search_key))
    
    value_dict = dict()

    for key_bytes in keys:
        key = key_bytes.decode("utf-8")

        node_name, timestamp, partial_key = parse_key(key)

        if timestamp > timestamp_cutoff:
            log.debug("ignoring recent key {0}".format(key))
            continue

        node_keys_processed.append(key)
        if key in dedupe_set:
            log.debug("ignoring duplicate key {0}".format(key))
            continue

        log.info("node = {0}, key = {1}".format(node_name, key))

        hash_dict = redis_connection.hgetall(key)
        for collection_id_bytes, count_bytes in hash_dict.items():
            collection_id = int(collection_id_bytes)
            count = int(count_bytes)

            log.info("    collection_id = {0}, count = {1}".format(
                collection_id, count))

            value_key = (timestamp, collection_id, )
            if not value_key in value_dict:
                value_dict[value_key] = \
                    _collection_ops_accounting_row(node_id, 
                                                   collection_id, 
                                                   timestamp)
            
            value_dict[value_key][partial_key] += count
        new_dedupes.append((node_id, key, ))

    collection_ops_accounting_rows.extend(value_dict.values())
Esempio n. 5
0
def _remove_processed_keys(node_name, keys_processed):
    log = logging.getLogger("_remove_processed_keys")
    redis_connection = create_redis_connection(host=node_name)
    for key in keys_processed:
        log.info("removing {0}".format(key))
        redis_connection.delete(key)