def list(self): count = 0 while True: queues = self._redis.xreadgroup(groupname=STREAM_GROUP, consumername=CONSUMER_NAME, streams={self._name: '>'}, count=1, block=round(self._timeout_ms)) if not queues: queues = self._claim_olds() stats.set_gauge(['redis', self._name_str, 'nb_messages'], 0) if queues is None and self._stop_if_empty: break if queues: for redis_message in queues: queue_name, queue_messages = redis_message assert queue_name == self._name for message in queue_messages: id_, body = message try: tile = decode_message(body[b'message'], from_redis=True, sqs_message=id_) yield tile except Exception: logger.warning('Failed decoding the Redis message', exc_info=True) stats.increment_counter(['redis', self._name_str, 'decode_error']) count += 1 if count % 100 == 0: stats.set_gauge(['redis', self._name_str, 'nb_messages'], self._redis.xlen(name=self._name))
def list(self): count = 0 while True: queues = self._redis.xreadgroup(groupname=STREAM_GROUP, consumername=CONSUMER_NAME, streams={self._name: '>'}, count=1, block=round(self._timeout_ms)) if not queues: queues = self._claim_olds() stats.set_gauge(['redis', self._name_str, 'nb_messages'], 0) if queues is None and self._stop_if_empty: break if queues: for redis_message in queues: queue_name, queue_messages = redis_message assert queue_name == self._name for message in queue_messages: id_, body = message try: tile = decode_message(body[b'message'], from_redis=True, sqs_message=id_) yield tile except Exception: logger.warning('Failed decoding the Redis message', exc_info=True) stats.increment_counter( ['redis', self._name_str, 'decode_error']) count += 1 if count % 100 == 0: stats.set_gauge(['redis', self._name_str, 'nb_messages'], self._redis.xlen(name=self._name))
def _check_roundtrip() -> None: check_uuid = str(uuid.uuid4()) # emit the log we are going to look for logger_name = LOGGER_NAME + "." + check_uuid logger = logging.getLogger(logger_name) logger.setLevel(logging.INFO) logger.info("Test roundtrip") query = {"query": {"match_phrase": {"logger_name": logger_name}}} start = time.monotonic() while time.monotonic() < start + LOG_TIMEOUT: r = requests.post(SEARCH_URL, json=query, headers=SEARCH_HEADERS) r.raise_for_status() json = r.json() found = json['hits']['total'] if found > 0: LOG.info("Found the test log line.") stats.set_gauge(['roundtrip'], time.monotonic() - start) return else: LOG.info("Didn't find the test log line. Wait 1s...") time.sleep(1) LOG.warning("Timeout waiting for the test log line") stats.set_gauge(['roundtrip'], LOG_TIMEOUT * 2)
def hello_get(request): """ Will use the slave """ with timer_context(['sql', 'read_hello']): hello = models.DBSession.query(models.Hello).first() increment_counter(['test', 'counter']) set_gauge(['test', 'gauge/s'], 42, tags={'value': 24, 'toto': 'tutu'}) return {'value': hello.value}
def hello_get(_): """ Will use the slave. """ with timer_context(["sql", "read_hello"]): hello = models.DBSession.query(models.Hello).first() increment_counter(["test", "counter"]) set_gauge(["test", "gauge/s"], 42, tags={"value": 24, "toto": "tutu"}) return {"value": hello.value}
def main() -> None: with stats.outcome_timer_context(['get_max_timestamp']): max_ts = _max_timestamp() now = datetime.datetime.now(max_ts.tzinfo) age = round((now - max_ts).total_seconds()) LOG.info("Last log age: %ss", age) stats.set_gauge(['max_age'], age) if 'LOG_TIMEOUT' in os.environ: _check_roundtrip()
def _redis_status(gene): config = gene.config['redis'] queue = config['queue'] if not queue.startswith('queue_'): queue = 'queue_' + queue stats_prefix = ['redis', queue] with stats.timer_context(stats_prefix + ['get_stats']): con = redis.StrictRedis.from_url(config['url']) nb_messages = con.llen(queue) print("Approximate number of tiles to generate: {nb_messages}".format( nb_messages=nb_messages)) stats.set_gauge(stats_prefix + ['nb_messages'], nb_messages)
def get_status(self): """ Returns a map of stats """ nb_messages = self._slave.xlen(self._name) pending = self._slave.xpending(self._name, STREAM_GROUP) tiles_in_error = self._get_errors() stats.set_gauge(["redis", self._name_str, "nb_messages"], nb_messages) return { "Approximate number of tiles to generate": nb_messages, "Approximate number of generating tiles": pending["pending"], "Tiles in error": ", ".join(tiles_in_error), }
def get_status(self): """ Returns a map of stats """ nb_messages = self._redis.xlen(self._name) pending = self._redis.xpending(self._name, STREAM_GROUP) tiles_in_error = self._get_errors() stats.set_gauge(['redis', self._name_str, 'nb_messages'], nb_messages) return { "Approximate number of tiles to generate": nb_messages, "Approximate number of generating tiles": pending['pending'], "Tiles in error": ', '.join(tiles_in_error) }
def _check_roundtrip() -> None: check_uuid = str(uuid.uuid4()) # emit the log we are going to look for logger_name = LOGGER_NAME + "." + check_uuid logger = logging.getLogger(logger_name) logger.setLevel(logging.INFO) logger.info("Test roundtrip") query = {"query": {"match_phrase": {"log.logger": logger_name}}} start = time.monotonic() while time.monotonic() < start + LOG_TIMEOUT: exception = None for _ in range( int(os.environ.get("C2CWSGIUTILS_CHECK_ES_TRYNUMBER", 10))): try: r = requests.post(SEARCH_URL, json=query, headers=SEARCH_HEADERS) exception = None except requests.exceptions.RequestException as e: logger.exception("Error on querying Elasticsearch") exception = e if r.ok: continue time.sleep(float(os.environ.get("C2CWSGIUTILS_CHECK_ES_SLEEP", 1))) if exception is not None: raise exception r.raise_for_status() json = r.json() found = json["hits"]["total"] if isinstance(found, dict): found = found["value"] if found > 0: LOG.info("Found the test log line.") stats.set_gauge(["roundtrip"], time.monotonic() - start) return else: LOG.info("Didn't find the test log line. Wait 1s...") time.sleep(1) LOG.warning("Timeout waiting for the test log line") stats.set_gauge(["roundtrip"], LOG_TIMEOUT * 2)
def main() -> None: """Run the command.""" try: argparser = argparse.ArgumentParser( description="Check logs on Elasticsearch") c2cwsgiutils.setup_process.fill_arguments(argparser) args = argparser.parse_args() c2cwsgiutils.setup_process.bootstrap_application_from_options(args) with stats.outcome_timer_context(["get_max_timestamp"]): max_ts = _max_timestamp() now = datetime.datetime.now(max_ts.tzinfo) age = round((now - max_ts).total_seconds()) LOG.info("Last log age: %ss", age) stats.set_gauge(["max_age"], age) if "LOG_TIMEOUT" in os.environ: _check_roundtrip() except: # pylint: disable=bare-except LOG.exception("Exception during run") sys.exit(1)
def status(gene): # pragma: no cover # get SQS status stats_prefix = ['SQS', gene.config.get('sqs', {}).get('queue', 'unknown')] with stats.timer_context(stats_prefix + ['get_stats']): queue = gene.get_sqs_queue() queue.load() attributes = dict(queue.attributes) attributes["CreatedTimestamp"] = time.ctime(int(attributes["CreatedTimestamp"])) attributes["LastModifiedTimestamp"] = time.ctime(int(attributes["LastModifiedTimestamp"])) print( """Approximate number of tiles to generate: {ApproximateNumberOfMessages} Approximate number of generating tiles: {ApproximateNumberOfMessagesNotVisible} Delay in seconds: {DelaySeconds} Receive message wait time in seconds: {ReceiveMessageWaitTimeSeconds} Visibility timeout in seconds: {VisibilityTimeout} Queue creation date: {CreatedTimestamp} Last modification in tile queue: {LastModifiedTimestamp}""".format(**attributes) ) stats.set_gauge(stats_prefix + ['nb_messages'], int(attributes['ApproximateNumberOfMessages']))
def list(self) -> Iterator[Tile]: count = 0 while True: queues = self._master.xreadgroup( groupname=STREAM_GROUP, consumername=CONSUMER_NAME, streams={self._name: ">"}, count=1, block=round(self._timeout_ms), ) logger.debug("Get %d new elements", len(queues)) if not queues: queues = self._claim_olds() if queues is None: stats.set_gauge(["redis", self._name_str, "nb_messages"], 0) stats.set_gauge(["redis", self._name_str, "pending"], 0) if queues is None and self._stop_if_empty: break if queues: for redis_message in queues: queue_name, queue_messages = redis_message assert queue_name == self._name for message in queue_messages: id_, body = message try: tile = decode_message(body[b"message"], from_redis=True, sqs_message=id_) yield tile except Exception: logger.warning("Failed decoding the Redis message", exc_info=True) stats.increment_counter( ["redis", self._name_str, "decode_error"]) count += 1 if count % 10 == 0: stats.set_gauge( ["redis", self._name_str, "nb_messages"], self._slave.xlen(name=self._name), ) pending = self._slave.xpending( self._name, STREAM_GROUP) # type: ignore stats.set_gauge(["redis", self._name_str, "pending"], pending["pending"])