# prometheus metrics state request_summary = Summary( 'reststore_api_request_duration_seconds', 'Time spent processing api request', ['resource', 'method'] ) request_timer = lambda *x: request_summary.labels(*x).time() file_count_gauge = Gauge( 'reststore_stored_files', 'Number of files in reststore', ['store'] ) file_count_gauge._samples = lambda: _counts() file_size_summary = Summary( 'reststore_file_size_bytes', 'Size of files stored/fetched in bytes', ['store', 'direction'] ) # unfortunately do not have a way to query for current # filestores, so we accumulate them as requests are seen known_file_stores = set() def _counts(): return [('', {'store': k}, len(_get_files(k))) \ for k in known_file_stores]
from restq import config # prometheus metrics state request_summary = Summary( 'restq_api_request_duration_seconds', 'Time spent processing api request', ['resource', 'method'] ) request_timer = lambda *x: request_summary.labels(*x).time() job_gauge = Gauge( 'restq_queued_jobs', 'Number of jobs in restq realms/queues', ['realm', 'queue'] ) job_gauge._samples = lambda: _get_job_stats() tag_gauge = Gauge( 'restq_queued_tags', 'Number of tags in restq realms', ['realm'] ) tag_gauge._samples = lambda: _get_tag_stats() def _get_job_stats(): d = realms.get_status() return [('', {'realm': name, 'queue': str(q)}, v) \ for name, detail in d.items() for q, v in detail['queues'].items()] def _get_tag_stats(): d = realms.get_status()