Example #1
0
def collect_snmp(config, host, port=161):
  """Scrape a host and return prometheus text format for it"""

  start = time.time()
  metrics = {}
  for metric in config['metrics']:
    metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped')

  values = walk_oids(host, port, config['walk'])
  oids = {}
  for oid, value in values:
    oids[tuple(oid)] = value

  for oid, value in oids.items():
    for metric in config['metrics']:
      prefix = oid_to_tuple(metric['oid'])
      if oid[:len(prefix)] == prefix:
        value = float(value)
        indexes = oid[len(prefix):]
        labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids)
        metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels)

  class Collector():
    def collect(self):
      return metrics.values()
  registry = CollectorRegistry()
  registry.register(Collector())
  duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry)
  duration.set(time.time() - start)
  walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry)
  walked.set(len(oids))
  return generate_latest(registry)
Example #2
0
def collect_snmp(config, host, port=161):
  """Scrape a host and return prometheus text format for it"""

  metrics = {}
  for metric in config['metrics']:
    metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped')

  values = walk_oids(host, port, config['walk'])
  oids = {}
  for oid, value in values:
    oids[tuple(oid)] = value

  for oid, value in oids.items():
    for metric in config['metrics']:
      prefix = oid_to_tuple(metric['oid'])
      if oid[:len(prefix)] == prefix:
        value = float(value)
        indexes = oid[len(prefix):]
        labels = parse_indexes(indexes, metric.get('indexes', {}), oids)
        metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels)

  class Collector():
    def collect(self):
      return metrics.values()
  registry = CollectorRegistry()
  registry.register(Collector())
  return generate_latest(registry)
class TestCounter(unittest.TestCase):

    def setUp(self):
        self.registry = CollectorRegistry()

    def test_initialize(self):
        c = Counter('test_value', 'Testing roller', registry=self.registry)
        r = CounterRoller(c, registry=self.registry)

        self.assertEqual(r.name, 'test_value_sum_rolled')

    def test_initialize_errors(self):
        # Raise error because tried to use wrong type of item
        def wrong_type_exception():
            h = Histogram('test_value', 'Testing roller', registry=self.registry)
            roller = CounterRoller(h, registry=self.registry)
        self.assertRaises(ValueError, wrong_type_exception)

        # Update seconds must be > 0
        def update_seconds_lt_1_exception():
            c = Counter('test_value', 'Testing roller', registry=self.registry)
            roller = CounterRoller(c, registry=self.registry, options={
                'update_seconds': 0
            })
        self.assertRaises(ValueError, update_seconds_lt_1_exception)

        # Update seconds must be a multiple of 1
        def update_seconds_not_divisible_by_1_exception():
            c = Counter('test_value', 'Testing roller', registry=self.registry)
            roller = CounterRoller(c, registry=self.registry, options={
                'update_seconds': 2.5
            })
        self.assertRaises(ValueError, update_seconds_not_divisible_by_1_exception)

    def test_collect(self):
        c = Counter('test_value', 'Testing roller', registry=self.registry)
        r = CounterRoller(c, registry=self.registry)

        r.collect()
        nchecks = 0
        for m in self.registry.collect():
            if m.name.endswith('sum_rolled'):
                for name, labels, val in m.samples:
                    self.assertEqual(val, 0.0)
                    nchecks += 1
        self.assertTrue(nchecks > 0)

        c.inc()
        c.inc(1.5)
        r.collect()

        nchecks = 0
        for m in self.registry.collect():
            if m.name.endswith('sum_rolled'):
                for name, labels, val in m.samples:
                    self.assertEqual(val, 2.5)
                    nchecks += 1
        self.assertTrue(nchecks > 0)
Example #4
0
def collect_snmp(config, host, port=161):
  """Scrape a host and return prometheus text format for it"""

  start = time.time()
  metrics = {}
  for metric in config['metrics']:
    metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped')

  do_bulkget = 'bulkget' not in config or config['bulkget']
  values = walk_oids(host, port, config['walk'], config.get('community', 'public'), do_bulkget)

  oids = {}
  for oid, value in values:
    oids[oid_to_tuple(oid)] = value

  # Netsnmp doesn't tell us if an error has occured, so
  # try to spot it by no results.
  if not oids:
    raise Exception("No OIDs returned, device not responding?")

  # Build a tree from the rules based on oid for faster lookup.
  metric_tree = {}
  for metric in config['metrics']:
    prefix = oid_to_tuple(metric['oid'])
    head = metric_tree
    for i in prefix:
      head.setdefault('children', {})
      head['children'].setdefault(i, {})
      head = head['children'][i]
    head['entry'] = metric

  for oid, value in oids.items():
    head = metric_tree
    for i in oid:
      head = head.get('children', {}).get(i)
      if not head:
        break
      if 'entry' in head:
        metric = head['entry']

        prefix = oid_to_tuple(metric['oid'])
        value = float(value)
        indexes = oid[len(prefix):]
        labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids)
        metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels)
        break

  class Collector():
    def collect(self):
      return metrics.values()
  registry = CollectorRegistry()
  registry.register(Collector())
  duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry)
  duration.set(time.time() - start)
  walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry)
  walked.set(len(oids))
  return generate_latest(registry)
Example #5
0
class TestSummary(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.summary = Summary('s', 'help', registry=self.registry)

    def test_summary(self):
        self.assertEqual(0, self.registry.get_sample_value('s_count'))
        self.assertEqual(0, self.registry.get_sample_value('s_sum'))
        self.summary.observe(10)
        self.assertEqual(1, self.registry.get_sample_value('s_count'))
        self.assertEqual(10, self.registry.get_sample_value('s_sum'))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('s_count'))

        @self.summary.time()
        def f():
            pass

        f()
        self.assertEqual(1, self.registry.get_sample_value('s_count'))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('s_count'))
        with self.summary.time():
            pass
        self.assertEqual(1, self.registry.get_sample_value('s_count'))
Example #6
0
def collect_snmp(config, host, port=161):
  """Scrape a host and return prometheus text format for it"""

  start = time.time()
  metrics = {}
  for metric in config['metrics']:
    prom_type = metric['metric_type'] if 'metric_type' in metric else 'gauge'
    prom_help = metric['metric_help'] if 'metric_help' in metric else 'SNMP OID {0}'.format( metric['oid'] if 'oid' in metric else "NaN" )
    metrics[metric['name']] = Metric(metric['name'], prom_help, prom_type)
  values = walk_oids(host, port, config['walk'], config.get('community', 'public'), config.get('timeout', 5), config.get('retries', 3))
  oids = {}
  for oid, value in values:
    if oid_to_tuple(oid) in oids:
      if (((not oids[oid_to_tuple(oid)]) or oids[oid_to_tuple(oid)] == None) and value):
        oids[oid_to_tuple(oid)] = value
    else:
        oids[oid_to_tuple(oid)] = value

  for oid, value in oids.items():
    for metric in config['metrics']:
      prefix = oid_to_tuple(metric['oid'])
      if oid[:len(prefix)] == prefix:
        try:
            value = float(value)
        except ValueError as e:
            print(e)
            value = 0.0

        indexes = oid[len(prefix):]
        labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids)
        metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels)

  class Collector():
    def collect(self):
      return metrics.values()
  registry = CollectorRegistry()
  registry.register(Collector())
  duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry)
  duration.set(time.time() - start)
  walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry)
  walked.set(len(oids))
  return generate_latest(registry)
Example #7
0
class TestGenerateText(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()

    def test_counter(self):
        c = Counter('cc', 'A counter', registry=self.registry)
        c.inc()
        self.assertEqual(b'# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n', generate_latest(self.registry))

    def test_gauge(self):
        g = Gauge('gg', 'A gauge', registry=self.registry)
        g.set(17)
        self.assertEqual(b'# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n', generate_latest(self.registry))

    def test_summary(self):
        s = Summary('ss', 'A summary', ['a', 'b'], registry=self.registry)
        s.labels('c', 'd').observe(17)
        self.assertEqual(b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry))

    def test_unicode(self):
        c = Counter('cc', '\u4500', ['l'], registry=self.registry)
        c.labels('\u4500').inc()
        self.assertEqual(b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry))

    def test_escaping(self):
        c = Counter('cc', 'A\ncount\\er', ['a'], registry=self.registry)
        c.labels('\\x\n"').inc(1)
        self.assertEqual(b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry))

    def test_nonnumber(self):
        class MyNumber():
            def __repr__(self):
              return "MyNumber(123)"
            def __float__(self):
              return 123.0
        class MyCollector():
            def collect(self):
                metric = Metric("nonnumber", "Non number", 'untyped')
                metric.add_sample("nonnumber", {}, MyNumber())
                yield metric
        self.registry.register(MyCollector())
        self.assertEqual(b'# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n', generate_latest(self.registry))
Example #8
0
class TestCounter(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.counter = Counter('c', 'help', registry=self.registry)

    def test_increment(self):
        self.assertEqual(0, self.registry.get_sample_value('c'))
        self.counter.inc()
        self.assertEqual(1, self.registry.get_sample_value('c'))
        self.counter.inc(7)
        self.assertEqual(8, self.registry.get_sample_value('c'))

    def test_negative_increment_raises(self):
        self.assertRaises(ValueError, self.counter.inc, -1)

    def test_function_decorator(self):
        @self.counter.count_exceptions(ValueError)
        def f(r):
            if r:
                raise ValueError
            else:
                raise TypeError

        try:
            f(False)
        except TypeError:
            pass
        self.assertEqual(0, self.registry.get_sample_value('c'))

        try:
            f(True)
        except ValueError:
            raised = True
        self.assertEqual(1, self.registry.get_sample_value('c'))

    def test_block_decorator(self):
        with self.counter.count_exceptions():
            pass
        self.assertEqual(0, self.registry.get_sample_value('c'))

        raised = False
        try:
            with self.counter.count_exceptions():
                raise ValueError
        except:
            raised = True
        self.assertTrue(raised)
        self.assertEqual(1, self.registry.get_sample_value('c'))
def parse_config():
    registry = CollectorRegistry()
    config = configparser.ConfigParser()
    config.read_file(
        open(
            os.environ.get(
                "BORG_REPO_EXPORTER_CONFIG_PATH",
                "/usr/local/etc/borg_repo_exporter/config.conf",
            )))
    output_path = config.get(
        "borg_repo_exporter",
        "output_path",
        fallback="/var/tmp/node_exporter/borg_repo_exporter.prom",
    )
    instance = config.get("borg_repo_exporter",
                          "instance",
                          fallback=socket.gethostname())

    collectors = []
    gauges = {}
    for section in config.sections():
        if section == "borg_repo_exporter":
            continue
        c = config[section]
        passphrase = c["passphrase"]
        path = c["path"]
        ignorelist = c.get("ignorelist", "").split(",")
        prefix = c.get("prefix", "")
        collectors.append(
            BorgCollector(gauges, registry, passphrase, path, ignorelist,
                          prefix))
    return {
        "collectors": collectors,
        "output_path": output_path,
        "registry": registry,
        "instance": instance,
    }
Example #10
0
 def __init__(self, name, session=None):
     self.config = Config()
     fh = logging.handlers.WatchedFileHandler(self.config.LOG_FILE)
     formatter = logging.Formatter(
         '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
     fh.setFormatter(formatter)
     self.logger = logging.Logger(name=name)
     self.logger.addHandler(fh)
     self.logger.setLevel(getattr(logging, self.config.LOG_LEVEL))
     self.name = name
     self.redis = redis.Redis(host='localhost',
                              port=6379,
                              db=0,
                              health_check_interval=30)
     if self.config.PUSH_PROMETHEUS:
         self.prom_registry = CollectorRegistry()
         self.prom_gauges = {}
     if not session:
         self.session = requests.Session()
     else:
         self.session = session
     self.get_bearer()
     self.logger.debug("DO_POST = %s", self.config.DO_POST)
     self.logger.debug("PUSH_PROMETHEUS = %s", self.config.PUSH_PROMETHEUS)
     if self.config.DO_POST:
         self.create_source_and_type()
     if not self.data_types:
         self.data_types = {
             'default': {
                 'name': 'Default data type',
                 'units': 'default',
                 'description': 'Base class built in type'
             }
         }
     self.data_queue = []
     self.logger.debug("Init completed")
     self.last_observation = None
Example #11
0
    def notify_success(self, source, hostname, filename, stats):
        registry = CollectorRegistry()

        s = Summary('backup_size',
                    'Size of backup file in bytes',
                    registry=registry)
        s.observe(stats.size)
        s = Summary(
            'backup_dumptime',
            'Time taken to dump and compress/encrypt backup in seconds',
            registry=registry)
        s.observe(stats.dumptime)
        s = Summary('backup_uploadtime',
                    'Time taken to upload backup in seconds',
                    registry=registry)
        s.observe(stats.uploadtime)
        if stats.retained_copies is not None:
            g = Gauge('backup_retained_copies',
                      'Number of retained backups found on destination',
                      registry=registry)
            g.set(stats.retained_copies)
        g = Gauge('backup_timestamp',
                  'Time backup completed as seconds-since-the-epoch',
                  registry=registry)
        g.set_to_current_time()

        def auth_handler(url, method, timeout, headers, data):
            return basic_auth_handler(url, method, timeout, headers, data,
                                      self.username, self.password)

        push_to_gateway(self.url,
                        job=source.id,
                        registry=registry,
                        handler=auth_handler)

        logging.info("Pushed metrics for job '%s' to gateway (%s)" %
                     (source.id, self.url))
Example #12
0
def push(reporter_name, reporter_value, report_type):
    if push.failures >= RETRIES:
        return

    registry = CollectorRegistry()

    if report_type is ReportType.metric:
        label_names = ['metric_name', 'push_gateway_type']
        label_values = [reporter_name, 'metric']
        gauge_name = REPORTER_PUSH_GATEWAY_METRIC_PREFIX + "_" + reporter_name
        gauge_value = reporter_value

    else:
        label_names = ['param_name', 'param_value', 'push_gateway_type']
        label_values = [reporter_name, reporter_value, 'parameter']
        gauge_name = REPORTER_PUSH_GATEWAY_METRIC_PARAMETER + "_" + reporter_name
        gauge_value = 1

    gauge = Gauge(name=gauge_name,
                  documentation="",
                  labelnames=label_names,
                  registry=registry)

    gauge.labels(*label_values).set(gauge_value)

    try:
        pushadd_to_gateway(
            gateway=os.environ[GATEWAY_URL_KEY],
            job=PUSH_GATEWAY_JOB_NAME,
            registry=registry,
            grouping_key={GROUPING_KEY: os.environ[GROUPING_KEY]})

        push.failures = 0
    except IOError as e:
        runai.utils.log.error('Failed pushing registry to push gateway (%s)',
                              e)
        push.failures += 1
Example #13
0
def generate_prometheus_exporter_from_storage():
	registry = CollectorRegistry()
	common_p_metric = ['yhat', 'yhat_lower', 'yhat_upper']
	t_gap = timedelta(minutes=3)
	# cst_tz = timezone('Asia/Shanghai')
	# tn = datetime.now().replace(tzinfo=cst_tz)
	tn = datetime.now()
	stn_str = (tn-t_gap).strftime('%Y-%m-%d %H:%M')
	etn_str = (tn+t_gap).strftime('%Y-%m-%d %H:%M')
	g_map = {}
	if not os.path.exists(TMP_FILE) or time.time() - os.path.getmtime(TMP_FILE) > (4 * 60):
		for x in storage.read_all():
			nearest_index = x['data'].index.get_loc(tn, method='nearest')
			if nearest_index >= (len(x['data'])-1):
				continue
			# print(len(x['data'].to_csv(index=True)))
			for pm in common_p_metric:
				#metric = "%s_%s_%d" % (pm, x['model'].register_metric.name, x['model'].pk)
				metric = "%s_%s" % (pm, x['model'].register_metric.name)
				if metric not in g_map:
					g_map[metric] = Gauge(
						name=metric,
						documentation=metric,
						labelnames=list([y.k for y in x['model'].labels.all()]),
						# labelvalues=list([y.v for y in x['model'].labels.all()]),
						registry=registry
					)

				print(str(x['model'].labels.all()))
				try:
					g_map[metric].labels(**({y.k: y.v for y in x['model'].labels.all()})).set(x['data'].iloc[[nearest_index]][pm])
				except ValueError as e:
					logging.error("[pt:%d]wrong label: %s" % (x['model'].pk, str(e)))

		write_to_textfile(TMP_FILE, registry)
	with open(TMP_FILE, 'r') as fd:
		return fd.read()
def collect(queue_name, dryrun=False):
    """Collect and push autopkgtest queue depth metrics."""
    queue_details = QUEUES_JSON[queue_name]

    for release in queue_details:
        for arch in queue_details[release]:
            count = len(queue_details[release][arch])
            print('%s %s: %i' % (release.title(), arch, count))

    if not dryrun:
        print('Pushing data...')
        registry = CollectorRegistry()

        for release in queue_details:
            for arch in queue_details[release]:
                count = len(queue_details[release][arch])
                Gauge('autopkgtest_queue_size_%s_%s_%s' %
                      (queue_name, release, arch),
                      "Autopkgtest queue size",
                      None,
                      registry=registry).set(count)

        util.push2gateway('foundations-autopkgtest-%s' %
                          queue_name, registry)
Example #15
0
def init_prometheus(prometheus_hostname, node_hostname):
    from prometheus_client import CollectorRegistry, Gauge

    metrics = Metrics()

    metrics.registry = CollectorRegistry()
    metrics.grouping_keys = {}
    metrics.hostname = prometheus_hostname
    metric_current_tps_name = "exonum_node_tps_current"
    metric_avg_tps_name = "exonum_node_tps_average"
    metric_current_height_name = "exonum_node_current_height"
    metrics.metric_avg_tps = Gauge(metric_avg_tps_name,
                                   "Exonum's node average TPS",
                                   registry=metrics.registry)
    metrics.metric_current_height = Gauge(
        metric_current_height_name,
        "Exonum's node current height",
        registry=metrics.registry,
    )
    metrics.metric_current_tps = Gauge(metric_current_tps_name,
                                       "Exonum's node current TPS",
                                       registry=metrics.registry)
    metrics.grouping_keys["instance"] = urlparse(node_hostname).netloc
    return metrics
Example #16
0
def collect(team_name, dryrun=False):
    """Push upload data."""
    date = datetime.now().date().strftime('%Y-%m-%d')
    results = generate_upload_report(date, team_name)
    print('%s: %s' % (date, results))

    if not dryrun:
        print('Pushing data...')
        registry = CollectorRegistry()

        Gauge('{}_uploads_daily_dev_total'.format(team_name),
              'Uploads to dev release',
              None,
              registry=registry).set(results['dev'])

        Gauge('{}_uploads_daily_sru_total'.format(team_name),
              'Uploads to supported release (SRU)',
              None,
              registry=registry).set(results['sru'])

        if team_name == 'server':
            util.push2gateway('upload', registry)
        else:
            util.push2gateway('upload-%s' % team_name, registry)
Example #17
0
    def setUp(self):
        self.registry = CollectorRegistry()
        self.counter = Gauge('g', 'help', registry=self.registry)
        self.requests = requests = []

        class TestHandler(BaseHTTPRequestHandler):
            def do_PUT(self):
                self.send_response(201)
                length = int(self.headers['content-length'])
                requests.append((self, self.rfile.read(length)))

            do_POST = do_PUT
            do_DELETE = do_PUT

        httpd = HTTPServer(('', 0), TestHandler)
        self.address = ':'.join([str(x) for x in httpd.server_address])

        class TestServer(threading.Thread):
            def run(self):
                httpd.handle_request()

        self.server = TestServer()
        self.server.daemon = True
        self.server.start()
Example #18
0
def lambda_handler(event, context):
    # Create the registry
    registry = CollectorRegistry()
    thread = threading.Thread(target=gather_metrics_data, args=(registry, ))
    stop_threads = False
    thread.start()

    # actual lambda handler code
    for record in event['Records']:
        bucket = record['s3']['bucket']['name']
    key = record['s3']['object']['key']
    original_img = '/tmp/original-{}'.format(key)
    temp_img = '/tmp/temp-{}'.format(key)

    s3_client.download_file(bucket, key, original_img)

    for size_px in THUMBNAIL_SIZES_PX:
        resize_image(original_img, temp_img, size_px)
        new_name_key = key.replace(".jpg", "_" + str(size_px) + ".jpg")
        s3_client.upload_file(temp_img, '{}-resized'.format(bucket),
                              new_name_key)

    stop_threads = True
    thread.join()
Example #19
0
    def setUp(self):
        self.registry = CollectorRegistry()

        self.data = ''

        class TCPHandler(SocketServer.BaseRequestHandler):
            def handle(s):
                self.data = s.request.recv(1024)

        server = SocketServer.TCPServer(('', 0), TCPHandler)

        class ServingThread(threading.Thread):
            def run(self):
                server.handle_request()
                server.socket.close()

        self.t = ServingThread()
        self.t.start()

        # Explicitly use localhost as the target host, since connecting to 0.0.0.0 fails on Windows
        self.address = ('localhost', server.server_address[1])
        self.gb = GraphiteBridge(self.address,
                                 self.registry,
                                 _timer=fake_timer)
Example #20
0
    def __init__(self):
        self.registry = CollectorRegistry()
        self.prodstack = {}
        with open(config['cache_file'], 'rb') as f:
            self.prodstack = pickle.load(f)[0]

        self.tenant_map = {
            t['id']: t['name']
            for t in self.prodstack['tenants']
        }
        self.network_map = {
            n['id']: n['name']
            for n in self.prodstack['networks']
        }
        self.subnet_map = {
            n['id']: {
                'name': n['name'],
                'pool': n['allocation_pools']
            }
            for n in self.prodstack['subnets']
        }
        self.routers = self.prodstack['routers']
        self.ports = self.prodstack['ports']
        self.floating_ips = self.prodstack['floatingips']
Example #21
0
    def get_stats(self):
        registry = CollectorRegistry()
        labels = ['region', 'host', 'service', 'state']
        services_stats_cache = self.get_cache_data()
        for services_stat in services_stats_cache:
            try:
                stat_gauge = Gauge(
                    self.gauge_name_sanitize(
                        services_stat['stat_name']),
                    'Openstack Nova Service statistic',
                    labels,
                    registry=registry)
                label_values = [self.osclient.region,
                                services_stat.get('host', ''),
                                services_stat.get('service', ''),
                                services_stat.get('state', '')]
                stat_gauge.labels(*label_values).set(services_stat['stat_value'])
            except ValueError:
                logger.debug('Unchanged value for stat {} already present in '
                             'nova services registry for host {}; ignoring.'
                            .format(services_stat['stat_name'],
                                    services_stat['host']))

        return generate_latest(registry)
Example #22
0
    def create_timeseries(self, data):
        """Create Prometheus timeseries.

        Args:
            data (dict): Metric data.

        Returns:
            object: Metric descriptor.
        """
        name = data['name']
        description = data['description']
        prometheus_push_url = data['url']
        prometheus_push_job_name = data.get('job', DEFAULT_PUSHGATEWAY_JOB)
        value = data['value']

        # Write timeseries w/ metric labels.
        labels = data['labels']
        registry = CollectorRegistry()
        gauge = Gauge(name,
                      description,
                      registry=registry,
                      labelnames=labels.keys())
        gauge.labels(*labels.values()).set(value)

        # Handle headers
        handler = default_handler
        if 'username' in data and 'password' in data:
            self.username = data['username']
            self.password = data['password']
            handler = PrometheusExporter.auth_handler

        return push_to_gateway(prometheus_push_url,
                               job=prometheus_push_job_name,
                               grouping_key=labels,
                               registry=registry,
                               handler=handler)
Example #23
0
    def process(self, file):
        self.parse_config()

        prom_labels = ["database", "task"]
        r = CollectorRegistry()
        db_backup_ok = Gauge("backup_ok", "Full Backup of mysql or pgsql "
                                          "databases.",
                             ["database"],
                             registry=r)
        db_backup_time_spent = Gauge("backup_time_spent",
                                     "Time spent with a task on backup.",
                                     prom_labels,
                                     registry=r)

        last_success = Gauge("backup_last_success_unixtime",
                             "Last time a backup job successfully finished",
                             ["database"],
                             registry=r)

        for i in dumpy.base.PROMETHEUS_MONIT_STATUS[self.db]:
            db_backup_time_spent.labels(self.db, i['task']).set(i['spent_time'])

        if dumpy.base.FILE_EXISTS_ON_S3:
            db_backup_ok.labels(self.db).set(1)
        elif False in dumpy.base.FAIL_STATE:
            db_backup_ok.labels(self.db).set(0)
        else:
            db_backup_ok.labels(self.db).set(1)
            last_success.labels(self.db).set_to_current_time()

        try:
            push_to_gateway(self.host, self.job_name, registry=r)
        except BaseException as e:
            logger.error("%s - %s - %s" % (self.db, self.__class__.__name__, e))

        return file
Example #24
0
    def expose(self, app: Flask, endpoint: str = "/metrics") -> "self":
        """Exposes Prometheus metrics by adding endpoint to the given app.

        **Important**: There are many different ways to expose metrics. This is 
        just one of them, suited for both multiprocess and singleprocess mode. 
        Refer to the Prometheus Python client documentation for more information.

        :param app: Flask app where the endpoint should be added to.
        :param endpoint: Route of the endpoint. Defaults to "/metrics".
        :param return: self.
        """

        from prometheus_client import (CONTENT_TYPE_LATEST, REGISTRY,
                                       CollectorRegistry, generate_latest,
                                       multiprocess)

        if "prometheus_multiproc_dir" in os.environ:
            pmd = os.environ["prometheus_multiproc_dir"]
            if os.path.isdir(pmd):
                registry = CollectorRegistry()
                multiprocess.MultiProcessCollector(registry)
            else:
                raise ValueError(
                    f"Env var prometheus_multiproc_dir='{pmd}' not a directory."
                )
        else:
            registry = REGISTRY

        @app.route(endpoint)
        def metrics():
            data = generate_latest(registry)
            headers = {
                "Content-Type": CONTENT_TYPE_LATEST,
                "Content-Length": str(len(data)),
            }
            return data, 200, headers
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--outfile',
                        metavar='FILE.prom',
                        help='Output file (stdout)')
    parser.add_argument('--statsfile',
                        metavar='FILE',
                        help='vhtcpd stats file (%(default)s)',
                        default='/tmp/vhtcpd.stats')
    parser.add_argument('-d',
                        '--debug',
                        action='store_true',
                        help='Enable debug logging (false)')
    args = parser.parse_args()

    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    if args.outfile and not args.outfile.endswith('.prom'):
        parser.error('Output file does not end with .prom')

    if args.statsfile:
        if args.statsfile == '-':
            infile = sys.stdin
        else:
            infile = open(args.statsfile, 'r')

    registry = CollectorRegistry()
    collect_vhtcp_stats(infile, registry)

    if args.outfile:
        write_to_textfile(args.outfile, registry)
    else:
        sys.stdout.buffer.write(generate_latest(registry))
def test_ursula_info_metrics_collector(test_registry, blockchain_ursulas,
                                       agency):
    ursula = random.choice(blockchain_ursulas)
    collector = UrsulaInfoMetricsCollector(ursula=ursula)

    collector_registry = CollectorRegistry()
    prefix = 'test_ursula_info_metrics_collector'
    collector.initialize(metrics_prefix=prefix, registry=collector_registry)
    collector.collect()

    known_nodes = collector_registry.get_sample_value(
        'test_ursula_info_metrics_collector_known_nodes')
    assert known_nodes == len(ursula.known_nodes)

    availability_score = collector_registry.get_sample_value(
        'test_ursula_info_metrics_collector_availability_score')
    assert availability_score == (ursula._availability_tracker.score if
                                  (ursula._availability_tracker
                                   and ursula._availability_tracker.running)
                                  else -1)

    policies_held = collector_registry.get_sample_value(
        'test_ursula_info_metrics_collector_policies_held')
    assert policies_held == 0

    work_orders = collector_registry.get_sample_value(
        'test_ursula_info_metrics_collector_work_orders')
    assert work_orders == 0

    mode = 'running' if ursula._learning_task.running else 'stopped'
    learning_mode = collector_registry.get_sample_value(
        'test_ursula_info_metrics_collector_node_discovery',
        labels={
            'test_ursula_info_metrics_collector_node_discovery': f'{mode}'
        })
    assert learning_mode == 1
Example #27
0
class TestMetricWrapper(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.counter = Counter('c', 'help', labelnames=['l'], registry=self.registry)
        self.two_labels = Counter('two', 'help', labelnames=['a', 'b'], registry=self.registry)

    def test_child(self):
        self.counter.labels('x').inc()
        self.assertEqual(1, self.registry.get_sample_value('c', {'l': 'x'}))
        self.two_labels.labels('x', 'y').inc(2)
        self.assertEqual(2, self.registry.get_sample_value('two', {'a': 'x', 'b': 'y'}))

    def test_remove(self):
        self.counter.labels('x').inc()
        self.counter.labels('y').inc(2)
        self.assertEqual(1, self.registry.get_sample_value('c', {'l': 'x'}))
        self.assertEqual(2, self.registry.get_sample_value('c', {'l': 'y'}))
        self.counter.remove('x')
        self.assertEqual(None, self.registry.get_sample_value('c', {'l': 'x'}))
        self.assertEqual(2, self.registry.get_sample_value('c', {'l': 'y'}))

    def test_incorrect_label_count_raises(self):
        self.assertRaises(ValueError, self.counter.labels)
        self.assertRaises(ValueError, self.counter.labels, 'a', 'b')
        self.assertRaises(ValueError, self.counter.remove)
        self.assertRaises(ValueError, self.counter.remove, 'a', 'b')

    def test_labels_coerced_to_string(self):
        self.counter.labels(None).inc()
        self.counter.labels({'l': None}).inc()
        self.assertEqual(2, self.registry.get_sample_value('c', {'l': 'None'}))

        self.counter.remove(None)
        self.assertEqual(None, self.registry.get_sample_value('c', {'l': 'None'}))

    def test_non_string_labels_raises(self):
        class Test(object):
            __str__ = None
        self.assertRaises(TypeError, self.counter.labels, Test())
        self.assertRaises(TypeError, self.counter.labels, {'l': Test()})

    def test_namespace_subsystem_concatenated(self):
        c = Counter('c', 'help', namespace='a', subsystem='b', registry=self.registry)
        c.inc()
        self.assertEqual(1, self.registry.get_sample_value('a_b_c'))

    def test_labels_by_dict(self):
        self.counter.labels({'l': 'x'}).inc()
        self.assertEqual(1, self.registry.get_sample_value('c', {'l': 'x'}))
        self.assertRaises(ValueError, self.counter.labels, {'l': 'x', 'm': 'y'})
        self.assertRaises(ValueError, self.counter.labels, {'m': 'y'})
        self.assertRaises(ValueError, self.counter.labels, {})
        self.two_labels.labels({'a': 'x', 'b': 'y'}).inc()
        self.assertEqual(1, self.registry.get_sample_value('two', {'a': 'x', 'b': 'y'}))
        self.assertRaises(ValueError, self.two_labels.labels, {'a': 'x', 'b': 'y', 'c': 'z'})
        self.assertRaises(ValueError, self.two_labels.labels, {'a': 'x', 'c': 'z'})
        self.assertRaises(ValueError, self.two_labels.labels, {'b': 'y', 'c': 'z'})
        self.assertRaises(ValueError, self.two_labels.labels, {'c': 'z'})
        self.assertRaises(ValueError, self.two_labels.labels, {})

    def test_invalid_names_raise(self):
        self.assertRaises(ValueError, Counter, '', 'help')
        self.assertRaises(ValueError, Counter, '^', 'help')
        self.assertRaises(ValueError, Counter, '', 'help', namespace='&')
        self.assertRaises(ValueError, Counter, '', 'help', subsystem='(')
        self.assertRaises(ValueError, Counter, 'c', '', labelnames=['^'])
        self.assertRaises(ValueError, Counter, 'c', '', labelnames=['__reserved'])
        self.assertRaises(ValueError, Summary, 'c', '', labelnames=['quantile'])
Example #28
0
from thoth.common import init_logging
from thoth.common import OpenShift
from thoth.common import __version__ as __common__version__

from thoth.storages import __version__ as __storage__version__
from thoth.storages import GraphDatabase


__version__ = "0.1.0"
__service_version__ = (
    f"{__version__}+storage.{__storage__version__}.common.{__common__version__}"
)

init_logging()
prometheus_registry = CollectorRegistry()

_GRAPH_DB = GraphDatabase()
_GRAPH_DB.connect()

_OPENSHIFT = OpenShift()

_LOGGER = logging.getLogger("thoth.graph_refresh_job")
_LOG_SOLVER = os.environ.get("THOTH_LOG_SOLVER") == "DEBUG"
_LOG_REVSOLVER = os.environ.get("THOTH_LOG_REVSOLVER") == "DEBUG"
THOTH_MY_NAMESPACE = os.getenv("NAMESPACE", "thoth-test-core")

_THOTH_METRICS_PUSHGATEWAY_URL = os.getenv("PROMETHEUS_PUSHGATEWAY_URL")
_METRIC_RUNTIME = Gauge(
    "graph_refresh_job_runtime_seconds",
    "Runtime of graph refresh job in seconds.",
Example #29
0
class BroadcastWebsocketStats():
    def __init__(self, local_hostname, remote_hostname):
        self._local_hostname = local_hostname
        self._remote_hostname = remote_hostname
        self._registry = CollectorRegistry()

        # TODO: More robust replacement
        self.name = self.safe_name(self._local_hostname)
        self.remote_name = self.safe_name(self._remote_hostname)

        self._messages_received_total = Counter(f'awx_{self.remote_name}_messages_received_total',
                                                'Number of messages received, to be forwarded, by the broadcast websocket system',
                                                registry=self._registry)
        self._messages_received = Gauge(f'awx_{self.remote_name}_messages_received',
                                        'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
                                        registry=self._registry)
        self._connection = Enum(f'awx_{self.remote_name}_connection',
                                'Websocket broadcast connection',
                                states=['disconnected', 'connected'],
                                registry=self._registry)
        self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start',
                                       'Time the connection was established',
                                       registry=self._registry)

        self._messages_received_per_minute = Gauge(f'awx_{self.remote_name}_messages_received_per_minute',
                                                   'Messages received per minute',
                                                   registry=self._registry)
        self._internal_messages_received_per_minute = FixedSlidingWindow()

    def safe_name(self, s):
        # Replace all non alpha-numeric characters with _
        return re.sub('[^0-9a-zA-Z]+', '_', s)

    def unregister(self):
        self._registry.unregister(f'awx_{self.remote_name}_messages_received')
        self._registry.unregister(f'awx_{self.remote_name}_connection')

    def record_message_received(self):
        self._internal_messages_received_per_minute.record()
        self._messages_received.inc()
        self._messages_received_total.inc()

    def record_connection_established(self):
        self._connection.state('connected')
        self._connection_start.set_to_current_time()
        self._messages_received.set(0)

    def record_connection_lost(self):
        self._connection.state('disconnected')

    def get_connection_duration(self):
        return (datetime.datetime.now() - self._connection_established_ts).total_seconds()

    def render(self):
        msgs_per_min = self._internal_messages_received_per_minute.render()
        self._messages_received_per_minute.set(msgs_per_min)

    def serialize(self):
        self.render()

        registry_data = generate_latest(self._registry).decode('UTF-8')
        return registry_data
 def metrics(self, **kwargs):
     registry = kwargs.pop('registry', CollectorRegistry(auto_describe=True))
     return PrometheusMetrics(self.app, registry=registry, **kwargs)
class TestHistogram(unittest.TestCase):

    def setUp(self):
        self.registry = CollectorRegistry()

    def get_rolled_samples(self):
        """Get all 'rolled' type gauges in the current registry
        """
        for m in self.registry.collect():
            if m.name.endswith('_rolled'):
                for name, labels, val in m.samples:
                    yield name, labels, val

    def get_hist_samples(self):
        """Get all histogram buckets in the current registry
        """
        for m in self.registry.collect():
            if m.name == 'test_value':
                for name, labels, val in m.samples:
                    if name.endswith('_bucket'):
                        yield name, labels, val

    def test_initialize(self):
        h = Histogram('test_value', 'Testing roller', registry=self.registry)
        roller = HistogramRoller(h, registry=self.registry)

        n_buckets = 0
        for name, _, _ in self.get_hist_samples():
            if name.endswith('_bucket'):
                n_buckets += 1

        n_created_guages = 0
        for _, _, _ in self.get_rolled_samples():
            n_created_guages += 1

        # Check that roller gauges don't exist until values are added
        self.assertTrue(n_buckets > 0)
        self.assertTrue(n_created_guages == 0)

        self.assertEqual(roller.name, 'test_value_sum_rolled')

    def test_initialize_errors(self):
        # Raise error because tried to use wrong type of item
        def wrong_type_exception():
            c = Counter('test_value', 'Testing roller', registry=self.registry)
            roller = HistogramRoller(c, registry=self.registry)
        self.assertRaises(ValueError, wrong_type_exception)

        # Update seconds must be > 0
        def update_seconds_lt_1_exception():
            h = Histogram('test_value', 'Testing roller', registry=self.registry)
            roller = HistogramRoller(h, registry=self.registry, options={
                'update_seconds': 0
            })
        self.assertRaises(ValueError, update_seconds_lt_1_exception)

        # Update seconds must be a multiple of 1
        def update_seconds_not_divisible_by_1_exception():
            h = Histogram('test_value', 'Testing roller', registry=self.registry)
            roller = HistogramRoller(h, registry=self.registry, options={
                'update_seconds': 2.5
            })
        self.assertRaises(ValueError, update_seconds_not_divisible_by_1_exception)

    def test_collect(self):
        h = Histogram('test_value', 'Testing roller', registry=self.registry)
        roller = HistogramRoller(h, registry=self.registry)

        # Get values
        roller.collect()

        n_buckets = 0
        for _, _, _ in self.get_hist_samples():
            n_buckets += 1

        n_created_guages = 0
        for _, _, _ in self.get_rolled_samples():
            n_created_guages += 1

        self.assertTrue(n_buckets > 0)
        self.assertTrue(n_created_guages > 0)
        self.assertEqual(n_buckets, n_created_guages)

        # Check that roller values are still 0.0 after initial collection
        for name, labels, value in self.get_rolled_samples():
            self.assertEqual(value, 0.0)

        # Add some samples
        for i in range(100):
            h.observe(pow(2, i/10 - 2))

        # Collect hisogram values
        hist_values = dict()
        for name, labels, value in self.get_hist_samples():
            hist_values[labels['le']] = value

        # Make sure they are still equal after collection
        for name, labels, value in self.get_rolled_samples():
            self.assertEqual(value, 0.0)

        roller.collect()

        for name, labels, value in self.get_rolled_samples():
            self.assertEqual(value, hist_values[labels['le']])

    def test_customize_reducer(self):
        h = Histogram('test_value', 'Testing roller', registry=self.registry)
        roller_max = HistogramRoller(h, registry=self.registry, options={
            'reducer': 'max'
        })
        roller_min = HistogramRoller(h, registry=self.registry, options={
            'reducer': 'sum'
        })

        def always_one(*args, **kwargs):
            return 1
        roller_one = HistogramRoller(h, registry=self.registry, options={
            'reducer': always_one
        })


        for state in [2.6, 4.7, 3.8, 2.8]:
            h.observe(state)
            roller_max.collect()
            roller_min.collect()
            roller_one.collect()

        # Deltas = 1, 1, 1
        nchecks = 0
        for m in self.registry.collect():
            if m.name.endswith('max_rolled'):
                for name, labels, val in m.samples:
                    if labels['le'] == '5.0':
                        nchecks += 1
                        self.assertEqual(val, 1.0)
        self.assertTrue(nchecks > 0)

        nchecks = 0
        for m in self.registry.collect():
            if m.name.endswith('sum_rolled'):
                for name, labels, val in m.samples:
                    if labels['le'] == '5.0':
                        self.assertEqual(val, 3.0)
                        nchecks += 1
        self.assertTrue(nchecks > 0)

        nchecks = 0
        for m in self.registry.collect():
            if m.name.endswith('always_one_rolled'):
                for name, labels, val in m.samples:
                    if labels['le'] == '5.0':
                        self.assertEqual(val, 1.0)
                        nchecks += 1
        self.assertTrue(nchecks > 0)
Example #32
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.gauge = Gauge('g', 'help', registry=self.registry)
Example #33
0
class TestGauge(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.gauge = Gauge('g', 'help', registry=self.registry)

    def test_gauge(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        self.gauge.inc()
        self.assertEqual(1, self.registry.get_sample_value('g'))
        self.gauge.dec(3)
        self.assertEqual(-2, self.registry.get_sample_value('g'))
        self.gauge.set(9)
        self.assertEqual(9, self.registry.get_sample_value('g'))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))

        @self.gauge.track_inprogress()
        def f():
            self.assertEqual(1, self.registry.get_sample_value('g'))

        f()
        self.assertEqual(0, self.registry.get_sample_value('g'))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        with self.gauge.track_inprogress():
            self.assertEqual(1, self.registry.get_sample_value('g'))
        self.assertEqual(0, self.registry.get_sample_value('g'))

    def test_gauge_function(self):
        x = {}
        self.gauge.set_function(lambda: len(x))
        self.assertEqual(0, self.registry.get_sample_value('g'))
        self.gauge.inc()
        self.assertEqual(0, self.registry.get_sample_value('g'))
        x['a'] = None
        self.assertEqual(1, self.registry.get_sample_value('g'))
Example #34
0
class GaugePrometheusTests(unittest.TestCase):  # pytype: disable=module-attr
    """Tests the GaugePortStatsPrometheusPoller update method"""

    prom_client = gauge_prom.GaugePrometheusClient(reg=CollectorRegistry())

    @staticmethod
    def parse_prom_output(output):
        """Parses the port stats from prometheus into a dictionary"""

        parsed_output = {}
        for line in output.split('\n'):
            # discard comments and stats not related to port stats
            if line.startswith('#') or not line.startswith(
                    gauge_prom.PROM_PORT_PREFIX):
                continue

            index = line.find('{')
            # get the stat name e.g. of_port_rx_bytes and strip 'of_port_'
            prefix = gauge_prom.PROM_PORT_PREFIX + gauge_prom.PROM_PREFIX_DELIM
            stat_name = line[0:index].replace(prefix, '')
            # get the labels within {}
            labels = line[index + 1:line.find('}')].split(',')

            for label in labels:
                lab_name, lab_val = label.split('=', 1)
                lab_val = lab_val.replace('"', '')
                if lab_name == 'dp_id':
                    dp_id = int(lab_val, 16)
                elif lab_name == 'port':
                    port_name = lab_val

            key = (dp_id, port_name)
            stat_val = line.split(' ')[-1]
            if key not in parsed_output:
                parsed_output[key] = []

            parsed_output[key].append((stat_name, float(stat_val)))

        return parsed_output

    @staticmethod
    def get_prometheus_stats(addr, port):
        """Attempts to contact the prometheus server
        at the address to grab port stats."""

        url = 'http://{}:{}'.format(addr, port)
        session = requests.Session()
        adapter = requests.adapters.HTTPAdapter(max_retries=10)
        session.mount('http://', adapter)
        return session.get(url).text

    def test_poller(self):
        """Test the update method to see if it pushes port stats"""

        datapath = create_mock_datapath(2)

        conf = mock.Mock(dp=datapath,
                         type='',
                         interval=1,
                         prometheus_port=9303,
                         prometheus_addr='localhost',
                         use_test_thread=True)

        prom_poller = gauge_prom.GaugePortStatsPrometheusPoller(
            conf, '__name__', self.prom_client)
        prom_poller._running = True
        msg = port_stats_msg(datapath)
        prom_poller.update(time.time(), msg)

        prom_lines = self.get_prometheus_stats(conf.prometheus_addr,
                                               conf.prometheus_port)
        prom_lines = self.parse_prom_output(prom_lines)

        for port_num, port in datapath.ports.items():
            port_stats = msg.body[int(port_num) - 1]
            stats = prom_lines[(datapath.dp_id, port.name)]
            stats_found = set()

            for stat_name, stat_val in stats:
                self.assertAlmostEqual(stat_val,
                                       getattr(port_stats, stat_name))
                stats_found.add(stat_name)

            self.assertEqual(stats_found, set(gauge_prom.PROM_PORT_VARS))

    def test_port_state(self):
        """Test the update method to see if it pushes port state"""

        datapath = create_mock_datapath(2)

        conf = mock.Mock(dp=datapath,
                         type='',
                         interval=1,
                         prometheus_port=9303,
                         prometheus_addr='localhost',
                         use_test_thread=True)

        prom_poller = gauge_prom.GaugePortStatePrometheusPoller(
            conf, '__name__', self.prom_client)
        prom_poller._running = True
        reasons = [
            ofproto.OFPPR_ADD, ofproto.OFPPR_DELETE, ofproto.OFPPR_MODIFY
        ]
        for i in range(1, len(conf.dp.ports) + 1):

            msg = port_state_msg(conf.dp, i, reasons[i - 1])
            port_name = conf.dp.ports[i].name
            rcv_time = int(time.time())
            prom_poller.update(rcv_time, msg)

            prom_lines = self.get_prometheus_stats(conf.prometheus_addr,
                                                   conf.prometheus_port)
            prom_lines = self.parse_prom_output(prom_lines)

            stats = prom_lines[(datapath.dp_id, port_name)]
            stats_found = set()

            for stat_name, stat_val in stats:
                msg_data = msg if stat_name == 'reason' else msg.desc
                self.assertAlmostEqual(stat_val, getattr(msg_data, stat_name))
                stats_found.add(stat_name)

            self.assertEqual(stats_found, set(gauge_prom.PROM_PORT_STATE_VARS))

    def test_flow_stats(self):
        """Check the update method of the GaugeFlowTablePrometheusPoller class"""

        datapath = create_mock_datapath(2)

        conf = mock.Mock(dp=datapath,
                         type='',
                         interval=1,
                         prometheus_port=9303,
                         prometheus_addr='localhost',
                         use_test_thread=True)

        prom_poller = gauge_prom.GaugeFlowTablePrometheusPoller(
            conf, '__name__', self.prom_client)
        rcv_time = int(time.time())
        instructions = [parser.OFPInstructionGotoTable(1)]
        msg = flow_stats_msg(conf.dp, instructions)
        prom_poller.update(rcv_time, msg)
Example #35
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.counter = Counter('c', 'help', labelnames=['l'], registry=self.registry)
     self.two_labels = Counter('two', 'help', labelnames=['a', 'b'], registry=self.registry)
Example #36
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.counter = Counter('c', 'help', registry=self.registry)
    def __init__(self):
        pass

    def collect(self):
        return HydraScrapeImporter(scrape()).collect()

def scrape(cached=None):
    if cached:
        with open(cached) as f:
            return json.load(f)
    else:
        print("Scraping")
        return requests.get(
            'https://hydra.nixos.org/queue-runner-status',
            headers={
                "Content-Type": "application/json"
            }
        ).json()

registry = CollectorRegistry()

registry.register(ScrapeCollector())

if __name__ == '__main__':
    # Start up the server to expose the metrics.
    start_http_server(9200, registry=registry)
    # Generate some requests.
    while True:
        time.sleep(30)

class TestProcessCollector(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.test_proc = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proc')

    def test_working(self):
        collector = ProcessCollector(proc=self.test_proc, pid=lambda: 26231, registry=self.registry)
        collector._ticks = 100

        self.assertEqual(17.21, self.registry.get_sample_value('process_cpu_seconds_total'))
        self.assertEqual(56274944.0, self.registry.get_sample_value('process_virtual_memory_bytes'))
        self.assertEqual(8114176, self.registry.get_sample_value('process_resident_memory_bytes'))
        self.assertEqual(1418184099.75, self.registry.get_sample_value('process_start_time_seconds'))
        self.assertEqual(2048.0, self.registry.get_sample_value('process_max_fds'))
        self.assertEqual(5.0, self.registry.get_sample_value('process_open_fds'))
        self.assertEqual(None, self.registry.get_sample_value('process_fake_namespace'))

    def test_namespace(self):
        collector = ProcessCollector(proc=self.test_proc, pid=lambda: 26231, registry=self.registry, namespace='n')
        collector._ticks = 100

        self.assertEqual(17.21, self.registry.get_sample_value('n_process_cpu_seconds_total'))
        self.assertEqual(56274944.0, self.registry.get_sample_value('n_process_virtual_memory_bytes'))
        self.assertEqual(8114176, self.registry.get_sample_value('n_process_resident_memory_bytes'))
        self.assertEqual(1418184099.75, self.registry.get_sample_value('n_process_start_time_seconds'))
        self.assertEqual(2048.0, self.registry.get_sample_value('n_process_max_fds'))
        self.assertEqual(5.0, self.registry.get_sample_value('n_process_open_fds'))
        self.assertEqual(None, self.registry.get_sample_value('process_cpu_seconds_total'))

    def test_working_584(self):
        collector = ProcessCollector(proc=self.test_proc, pid=lambda: "584\n", registry=self.registry)
        collector._ticks = 100

        self.assertEqual(0.0, self.registry.get_sample_value('process_cpu_seconds_total'))
        self.assertEqual(10395648.0, self.registry.get_sample_value('process_virtual_memory_bytes'))
        self.assertEqual(634880, self.registry.get_sample_value('process_resident_memory_bytes'))
        self.assertEqual(1418291667.75, self.registry.get_sample_value('process_start_time_seconds'))
        self.assertEqual(None, self.registry.get_sample_value('process_max_fds'))
        self.assertEqual(None, self.registry.get_sample_value('process_open_fds'))

    def test_working_fake_pid(self):
        collector = ProcessCollector(proc=self.test_proc, pid=lambda: 123, registry=self.registry)
        collector._ticks = 100

        self.assertEqual(None, self.registry.get_sample_value('process_cpu_seconds_total'))
        self.assertEqual(None, self.registry.get_sample_value('process_virtual_memory_bytes'))
        self.assertEqual(None, self.registry.get_sample_value('process_resident_memory_bytes'))
        self.assertEqual(None, self.registry.get_sample_value('process_start_time_seconds'))
        self.assertEqual(None, self.registry.get_sample_value('process_max_fds'))
        self.assertEqual(None, self.registry.get_sample_value('process_open_fds'))
        self.assertEqual(None, self.registry.get_sample_value('process_fake_namespace'))
 def setUp(self):
     self.registry = CollectorRegistry()
     self.test_proc = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proc')
Example #40
0
def metrics():
    registry = CollectorRegistry()
    multiprocess.MultiProcessCollector(registry)
    return Response(generate_latest(registry), mimetype=CONTENT_TYPE_LATEST)
Example #41
0
class TestGenerateText(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()

    def test_counter(self):
        c = Counter('cc', 'A counter', registry=self.registry)
        c.inc()
        self.assertEqual(b'# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n', generate_latest(self.registry))

    def test_gauge(self):
        g = Gauge('gg', 'A gauge', registry=self.registry)
        g.set(17)
        self.assertEqual(b'# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n', generate_latest(self.registry))

    def test_summary(self):
        s = Summary('ss', 'A summary', ['a', 'b'], registry=self.registry)
        s.labels('c', 'd').observe(17)
        self.assertEqual(b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry))

    @unittest.skipIf(sys.version_info < (2, 7), "Test requires Python 2.7+.")
    def test_histogram(self):
        s = Histogram('hh', 'A histogram', registry=self.registry)
        s.observe(0.05)
        self.assertEqual(b'''# HELP hh A histogram
# TYPE hh histogram
hh_bucket{le="0.005"} 0.0
hh_bucket{le="0.01"} 0.0
hh_bucket{le="0.025"} 0.0
hh_bucket{le="0.05"} 1.0
hh_bucket{le="0.075"} 1.0
hh_bucket{le="0.1"} 1.0
hh_bucket{le="0.25"} 1.0
hh_bucket{le="0.5"} 1.0
hh_bucket{le="0.75"} 1.0
hh_bucket{le="1.0"} 1.0
hh_bucket{le="2.5"} 1.0
hh_bucket{le="5.0"} 1.0
hh_bucket{le="7.5"} 1.0
hh_bucket{le="10.0"} 1.0
hh_bucket{le="+Inf"} 1.0
hh_count 1.0
hh_sum 0.05
''', generate_latest(self.registry))

    def test_unicode(self):
        c = Counter('cc', '\u4500', ['l'], registry=self.registry)
        c.labels('\u4500').inc()
        self.assertEqual(b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry))

    def test_escaping(self):
        c = Counter('cc', 'A\ncount\\er', ['a'], registry=self.registry)
        c.labels('\\x\n"').inc(1)
        self.assertEqual(b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry))

    def test_nonnumber(self):
        class MyNumber():
            def __repr__(self):
              return "MyNumber(123)"
            def __float__(self):
              return 123.0
        class MyCollector():
            def collect(self):
                metric = Metric("nonnumber", "Non number", 'untyped')
                metric.add_sample("nonnumber", {}, MyNumber())
                yield metric
        self.registry.register(MyCollector())
        self.assertEqual(b'# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n', generate_latest(self.registry))
Example #42
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Auther: Ray
# @Time: 2019/11/5 10:40

from prometheus_client import Gauge, generate_latest, CollectorRegistry
from flask import Response, Flask
import os
import json
import time, datetime
import re

app = Flask(__name__)

# 定义数据仓库
REGISTRY = CollectorRegistry()
fastdfs_tracker_server_info = Gauge('fastdfs_tracker_server_info',
                                    "fastdfs_tracker_server_info", ['tracker'],
                                    registry=REGISTRY)
fastdfs_group_info = Gauge('fastdfs_group_info',
                           "fastdfs_group_info", ['group'],
                           registry=REGISTRY)
fastdfs_storage_server_count = Gauge('fastdfs_storage_server_count',
                                     "fastdfs_storage_server_count", ['group'],
                                     registry=REGISTRY)
fastdfs_active_server_count = Gauge('fastdfs_active_server_count',
                                    "fastdfs_active_server_count", ['group'],
                                    registry=REGISTRY)
fastdfs_disk_total_space = Gauge('fastdfs_disk_total_space',
                                 "fastdfs_disk_total_space", ['group'],
                                 registry=REGISTRY)
Example #43
0
class TestGenerateText(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()

    def test_counter(self):
        c = Counter("cc", "A counter", registry=self.registry)
        c.inc()
        self.assertEqual(b"# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n", generate_latest(self.registry))

    def test_gauge(self):
        g = Gauge("gg", "A gauge", registry=self.registry)
        g.set(17)
        self.assertEqual(b"# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n", generate_latest(self.registry))

    def test_summary(self):
        s = Summary("ss", "A summary", ["a", "b"], registry=self.registry)
        s.labels("c", "d").observe(17)
        self.assertEqual(
            b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n',
            generate_latest(self.registry),
        )

    def test_histogram(self):
        s = Histogram("hh", "A histogram", registry=self.registry)
        s.observe(0.05)
        self.assertEqual(
            b"""# HELP hh A histogram
# TYPE hh histogram
hh_bucket{le="0.005"} 0.0
hh_bucket{le="0.01"} 0.0
hh_bucket{le="0.025"} 0.0
hh_bucket{le="0.05"} 1.0
hh_bucket{le="0.075"} 1.0
hh_bucket{le="0.1"} 1.0
hh_bucket{le="0.25"} 1.0
hh_bucket{le="0.5"} 1.0
hh_bucket{le="0.75"} 1.0
hh_bucket{le="1.0"} 1.0
hh_bucket{le="2.5"} 1.0
hh_bucket{le="5.0"} 1.0
hh_bucket{le="7.5"} 1.0
hh_bucket{le="10.0"} 1.0
hh_bucket{le="+Inf"} 1.0
hh_count 1.0
hh_sum 0.05
""",
            generate_latest(self.registry),
        )

    def test_unicode(self):
        c = Counter("cc", "\u4500", ["l"], registry=self.registry)
        c.labels("\u4500").inc()
        self.assertEqual(
            b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry)
        )

    def test_escaping(self):
        c = Counter("cc", "A\ncount\\er", ["a"], registry=self.registry)
        c.labels('\\x\n"').inc(1)
        self.assertEqual(
            b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry)
        )

    def test_nonnumber(self):
        class MyNumber:
            def __repr__(self):
                return "MyNumber(123)"

            def __float__(self):
                return 123.0

        class MyCollector:
            def collect(self):
                metric = Metric("nonnumber", "Non number", "untyped")
                metric.add_sample("nonnumber", {}, MyNumber())
                yield metric

        self.registry.register(MyCollector())
        self.assertEqual(
            b"# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n", generate_latest(self.registry)
        )
def collect_pve(config, host, options: CollectorsOptions):
    """Scrape a host and return prometheus text format for it"""

    pve = ProxmoxAPI(host, **config)

    registry = CollectorRegistry()
    if options.status:
        registry.register(StatusCollector(pve))
    if options.resources:
        registry.register(ClusterResourcesCollector(pve))
    if options.node:
        registry.register(ClusterNodeCollector(pve))
    if options.cluster:
        registry.register(ClusterInfoCollector(pve))
    if options.config:
        registry.register(ClusterNodeConfigCollector(pve))
    if options.version:
        registry.register(VersionCollector(pve))

    return generate_latest(registry)
Example #45
0
        # libp2p_peers
        self.libp2p_peers = Gauge('libp2p_peers', 'Tracks number of libp2p peers', registry=registry)  # noqa: E501

        # On slot transition
        self.beacon_slot = Gauge('beacon_slot', 'Latest slot of the beacon chain state', registry=registry)  # noqa: E501

        # On block transition
        self.beacon_head_slot = Gauge('beacon_head_slot', 'Slot of the head block of the beacon chain', registry=registry)  # noqa: E501
        self.beacon_head_root = Gauge('beacon_head_root', 'Root of the head block of the beacon chain', registry=registry)  # noqa: E501

        # On epoch transition
        self.beacon_previous_justified_epoch = Gauge('beacon_previous_justified_epoch', 'Current previously justified epoch', registry=registry)  # noqa: E501
        self.beacon_previous_justified_root = Gauge('beacon_previous_justified_root', 'Current previously justified root', registry=registry)  # noqa: E501
        self.beacon_current_justified_epoch = Gauge('beacon_current_justified_epoch', 'Current justified epoch', registry=registry)  # noqa: E501
        self.beacon_current_justified_root = Gauge('beacon_current_justified_root', 'Current justified root', registry=registry)  # noqa: E501
        self.beacon_finalized_epoch = Gauge('beacon_finalized_epoch', 'Current finalized epoch', registry=registry)  # noqa: E501
        self.beacon_finalized_root = Gauge('beacon_finalized_root', 'Current finalized root', registry=registry)  # noqa: E501

        #
        # Other
        #

        # Validator
        self.validator_proposed_blocks = Counter("validator_proposed_blocks", "counter of proposed blocks", registry=registry)  # noqa: E501
        self.validator_sent_attestation = Counter("validator_sent_attestation", "counter of attested", registry=registry)  # noqa: E501


registry = CollectorRegistry()
metrics = AllMetrics(registry)
Example #46
0
def metrics():
    return Response(
        generate_latest(MultiProcessCollector(CollectorRegistry())),
        mimetype=CONTENT_TYPE_LATEST,
    )
Example #47
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.histogram = Histogram('h', 'help', registry=self.registry)
     self.labels = Histogram('hl', 'help', ['l'], registry=self.registry)
Example #48
0
def metrics():
    registry = CollectorRegistry()
    multiprocess.MultiProcessCollector(registry)
    return generate_latest(registry)
Example #49
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.summary = Summary('s', 'help', registry=self.registry)
Example #50
0
from prometheus_client import Gauge
from prometheus_client import Counter
from prometheus_client import push_to_gateway

from thoth.common import init_logging
from thoth.common import OpenShift
from thoth.common import __version__ as __common__version__

__version__ = f"0.7.0+common.{__common__version__}"

init_logging()

_LOGGER = logging.getLogger("thoth.cleanup_job")
_DEFAULT_TTL = parse_ttl(os.getenv("THOTH_CLEANUP_DEFAULT_TTL") or "7d")
_CLEANUP_LABEL_SELECTOR = "mark=cleanup"
_PROMETHEUS_REGISTRY = CollectorRegistry()
_THOTH_METRICS_PUSHGATEWAY_URL = os.getenv("PROMETHEUS_PUSHGATEWAY_URL")
_METRIC_RUNTIME = Gauge("thoth_cleanup_job_runtime_seconds",
                        "Runtime of cleanup job in seconds.", [],
                        registry=_PROMETHEUS_REGISTRY)
_METRIC_INFO = Gauge("thoth_cleanup_job_info",
                     "Thoth Cleanup Job information", ["version"],
                     registry=_PROMETHEUS_REGISTRY)

_METRIC_DELETED_BUILDCONFIGS = Counter(
    "thoth_cleanup_job_buildconfigs",
    "Buildconfigs cleaned up.",
    ["namespace", "component", "resource"],
    registry=_PROMETHEUS_REGISTRY,
)
_METRIC_DELETED_IMAGESTREAMS = Counter(
Example #51
0
class TestHistogram(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.histogram = Histogram('h', 'help', registry=self.registry)
        self.labels = Histogram('hl', 'help', ['l'], registry=self.registry)

    def test_histogram(self):
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '1.0'}))
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '2.5'}))
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
        self.assertEqual(0, self.registry.get_sample_value('h_count'))
        self.assertEqual(0, self.registry.get_sample_value('h_sum'))

        self.histogram.observe(2)
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '1.0'}))
        self.assertEqual(1, self.registry.get_sample_value('h_bucket', {'le': '2.5'}))
        self.assertEqual(1, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
        self.assertEqual(1, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
        self.assertEqual(1, self.registry.get_sample_value('h_count'))
        self.assertEqual(2, self.registry.get_sample_value('h_sum'))

        self.histogram.observe(2.5)
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '1.0'}))
        self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '2.5'}))
        self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
        self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
        self.assertEqual(2, self.registry.get_sample_value('h_count'))
        self.assertEqual(4.5, self.registry.get_sample_value('h_sum'))

        self.histogram.observe(float("inf"))
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '1.0'}))
        self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '2.5'}))
        self.assertEqual(2, self.registry.get_sample_value('h_bucket', {'le': '5.0'}))
        self.assertEqual(3, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
        self.assertEqual(3, self.registry.get_sample_value('h_count'))
        self.assertEqual(float("inf"), self.registry.get_sample_value('h_sum'))

    def test_setting_buckets(self):
        h = Histogram('h', 'help', registry=None, buckets=[0, 1, 2])
        self.assertEqual([0.0, 1.0, 2.0, float("inf")], h._upper_bounds)

        h = Histogram('h', 'help', registry=None, buckets=[0, 1, 2, float("inf")])
        self.assertEqual([0.0, 1.0, 2.0, float("inf")], h._upper_bounds)

        self.assertRaises(ValueError, Histogram, 'h', 'help', registry=None, buckets=[])
        self.assertRaises(ValueError, Histogram, 'h', 'help', registry=None, buckets=[float("inf")])
        self.assertRaises(ValueError, Histogram, 'h', 'help', registry=None, buckets=[3, 1])

    def test_labels(self):
        self.labels.labels('a').observe(2)
        self.assertEqual(0, self.registry.get_sample_value('hl_bucket', {'le': '1.0', 'l': 'a'}))
        self.assertEqual(1, self.registry.get_sample_value('hl_bucket', {'le': '2.5', 'l': 'a'}))
        self.assertEqual(1, self.registry.get_sample_value('hl_bucket', {'le': '5.0', 'l': 'a'}))
        self.assertEqual(1, self.registry.get_sample_value('hl_bucket', {'le': '+Inf', 'l': 'a'}))
        self.assertEqual(1, self.registry.get_sample_value('hl_count', {'l': 'a'}))
        self.assertEqual(2, self.registry.get_sample_value('hl_sum', {'l': 'a'}))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('h_count'))
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))

        @self.histogram.time()
        def f():
            pass

        f()
        self.assertEqual(1, self.registry.get_sample_value('h_count'))
        self.assertEqual(1, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('h_count'))
        self.assertEqual(0, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
        with self.histogram.time():
            pass
        self.assertEqual(1, self.registry.get_sample_value('h_count'))
        self.assertEqual(1, self.registry.get_sample_value('h_bucket', {'le': '+Inf'}))
Example #52
0
    def test_gauge_config(self):
        """Test Gauge minimal config."""
        faucet_conf1 = """
vlans:
   100:
       description: "100"
dps:
   dp1:
       dp_id: 0x1
       interfaces:
           1:
               description: "1"
               native_vlan: 100
"""
        faucet_conf2 = """
vlans:
   100:
       description: "200"
dps:
   dp1:
       dp_id: 0x1
       interfaces:
           2:
               description: "2"
               native_vlan: 100
"""
        os.environ['FAUCET_CONFIG'] = os.path.join(self.tmpdir, 'faucet.yaml')
        self._write_config(os.environ['FAUCET_CONFIG'], faucet_conf1)
        os.environ['GAUGE_CONFIG'] = os.path.join(self.tmpdir, 'gauge.yaml')
        gauge_conf = """
faucet_configs:
   - '%s'
watchers:
    port_status_poller:
        type: 'port_state'
        all_dps: True
        db: 'prometheus'
    port_stats_poller:
        type: 'port_stats'
        all_dps: True
        interval: 10
        db: 'prometheus'
    flow_table_poller:
        type: 'flow_table'
        all_dps: True
        interval: 60
        db: 'prometheus'
dbs:
    prometheus:
        type: 'prometheus'
        prometheus_addr: '0.0.0.0'
        prometheus_port: 0
""" % os.environ['FAUCET_CONFIG']
        self._write_config(os.environ['GAUGE_CONFIG'], gauge_conf)
        self.os_ken_app = gauge.Gauge(dpset={}, reg=CollectorRegistry())
        self.os_ken_app.reload_config(None)
        self.assertFalse(self.os_ken_app._config_files_changed())
        self.assertTrue(self.os_ken_app.watchers)
        self.os_ken_app.reload_config(None)
        self.assertTrue(self.os_ken_app.watchers)
        self.assertFalse(self.os_ken_app._config_files_changed())
        # Load a new FAUCET config.
        self._write_config(os.environ['FAUCET_CONFIG'], faucet_conf2)
        self.assertTrue(self.os_ken_app._config_files_changed())
        self.os_ken_app.reload_config(None)
        self.assertTrue(self.os_ken_app.watchers)
        self.assertFalse(self.os_ken_app._config_files_changed())
        # Load an invalid Gauge config
        self._write_config(os.environ['GAUGE_CONFIG'], 'invalid')
        self.assertTrue(self.os_ken_app._config_files_changed())
        self.os_ken_app.reload_config(None)
        self.assertTrue(self.os_ken_app.watchers)
        # Keep trying to load a valid version.
        self.assertTrue(self.os_ken_app._config_files_changed())
        # Load good Gauge config back
        self._write_config(os.environ['GAUGE_CONFIG'], gauge_conf)
        self.assertTrue(self.os_ken_app._config_files_changed())
        self.os_ken_app.reload_config(None)
        self.assertTrue(self.os_ken_app.watchers)
        self.assertFalse(self.os_ken_app._config_files_changed())
Example #53
0
 def setUp(self):
     self.registry = CollectorRegistry()