def get_stats(self): registry = CollectorRegistry() labels = ['region', 'host', 'service', 'state'] services_stats_cache = self.get_cache_data() services_stats_cache.sort(key=self.take_stat_name) REGISTRY_FLAG = '' stat_gauge = [] for services_stat in services_stats_cache: label_values = [self.osclient.region, services_stat.get('host', ''), services_stat.get('service', ''), services_stat.get('state', '')] if REGISTRY_FLAG != services_stat['stat_name']: if REGISTRY_FLAG: registry.register(OSCollector(stat_gauge)) stat_gauge = GaugeMetricFamily( self.gauge_name_sanitize( services_stat['stat_name']), 'Openstack Nova Service statistic', labels=labels) REGISTRY_FLAG = services_stat['stat_name'] stat_gauge.add_metric(label_values, services_stat['stat_value']) registry.register(OSCollector(stat_gauge)) return generate_latest(registry)
def __init__( self, name: str, documentation: str, buckets: Iterable[float], registry: CollectorRegistry = REGISTRY, ): """ Args: name: base name of metric to be exported to Prometheus. (a _bucket suffix will be added.) documentation: help text for the metric buckets: The top bounds of the buckets to report registry: metric registry to register with """ self._name = name self._documentation = documentation # the tops of the buckets self._bucket_bounds = [float(b) for b in buckets] if self._bucket_bounds != sorted(self._bucket_bounds): raise ValueError("Buckets not in sorted order") if self._bucket_bounds[-1] != float("inf"): self._bucket_bounds.append(float("inf")) # We initially set this to None. We won't report metrics until # this has been initialised after a successful data update self._metric: Optional[GaugeHistogramMetricFamily] = None registry.register(self)
def collect_snmp(config, host, port=161): """Scrape a host and return prometheus text format for it""" start = time.time() metrics = {} for metric in config['metrics']: metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped') values = walk_oids(host, port, config['walk']) oids = {} for oid, value in values: oids[tuple(oid)] = value for oid, value in oids.items(): for metric in config['metrics']: prefix = oid_to_tuple(metric['oid']) if oid[:len(prefix)] == prefix: value = float(value) indexes = oid[len(prefix):] labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids) metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels) class Collector(): def collect(self): return metrics.values() registry = CollectorRegistry() registry.register(Collector()) duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry) duration.set(time.time() - start) walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry) walked.set(len(oids)) return generate_latest(registry)
def route_metrics(): """ Produce FlashArray metrics. """ endp = request.args.get('endpoint', None) atok = request.args.get('apitoken', None) path = request.path if (path == '/metrics/flasharray'): collector = 'FlasharrayCollector' elif (path == '/metrics/flashblade'): collector = 'FlashbladeCollector' else: abort(400) if (endp is None or atok is None): abort(400) _reg = CollectorRegistry() try: if (collector == 'FlasharrayCollector'): _reg.register(FlasharrayCollector(endp, atok)) else: _reg.register(FlashbladeCollector(endp, atok)) except Exception as e: app.logger.warn('%s: %s', collector, str(e)) abort(500) resp = make_response(generate_latest(_reg), 200) resp.headers['Content-type'] = CONTENT_TYPE_LATEST return resp
def collect_snmp(config, host, port=161): """Scrape a host and return prometheus text format for it""" metrics = {} for metric in config['metrics']: metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped') values = walk_oids(host, port, config['walk']) oids = {} for oid, value in values: oids[tuple(oid)] = value for oid, value in oids.items(): for metric in config['metrics']: prefix = oid_to_tuple(metric['oid']) if oid[:len(prefix)] == prefix: value = float(value) indexes = oid[len(prefix):] labels = parse_indexes(indexes, metric.get('indexes', {}), oids) metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels) class Collector(): def collect(self): return metrics.values() registry = CollectorRegistry() registry.register(Collector()) return generate_latest(registry)
def generate_latest_metrics(self, request): """ gets the latest metrics """ section = request.args.get('section', ['default'])[0] if section not in self.config.keys(): log("{} is not a valid section, using default".format(section)) section = 'default' if self.config[section].get('vsphere_host') and self.config[ section].get('vsphere_host') != "None": vsphere_host = self.config[section].get('vsphere_host') elif request.args.get(b'target', [None])[0]: vsphere_host = request.args.get(b'target', [None])[0].decode('utf-8') elif request.args.get(b'vsphere_host', [None])[0]: vsphere_host = request.args.get(b'vsphere_host')[0].decode('utf-8') else: request.setResponseCode(500) log("No vsphere_host or target defined") request.write(b'No vsphere_host or target defined!\n') request.finish() return registry = CollectorRegistry() registry.register( VmwareCollector( vsphere_host, self.config[section]['vsphere_user'], self.config[section]['vsphere_password'], self.config[section]['collect_only'], self.config[section]['ignore_ssl'], )) output = generate_latest(registry) request.write(output) request.finish()
def get_stats(self): registry = CollectorRegistry() labels = ['region', 'url', 'service'] check_api_data_cache = self.get_cache_data() check_api_data_cache.sort(key=self.take_service_name) REGISTRY_FLAG = '' check_gauge = [] for check_api_data in check_api_data_cache: label_values = [ check_api_data['region'], check_api_data['url'], check_api_data['service'] ] gauge_name = self.gauge_name_sanitize("check_{}_api".format( check_api_data['service'])) if REGISTRY_FLAG != gauge_name: if REGISTRY_FLAG: registry.register(OSCollector(check_gauge)) check_gauge = GaugeMetricFamily( gauge_name, 'Openstack API check. fail = 0, ok = 1 and unknown = 2', labels=labels) REGISTRY_FLAG = gauge_name check_gauge.add_metric(label_values, check_api_data['status']) registry.register(OSCollector(check_gauge)) return generate_latest(registry)
def test_multiple_machines_in_multiple_subscriptions(enable_login): responses.add( method='GET', url='https://management.azure.com/subscriptions/SUBSCRIPTION_A/providers/Microsoft.Compute/virtualMachines?api-version=2017-03-30', match_querystring=True, json={'value': [ { 'id': '/subscriptions/SUBSCRIPTION_A/resourceGroups/RESOURCE_GROUP/providers/Microsoft.Compute/virtualMachines/NAME', 'location': 'WESTEUROPE', 'properties': {'hardwareProfile': {'vmSize': 'SIZE'}} } ]}) responses.add( method='GET', url='https://management.azure.com/subscriptions/SUBSCRIPTION_B/providers/Microsoft.Compute/virtualMachines?api-version=2017-03-30', match_querystring=True, json={'value': [ { 'id': '/subscriptions/SUBSCRIPTION_B/resourceGroups/RESOURCE_GROUP/providers/Microsoft.Compute/virtualMachines/NAME', 'location': 'WESTEUROPE', 'properties': {'hardwareProfile': {'vmSize': 'SIZE'}} } ]}) registry = CollectorRegistry() c = AzureAllocatedVMCollector('app_id', 'app_secret', 'tenant_id', ['SUBSCRIPTION_A', 'SUBSCRIPTION_B'], 'SERIES_NAME') registry.register(c) result = generate_latest(registry).decode('utf8').split('\n') assert 'SERIES_NAME{location="WESTEUROPE",resource_group="RESOURCE_GROUP",subscription="SUBSCRIPTION_A",vm_size="SIZE"} 1.0' in result assert 'SERIES_NAME{location="WESTEUROPE",resource_group="RESOURCE_GROUP",subscription="SUBSCRIPTION_B",vm_size="SIZE"} 1.0' in result
class Promethus(): def create_screen(self,promethus_url,job_name = 'response_num',metric_name='traffic1'): self.job = job_name self.promethus_url = promethus_url self.registry = CollectorRegistry() # 存放所有Metrics的容器,以Name-Metric(Key-Value)形式维护其中的Metric对象。 self.face_total = Gauge(metric_name, 'Total response cout of diff age and gender', ['instance','deviceid','age','gender','emotions']) self.registry.register(self.face_total) def create_device(self,promethus_url,job_name = 'response_num',metric_name='traffic2'): self.job = job_name self.promethus_url = promethus_url self.registry = CollectorRegistry() self.face_total = Gauge(metric_name, 'Total response cout of diff age and gender', ['instance','deviceid','age','gender']) self.registry.register(self.face_total) def push_prometheus(self): try: prometheus_client.push_to_gateway(self.promethus_url, job=self.job, registry=self.registry, timeout=3) # 将所有的error码的统计结果清空 for label_text in self.face_total._metrics: self.face_total._metrics[label_text].set(0) # 卸载所有搜集器 # for register in list(self.registry._collector_to_names): # self.registry.unregister(register) except Exception as e: logging.error('push_to_gateway error %s' %e)
def main(logger): config = _init_config() registry = CollectorRegistry() for metric in COLLECTED_METRICS: registry.register(metric) job = _prometheus_job(config.kubernetes_namespace) prometheus_shutdown = partial(push_to_gateway, config.prometheus_pushgateway, job, registry) register_shutdown(prometheus_shutdown, "Pushing metrics") Session = _init_db(config) session = Session() register_shutdown(session.get_bind().dispose, "Closing database") event_producer = EventProducer(config) register_shutdown(event_producer.close, "Closing producer") shutdown_handler = ShutdownHandler() shutdown_handler.register() with session_guard(session): run(config, logger, session, event_producer, shutdown_handler)
def collect(self, params): with open(self._config_path, 'r') as f: config = yaml.safe_load(f) collector = P4Collector(config, params) registry = CollectorRegistry() registry.register(collector) return generate_latest(registry)
def start_http(port): # Create the registry print "starting port" + str(port) from prometheus_client.core import REGISTRY try: myCollector = MyCollector(port) reg = CollectorRegistry(auto_describe=True) reg.register(myCollector) def handler(*args, **kwargs): MyMetricHandler(reg, *args, **kwargs) #server=start_http_server(port,handler) #myCollector=MyCollector(port) #reg=CollectorRegistry(auto_describe=True) #reg.register(myCollector) server = HTTPServer(('', port), handler) server.serve_forever() #while True: time.sleep(1) except (KeyboardInterrupt, SystemExit): print "stop" finally: sys.exit(0) server.shutdown() server.close()
def route_array(array_type, m_type): """Produce FlashArray and FlashBlade metrics.""" collector = None if array_type == 'flasharray': if not m_type in ['array', 'volumes', 'hosts', 'pods']: m_type = 'all' collector = FlasharrayCollector elif array_type == 'flashblade': if not m_type in ['array', 'clients', 'usage']: m_type = 'all' collector = FlashbladeCollector else: abort(404) registry = CollectorRegistry() try: endpoint = request.args.get('endpoint', None) token = auth.current_user() registry.register(collector(endpoint, token, m_type)) except Exception as e: app.logger.warn('%s: %s', collector.__name__, str(e)) abort(500) resp = make_response(generate_latest(registry), 200) resp.headers['Content-type'] = CONTENT_TYPE_LATEST return resp
def main(): args = parse_args() port = int(args.port) registry = CollectorRegistry() registry.register( JenkinsCollector(args.jenkins, args.user, args.password, args.insecure)) push_to_gateway('13.66.189.221:9091', job='jenkins', registry=registry)
def collect_esl(config, host): """Scrape a host and return prometheus text format for it (asinc)""" port = config.get('port', 8021) password = config.get('password', 'ClueCon') registry = CollectorRegistry() registry.register(ChannelCollector(host, port, password)) return generate_latest(registry)
def collect_snmp(config, host, port=161): """Scrape a host and return prometheus text format for it""" start = time.time() metrics = {} for metric in config['metrics']: metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped') do_bulkget = 'bulkget' not in config or config['bulkget'] values = walk_oids(host, port, config['walk'], config.get('community', 'public'), do_bulkget) oids = {} for oid, value in values: oids[oid_to_tuple(oid)] = value # Netsnmp doesn't tell us if an error has occured, so # try to spot it by no results. if not oids: raise Exception("No OIDs returned, device not responding?") # Build a tree from the rules based on oid for faster lookup. metric_tree = {} for metric in config['metrics']: prefix = oid_to_tuple(metric['oid']) head = metric_tree for i in prefix: head.setdefault('children', {}) head['children'].setdefault(i, {}) head = head['children'][i] head['entry'] = metric for oid, value in oids.items(): head = metric_tree for i in oid: head = head.get('children', {}).get(i) if not head: break if 'entry' in head: metric = head['entry'] prefix = oid_to_tuple(metric['oid']) value = float(value) indexes = oid[len(prefix):] labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids) metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels) break class Collector(): def collect(self): return metrics.values() registry = CollectorRegistry() registry.register(Collector()) duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry) duration.set(time.time() - start) walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry) walked.set(len(oids)) return generate_latest(registry)
class MetricsRegistry: """A registry for metrics.""" registry: CollectorRegistry def __init__(self): self.registry = CollectorRegistry(auto_describe=True) self._metrics: Dict[str, Metric] = {} def create_metrics(self, configs: Iterable[MetricConfig]) -> Dict[str, Metric]: """Create Prometheus metrics from a list of MetricConfigs.""" metrics: Dict[str, Metric] = { config.name: self._register_metric(config) for config in configs } self._metrics.update(metrics) return metrics def get_metric(self, name: str, labels: Optional[Dict[str, str]] = None) -> Metric: """Return a metric, optionally configured with labels.""" metric = self._metrics[name] if labels: return metric.labels(**labels) return metric def get_metrics(self) -> Dict[str, Metric]: """Return a dict mapping names to metrics.""" return self._metrics.copy() def register_additional_collector(self, collector): """Registrer an additional collector or metric. Metric(s) for the collector will not be include in the result of get_metrics. """ self.registry.register(collector) def generate_metrics(self) -> bytes: """Generate text with metrics values from the registry.""" return bytes(generate_latest(self.registry)) def _register_metric(self, config: MetricConfig) -> Metric: metric_type = METRIC_TYPES[config.type] options = { metric_type.options[key]: value for key, value in config.config.items() if key in metric_type.options } return metric_type.cls(config.name, config.description, registry=self.registry, **options)
def __init__(self, template_file: str, device: AbstractDevice, registry: CollectorRegistry): with open(template_file, "r") as template: self._parser = textfsm.TextFSM(template) self._device = device if registry: registry.register(self)
def __init__(self, registry: CollectorRegistry, namespace: str = "", gc=gc) -> None: if (not hasattr(gc, "get_stats") or platform.python_implementation() != "CPython"): return if namespace: self.namespace = f"{namespace}_" registry.register(self)
def setup_bridge(prom, retry, graphite): registry = CollectorRegistry() registry.register(ScrapeCollector(prom, retry)) logging.info('Scrape target: %s', prom) logging.info('Graphite server: %s:%i', *graphite) bridge = GraphiteBridge(graphite, registry=registry) return bridge
class TestGenerateText(unittest.TestCase): def setUp(self): self.registry = CollectorRegistry() def test_counter(self): c = Counter('cc', 'A counter', registry=self.registry) c.inc() self.assertEqual(b'# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n', generate_latest(self.registry)) def test_gauge(self): g = Gauge('gg', 'A gauge', registry=self.registry) g.set(17) self.assertEqual(b'# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n', generate_latest(self.registry)) def test_summary(self): s = Summary('ss', 'A summary', ['a', 'b'], registry=self.registry) s.labels('c', 'd').observe(17) self.assertEqual( b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry)) def test_unicode(self): c = Counter('cc', '\u4500', ['l'], registry=self.registry) c.labels('\u4500').inc() self.assertEqual( b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry)) def test_escaping(self): c = Counter('cc', 'A\ncount\\er', ['a'], registry=self.registry) c.labels('\\x\n"').inc(1) self.assertEqual( b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry)) def test_nonnumber(self): class MyNumber(): def __repr__(self): return "MyNumber(123)" def __float__(self): return 123.0 class MyCollector(): def collect(self): metric = Metric("nonnumber", "Non number", 'untyped') metric.add_sample("nonnumber", {}, MyNumber()) yield metric self.registry.register(MyCollector()) self.assertEqual( b'# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n', generate_latest(self.registry))
def collect_snmp(config, host, port=161): """Scrape a host and return prometheus text format for it""" start = time.time() metrics = {} for metric in config['metrics']: metrics[metric['name']] = Metric(metric['name'], 'SNMP OID {0}'.format(metric['oid']), 'untyped') values = walk_oids(host, port, config['walk'], config.get('community', 'public')) oids = {} for oid, value in values: oids[oid_to_tuple(oid)] = value # Netsnmp doesn't tell us if an error has occured, so # try to spot it by no results. if not oids: raise Exception("No OIDs returned, device not responding?") # Build a tree from the rules based on oid for faster lookup. metric_tree = {} for metric in config['metrics']: prefix = oid_to_tuple(metric['oid']) head = metric_tree for i in prefix: head.setdefault('children', {}) head['children'].setdefault(i, {}) head = head['children'][i] head['entry'] = metric for oid, value in oids.items(): head = metric_tree for i in oid: head = head.get('children', {}).get(i) if not head: break if 'entry' in head: metric = head['entry'] prefix = oid_to_tuple(metric['oid']) value = float(value) indexes = oid[len(prefix):] labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids) metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels) break class Collector(): def collect(self): return metrics.values() registry = CollectorRegistry() registry.register(Collector()) duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry) duration.set(time.time() - start) walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry) walked.set(len(oids)) return generate_latest(registry)
def metrics(): timeout = _get_timeout() collector = AzureEABillingCollector( current_app.config['PROMETHEUS_METRIC_NAME'], current_app.config['ENROLLMENT_NUMBER'], current_app.config['BILLING_API_ACCESS_KEY'], timeout) registry = CollectorRegistry() registry.register(collector) try: content = generate_latest(registry) return content, 200, {'Content-Type': CONTENT_TYPE_LATEST} except Exception as e: abort(Response("Scrape failed: {}".format(e), status=502))
def collect_push_metrics_cpu(event, context): while True: time.sleep(0.5) registry = CollectorRegistry() # register the metric collector registry.register(cpu_metric) for c, p in enumerate(psutil.cpu_percent(interval=1, percpu=True)): cpu_metric.set(p) push_to_gateway(push_gateway_host, job=context.function_name + '_cpu', registry=registry)
def collect_push_metrics_ram(event, context): while True: time.sleep(0.5) registry = CollectorRegistry() # register the metric collector registry.register(ram_metric) ram = psutil.virtual_memory() ram_metric.set(ram.used) push_to_gateway(push_gateway_host, job=context.function_name + '_ram', registry=registry)
def collect_pve(config, host): """Scrape a host and return prometheus text format for it""" pve = ProxmoxAPI(host, **config) registry = CollectorRegistry() registry.register(StatusCollector(pve)) registry.register(ClusterResourcesCollector(pve)) registry.register(ClusterNodeCollector(pve)) registry.register(ClusterInfoCollector(pve)) registry.register(ClusterNodeConfigCollector(pve)) registry.register(VersionCollector(pve)) return generate_latest(registry)
class MetricsRegistry: """A registry for metrics.""" def __init__(self): self.registry = CollectorRegistry(auto_describe=True) self._metrics = {} def create_metrics(self, configs): """Create Prometheus metrics from a list of MetricConfigs.""" metrics = { config.name: self._register_metric(config) for config in configs } self._metrics.update(metrics) return metrics def get_metric(self, name, labels=None): """Return a metric, optionally configured with labels.""" metric = self._metrics[name] if labels: return metric.labels(**labels) return metric def get_metrics(self): """Return a dict mapping names to metrics.""" return self._metrics.copy() def register_additional_collector(self, collector): """Registrer an additional collector or metric. Metric(s) for the collector will not be include in the result of get_metrics. """ self.registry.register(collector) def generate_metrics(self): """Generate text with metrics values from the registry.""" return generate_latest(self.registry) def _register_metric(self, config): metric_info = METRIC_TYPES[config.type] options = { metric_info['options'][key]: value for key, value in config.config.items() if key in metric_info['options'] } return metric_info['class'](config.name, config.description, registry=self.registry, **options)
def test_failing_requests(api_url, enrollment, status): registry = CollectorRegistry() c = AzureEABillingCollector('cloud_costs', enrollment, 'abc123xyz', 42.3) registry.register(c) responses.add( method='GET', url=api_url, match_querystring=True, status=status ) with pytest.raises(requests.HTTPError): generate_latest(registry)
def test_base_reservation(enable_login, single_order, reservation): responses.add( method='GET', url='https://management.azure.com/providers/Microsoft.Capacity/reservationOrders/SINGLE_ORDER/reservations?api-version=2017-11-01', match_querystring=True, json={'value': [reservation]}) registry = CollectorRegistry() c = AzureReservedVMCollector('app_id', 'app_secret', 'tenant_id', 'SERIES_NAME') registry.register(c) result = generate_latest(registry).decode('utf8').split('\n') assert 'SERIES_NAME{duration="1-year",location="WESTEUROPE",subscription="SOME_SUBSCRIPTION",vm_size="SIZE"} 42.0' in result assert 'SERIES_NAME_next_expiration{duration="1-year",location="WESTEUROPE",subscription="SOME_SUBSCRIPTION",vm_size="SIZE"} 1546300800.0' in result
def do_GET(self): c.inc() e.state(random.choice(['starting', 'running', 'stopped'])) h.observe(random.randint(1, 11)) registry = CollectorRegistry(auto_describe=True) for i in collectors: registry.register(i) encoder, content_type = choose_encoder(self.headers.get('Accept')) output = encoder(registry) output2 = encoder(global_registry) self.send_response(200) self.send_header('Content-Type', content_type) self.end_headers() self.wfile.write(output) self.wfile.write(output2)
def collect_es(name, config, host, kerberos, tls, uname=None, pword=None): """Execute a search against Elasticsearch and return prometheus text format for it""" try: a = None if kerberos: a = HTTPKerberosAuth() s = '' if tls: s = 's' if uname and pword: r = requests.get("http{}://{}/{}/_search".format(s, host, config['index']), auth(uname, pword)) else: r = requests.get("http{}://{}/{}/_search".format(s, host, config['index']) data = json.dumps({"query": config['query'], "size": 0}), auth = a, ) except: raise Exception('Cannot connect to Elasticsearch host: {}'.format(host)) if r.status_code == 200: count = r.json()['hits']['total'] successful = r.json()['_shards']['successful'] failed = r.json()['_shards']['failed'] total = r.json()['_shards']['total'] duration = float(r.json()['took']) / 1000 timed_out = r.json()['timed_out'] else: raise Exception('Query failed: {}'.format(r.json())) metrics = {} metrics['results'] = Metric('es_search_results_total', 'Number of matching results from Elasticsearch', 'gauge') metrics['results'].add_sample('es_search_results_total', value=count, labels={'search': name}) metrics['successful'] = Metric('es_search_shards_successful_total', 'Number of shards where the query returned successfully', 'gauge') metrics['successful'].add_sample('es_search_shards_successful_total', value=successful, labels={'search': name}) metrics['failed'] = Metric('es_search_shards_failed_total', 'Number of shards where the query failed', 'gauge') metrics['failed'].add_sample('es_search_shards_failed_total', value=failed, labels={'search': name}) metrics['total'] = Metric('es_search_shards_total', 'Number of shards queried', 'gauge') metrics['total'].add_sample('es_search_shards_total', value=total, labels={'search': name}) metrics['timed_out'] = Metric('es_search_timed_out', 'Did the query time out', 'gauge') metrics['timed_out'].add_sample('es_search_timed_out', value=timed_out, labels={'search': name}) metrics['duration'] = Metric('es_search_duration_seconds', 'Time Elasticsearch search took, in seconds', 'gauge') metrics['duration'].add_sample('es_search_duration_seconds', value=duration, labels={'search': name}) class Collector(): def collect(self): return metrics.values() registry = CollectorRegistry() registry.register(Collector()) return generate_latest(registry)
def collect_tftp(host, port_str='69', tftp_file='pxelinux.0'): """ Collect tftp-probing data from a host and return prometheus formatted stats """ start = time.time() metrics = {} res = tftp_probe(host=host, port=port_str, tftp_file=tftp_file) # pp.pprint(res) # noinspection PyMethodMayBeStatic class Collector(object): """ Collector-class with necessary callback-functions. """ def collect(self): """ This is called on every web-request. """ return metrics.values() registry = CollectorRegistry() registry.register(Collector()) probe_duration = Gauge( 'tftp_probe_duration_seconds', 'Returns how long the probe took to return in seconds', registry=registry) probe_success = Gauge('tftp_probe_success', 'Displays whether or not the probe was a success', registry=registry) probe_content_length = Gauge('tftp_probe_content_length_bytes', 'Content bytes received', registry=registry) probe_dl_speed = Gauge('tftp_probe_dl_speed', 'Returns the download speed in KB/s', registry=registry) probe_duration.set(time.time() - start) if res: if 'status' in res: probe_success.set(res['status']) if 'rec_bytes' in res: probe_content_length.set(res['rec_bytes']) if 'speed_KBpS' in res: probe_dl_speed.set(res['speed_KBpS']) return generate_latest(registry)
def route_flasharray(m_type: str): """Produce FlashArray metrics.""" if not m_type in ['array', 'volumes', 'hosts', 'pods']: m_type = 'all' collector = FlasharrayCollector registry = CollectorRegistry() try: endpoint = request.args.get('endpoint', None) token = request.args.get('apitoken', None) registry.register(collector(endpoint, token, m_type)) except Exception as e: app.logger.warn('%s: %s', collector.__name__, str(e)) abort(500) resp = make_response(generate_latest(registry), 200) resp.headers['Content-type'] = CONTENT_TYPE_LATEST return resp
def collect_snmp(config, host, port=161): """Scrape a host and return prometheus text format for it""" start = time.time() metrics = {} for metric in config['metrics']: prom_type = metric['metric_type'] if 'metric_type' in metric else 'gauge' prom_help = metric['metric_help'] if 'metric_help' in metric else 'SNMP OID {0}'.format( metric['oid'] if 'oid' in metric else "NaN" ) metrics[metric['name']] = Metric(metric['name'], prom_help, prom_type) values = walk_oids(host, port, config['walk'], config.get('community', 'public'), config.get('timeout', 5), config.get('retries', 3)) oids = {} for oid, value in values: if oid_to_tuple(oid) in oids: if (((not oids[oid_to_tuple(oid)]) or oids[oid_to_tuple(oid)] == None) and value): oids[oid_to_tuple(oid)] = value else: oids[oid_to_tuple(oid)] = value for oid, value in oids.items(): for metric in config['metrics']: prefix = oid_to_tuple(metric['oid']) if oid[:len(prefix)] == prefix: try: value = float(value) except ValueError as e: print(e) value = 0.0 indexes = oid[len(prefix):] labels = parse_indexes(indexes, metric.get('indexes', {}), metric.get('lookups', {}), oids) metrics[metric['name']].add_sample(metric['name'], value=value, labels=labels) class Collector(): def collect(self): return metrics.values() registry = CollectorRegistry() registry.register(Collector()) duration = Gauge('snmp_scrape_duration_seconds', 'Time this SNMP scrape took, in seconds', registry=registry) duration.set(time.time() - start) walked = Gauge('snmp_oids_walked', 'Number of oids walked in this scrape', registry=registry) walked.set(len(oids)) return generate_latest(registry)
class TestGenerateText(unittest.TestCase): def setUp(self): self.registry = CollectorRegistry() def test_counter(self): c = Counter('cc', 'A counter', registry=self.registry) c.inc() self.assertEqual(b'# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n', generate_latest(self.registry)) def test_gauge(self): g = Gauge('gg', 'A gauge', registry=self.registry) g.set(17) self.assertEqual(b'# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n', generate_latest(self.registry)) def test_summary(self): s = Summary('ss', 'A summary', ['a', 'b'], registry=self.registry) s.labels('c', 'd').observe(17) self.assertEqual(b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry)) def test_unicode(self): c = Counter('cc', '\u4500', ['l'], registry=self.registry) c.labels('\u4500').inc() self.assertEqual(b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry)) def test_escaping(self): c = Counter('cc', 'A\ncount\\er', ['a'], registry=self.registry) c.labels('\\x\n"').inc(1) self.assertEqual(b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry)) def test_nonnumber(self): class MyNumber(): def __repr__(self): return "MyNumber(123)" def __float__(self): return 123.0 class MyCollector(): def collect(self): metric = Metric("nonnumber", "Non number", 'untyped') metric.add_sample("nonnumber", {}, MyNumber()) yield metric self.registry.register(MyCollector()) self.assertEqual(b'# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n', generate_latest(self.registry))
def __init__(self): pass def collect(self): return HydraScrapeImporter(scrape()).collect() def scrape(cached=None): if cached: with open(cached) as f: return json.load(f) else: print("Scraping") return requests.get( 'https://hydra.nixos.org/queue-runner-status', headers={ "Content-Type": "application/json" } ).json() registry = CollectorRegistry() registry.register(ScrapeCollector()) if __name__ == '__main__': # Start up the server to expose the metrics. start_http_server(9200, registry=registry) # Generate some requests. while True: time.sleep(30)
class TestGenerateText(unittest.TestCase): def setUp(self): self.registry = CollectorRegistry() def test_counter(self): c = Counter("cc", "A counter", registry=self.registry) c.inc() self.assertEqual(b"# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n", generate_latest(self.registry)) def test_gauge(self): g = Gauge("gg", "A gauge", registry=self.registry) g.set(17) self.assertEqual(b"# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n", generate_latest(self.registry)) def test_summary(self): s = Summary("ss", "A summary", ["a", "b"], registry=self.registry) s.labels("c", "d").observe(17) self.assertEqual( b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry), ) def test_histogram(self): s = Histogram("hh", "A histogram", registry=self.registry) s.observe(0.05) self.assertEqual( b"""# HELP hh A histogram # TYPE hh histogram hh_bucket{le="0.005"} 0.0 hh_bucket{le="0.01"} 0.0 hh_bucket{le="0.025"} 0.0 hh_bucket{le="0.05"} 1.0 hh_bucket{le="0.075"} 1.0 hh_bucket{le="0.1"} 1.0 hh_bucket{le="0.25"} 1.0 hh_bucket{le="0.5"} 1.0 hh_bucket{le="0.75"} 1.0 hh_bucket{le="1.0"} 1.0 hh_bucket{le="2.5"} 1.0 hh_bucket{le="5.0"} 1.0 hh_bucket{le="7.5"} 1.0 hh_bucket{le="10.0"} 1.0 hh_bucket{le="+Inf"} 1.0 hh_count 1.0 hh_sum 0.05 """, generate_latest(self.registry), ) def test_unicode(self): c = Counter("cc", "\u4500", ["l"], registry=self.registry) c.labels("\u4500").inc() self.assertEqual( b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry) ) def test_escaping(self): c = Counter("cc", "A\ncount\\er", ["a"], registry=self.registry) c.labels('\\x\n"').inc(1) self.assertEqual( b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry) ) def test_nonnumber(self): class MyNumber: def __repr__(self): return "MyNumber(123)" def __float__(self): return 123.0 class MyCollector: def collect(self): metric = Metric("nonnumber", "Non number", "untyped") metric.add_sample("nonnumber", {}, MyNumber()) yield metric self.registry.register(MyCollector()) self.assertEqual( b"# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n", generate_latest(self.registry) )
class TestGenerateText(unittest.TestCase): def setUp(self): self.registry = CollectorRegistry() def test_counter(self): c = Counter('cc', 'A counter', registry=self.registry) c.inc() self.assertEqual(b'# HELP cc A counter\n# TYPE cc counter\ncc 1.0\n', generate_latest(self.registry)) def test_gauge(self): g = Gauge('gg', 'A gauge', registry=self.registry) g.set(17) self.assertEqual(b'# HELP gg A gauge\n# TYPE gg gauge\ngg 17.0\n', generate_latest(self.registry)) def test_summary(self): s = Summary('ss', 'A summary', ['a', 'b'], registry=self.registry) s.labels('c', 'd').observe(17) self.assertEqual(b'# HELP ss A summary\n# TYPE ss summary\nss_count{a="c",b="d"} 1.0\nss_sum{a="c",b="d"} 17.0\n', generate_latest(self.registry)) @unittest.skipIf(sys.version_info < (2, 7), "Test requires Python 2.7+.") def test_histogram(self): s = Histogram('hh', 'A histogram', registry=self.registry) s.observe(0.05) self.assertEqual(b'''# HELP hh A histogram # TYPE hh histogram hh_bucket{le="0.005"} 0.0 hh_bucket{le="0.01"} 0.0 hh_bucket{le="0.025"} 0.0 hh_bucket{le="0.05"} 1.0 hh_bucket{le="0.075"} 1.0 hh_bucket{le="0.1"} 1.0 hh_bucket{le="0.25"} 1.0 hh_bucket{le="0.5"} 1.0 hh_bucket{le="0.75"} 1.0 hh_bucket{le="1.0"} 1.0 hh_bucket{le="2.5"} 1.0 hh_bucket{le="5.0"} 1.0 hh_bucket{le="7.5"} 1.0 hh_bucket{le="10.0"} 1.0 hh_bucket{le="+Inf"} 1.0 hh_count 1.0 hh_sum 0.05 ''', generate_latest(self.registry)) def test_unicode(self): c = Counter('cc', '\u4500', ['l'], registry=self.registry) c.labels('\u4500').inc() self.assertEqual(b'# HELP cc \xe4\x94\x80\n# TYPE cc counter\ncc{l="\xe4\x94\x80"} 1.0\n', generate_latest(self.registry)) def test_escaping(self): c = Counter('cc', 'A\ncount\\er', ['a'], registry=self.registry) c.labels('\\x\n"').inc(1) self.assertEqual(b'# HELP cc A\\ncount\\\\er\n# TYPE cc counter\ncc{a="\\\\x\\n\\""} 1.0\n', generate_latest(self.registry)) def test_nonnumber(self): class MyNumber(): def __repr__(self): return "MyNumber(123)" def __float__(self): return 123.0 class MyCollector(): def collect(self): metric = Metric("nonnumber", "Non number", 'untyped') metric.add_sample("nonnumber", {}, MyNumber()) yield metric self.registry.register(MyCollector()) self.assertEqual(b'# HELP nonnumber Non number\n# TYPE nonnumber untyped\nnonnumber 123.0\n', generate_latest(self.registry))