Example #1
0
def countMetrics(job, instance, targetFile, content):
    metrics = parser.text_string_to_metric_families(content)
    for metric in metrics:
        metricsCountWriter.writerow([
            job, instance, metric.name,
            len(metric.samples), targetFile, metric.documentation
        ])
Example #2
0
def test_multiple_record(cleanup_agent):
    # Make sure prometheus export data properly when multiple points with
    # the same name is reported.
    TOTAL_POINTS = 10
    NAME = "TEST"
    values = list(range(TOTAL_POINTS))
    tags = [{"TAG_KEY": str(i)} for i in range(TOTAL_POINTS)]
    timestamps = list(range(TOTAL_POINTS))
    points = []

    for i in range(TOTAL_POINTS):
        points.append(
            generate_metrics_point(name=NAME,
                                   value=values[i],
                                   timestamp=timestamps[i],
                                   tags=tags[i]))
    for point in points:
        metrics_agent.record_metrics_points([point])

    # Make sure data is available at prometheus.
    response = requests.get("http://localhost:{}".format(
        metrics_agent.metrics_export_port))
    response.raise_for_status()

    sample_values = []
    for line in response.text.split("\n"):
        for family in text_string_to_metric_families(line):
            metric_name = family.name
            name_without_prefix = metric_name.split("_")[1]
            if name_without_prefix != NAME:
                continue
            # Lines for recorded metrics values.
            for sample in family.samples:
                sample_values.append(sample.value)
    assert sample_values == [point.value for point in points]
    def text_metric(self, met):
        '''process text value from etcd'''
        return_data = {}

        content = self.call_etcd_api(met['path'])
        if content:
            for metric in text_string_to_metric_families(content):
                # skipping histogram and summary types unless we find a good way to add them to zabbix (unlikely)
                if metric.type in ['histogram', 'summary']:
                    continue
                elif metric.type in ['counter', 'gauge'
                                     ] and metric.name in met['values']:
                    zab_metric_name = met['prefix'] + metric.name.replace(
                        '_', '.')
                    if len(metric.samples) > 1:
                        if met['values'][metric.name]:
                            sub_key = met['values'][metric.name]
                        for singlemetric in metric.samples:
                            return_data['{0}.{1}'.format(
                                zab_metric_name,
                                singlemetric[1][sub_key])] = singlemetric[2]
                    else:
                        return_data[zab_metric_name] = metric.samples[0][2]
                else:
                    if self.args.debug:
                        print 'Got unknown type of metric from etcd, skipping it: ({0}) '.format(
                            metric.type)

        return return_data
Example #4
0
    def prometheus_flat(self, include_keys=None):
        t = self.__request().text
        result = {}
        for prom_family in text_string_to_metric_families(t):
            # prom_family.name - name
            # prom_family.type - type (gauge, summary, ...)
            # prom_family.samples - samples
            for prom_sample in prom_family.samples:
                # prom_sample[0] = name
                # prom_sample[1] = labels dict (label name -> label value)
                # prom_sample[2] = value

                if len(prom_sample[1]) == 0:
                    # no labels
                    if include_keys is None or len(include_keys) == 0 or prom_sample[0] in include_keys:
                        result[prom_sample[0]] = prom_sample[2]
                else:
                    labels_key_list = []
                    for label in sorted(prom_sample[1]):
                        labels_key_list.append(label)
                        labels_key_list.append(prom_sample[1][label])
                    key = '.'.join(labels_key_list)

                    full_name = '%s.%s' % (prom_sample[0], key)
                    if include_keys is None or len(include_keys) == 0 or full_name in include_keys:
                        metrics = result.setdefault(prom_sample[0], {})
                        metrics[key] = prom_sample[2]
        return result
Example #5
0
 def test_compliance_checks_statuses(self):
     model_counts = {}
     for i in range(3):
         count = i + 1
         cc = self._force_compliance_check(count=count)
         model_counts[cc.model] = count
     response = self._make_authenticated_request()
     self.assertEqual(response.status_code, 200)
     for family in text_string_to_metric_families(
             response.content.decode("utf-8")):
         if family.name != "zentral_compliance_checks_statuses_bucket":
             continue
         else:
             self.assertEqual(len(family.samples), 3 * 7)
             for sample in family.samples:
                 self.assertEqual(sample.labels["status"], Status.OK.name)
                 le = sample.labels["le"]
                 if le in ("1", "7", "14"):
                     self.assertEqual(sample.value, 0)
                 else:
                     self.assertEqual(sample.value,
                                      model_counts[sample.labels["model"]])
             break
     else:
         raise AssertionError("could not find expected metric family")
def parse_func(metrics):

    for family in text_string_to_metric_families(metrics):
        for sample in family.samples:

            # TODO: figure out how to support summary/histogram types
            if family.type == 'summary':
                #print(family.type)
                pass
            elif family.type == 'histogram':
                #print(family.type)
                pass
            else:
                val = collectd.Values()
                val.plugin = ENDPOINT_NAME
                val.interval = INTERVAL
                val.type = family.type

                # TODO: support multiple labels

                if len(sample.labels) > 0:
                    joined = ''.join(key + '_' + str(val)
                                     for key, val in sample.labels.items())
                    val.type_instance = sample.name + '.' + joined
                else:
                    val.type_instance = sample.name

                #print(f'type_instance: {val.type_instance}')

                val.values = [sample.value]
                val.dispatch()
Example #7
0
def scrape_prometheus(endpoints, retries=3, err_output_file=sys.stdout):
    """Scrape a list of Prometheus/FAUCET/Gauge endpoints and aggregate results."""
    metrics = []
    for endpoint in endpoints:
        content = None
        err = None
        for _ in range(retries):
            try:
                if endpoint.startswith('http'):
                    response = requests.get(endpoint)
                    if response.status_code == requests.status_codes.codes.ok: # pylint: disable=no-member
                        content = response.content.decode('utf-8', 'strict')
                        break
                else:
                    response = urllib.request.urlopen(endpoint) # pytype: disable=module-attr
                    content = response.read().decode('utf-8', 'strict')
                    break
            except (requests.exceptions.ConnectionError, ValueError) as exception:
                err = exception
                time.sleep(1)
        if err is not None:
            err_output_file.write(str(err))
            return None
        try:
            endpoint_metrics = parser.text_string_to_metric_families(
                content)
            metrics.extend(endpoint_metrics)
        except ValueError as err:
            err_output_file.write(str(err))
            return None
    return metrics
Example #8
0
    def test_simple_counter(self):
        families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a 1
""")
        self.assertEqual([CounterMetricFamily("a", "help", value=1)],
                         list(families))
Example #9
0
    def test_tabs(self):
        families = text_string_to_metric_families("""#\tTYPE\ta\tcounter
#\tHELP\ta\thelp
a\t1
""")
        self.assertEqual([CounterMetricFamily("a", "help", value=1)],
                         list(families))
def text_string_to_metric_families_map(
    text,
) -> typing.Mapping[str, prometheus_client.Metric]:
    families: typing.Generator[
        prometheus_client.Metric
    ] = parser.text_string_to_metric_families(text)
    return {f.name: f for f in families}
Example #11
0
 def test_prometheus_metrics_with_machine_snapshot(self):
     tree = {
         "source": {"module": "tests.zentral.io", "name": "Zentral Tests"},
         "serial_number": "0123456789",
         "os_version": {'name': 'OS X', 'major': 10, 'minor': 11, 'patch': 1},
         "osx_app_instances": [
             {'app': {'bundle_id': 'io.zentral.baller',
                      'bundle_name': 'Baller.app',
                      'bundle_version': '123',
                      'bundle_version_str': '1.2.3'},
              'bundle_path': "/Applications/Baller.app"}
         ]
     }
     _, ms = MachineSnapshotCommit.objects.commit_machine_snapshot_tree(tree)
     source_id = ms.source.pk
     response = self.client.get(reverse("inventory:prometheus_metrics"),
                                HTTP_AUTHORIZATION="Bearer CHANGE ME!!!")
     labels_dict = {}
     for family in text_string_to_metric_families(response.content.decode('utf-8')):
         self.assertEqual(len(family.samples), 1)
         sample = family.samples[0]
         self.assertEqual(sample.value, 1)  # only one machine in inventory
         labels_dict[sample.name] = sample.labels
     self.assertEqual(labels_dict['zentral_inventory_osx_apps'],
                      {'name': 'Baller.app',
                       'source': 'tests.zentral.io#{}'.format(source_id),
                       'version_str': '1.2.3'})
     self.assertEqual(labels_dict['zentral_inventory_os_versions'],
                      {'build': '_',
                       'major': '10',
                       'minor': '11',
                       'name': 'OS X',
                       'patch': '1',
                       'source': 'tests.zentral.io#{}'.format(source_id)})
Example #12
0
    def test_simple_gauge(self):
        families = text_string_to_metric_families("""# TYPE a gauge
# HELP a help
a 1
""")
        self.assertEqual([GaugeMetricFamily("a", "help", value=1)],
                         list(families))
Example #13
0
    def read_metric(self, met):
        ''' read a prometheus endpoint and create data for monitoring systems'''
        return_data = {}
        content = self.call_api(met['url'])
        if content is not None:
            for metric in text_string_to_metric_families(content):
                # skipping histogram and summary types unless we find a good way to add them to zabbix (unlikely)
                if metric.type in ['histogram', 'summary']:
                    continue
                elif metric.type in ['counter', 'gauge']:
                    if metric.name in met['metrics']:
                        zmetric_name = '{}.{}'.format(
                            met['name'], metric.name.replace('_', '.'))
                        logger.debug('Sending: %s - %s', zmetric_name,
                                     metric.samples[0][2])
                        return_data[zmetric_name] = metric.samples[0][2]
                    else:
                        logger.debug(
                            'We are skipping metric, not requested: %s',
                            metric.name)
                else:
                    logger.error('Unknown metric type: %s - %s', metric.type,
                                 metric.name)

        return return_data
Example #14
0
 def report_endpoint_metrics(self,
                             metric_endpoint,
                             endpoint_dimensions,
                             endpoint_whitelist=None,
                             endpoint_metric_types=None):
     # Hit metric endpoint
     try:
         result = requests.get(metric_endpoint,
                               timeout=self.connection_timeout)
     except Exception as e:
         self.log.error(
             "Could not get metrics from {} with error {}".format(
                 metric_endpoint, e))
     else:
         result_content_type = result.headers['Content-Type']
         if "text/plain" in result_content_type:
             try:
                 metric_families = text_string_to_metric_families(
                     result.text)
                 self._send_metrics(metric_families, endpoint_dimensions,
                                    endpoint_whitelist,
                                    endpoint_metric_types)
             except Exception as e:
                 self.log.error(
                     "Error parsing data from {} with error {}".format(
                         metric_endpoint, e))
         else:
             self.log.error("Unsupported content type - {}".format(
                 result_content_type))
Example #15
0
def get_server_metric(metric_name):
    metrics = list(
        text_string_to_metric_families(
            requests.get("http://localhost:50052/metrics", timeout=5).text))
    target_metric = list(filter(lambda x: x.name == metric_name, metrics))
    assert len(target_metric) == 1
    return target_metric[0]
Example #16
0
    def metrics(self):  # pragma: no cover
        """
        get the modelized metrics parsed by prometheus_client
        """
        from prometheus_client.parser import text_string_to_metric_families

        return text_string_to_metric_families(self.metrics_raw())
Example #17
0
def main():
  mqttc = mqtt.Client()
  mqttc.connect(MQTT_BROKER)

  starttime=time.time()
  while True:
    metrics = requests.get(METRICS_ENDPOINT).text
    for family in text_string_to_metric_families(metrics):
      for sample in family.samples:
        j = {
            'name': sample[0],
            'labels': sample[1],
            'value': sample[2],
            'timestamp': sample[3],
            'exemplar': sample[4]
          }
        topic = Template(MQTT_TOPIC).render(sample=j)
        print(topic)
        print(f"{json.dumps(j, indent=2)}")
        mqttc.publish(topic, json.dumps(j), qos=1, retain=True)
    if float(UPDATE_INTERVAL) == 0:
      break
    sleep = max(0, float(UPDATE_INTERVAL) - ((time.time() - starttime) % float(UPDATE_INTERVAL)))
    if sleep > 1:
      print('{0}    Sleeping {1:00.0f} seconds'.format(datetime.datetime.now().isoformat(), sleep), flush=True)
      time.sleep(sleep)
  mqttc.disconnect()
Example #18
0
    def test_empty_help(self):
        families = text_string_to_metric_families("""# TYPE a counter
# HELP a
a 1
""")
        self.assertEqual([CounterMetricFamily("a", "", value=1)],
                         list(families))
Example #19
0
    def test_multiple_trailing_commas(self):
        text = """# TYPE a counter
# HELP a help
a{foo="bar",, } 1
"""
        self.assertRaises(ValueError,
                          lambda: list(text_string_to_metric_families(text)))
Example #20
0
    def prometheus_flat(self, include_keys=None):
        t = self.__request().text
        result = {}
        for prom_family in text_string_to_metric_families(t):
            # prom_family.name - name
            # prom_family.type - type (gauge, summary, ...)
            # prom_family.samples - samples
            for prom_sample in prom_family.samples:
                # prom_sample[0] = name
                # prom_sample[1] = labels dict (label name -> label value)
                # prom_sample[2] = value

                if len(prom_sample[1]) == 0:
                    # no labels
                    if include_keys is None or len(
                            include_keys
                    ) == 0 or prom_sample[0] in include_keys:
                        result[prom_sample[0]] = prom_sample[2]
                else:
                    labels_key_list = []
                    for label in sorted(prom_sample[1]):
                        labels_key_list.append(label)
                        labels_key_list.append(prom_sample[1][label])
                    key = '.'.join(labels_key_list)

                    full_name = '%s.%s' % (prom_sample[0], key)
                    if include_keys is None or len(
                            include_keys) == 0 or full_name in include_keys:
                        metrics = result.setdefault(prom_sample[0], {})
                        metrics[key] = prom_sample[2]
        return result
Example #21
0
def get_prometheus_metrics_data(endpoint: str, requested_metrics: list,
                                logger: logging.Logger) -> Dict:
    response = {}
    if len(requested_metrics) == 0:
        raise NoMetricsGivenException("No metrics given when requesting"
                                      "prometheus data from " + endpoint)

    metrics = get_prometheus(endpoint, logger)
    for family in text_string_to_metric_families(metrics):
        for sample in family.samples:
            if sample.name in requested_metrics:
                if sample.name not in response:
                    if sample.labels != {}:
                        response[sample.name] = {}
                        response[sample.name][json.dumps(sample.labels)] = \
                            sample.value
                    else:
                        response[sample.name] = sample.value
                else:
                    if sample.labels != {}:
                        response[sample.name][json.dumps(sample.labels)] = \
                            sample.value
                    else:
                        response[sample.name] = sample.value + \
                                                response[sample.name]

    # Raises a meaningful exception if some requested metrics are not found at
    # the endpoint
    missing_metrics = set(requested_metrics) - set(response)
    for metric in missing_metrics:
        raise MetricNotFoundException(metric, endpoint)

    return response
Example #22
0
    def from_url(cls, metrics_url):
        response = requests.get(metrics_url, timeout=10)
        timestamp = time.time()
        response.raise_for_status()
        prometheus_string = response.content.decode('utf8')
        prometheus_metrics = dict(
            map(lambda m: (m.name, m),
                parser.text_string_to_metric_families(prometheus_string)))

        fold_sample = lambda key: fold(prometheus_metrics, key, lambda m: int(
            m.samples[0].value), 0)

        total_blocks = fold_sample('near_block_processed')
        memory_usage = fold_sample('near_memory_usage_bytes')
        total_transactions = fold_sample('near_transaction_processed')
        blocks_per_second = fold_sample('near_blocks_per_minute') / 60.0

        block_processing_time_samples = prometheus_metrics[
            'near_block_processing_time'].samples

        block_processing_time = {}
        for sample in block_processing_time_samples:
            if 'le' in sample.labels:
                bound = sample.labels['le']
                block_processing_time[f'le {bound}'] = int(sample.value)

        return cls(total_blocks, memory_usage, total_transactions,
                   block_processing_time, timestamp, blocks_per_second)
Example #23
0
 def test_active_machines(self):
     for age in (2, 22, 31):
         ms = MunkiState.objects.create(
             machine_serial_number=get_random_string())
         MunkiState.objects.filter(pk=ms.pk).update(
             last_seen=datetime.utcnow() - timedelta(days=age))
     response = self._make_authenticated_request()
     self.assertEqual(response.status_code, 200)
     for family in text_string_to_metric_families(
             response.content.decode("utf-8")):
         if family.name != "zentral_munki_active_machines_bucket":
             continue
         else:
             self.assertEqual(len(family.samples), 7)
             for sample in family.samples:
                 le = sample.labels["le"]
                 if le == "1":
                     self.assertEqual(sample.value, 0)
                 elif le in ("7", "14"):
                     self.assertEqual(sample.value, 1)
                 elif le == "30":
                     self.assertEqual(sample.value, 2)
                 else:
                     self.assertEqual(sample.value, 3)
             break
     else:
         raise AssertionError("could not find expected metric family")
def scrape_to_samples(scrape, user, push_time=None):
    if push_time is None:
        push_time = timezone.now()

    for family in text_string_to_metric_families(scrape):
        for s in family.samples:
            labels = labels_from_sample(s)
            widget, created = models.Widget.objects.lookup_or_create(
                labels,
                owner=user,
                defaults={
                    "title": s.name,
                    "timestamp": push_time
                })
            if created:
                logger.debug("Created widget %s", widget)
                yield "Created widget %s" % widget

            sample = widget.sample_set.create(timestamp=push_time,
                                              value=s.value)
            yield "Appended sample to %s" % widget

            # Manually call here since bulk_create does not call our signals
            tasks.update_chart(widget.pk)

            logger.debug("%s", sample)
Example #25
0
def scrape_prometheus(endpoints, retries=3):
    """Scrape a list of Prometheus/FAUCET/Gauge endpoints and aggregate results."""
    metrics = []
    for endpoint in endpoints:
        content = None
        err = None
        for _ in range(retries):
            try:
                if endpoint.startswith('http'):
                    response = requests.get(endpoint)
                    if response.status_code == requests.status_codes.codes.ok:  # pylint: disable=no-member
                        content = response.content.decode('utf-8', 'strict')
                        break
                else:
                    response = urllib.request.urlopen(endpoint)  # pytype: disable=module-attr
                    content = response.read().decode('utf-8', 'strict')
                    break
            except requests.exceptions.ConnectionError as exception:
                err = exception
                time.sleep(1)
        if err is not None:
            print(err)
            return None
        endpoint_metrics = parser.text_string_to_metric_families(content)
        metrics.extend(endpoint_metrics)
    return metrics
Example #26
0
async def get(name, port, host="127.0.0.1"):
    """
    Get the value of a metric by requesting it from the prometheus web server.

    Parameters
    ----------
    name : str
        Name of the metric
    port : int
        Port of the prometheus server
    host : str
        Host running the prometheus server (default "127.0.0.1")

    Returns
    -------
    Value of the metric
    """
    async with aiohttp.ClientSession() as session:
        async with session.get(f"http://{host}:{port}") as resp:
            resp.raise_for_status()
            metrics = await resp.text()
    for family in text_string_to_metric_families(metrics):
        if family.name == name:
            return family.samples[0].value
    raise InternalError(
        f"Couldn't find metric {name} in response from coco's prometheus client."
    )
 def get_stats_sync(cls):
     """
     Stringified verion of all the stats
     """
     redis_conn = redis.Redis.from_url(settings.BROKER_URL)
     stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
     return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
 def test_prometheus_metrics_with_machine_snapshot(self):
     tree = {
         "source": {"module": "tests.zentral.io", "name": "Zentral Tests"},
         "machine": {"serial_number": "0123456789"},
         "os_version": {'name': 'OS X', 'major': 10, 'minor': 11, 'patch': 1},
         "osx_app_instances": [
             {'app': {'bundle_id': 'io.zentral.baller',
                      'bundle_name': 'Baller.app',
                      'bundle_version': '123',
                      'bundle_version_str': '1.2.3'},
              'bundle_path': "/Applications/Baller.app"}
         ]
     }
     ms, _ = MachineSnapshot.objects.commit(tree)
     source_id = ms.source.pk
     response = self.client.get(reverse("inventory:prometheus_metrics"))
     labels_dict = {}
     for family in text_string_to_metric_families(response.content.decode('utf-8')):
         self.assertEqual(len(family.samples), 1)
         name, labels, value = family.samples[0]
         self.assertEqual(value, 1)  # only one machine in inventory
         labels_dict[name] = labels
     self.assertEqual(labels_dict['zentral_inventory_osx_apps'],
                      {'name': 'Baller.app',
                       'source': 'tests.zentral.io#{}'.format(source_id),
                       'version_str': '1.2.3'})
     self.assertEqual(labels_dict['zentral_inventory_os_versions'],
                      {'build': '_',
                       'major': '10',
                       'minor': '11',
                       'name': 'OS X',
                       'patch': '1',
                       'source': 'tests.zentral.io#{}'.format(source_id)})
    def add_datapoints(self, raw_metrics):
        """Add all of the data points for a node

        :param str metrics: The metrics content

        """
        hasMetrics = False
        if not raw_metrics:
            return
        for family in text_string_to_metric_families(raw_metrics):
            for sample in family.samples:
                hasMetrics = True
                if (not self.INCLUDE_CONFIG_KEY in self.config or sample.name
                        in self.config[self.INCLUDE_CONFIG_KEY]):
                    if (self.EXCLUDE_CONFIG_KEY in self.config and sample.name
                            in self.config[self.EXCLUDE_CONFIG_KEY]):
                        LOGGER.debug('Ignoring sample: %r', sample)
                    else:
                        name = reduce(
                            (lambda k, i: k + '/' + i[0] + '/' + i[1]),
                            sample.labels.iteritems(), sample.name)
                        if (self.GAUGES_CONFIG_KEY in self.config
                                and sample.name
                                in self.config[self.GAUGES_CONFIG_KEY]):
                            self.add_gauge_value(name, sample.name,
                                                 sample.value)
                        else:
                            self.add_derive_value(name, sample.name,
                                                  sample.value)
        if not hasMetrics:
            LOGGER.debug('Metrics output: %r', raw_metrics)
Example #30
0
def check_stats(filename, debug, profile):
    topology = Topology.load_topology_from_file(filename)
    failures = []

    for node in topology.nodes.values():
        if not node.stats_enable:
            continue
        stats = requests.get(f"http://localhost:{node.stats_port}/metrics")
        metrics = {
            metric.name: metric
            for metric in text_string_to_metric_families(stats.text)
            if metric.name in RECEPTOR_METRICS
        }
        expected_connected_peers = len([
            n for n in topology.nodes.values() if node.name in n.connections
        ]) + len(node.connections)
        connected_peers = metrics["connected_peers"].samples[0].value
        if expected_connected_peers != connected_peers:
            failures.append(
                f"Node '{node.name}' was expected to have "
                f"{expected_connected_peers} connections, but it reported to "
                f" have {connected_peers}")
    if failures:
        print("\n".join(failures))
        sys.exit(127)
    def test_empty_brackets(self):
        families = text_string_to_metric_families("""# TYPE a counter
# HELP a help
a{} 1
""")
        self.assertEqualMetrics([CounterMetricFamily("a", "help", value=1)],
                                list(families))
Example #32
0
    def test_type_help_switched(self):
        families = text_string_to_metric_families("""# HELP a help
# TYPE a counter
a 1
""")
        self.assertEqual([CounterMetricFamily("a", "help", value=1)],
                         list(families))
Example #33
0
def get_oasis_prometheus(endpoint: str, params: list, logger: logging.Logger) \
        -> dict:
    response = {}
    if len(params) == 0:
        raise NoParametersGivenException('no parameters given for ' + endpoint)

    metrics = get_prometheus(endpoint, logger)
    for family in text_string_to_metric_families(metrics):
        for sample in family.samples:
            if sample.name in params:
                if sample.name not in response:
                    if sample.labels != {}:
                        response[sample.name] = {}
                        response[sample.name][json.dumps(sample.labels)] = \
                            sample.value
                    else:
                        response[sample.name] = sample.value
                else:
                    if sample.labels != {}:
                        response[sample.name][json.dumps(sample.labels)] = \
                            sample.value
                    else:
                        response[sample.name] = sample.value + \
                                                response[sample.name]

    # Alert on the metrics that had not been retrieved from prometheus
    difference = set(params).difference(set(response))
    for i in difference:
        raise MetricNotFoundException('metric ' + i + ' not found at endpoint' \
                                                      ' ' + endpoint)

    return response
Example #34
0
    def prometheus(self):
        t = self.__request().text
        samples_by_name = defaultdict(list)

        for l in text_string_to_metric_families(t):
            for s in l.samples:
                samples_by_name[s[0]].append((s[1],s[2]))

        return samples_by_name
Example #35
0
 def eval_prometheus_line(etime, line):
     ret = []
     for family in text_string_to_metric_families(line):
         for sample in family.samples:
             dims = []
             for kv in sample[1].items():
                 dims.append("%s=%s" % kv)
             m = Metric("docker.{0}".format(*sample), etime, sample[2], dims)
             ret.append(m)
     return ret
Example #36
0
    def _retrieve_and_parse_metrics(self, url, timeout, collect_response_time, instance_name):
        """
        Metrics from prometheus come in plain text from the endpoint and therefore need to be parsed.
        To do that the prometheus client's text_string_to_metric_families -method is used. That method returns a
        generator object.

        The method consumes the metrics from the endpoint:
            # HELP container_cpu_system_seconds_total Cumulative system cpu time consumed in seconds.
            # TYPE container_cpu_system_seconds_total counter
            container_cpu_system_seconds_total{id="/",name="/"} 1.59578817e+06
            ....
        and produces a metric family element with (returned from generator) with the following attributes:
            name          -> e.g. ' container_cpu_system_seconds_total '
            documentation -> e.g. ' container_cpu_system_seconds_total Cumulative system cpu time consumed in seconds. '
            type          -> e.g. ' counter '
            samples       -> e.g. ' [.. ,("container_cpu_system_seconds_total", {id="/",name="/"}, 1.59578817e+06),
                                      ('container_cpu_system_seconds_total', {u'id': u'/docker', u'name': u'/docker'},
                                      922.66),
                                    ..] '

        :param url: the url of the prometheus metrics
        :return: metric_families iterable
        """

        timer = util.Timer()

        try:
            response = requests.get(url, timeout=timeout)

            # report response time first, even when there is HTTP errors
            if collect_response_time:
                # Stop the timer as early as possible
                running_time = timer.total()
                self.gauge('monasca.agent.collect_time', running_time, dimensions={'agent_check': 'influxdb',
                                                                                   'instance': instance_name})

            response.raise_for_status()
            body = response.text
        except RequestException:
            self.log.exception("Retrieving metrics from endpoint %s failed", url)
            self.rate('monasca.agent.collect_errors', 1, dimensions={'agent_check': 'prometheus',
                                                                     'instance': instance_name})
            return []

        metric_families = prometheus_client_parser.text_string_to_metric_families(body)
        return metric_families
Example #37
0
def test_metrics():
    try:
        from prometheus_client.parser import text_string_to_metric_families
    except ImportError:
        pytest.skip('need prometheus_client installed')

    client = get_client()
    response = client.get('/_status/test/prometheus',
                          environ_overrides={'REMOTE_ADDR': b'127.0.0.1'})
    assert response.status_code == 200

    response = client.get('/_status/metrics',
                          environ_overrides={'REMOTE_ADDR': b'1.2.3.4'})
    assert response.status_code == 403
    response = client.get('/_status/metrics',
                          environ_overrides={'REMOTE_ADDR': b'127.0.0.1'})
    assert response.status_code == 200
    assert list(text_string_to_metric_families(response.data.decode()))
Example #38
0
    def metrics(self):
        """
        Return API server metrics in prometheus format.

        :return: Cluster metrics.
        :rtype: dict
        """
        url = self.__client.config.cluster['server'] + '/metrics'
        response = self.__client.session.get(url)

        response.raise_for_status()

        samples_by_name = defaultdict(list)

        for l in text_string_to_metric_families(response.text):
            for s in l.samples:
                samples_by_name[s[0]].append((s[1], s[2]))

        return samples_by_name
    def metric_check(self):
        """ collect certain metrics from the /metrics API call """

        print "\nPerforming /metrics check..."
        response = self.ora.get('/metrics', rtype='text')

        for metric_type in text_string_to_metric_families(response):

            # Collect the apiserver_request_latencies_summary{resource="pods",verb="LIST",quantiles in /metrics
            # Collect the apiserver_request_latencies_summary{resource="pods",verb="WATCHLIST",quantiles in /metrics
            if metric_type.name == 'apiserver_request_latencies_summary':
                key_str = 'openshift.master.apiserver.latency.summary'
                for sample in metric_type.samples:
                    if (sample[1]['resource'] == 'pods'
                            and sample[1].has_key('quantile')
                            and 'LIST' in sample[1]['verb']):
                        curr_key_str = key_str + ".pods.quantile.%s.%s" % (sample[1]['verb'],
                                                                           sample[1]['quantile'].split('.')[1])

                        if math.isnan(sample[2]):
                            value = 0
                        else:
                            value = sample[2]

                        self.zagg_sender.add_zabbix_keys({curr_key_str.lower(): int(value/1000)})

            # Collect the scheduler_e2e_scheduling_latency_microseconds{quantiles in /metrics
            if metric_type.name == 'scheduler_e2e_scheduling_latency_microseconds':
                for sample in metric_type.samples:
                    if sample[1].has_key('quantile'):
                        key_str = 'openshift.master.scheduler.e2e.scheduling.latency'
                        curr_key_str = key_str + ".quantile.%s" % (sample[1]['quantile'].split('.')[1])

                        if math.isnan(sample[2]):
                            value = 0
                        else:
                            value = sample[2]

                        self.zagg_sender.add_zabbix_keys({curr_key_str.lower(): int(value/1000)})

        self.zagg_sender.add_zabbix_keys({'openshift.master.metric.ping' : 1}) #
def parse_prometheus_data(text):
    """Parse prometheus-formatted text to the python objects

    Args:
        text (str): prometheus-formatted data

    Returns:
        dict: parsed data as python dictionary
    """
    metrics = {}
    for family in text_string_to_metric_families(text):
        for sample in family.samples:
            key, data, val = (sample.name, sample.labels, sample.value)
            if data.keys():
                data['value'] = val
                if key in metrics.keys():
                    metrics[key].append(data)
                else:
                    metrics[key] = [data]
            else:
                metrics[key] = val

    return metrics
Example #41
0
 def report_endpoint_metrics(self, metric_endpoint, endpoint_dimensions, endpoint_whitelist=None,
                             endpoint_metric_types=None, report_pod_label_owner=False):
     # Hit metric endpoint
     try:
         result = requests.get(metric_endpoint, timeout=self.connection_timeout)
     except Exception as e:
         self.log.error("Could not get metrics from {} with error {}".format(metric_endpoint, e))
     else:
         result_content_type = result.headers['Content-Type']
         if "text/plain" in result_content_type:
             try:
                 metric_families = text_string_to_metric_families(result.text)
                 self._send_metrics(
                     metric_families,
                     endpoint_dimensions,
                     endpoint_whitelist,
                     endpoint_metric_types,
                     report_pod_label_owner)
             except Exception as e:
                 self.log.error(
                     "Error parsing data from {} with error {}".format(
                         metric_endpoint, e))
         else:
             self.log.error("Unsupported content type - {}".format(result_content_type))
Example #42
0
from prometheus_client.parser import text_string_to_metric_families

for family in text_string_to_metric_families(u"counter_total 1.0\n"):
  for sample in family.samples:
    print("Name: {0} Labels: {1} Value: {2}".format(*sample))
Example #43
0
 def get_count(response):
     for family in text_string_to_metric_families(response.text):
         if family.name == 'test':
             return family.samples[0][2]