Exemple #1
0
    def test_counter_to_prometheus(self):
        meter = get_meter_provider().get_meter(__name__)
        metric = meter.create_counter(
            "test@name",
            "testdesc",
            "unit",
            int,
        )
        labels = {"environment@": "staging", "os": "Windows"}
        key_labels = get_dict_as_key(labels)
        aggregator = SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = ExportRecord(metric, key_labels, aggregator,
                              get_meter_provider().resource)
        collector = CustomCollector("testprefix")
        collector.add_metrics_data([record])

        for prometheus_metric in collector.collect():
            self.assertEqual(type(prometheus_metric), CounterMetricFamily)
            self.assertEqual(prometheus_metric.name, "testprefix_test_name")
            self.assertEqual(prometheus_metric.documentation, "testdesc")
            self.assertTrue(len(prometheus_metric.samples) == 1)
            self.assertEqual(prometheus_metric.samples[0].value, 123)
            self.assertTrue(len(prometheus_metric.samples[0].labels) == 2)
            self.assertEqual(
                prometheus_metric.samples[0].labels["environment_"], "staging")
            self.assertEqual(prometheus_metric.samples[0].labels["os"],
                             "Windows")
Exemple #2
0
 def test_invalid_metric(self):
     meter = get_meter_provider().get_meter(__name__)
     metric = StubMetric("tesname", "testdesc", "unit", int, meter)
     labels = {"environment": "staging"}
     key_labels = get_dict_as_key(labels)
     record = ExportRecord(metric, key_labels, None,
                           get_meter_provider().resource)
     collector = CustomCollector("testprefix")
     collector.add_metrics_data([record])
     collector.collect()
     self.assertLogs("opentelemetry.exporter.prometheus", level="WARNING")
 def setUp(self):
     super().setUp
     self.exporter = PrometheusRemoteWriteMetricsExporter(
         endpoint="http://localhost:9009/api/prom/push",
         headers={"X-Scope-Org-ID": "5"},
     )
     self.labels = {"environment": "testing"}
     self.meter = self.meter_provider.get_meter(__name__)
     metrics.get_meter_provider().start_pipeline(
         self.meter,
         self.exporter,
         1,
     )
 def test_get_collector_point(self):
     aggregator = aggregate.SumAggregator()
     int_counter = self._meter.create_counter(
         "testName",
         "testDescription",
         "unit",
         int,
     )
     float_counter = self._meter.create_counter(
         "testName",
         "testDescription",
         "unit",
         float,
     )
     valuerecorder = self._meter.create_valuerecorder(
         "testName",
         "testDescription",
         "unit",
         float,
     )
     result = metrics_exporter.get_collector_point(
         ExportRecord(
             int_counter,
             self._key_labels,
             aggregator,
             metrics.get_meter_provider().resource,
         ))
     self.assertIsInstance(result, metrics_pb2.Point)
     self.assertIsInstance(result.timestamp, Timestamp)
     self.assertEqual(result.int64_value, 0)
     aggregator.update(123.5)
     aggregator.take_checkpoint()
     result = metrics_exporter.get_collector_point(
         ExportRecord(
             float_counter,
             self._key_labels,
             aggregator,
             metrics.get_meter_provider().resource,
         ))
     self.assertEqual(result.double_value, 123.5)
     self.assertRaises(
         TypeError,
         metrics_exporter.get_collector_point(
             ExportRecord(
                 valuerecorder,
                 self._key_labels,
                 aggregator,
                 metrics.get_meter_provider().resource,
             )),
     )
Exemple #5
0
    def test_export(self):
        mock_client = mock.MagicMock()
        mock_export = mock.MagicMock()
        mock_client.Export = mock_export
        host_name = "testHostName"
        collector_exporter = metrics_exporter.OpenCensusMetricsExporter(
            client=mock_client, host_name=host_name
        )
        test_metric = self._meter.create_counter(
            "testname", "testdesc", "unit", int, self._labels.keys(),
        )
        record = MetricRecord(
            test_metric,
            self._key_labels,
            aggregate.SumAggregator(),
            metrics.get_meter_provider().resource,
        )

        result = collector_exporter.export([record])
        self.assertIs(result, MetricsExportResult.SUCCESS)
        # pylint: disable=unsubscriptable-object
        export_arg = mock_export.call_args[0]
        service_request = next(export_arg[0])
        output_metrics = getattr(service_request, "metrics")
        output_node = getattr(service_request, "node")
        self.assertEqual(len(output_metrics), 1)
        self.assertIsNotNone(getattr(output_node, "library_info"))
        self.assertIsNotNone(getattr(output_node, "service_info"))
        output_identifier = getattr(output_node, "identifier")
        self.assertEqual(
            getattr(output_identifier, "host_name"), "testHostName"
        )
    def test_counter_to_prometheus(self):
        meter = get_meter_provider().get_meter(__name__)
        metric = meter.create_metric(
            "test@name",
            "testdesc",
            "unit",
            int,
            metrics.Counter,
            ["environment@", "os"],
        )
        kvp = {"environment@": "staging", "os": "Windows"}
        label_set = meter.get_label_set(kvp)
        aggregator = CounterAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = MetricRecord(aggregator, label_set, metric)
        collector = CustomCollector("testprefix")
        collector.add_metrics_data([record])

        for prometheus_metric in collector.collect():
            self.assertEqual(type(prometheus_metric), CounterMetricFamily)
            self.assertEqual(prometheus_metric.name, "testprefix_test_name")
            self.assertEqual(prometheus_metric.documentation, "testdesc")
            self.assertTrue(len(prometheus_metric.samples) == 1)
            self.assertEqual(prometheus_metric.samples[0].value, 123)
            self.assertTrue(len(prometheus_metric.samples[0].labels) == 2)
            self.assertEqual(
                prometheus_metric.samples[0].labels["environment_"], "staging")
            self.assertEqual(prometheus_metric.samples[0].labels["os"],
                             "Windows")
Exemple #7
0
 def setUpClass(cls):
     cls.original_tracer_provider = trace_api.get_tracer_provider()
     result = cls.create_tracer_provider()
     cls.tracer_provider, cls.memory_exporter = result
     trace_api.set_tracer_provider(cls.tracer_provider)
     cls.original_meter_provider = metrics_api.get_meter_provider()
     result = cls.create_meter_provider()
     cls.meter_provider, cls.memory_metrics_exporter = result
     metrics_api.set_meter_provider(cls.meter_provider)
Exemple #8
0
 def test_min_max_sum_aggregator_to_prometheus(self):
     meter = get_meter_provider().get_meter(__name__)
     metric = meter.create_valuerecorder("test@name", "testdesc", "unit",
                                         int, [])
     labels = {}
     key_labels = get_dict_as_key(labels)
     aggregator = MinMaxSumCountAggregator()
     aggregator.update(123)
     aggregator.update(456)
     aggregator.take_checkpoint()
     record = ExportRecord(metric, key_labels, aggregator,
                           get_meter_provider().resource)
     collector = CustomCollector("testprefix")
     collector.add_metrics_data([record])
     result_bytes = generate_latest(collector)
     result = result_bytes.decode("utf-8")
     self.assertIn("testprefix_test_name_count 2.0", result)
     self.assertIn("testprefix_test_name_sum 579.0", result)
 def test_invalid_metric(self):
     meter = get_meter_provider().get_meter(__name__)
     metric = meter.create_metric("tesname", "testdesc", "unit", int,
                                  TestMetric)
     kvp = {"environment": "staging"}
     label_set = meter.get_label_set(kvp)
     record = MetricRecord(None, label_set, metric)
     collector = CustomCollector("testprefix")
     collector.add_metrics_data([record])
     collector.collect()
     self.assertLogs("opentelemetry.ext.prometheus", level="WARNING")
Exemple #10
0
 def test_export(self):
     with self._registry_register_patch:
         record = ExportRecord(
             self._test_metric,
             self._labels_key,
             SumAggregator(),
             get_meter_provider().resource,
         )
         exporter = PrometheusMetricsExporter()
         result = exporter.export([record])
         # pylint: disable=protected-access
         self.assertEqual(len(exporter._collector._metrics_to_export), 1)
         self.assertIs(result, MetricsExportResult.SUCCESS)
    def setUp(self):
        set_meter_provider(metrics.MeterProvider())
        self._meter = get_meter_provider().get_meter(__name__)
        self._test_metric = self._meter.create_counter(
            "testname", "testdesc", "unit", int,
        )
        labels = {"environment": "staging"}
        self._labels_key = get_dict_as_key(labels)

        self._mock_registry_register = mock.Mock()
        self._registry_register_patch = mock.patch(
            "prometheus_client.core.REGISTRY.register",
            side_effect=self._mock_registry_register,
        )
Exemple #12
0
 def setUpClass(cls):
     cls.original_tracer_provider = trace_api.get_tracer_provider()
     result = cls.create_tracer_provider()
     cls.tracer_provider, cls.memory_exporter = result
     # This is done because set_tracer_provider cannot override the
     # current tracer provider.
     trace_api._TRACER_PROVIDER = None  # pylint: disable=protected-access
     trace_api.set_tracer_provider(cls.tracer_provider)
     cls.original_meter_provider = metrics_api.get_meter_provider()
     result = cls.create_meter_provider()
     cls.meter_provider, cls.memory_metrics_exporter = result
     # This is done because set_meter_provider cannot override the
     # current meter provider.
     metrics_api._METER_PROVIDER = None  # pylint: disable=protected-access
     metrics_api.set_meter_provider(cls.meter_provider)
    def setUp(self):
        set_meter_provider(metrics.MeterProvider())
        self._meter = get_meter_provider().get_meter(__name__)
        self._test_metric = self._meter.create_metric(
            "testname",
            "testdesc",
            "unit",
            int,
            metrics.Counter,
            ["environment"],
        )
        kvp = {"environment": "staging"}
        self._test_label_set = self._meter.get_label_set(kvp)

        self._mock_registry_register = mock.Mock()
        self._registry_register_patch = mock.patch(
            "prometheus_client.core.REGISTRY.register",
            side_effect=self._mock_registry_register,
        )
# limitations under the License.
#
"""
This example shows how to export metrics to the OT collector.
"""

from opentelemetry import metrics
from opentelemetry.exporter.opencensus.metrics_exporter import (
    OpenCensusMetricsExporter, )
from opentelemetry.sdk.metrics import MeterProvider

exporter = OpenCensusMetricsExporter(service_name="basic-service",
                                     endpoint="localhost:55678")

metrics.set_meter_provider(MeterProvider())
meter = metrics.get_meter(__name__)
metrics.get_meter_provider().start_pipeline(meter, exporter, 5)

requests_counter = meter.create_counter(
    name="requests",
    description="number of requests",
    unit="1",
    value_type=int,
)

staging_labels = {"environment": "staging"}
requests_counter.add(25, staging_labels)

print("Metrics are available now at http://localhost:9090/graph")
input("Press any key to exit...")
"""
from opentelemetry import metrics
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics.export import ConsoleMetricsExporter
from opentelemetry.sdk.metrics.export.aggregate import (
    HistogramAggregator,
    LastValueAggregator,
    MinMaxSumCountAggregator,
    SumAggregator,
)
from opentelemetry.sdk.metrics.view import View, ViewConfig

# Use the meter type provided by the SDK package
metrics.set_meter_provider(MeterProvider())
meter = metrics.get_meter(__name__)
metrics.get_meter_provider().start_pipeline(meter, ConsoleMetricsExporter(), 5)

requests_counter = meter.create_counter(
    name="requests",
    description="number of requests",
    unit="1",
    value_type=int,
)

requests_size = meter.create_valuerecorder(
    name="requests_size",
    description="size of requests",
    unit="1",
    value_type=int,
)
    CloudMonitoringMetricsExporter,
)
from opentelemetry.sdk.metrics import Counter, MeterProvider
from opentelemetry.sdk.resources import get_aggregated_resources
from opentelemetry.tools.resource_detector import GoogleCloudResourceDetector

# MUST be run on a Google tool!
# Detect resources from the environment
resources = get_aggregated_resources(
    [GoogleCloudResourceDetector(raise_on_error=True)]
)

metrics.set_meter_provider(MeterProvider(resource=resources))
meter = metrics.get_meter(__name__)
metrics.get_meter_provider().start_pipeline(
    meter, CloudMonitoringMetricsExporter(), 5
)

requests_counter = meter.create_metric(
    name="request_counter_with_resource",
    description="number of requests",
    unit="1",
    value_type=int,
    metric_type=Counter,
)

staging_labels = {"environment": "staging"}

for i in range(20):
    requests_counter.add(25, staging_labels)
    time.sleep(10)
Exemple #17
0
    def test_translate_to_collector(self):
        test_metric = self._meter.create_counter(
            "testname", "testdesc", "unit", int, self._labels.keys()
        )
        aggregator = aggregate.SumAggregator()
        aggregator.update(123)
        aggregator.take_checkpoint()
        record = MetricRecord(
            test_metric,
            self._key_labels,
            aggregator,
            metrics.get_meter_provider().resource,
        )
        start_timestamp = Timestamp()
        output_metrics = metrics_exporter.translate_to_collector(
            [record], start_timestamp,
        )
        self.assertEqual(len(output_metrics), 1)
        self.assertIsInstance(output_metrics[0], metrics_pb2.Metric)
        self.assertEqual(output_metrics[0].metric_descriptor.name, "testname")
        self.assertEqual(
            output_metrics[0].metric_descriptor.description, "testdesc"
        )
        self.assertEqual(output_metrics[0].metric_descriptor.unit, "unit")
        self.assertEqual(
            output_metrics[0].metric_descriptor.type,
            metrics_pb2.MetricDescriptor.CUMULATIVE_INT64,
        )
        self.assertEqual(
            len(output_metrics[0].metric_descriptor.label_keys), 2
        )
        self.assertEqual(
            output_metrics[0].metric_descriptor.label_keys[0].key,
            "environment",
        )
        self.assertEqual(
            output_metrics[0].metric_descriptor.label_keys[1].key, "number",
        )

        self.assertIsNotNone(output_metrics[0].resource)
        self.assertEqual(
            output_metrics[0].resource.type, "",
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_str_value"],
            self._resource_labels["key_with_str_value"],
        )
        self.assertIsInstance(
            output_metrics[0].resource.labels["key_with_int_val"], str,
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_int_val"],
            str(self._resource_labels["key_with_int_val"]),
        )
        self.assertIsInstance(
            output_metrics[0].resource.labels["key_with_true"], str,
        )
        self.assertEqual(
            output_metrics[0].resource.labels["key_with_true"],
            str(self._resource_labels["key_with_true"]),
        )

        self.assertEqual(len(output_metrics[0].timeseries), 1)
        self.assertEqual(len(output_metrics[0].timeseries[0].label_values), 2)
        self.assertEqual(
            output_metrics[0].timeseries[0].start_timestamp, start_timestamp
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].label_values[0].has_value, True
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].label_values[0].value, "staging"
        )
        self.assertEqual(len(output_metrics[0].timeseries[0].points), 1)
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].timestamp.seconds,
            record.aggregator.last_update_timestamp // 1000000000,
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].timestamp.nanos,
            record.aggregator.last_update_timestamp % 1000000000,
        )
        self.assertEqual(
            output_metrics[0].timeseries[0].points[0].int64_value, 123
        )
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module shows how you can enable collection and exporting of http metrics
related to instrumentations.
"""
import requests

from opentelemetry import metrics
from opentelemetry.instrumentation.requests import RequestsInstrumentor
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics.export import ConsoleMetricsExporter

# Sets the global MeterProvider instance
metrics.set_meter_provider(MeterProvider())

# Exporter to export metrics to the console
exporter = ConsoleMetricsExporter()

# Instrument the requests library
RequestsInstrumentor().instrument()

# Indicate to start collecting and exporting requests related metrics
metrics.get_meter_provider().start_pipeline(RequestsInstrumentor().meter,
                                            exporter, 5)

response = requests.get("http://example.com")

input("...\n")
Exemple #19
0
from opentelemetry import metrics
from opentelemetry.sdk.metrics.export import ConsoleMetricsExporter
from opentelemetry.exporter.otlp.metrics_exporter import OTLPMetricsExporter

app = Flask("api")


@app.route('/book/<username>')
def hello_world(username):
    status = requests.post(os.getenv("BOOK_SVC"), json={
        "card": "VISA",
        "name": username,
        "date": datetime.datetime.today().strftime('%Y-%m-%d')
    })
    if status.ok:
        resp = status.json()
        return resp
    else:
        return 'bad request!', 400


if __name__ == '__main__':
    resource = Resource({"service.name": "gateway"})

    trace.get_tracer_provider().resource = resource
    trace.get_tracer_provider().add_span_processor(BatchExportSpanProcessor(OTLPSpanExporter(os.getenv("OTC_HOST"))))

    metrics.get_meter_provider().resource = resource
    metrics.get_meter_provider().start_pipeline(RequestsInstrumentor().meter, ConsoleMetricsExporter(), 1)

    app.run(debug=True, host='0.0.0.0')