コード例 #1
0
ファイル: main.py プロジェクト: T-Aruga/gke-tutorial
def homePage() -> (str, int):
    # start timer
    # [START monitoring_sli_metrics_opencensus_latency]
    start_time = time.perf_counter()
    # [START monitoring_sli_metrics_opencensus_counts]
    mmap = stats_recorder.new_measurement_map()
    # [END monitoring_sli_metrics_opencensus_latency]
    # count request
    mmap.measure_int_put(m_request_count, 1)
    # fail 10% of the time
    # [START monitoring_sli_metrics_opencensus_latency]
    if random.randint(0, 100) > 90:
        # [END monitoring_sli_metrics_opencensus_latency]
        mmap.measure_int_put(m_failed_request_count, 1)
        # [END monitoring_sli_metrics_opencensus_counts]
        # [START monitoring_sli_metrics_opencensus_latency]
        response_latency = time.perf_counter() - start_time
        mmap.measure_float_put(m_response_latency, response_latency)
        # [START monitoring_sli_metrics_opencensus_counts]
        tmap = tag_map_module.TagMap()
        mmap.record(tmap)
        # [END monitoring_sli_metrics_opencensus_latency]
        return ("error!", 500)
        # [END monitoring_sli_metrics_opencensus_counts]
    else:
        random_delay = random.randint(0, 5000) / 1000
        # delay for a bit to vary latency measurement
        time.sleep(random_delay)
        # record latency
        response_latency = time.perf_counter() - start_time
        mmap.measure_float_put(m_response_latency, response_latency)
        tmap = tag_map_module.TagMap()
        mmap.record(tmap)
        return ("home page", 200)
コード例 #2
0
ファイル: modifying.py プロジェクト: tonitopark/rcs_log_howto
def main():
    # Enable metrics
    # Set the interval in seconds in which you want to send metrics
    exporter = metrics_exporter.new_metrics_exporter(
        connection_string='InstrumentationKey=<Your Key>')
    exporter.add_telemetry_processor(callback_function)
    view_manager.register_exporter(exporter)

    view_manager.register_view(CARROTS_VIEW)
    mmap = stats_recorder.new_measurement_map()
    tmap = tag_map_module.TagMap()

    mmap.measure_int_put(CARROTS_MEASURE, 0)
    mmap.record(tmap)
    # Default export interval is every 15.0s
    # Your application should run for at least this amount
    # of time so the exporter will meet this interval
    # Sleep can fulfill this
    time.sleep(60)

    tmap = tag_map_module.TagMap()

    mmap.measure_int_put(CARROTS_MEASURE, 1000)
    mmap.record(tmap)

    time.sleep(60)

    print("Done recording metrics")
コード例 #3
0
    def test_create_timeseries_multiple_tag_values(self,
                                                   monitoring_resoure_mock):
        view_manager, stats_recorder, exporter = \
            self.setup_create_timeseries_test()

        view_manager.register_view(VIDEO_SIZE_VIEW)

        measure_map = stats_recorder.new_measurement_map()

        # Add first point with one tag value
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200"))
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
        measure_map.record(tag_map)

        # Add second point with different tag value
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1400"))
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 12 * MiB)
        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(
            VIDEO_SIZE_VIEW_NAME, None)

        time_series_list = exporter.create_time_series_list(v_data, "", "")

        self.assertEqual(len(time_series_list), 2)
        ts_by_frontend = {
            ts.metric.labels.get(FRONTEND_KEY_CLEAN): ts
            for ts in time_series_list
        }
        self.assertEqual(set(ts_by_frontend.keys()), {"1200", "1400"})
        ts1 = ts_by_frontend["1200"]
        ts2 = ts_by_frontend["1400"]

        # Verify first time series
        self.assertEqual(ts1.resource.type, "global")
        self.assertEqual(
            ts1.metric.type,
            "custom.googleapis.com/opencensus/my.org/views/video_size_test2")
        self.assertIsNotNone(ts1.resource)

        self.assertEqual(len(ts1.points), 1)
        value1 = ts1.points[0].value
        self.assertEqual(value1.distribution_value.count, 1)
        self.assertEqual(value1.distribution_value.mean, 25 * MiB)

        # Verify second time series
        self.assertEqual(ts2.resource.type, "global")
        self.assertEqual(
            ts2.metric.type,
            "custom.googleapis.com/opencensus/my.org/views/video_size_test2")
        self.assertIsNotNone(ts2.resource)

        self.assertEqual(len(ts2.points), 1)
        value2 = ts2.points[0].value
        self.assertEqual(value2.distribution_value.count, 1)
        self.assertEqual(value2.distribution_value.mean, 12 * MiB)
コード例 #4
0
    def test_value(self):
        key = 'key1'
        value = 'value1'
        tag_map = tag_map_module.TagMap(tags=[{key: value}])
        test_val = tag_map.get_value(key)
        self.assertEqual(test_val, value)

        value_1 = None
        tag_map = tag_map_module.TagMap(tags=[{key: value_1}])
        with self.assertRaises(KeyError):
            tag_map.get_value(key=key)
コード例 #5
0
    def test_prometheus_stats(self):

        method_key = tag_key_module.TagKey("method")
        request_count_measure = measure_module.MeasureInt(
            "request_count", "number of requests", "1")
        request_count_view_name = "request_count_view"
        count_agg = aggregation_module.CountAggregation()
        request_count_view = view_module.View(
            request_count_view_name,
            "number of requests broken down by methods", [method_key],
            request_count_measure, count_agg)
        stats = stats_module.stats
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = prometheus.new_stats_exporter(
            prometheus.Options(namespace="opencensus", port=9303))
        view_manager.register_exporter(exporter)

        view_manager.register_view(request_count_view)

        time.sleep(random.randint(1, 10) / 1000.0)

        method_value_1 = tag_value_module.TagValue("some method")
        tag_map_1 = tag_map_module.TagMap()
        tag_map_1.insert(method_key, method_value_1)
        measure_map_1 = stats_recorder.new_measurement_map()
        measure_map_1.measure_int_put(request_count_measure, 1)
        measure_map_1.record(tag_map_1)

        method_value_2 = tag_value_module.TagValue("some other method")
        tag_map_2 = tag_map_module.TagMap()
        tag_map_2.insert(method_key, method_value_2)
        measure_map_2 = stats_recorder.new_measurement_map()
        measure_map_2.measure_int_put(request_count_measure, 1)
        measure_map_2.record(tag_map_2)
        measure_map_2.record(tag_map_2)

        if sys.version_info > (3, 0):
            import urllib.request
            contents = urllib.request.urlopen(
                "http://localhost:9303/metrics").read()
        else:
            import urllib2
            contents = urllib2.urlopen("http://localhost:9303/metrics").read()

        self.assertIn(b'# TYPE opencensus_request_count_view_total counter',
                      contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some method"} 1.0', contents)
        self.assertIn(
            b'opencensus_request_count_view_total'
            b'{method="some other method"} 2.0', contents)
コード例 #6
0
    def test_create_batched_time_series_with_many(self, monitor_resource_mock):
        client = mock.Mock()

        # First view with 3
        view_name1 = "view-name1"
        view1 = view_module.View(view_name1, "test description", ['test'],
                                 VIDEO_SIZE_MEASURE,
                                 aggregation_module.LastValueAggregation())
        v_data1 = view_data_module.ViewData(view=view1,
                                            start_time=TEST_TIME_STR,
                                            end_time=TEST_TIME_STR)
        v_data1.record(context=tag_map_module.TagMap({'test': '1'}),
                       value=7,
                       timestamp=None)
        v_data1.record(context=tag_map_module.TagMap({'test': '2'}),
                       value=5,
                       timestamp=None)
        v_data1.record(context=tag_map_module.TagMap({'test': '3'}),
                       value=3,
                       timestamp=None)

        # Second view with 2
        view_name2 = "view-name2"
        view2 = view_module.View(view_name2, "test description", ['test'],
                                 VIDEO_SIZE_MEASURE,
                                 aggregation_module.LastValueAggregation())
        v_data2 = view_data_module.ViewData(view=view2,
                                            start_time=TEST_TIME_STR,
                                            end_time=TEST_TIME_STR)
        v_data2.record(context=tag_map_module.TagMap({'test': '1'}),
                       value=7,
                       timestamp=None)
        v_data2.record(context=tag_map_module.TagMap({'test': '2'}),
                       value=5,
                       timestamp=None)

        view_data = [v_data1, v_data2]
        view_data = [
            metric_utils.view_data_to_metric(vd, TEST_TIME) for vd in view_data
        ]

        option = stackdriver.Options(project_id="project-test")
        exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                        client=client)

        time_series_batches = exporter.create_batched_time_series(view_data, 2)

        self.assertEqual(len(time_series_batches), 3)
        [tsb1, tsb2, tsb3] = time_series_batches
        self.assertEqual(len(tsb1), 2)
        self.assertEqual(len(tsb2), 2)
        self.assertEqual(len(tsb3), 1)
コード例 #7
0
 def _parse_tags(self, buffer):
     tag_context = tag_map_module.TagMap()
     limit = len(buffer)
     total_chars = 0
     i = 1
     while i < limit:
         field_id = buffer[i] if six.PY3 else ord(buffer[i])
         if field_id == TAG_FIELD_ID:
             i += 1
             key = self._decode_string(buffer, i)
             i += len(key)
             total_chars += len(key)
             i += 1
             val = self._decode_string(buffer, i)
             i += len(val)
             total_chars += len(val)
             i += 1
             if total_chars > \
                     TAG_MAP_SERIALIZED_SIZE_LIMIT:  # pragma: NO COVER
                 logging.warning("Size of the tag context exceeds maximum")
                 break
             else:
                 tag_context.insert(str(key), str(val))
         else:
             break
     return tag_context
コード例 #8
0
    def test_create_timeseries(self):
        client = mock.Mock()
        start_time = datetime.utcnow()
        end_time = datetime.utcnow()

        option = stackdriver.Options(project_id="project-test", resource="global")
        exporter = stackdriver.StackdriverStatsExporter(options=option, client=client)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        if len(view_manager.measure_to_view_map.exporters) > 0:
            view_manager.unregister_exporter(view_manager.measure_to_view_map.exporters[0])

        view_manager.register_exporter(exporter)

        view_manager.register_view(VIDEO_SIZE_VIEW)

        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)

        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(VIDEO_SIZE_VIEW_NAME, None)

        time_serie = exporter.create_time_series_list(v_data,"")
        self.assertIsNotNone(time_serie)
        time_serie = exporter.create_time_series_list(v_data,"global")
        self.assertIsNotNone(time_serie)
コード例 #9
0
    def track_metric(self, metric_name, metric_value):
        if self.appinsights_key:
            print("Tracking metric:" + metric_name + ", Value: " +
                  str(metric_value))

            if not metric_name in self.metrics:
                metrics_measure = measure_module.MeasureInt(
                    metric_name, metric_name, metric_name)
                metrics_view = view_module.View(
                    metric_name,
                    metric_name,
                    [],
                    metrics_measure,
                    aggregation_module.LastValueAggregation(
                        value=metric_value),
                )

                view_manager.register_view(metrics_view)
                mmap = stats_recorder.new_measurement_map()
                tmap = tag_map_module.TagMap()

                self.metrics[metric_name] = {
                    "measure": metrics_measure,
                    "measurement_map": mmap,
                    "tag_map": tmap,
                }

            measure = self.metrics[metric_name]["measure"]
            mmap = self.metrics[metric_name]["measurement_map"]
            tmap = self.metrics[metric_name]["tag_map"]
            print("Putting metric:" + metric_name + ", Value: " +
                  str(metric_value))
            mmap.measure_int_put(measure, metric_value)
            mmap.record(tmap)
コード例 #10
0
    def report_metric_with_run_tagging(self,
                                       name: str,
                                       value: float,
                                       description=""):
        """Report a metric value to the AML run and to AppInsights, and tag the parent run with the metric.
        Please note tags are mutable. By default, this method reports to AML parent run.
        e.g. Condensed_Binocular.report_metric(name, value)
        :param name: The name of the metric.
        :param value: The value to be reported.
        :type value: Float or integer.
        :param description: An optional description about the metric.
        :param report_to_parent: Mark True if you want to report to AML parent run.
        """
        # Report to AML
        self.run.log(name, value)
        if not self.offline_run:
            self.run.parent.log(name, value)
            self.run.parent.tag(name, value)

        # Report to AppInsights
        measurement_map = stats_module.stats.stats_recorder.new_measurement_map(
        )
        tag_map = tag_map_module.TagMap()
        measure = measure_module.MeasureFloat(name, description)
        self.set_view(name, description, measure)
        measurement_map.measure_float_put(measure, value)
        measurement_map.record(tag_map)
    def track_metric(self, metric_name, metric_value):
        if (self.appinsights_key):
            print("Tracking metric:" + metric_name + ", Value: " +
                  str(metric_value))

            if (not metric_name in self.metrics):
                metrics_measure = measure_module.MeasureInt(
                    metric_name, metric_name, metric_name)
                metrics_view = view_module.View(
                    metric_name, metric_name, [], metrics_measure,
                    aggregation_module.LastValueAggregation(
                        value=metric_value))

                view_manager.register_view(metrics_view)
                mmap = stats_recorder.new_measurement_map()
                tmap = tag_map_module.TagMap()

                self.metrics[metric_name] = {
                    'measure': metrics_measure,
                    'measurement_map': mmap,
                    'tag_map': tmap
                }

            measure = self.metrics[metric_name]['measure']
            mmap = self.metrics[metric_name]['measurement_map']
            tmap = self.metrics[metric_name]['tag_map']
            print("Putting metric:" + metric_name + ", Value: " +
                  str(metric_value))
            mmap.measure_int_put(measure, metric_value)
            mmap.record(tmap)
コード例 #12
0
    def track_metric(self, metric_name, metric_value):
        try:
            if self.appinsights_key:
                if not metric_name in self.metrics:
                    metrics_measure = measure_module.MeasureInt(
                        metric_name, metric_name, metric_name)
                    metrics_view = view_module.View(
                        metric_name,
                        metric_name,
                        [],
                        metrics_measure,
                        aggregation_module.LastValueAggregation(
                            value=metric_value),
                    )

                    self.view_manager.register_view(metrics_view)
                    mmap = self.stats_recorder.new_measurement_map()
                    tmap = tag_map_module.TagMap()

                    self.metrics[metric_name] = {
                        "measure": metrics_measure,
                        "measurement_map": mmap,
                        "tag_map": tmap,
                    }

                measure = self.metrics[metric_name]["measure"]
                mmap = self.metrics[metric_name]["measurement_map"]
                tmap = self.metrics[metric_name]["tag_map"]
                mmap.measure_int_put(measure, metric_value)
                mmap.record(tmap)
        except Exception as e:
            print("Exception when tracking a metric:")
            print(e)
コード例 #13
0
    def trace_and_record_stats(self, method_name, fn, *args, **kwargs):
        __TRACER = execution_context.get_opencensus_tracer() or noop_tracer.NoopTracer()
        __STATS_RECORDER = stats.Stats().stats_recorder

        start_time = time.time()

        tags = tag_map_module.TagMap()
        tags.insert(key_method, tag_value_module.TagValue(method_name))
        mm = __STATS_RECORDER.new_measurement_map()

        with __TRACER.span(name=method_name) as span:
            try:
                return fn(*args, **kwargs)
            except Exception as e: # an error to record
                span.status = Status.from_exception(e)
                # TODO: (@odeke-em) perhaps shorten the exception when added as a tag here?
                tags.insert(key_error, e.__str__())
                # Then finally after recording the exception, re-raise it.
                raise e
            else: # Success
                tags.insert(key_status, "ok")
            finally:
                latency_ms = (time.time() - start_time) * 1000
                mm.measure_float_put(m_latency_ms, latency_ms)
                mm.measure_int_put(m_calls, 1)
                mm.record(tags)
コード例 #14
0
    def test_create_batched_time_series(self, monitor_resource_mock):
        client = mock.Mock()
        v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                           start_time=TEST_TIME_STR,
                                           end_time=TEST_TIME_STR)
        v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None)
        view_data = [v_data]

        option = stackdriver.Options(project_id="project-test")
        exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                        client=client)

        view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)]

        time_series_batches = exporter.create_batched_time_series(view_data, 1)

        self.assertEqual(len(time_series_batches), 1)
        [time_series_batch] = time_series_batches
        self.assertEqual(len(time_series_batch), 1)
        [time_series] = time_series_batch
        self.assertEqual(
            time_series.metric.type,
            'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME)
        self.check_labels(time_series.metric.labels, {},
                          include_opencensus=True)
コード例 #15
0
    def report_metric(self,
                      name: str,
                      value: float,
                      description="",
                      report_to_parent: bool = False):
        """Report a metric value to the AML run and to AppInsights.
        e.g. Condensed_Binocular.report_metric(name, value)
        :param name: The name of the metric.
        :param value: The value to be reported.
        :type value: Float or integer.
        :param description: An optional description about the metric.
        :param report_to_parent: Mark True if you want to report to AML parent run.
        """
        # Report to AML
        self.run.log(name, value)
        if report_to_parent and not self.offline_run:
            self.run.parent.log(name, value)

        # Report to AppInsights
        measurement_map = stats_module.stats.stats_recorder.new_measurement_map(
        )
        tag_map = tag_map_module.TagMap()
        measure = measure_module.MeasureFloat(name, description)
        self.set_view(name, description, measure)
        measurement_map.measure_float_put(measure, value)
        measurement_map.record(tag_map)
コード例 #16
0
def record_values(view_data_objects, tags, value=1, count=1):
    tag_map = tag_map_module.TagMap(tags)
    for view_data_object in view_data_objects:
        for _ in range(count):
            # Timestamp here is only used to record exemplars. It is safe to
            # leave it as None
            view_data_object.record(tag_map, value, None)
コード例 #17
0
def main():
    address = os.environ.get("ZENOSS_ADDRESS", zenoss.DEFAULT_ADDRESS)
    api_key = os.environ.get("ZENOSS_API_KEY")
    if not api_key:
        sys.exit("ZENOSS_API_KEY must be set")

    # Create Zenoss exporter.
    exporter = zenoss.new_stats_exporter(options=zenoss.Options(
        address=address, api_key=api_key, source="app.example.com"),
                                         interval=10)

    # Register Zenoss exporter.
    view_manager.register_exporter(exporter)

    # Register our example view.
    view_manager.register_view(latency_view)

    # Prepare measurement map, and tag map we can reuse for each sample.
    measurement_map = stats_recorder.new_measurement_map()
    tag_map = tag_map_module.TagMap()

    # Record one random measurement each second for 100 seconds.
    print("Recording measurements (CTRL-C to stop):")
    while True:
        ms = random.random() * 5 * 1000
        print("  - latency {}".format(ms))
        measurement_map.measure_float_put(m_latency_ms, ms)
        measurement_map.record(tag_map)
        time.sleep(1)
コード例 #18
0
    def test_emit(self):
        options = prometheus.Options(namespace="opencensus", port=9005)
        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder
        exporter = prometheus.new_stats_exporter(options)
        view_manager.register_exporter(exporter)
        view_manager.register_view(VIDEO_SIZE_VIEW)
        tag_value = tag_value_module.TagValue(str(1000))
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
        measure_map.record(tag_map)
        exporter.export([
            exporter.collector.view_data[(
                'opencensus_my.org/views/video_size_test2-my.org'
                '/keys/frontend')]
        ])

        self.assertIsInstance(
            exporter.collector.view_data[(
                'opencensus_my.org/views/video_size_test2-my.org'
                '/keys/frontend')], view_data_module.ViewData)
        self.assertEqual(REGISTERED_VIEW2, exporter.collector.registered_views)
        self.assertEqual(options, exporter.options)
        self.assertEqual(options.registry, exporter.gatherer)
        self.assertIsNotNone(exporter.collector)
        self.assertIsNotNone(exporter.transport)
コード例 #19
0
    def track_metric(self, metric_name, metric_value):
        try:
            if (self.appinsights_key):
                if (not metric_name in self.metrics):
                    metrics_measure = measure_module.MeasureInt(
                        metric_name, metric_name, metric_name)
                    metrics_view = view_module.View(
                        metric_name, metric_name, [], metrics_measure,
                        aggregation_module.LastValueAggregation(
                            value=metric_value))

                    self.view_manager.register_view(metrics_view)
                    mmap = self.stats_recorder.new_measurement_map()
                    tmap = tag_map_module.TagMap()

                    self.metrics[metric_name] = {
                        'measure': metrics_measure,
                        'measurement_map': mmap,
                        'tag_map': tmap
                    }

                measure = self.metrics[metric_name]['measure']
                mmap = self.metrics[metric_name]['measurement_map']
                tmap = self.metrics[metric_name]['tag_map']
                mmap.measure_int_put(measure, metric_value)
                mmap.record(tmap)
        except Exception as e:
            print('Exception when tracking a metric:')
            print(e)
コード例 #20
0
ファイル: main.py プロジェクト: zyxue/opencensus-python
def main():
    stats = stats_module.stats
    view_manager = stats.view_manager
    stats_recorder = stats.stats_recorder

    # Register view.
    view_manager.register_view(VIDEO_SIZE_VIEW)

    # Sleep for [0, 10] milliseconds to fake work.
    time.sleep(random.randint(1, 10) / 1000.0)

    # Process video.
    # Record the processed video size.
    tag_value = tag_value_module.TagValue("mobile-ios9.3.5")
    tag_map = tag_map_module.TagMap()
    tag_map.insert(FRONTEND_KEY, tag_value)
    measure_map = stats_recorder.new_measurement_map()
    measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
    measure_map.record(tag_map)

    # Get aggregated stats and print it to console.
    view_data = view_manager.get_view(VIDEO_SIZE_VIEW_NAME)
    pprint(vars(view_data))
    for k, v in view_data._tag_value_aggregation_data_map.items():
        pprint(k)
        pprint(vars(v))
コード例 #21
0
def main():
    stats = stats_module.Stats()
    view_manager = stats.view_manager
    stats_recorder = stats.stats_recorder

    exporter = prometheus.new_stats_exporter(
        prometheus.Options(namespace="opencensus"))
    view_manager.register_exporter(exporter)

    # Register view.
    view_manager.register_view(VIDEO_SIZE_VIEW)

    # Sleep for [0, 10] milliseconds to fake work.
    time.sleep(random.randint(1, 10) / 1000.0)

    # Process video.
    # Record the processed video size.
    tag_value = tag_value_module.TagValue(str(random.randint(1, 10000)))
    tag_map = tag_map_module.TagMap()
    tag_map.insert(FRONTEND_KEY, tag_value)
    measure_map = stats_recorder.new_measurement_map()
    measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
    measure_map.record(tag_map)

    # Get aggregated stats and print it to console.
    view_data = view_manager.get_view(VIDEO_SIZE_VIEW_NAME)
    pprint(vars(view_data))
    for k, v in view_data.tag_value_aggregation_data_map.items():
        pprint(k)
        pprint(vars(v))

    # Prevent main from exiting to see the data on prometheus
    # localhost:8000/metrics
    while True:
        pass
コード例 #22
0
ファイル: metrics_agent.py プロジェクト: sanderland/ray
    def _record(self, metric_point: MetricPoint,
                measurement_map: MeasurementMap):
        """Record a single metric point to export.

        NOTE: When this method is called, the caller should acquire a lock.

        Args:
            metric_point(MetricPoint) metric point defined in common.proto
            measurement_map(MeasurementMap): Measurement map to record metrics.
        """
        metric_name = metric_point.metric_name
        tags = metric_point.tags

        metric = self._registry.get(metric_name)
        # Metrics should be always registered dynamically.
        assert metric

        tag_map = tag_map_module.TagMap()
        for key, value in tags.items():
            tag_key = tag_key_module.TagKey(key)
            tag_value = tag_value_module.TagValue(value)
            tag_map.insert(tag_key, tag_value)

        metric_value = metric_point.value
        measurement_map.measure_float_put(metric.measure, metric_value)
        # NOTE: When we record this metric, timestamp will be renewed.
        measurement_map.record(tag_map)
コード例 #23
0
    def test_stats_record_async(self):
        # We are using sufix in order to prevent cached objects
        sufix = str(os.getpid())

        tag_key = "SampleKeyAsyncTest%s" % sufix
        measure_name = "SampleMeasureNameAsyncTest%s" % sufix
        measure_description = "SampleDescriptionAsyncTest%s" % sufix
        view_name = "SampleViewNameAsyncTest%s" % sufix
        view_description = "SampleViewDescriptionAsyncTest%s" % sufix

        FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key)
        VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt(
            measure_name, measure_description, "By")
        VIDEO_SIZE_VIEW_NAME_ASYNC = view_name
        VIDEO_SIZE_DISTRIBUTION_ASYNC =\
            aggregation_module.DistributionAggregation(
                [0.0, 16.0 * MiB, 256.0 * MiB]
            )
        VIDEO_SIZE_VIEW_ASYNC = view_module.View(
            VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC],
            VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        exporter = stackdriver.new_stats_exporter(
            stackdriver.Options(project_id=PROJECT))
        view_manager.register_exporter(exporter)

        # Register view.
        view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC)

        # Sleep for [0, 10] milliseconds to fake work.
        time.sleep(random.randint(1, 10) / 1000.0)

        # Process video.
        # Record the processed video size.
        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY_ASYNC, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB)

        measure_map.record(tag_map)

        @retry(wait_fixed=RETRY_WAIT_PERIOD,
               stop_max_attempt_number=RETRY_MAX_ATTEMPT)
        def get_metric_descriptors(self, exporter, view_description):
            name = exporter.client.project_path(PROJECT)
            list_metrics_descriptors = exporter.client.list_metric_descriptors(
                name)
            element = next((element for element in list_metrics_descriptors
                            if element.description == view_description), None)
            self.assertIsNotNone(element)
            self.assertEqual(element.description, view_description)
            self.assertEqual(element.unit, "By")

        get_metric_descriptors(self, exporter, view_description)
コード例 #24
0
 def test_delete(self):
     key = 'key1'
     tag_map = tag_map_module.TagMap(tags=[{
         'key1': 'value1',
         'key2': 'value2'
     }])
     tag_map.delete(key=key)
     self.assertEqual(tag_map.map, {'key2': 'value2'})
コード例 #25
0
 def test_insert(self):
     test_key = 'key1'
     test_value = 'value1'
     tag_map = tag_map_module.TagMap()
     tag_map.insert(key=test_key, value=test_value)
     self.assertEqual({'key1': 'value1'}, tag_map.map)
     tag_map.insert(key=test_key, value=test_value)
     self.assertEqual({'key1': 'value1'}, tag_map.map)
コード例 #26
0
def tag_and_record(mmap, metrics_info):
    # apply same tags to every metric in batch
    tag_value_isp = tag_value.TagValue(metrics_info['client']['isp'])
    tag_value_server_host = tag_value.TagValue(metrics_info['server']['host'])
    tagmap = tag_map.TagMap()
    tagmap.insert(tag_key_isp, tag_value_isp)
    tagmap.insert(tag_key_server_host, tag_value_server_host)
    logger.debug("tagmap: %s", tagmap.map)
    mmap.record(tagmap)
コード例 #27
0
 def test_update(self):
     key = 'key1'
     value = 'value1'
     tag_map = tag_map_module.TagMap(tags=[{'key1': 'value2'}])
     tag_map.update(key=key, value=value)
     self.assertEqual({'key1': 'value1'}, tag_map.map)
     key_2 = 'key2'
     tag_map.update(key=key_2, value=value)
     self.assertEqual({'key1': 'value1'}, tag_map.map)
コード例 #28
0
    def test_create_timeseries_with_resource(self, monitor_resource_mock):
        client = mock.Mock()

        option = stackdriver.Options(project_id="project-test",
                                     resource="global")
        exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                        client=client)

        stats = stats_module.Stats()
        view_manager = stats.view_manager
        stats_recorder = stats.stats_recorder

        if len(view_manager.measure_to_view_map.exporters) > 0:
            view_manager.unregister_exporter(
                view_manager.measure_to_view_map.exporters[0])

        view_manager.register_exporter(exporter)

        view_manager.register_view(VIDEO_SIZE_VIEW)

        tag_value = tag_value_module.TagValue("1200")
        tag_map = tag_map_module.TagMap()
        tag_map.insert(FRONTEND_KEY, tag_value)
        measure_map = stats_recorder.new_measurement_map()
        measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)

        measure_map.record(tag_map)

        v_data = measure_map.measure_to_view_map.get_view(
            VIDEO_SIZE_VIEW_NAME, None)

        mocked_labels = {
            'instance_id': 'my-instance',
            'project_id': 'my-project',
            'zone': 'us-east1',
            'pod_id': 'localhost',
            'namespace_id': 'namespace'
        }

        monitor_resource_mock.return_value = mock.Mock()
        monitor_resource_mock.return_value.resource_type = 'gce_instance'
        monitor_resource_mock.return_value.get_resource_labels.return_value = mocked_labels

        time_series = exporter.create_time_series_list(v_data, "", "")
        self.assertEquals(time_series.resource.type, "gce_instance")
        self.assertEquals(
            time_series.metric.type,
            "custom.googleapis.com/opencensus/my.org/views/video_size_test2")
        self.assertIsNotNone(time_series)

        time_series = exporter.create_time_series_list(v_data, "global", "")
        self.assertEquals(
            time_series.metric.type,
            "custom.googleapis.com/opencensus/my.org/views/video_size_test2")
        self.assertIsNotNone(time_series)
コード例 #29
0
    def test_set_and_get_tag_map(self):
        key = tag_key_module.TagKey('key')
        value = tag_value_module.TagValue('value')
        tag_map = tag_map_module.TagMap()
        tag_map.insert(key, value)

        execution_context.set_current_tag_map(tag_map)

        result = execution_context.get_current_tag_map()

        self.assertEqual(result, tag_map)
コード例 #30
0
 def test_handle_upload_with_data(self, monitor_resource_mock):
     client = mock.Mock()
     v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW,
                                        start_time=TEST_TIME,
                                        end_time=TEST_TIME)
     v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None)
     view_data = [v_data]
     option = stackdriver.Options(project_id="project-test")
     exporter = stackdriver.StackdriverStatsExporter(options=option,
                                                     client=client)
     exporter.handle_upload(view_data)
     self.assertTrue(client.create_time_series.called)