def setup_open_census(): stats_stats = stats.Stats() app.m_response_ms = measure_module.MeasureFloat("flask_response_time", "The request duration", "ms") app.key_method = tag_key_module.TagKey("method") # Create the status key app.key_status = tag_key_module.TagKey("status") # Create the error key app.key_error = tag_key_module.TagKey("error") app.view_manager = stats_stats.view_manager app.stats_recorder = stats_stats.stats_recorder response_time_view = view.View( "response_time", "The time it took to respond", [app.key_method, app.key_status, app.key_error], app.m_response_ms, aggregation.LastValueAggregation()) app.exporter = stackdriver.new_stats_exporter(options=stackdriver.Options( project_id=os.getenv('PROJECT_ID'))) app.view_manager.register_exporter(app.exporter) app.view_manager.register_view(response_time_view)
def trace_and_record_stats(self, method_name, fn, *args, **kwargs): __TRACER = execution_context.get_opencensus_tracer() or noop_tracer.NoopTracer() __STATS_RECORDER = stats.Stats().stats_recorder start_time = time.time() tags = tag_map_module.TagMap() tags.insert(key_method, tag_value_module.TagValue(method_name)) mm = __STATS_RECORDER.new_measurement_map() with __TRACER.span(name=method_name) as span: try: return fn(*args, **kwargs) except Exception as e: # an error to record span.status = Status.from_exception(e) # TODO: (@odeke-em) perhaps shorten the exception when added as a tag here? tags.insert(key_error, e.__str__()) # Then finally after recording the exception, re-raise it. raise e else: # Success tags.insert(key_status, "ok") finally: latency_ms = (time.time() - start_time) * 1000 mm.measure_float_put(m_latency_ms, latency_ms) mm.measure_int_put(m_calls, 1) mm.record(tags)
def init_app(self, app): self.app = app params = self.app.config.get('OPENCENSUS_TRACE_PARAMS', {}) self.blacklist_paths = params.get(BLACKLIST_PATHS, self.blacklist_paths) transport = params.get(TRANSPORT, sync.SyncTransport) # Initialize the exporter if not inspect.isclass(self.exporter): pass # handling of instantiated exporter elif self.exporter.__name__ == 'StackdriverExporter': _project_id = params.get(GCP_EXPORTER_PROJECT, None) self.exporter = self.exporter(project_id=_project_id, transport=transport) else: self.exporter = self.exporter(transport=transport) stats_stats = stats.Stats() self.app.stats_recorder = stats_stats.stats_recorder self.app.view_manager = stats_stats.view_manager response_time_view = view.View( "response_time", "The time it took to respond", [self.app.key_method, self.app.key_status, self.app.key_error], self.app.m_response_ms, aggregation.LastValueAggregation()) self.app.view_manager.register_exporter(self.exporter) self.app.view_manager.register_view(response_time_view) self.setup_metrics()
def main(): stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = prometheus.new_stats_exporter( prometheus.Options(namespace="opencensus")) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue(str(random.randint(1, 10000))) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) # Get aggregated stats and print it to console. view_data = view_manager.get_view(VIDEO_SIZE_VIEW_NAME) pprint(vars(view_data)) for k, v in view_data.tag_value_aggregation_data_map.items(): pprint(k) pprint(vars(v)) # Prevent main from exiting to see the data on prometheus # localhost:8000/metrics while True: pass
def test_emit(self): options = prometheus.Options(namespace="opencensus", port=9005) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = prometheus.new_stats_exporter(options) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue(str(1000)) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) exporter.export([ exporter.collector.view_data[( 'opencensus_my.org/views/video_size_test2-my.org' '/keys/frontend')] ]) self.assertIsInstance( exporter.collector.view_data[( 'opencensus_my.org/views/video_size_test2-my.org' '/keys/frontend')], view_data_module.ViewData) self.assertEqual(REGISTERED_VIEW2, exporter.collector.registered_views) self.assertEqual(options, exporter.options) self.assertEqual(options.registry, exporter.gatherer) self.assertIsNotNone(exporter.collector) self.assertIsNotNone(exporter.transport)
def main(): stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("mobile-ios9.3.5") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) # Get aggregated stats and print it to console. view_data = view_manager.get_view(VIDEO_SIZE_VIEW_NAME) pprint(vars(view_data)) for k, v in view_data._tag_value_aggregation_data_map.items(): pprint(k) pprint(vars(v))
def test_create_timeseries(self): client = mock.Mock() start_time = datetime.utcnow() end_time = datetime.utcnow() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter(view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(VIDEO_SIZE_VIEW_NAME, None) time_serie = exporter.create_time_series_list(v_data,"") self.assertIsNotNone(time_serie) time_serie = exporter.create_time_series_list(v_data,"global") self.assertIsNotNone(time_serie)
def test_stats_record_async(self): # We are using sufix in order to prevent cached objects sufix = str(os.getpid()) tag_key = "SampleKeyAsyncTest%s" % sufix measure_name = "SampleMeasureNameAsyncTest%s" % sufix measure_description = "SampleDescriptionAsyncTest%s" % sufix view_name = "SampleViewNameAsyncTest%s" % sufix view_description = "SampleViewDescriptionAsyncTest%s" % sufix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME_ASYNC = view_name VIDEO_SIZE_DISTRIBUTION_ASYNC =\ aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB] ) VIDEO_SIZE_VIEW_ASYNC = view_module.View( VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC], VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=PROJECT)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_ASYNC, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB) measure_map.record(tag_map) @retry(wait_fixed=RETRY_WAIT_PERIOD, stop_max_attempt_number=RETRY_MAX_ATTEMPT) def get_metric_descriptors(self, exporter, view_description): name = exporter.client.project_path(PROJECT) list_metrics_descriptors = exporter.client.list_metric_descriptors( name) element = next((element for element in list_metrics_descriptors if element.description == view_description), None) self.assertIsNotNone(element) self.assertEqual(element.description, view_description) self.assertEqual(element.unit, "By") get_metric_descriptors(self, exporter, view_description)
def stats(): global _stats if not _stats: new_stats = stats_module.Stats() if environment.in_gae() and not environment.in_test(): exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=metadata.project_id(), )) new_stats.view_manager.register_exporter(exporter) _stats = new_stats return _stats
def test_create_timeseries_with_resource(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view( VIDEO_SIZE_VIEW_NAME, None) mocked_labels = { 'instance_id': 'my-instance', 'project_id': 'my-project', 'zone': 'us-east1', 'pod_id': 'localhost', 'namespace_id': 'namespace' } monitor_resource_mock.return_value = mock.Mock() monitor_resource_mock.return_value.resource_type = 'gce_instance' monitor_resource_mock.return_value.get_resource_labels.return_value = mocked_labels time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, "gce_instance") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) time_series = exporter.create_time_series_list(v_data, "global", "") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series)
def test_prometheus_stats(self): method_key = tag_key_module.TagKey("method") request_count_measure = measure_module.MeasureInt( "request_count", "number of requests", "1") request_count_view_name = "request_count_view" count_agg = aggregation_module.CountAggregation() request_count_view = view_module.View( request_count_view_name, "number of requests broken down by methods", [method_key], request_count_measure, count_agg) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = prometheus.new_stats_exporter( prometheus.Options(namespace="opencensus", port=9303)) view_manager.register_exporter(exporter) view_manager.register_view(request_count_view) time.sleep(random.randint(1, 10) / 1000.0) method_value_1 = tag_value_module.TagValue("some method") tag_map_1 = tag_map_module.TagMap() tag_map_1.insert(method_key, method_value_1) measure_map_1 = stats_recorder.new_measurement_map() measure_map_1.measure_int_put(request_count_measure, 1) measure_map_1.record(tag_map_1) method_value_2 = tag_value_module.TagValue("some other method") tag_map_2 = tag_map_module.TagMap() tag_map_2.insert(method_key, method_value_2) measure_map_2 = stats_recorder.new_measurement_map() measure_map_2.measure_int_put(request_count_measure, 1) measure_map_2.record(tag_map_2) measure_map_2.record(tag_map_2) if sys.version_info > (3, 0): import urllib.request contents = urllib.request.urlopen( "http://localhost:9303/metrics").read() else: import urllib2 contents = urllib2.urlopen("http://localhost:9303/metrics").read() self.assertIn(b'# TYPE opencensus_request_count_view counter', contents) self.assertIn( b'opencensus_request_count_view' b'{method="some method"} 1.0', contents) self.assertIn( b'opencensus_request_count_view' b'{method="some other method"} 2.0', contents)
def test_prometheus_stats(self): import random import time import sys from opencensus.stats import aggregation as aggregation_module from opencensus.stats.exporters import prometheus_exporter as prometheus from opencensus.stats import measure as measure_module from opencensus.stats import stats as stats_module from opencensus.stats import view as view_module from opencensus.tags import tag_key as tag_key_module from opencensus.tags import tag_map as tag_map_module from opencensus.tags import tag_value as tag_value_module MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") VIDEO_SIZE_MEASURE = measure_module.MeasureInt( "my.org/measures/video_size", "size of processed videos", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size" VIDEO_SIZE_DISTRIBUTION = aggregation_module.CountAggregation( 256.0 * MiB) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = prometheus.new_stats_exporter(prometheus.Options(namespace="opencensus", port=9303)) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) time.sleep(random.randint(1, 10) / 1000.0) tag_value = tag_value_module.TagValue(str(random.randint(1, 10000))) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) if sys.version_info > (3, 0): import urllib.request contents = urllib.request.urlopen("http://localhost:9303/metrics").read() else: import urllib2 contents = urllib2.urlopen("http://localhost:9303/metrics").read() self.assertIn(b'# TYPE opencensus_my.org/views/video_size counter', contents) self.assertIn(b'opencensus_my.org/views/video_size 268435456.0', contents)
def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_3 = aggregation_module.SumAggregation(sum=2.2) view_name3 = "view-name3" new_view3 = view_module.View(view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_3) view_manager.register_view(new_view3) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name3, None) time_series_list = exporter.create_time_series_list( v_data, "global", "") self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.assertEqual(time_series.metric.type, "custom.googleapis.com/opencensus/view-name3") self.assertCorrectLabels(time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, include_opencensus=True) self.assertIsNotNone(time_series.resource) self.assertEqual(len(time_series.points), 1) expected_value = monitoring_v3.types.TypedValue() expected_value.double_value = 2.2 + 25 * MiB self.assertEqual(time_series.points[0].value, expected_value)
def test_stackdriver_register_exporter(self): stats = stats_module.Stats() view_manager = stats.view_manager exporter = mock.Mock() if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) registered_exporters = len(view_manager.measure_to_view_map.exporters) self.assertEqual(registered_exporters, 1)
def setup_open_census(): stats_stats = stats.Stats() app.view_manager = stats_stats.view_manager app.stats_recorder = stats_stats.stats_recorder response_time_view = view.View("response_time", "The time it took to respond", [key_method, key_status, key_error], m_response_ms, aggregation.LastValueAggregation()) app.exporter = prometheus.new_stats_exporter( prometheus.Options(namespace="flask_app", port=8000)) app.view_manager.register_exporter(app.exporter) app.view_manager.register_view(response_time_view)
def test_create_timeseries_last_value_float_tagvalue( self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_1 = aggregation_module.LastValueAggregation(value=2) view_name1 = "view-name1" new_view1 = view_module.View(view_name1, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_1) view_manager.register_view(new_view1) tag_value_int = tag_value_module.TagValue("Abc") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_INT, tag_value_int) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name1, None) time_series = exporter.create_time_series_list(v_data, "global", "kubernetes.io/myorg") self.assertEquals(time_series.metric.type, "kubernetes.io/myorg/view-name1") self.assertIsNotNone(time_series)
def enable_metrics_views(): calls_view = view_module.View("pymemcache/calls", "The number of calls", [key_method, key_error, key_status], m_calls, aggregation_module.CountAggregation()) latency_view = view_module.View("pymemcache/latency", "The distribution of the latencies", [key_method, key_error, key_status], m_latency_ms, aggregation_module.DistributionAggregation([ # Latency in buckets: # [>=0ms, >=5ms, >=10ms, >=25ms, >=40ms, >=50ms, >=75ms, >=100ms, >=200ms, >=400ms, >=600ms, >=800ms, >=1s, >=2s, >=4s, >=6s, >=10s, >-20s] 0, 5, 10, 25, 40, 50, 75, 100, 200, 400, 600, 800, 1000, 2000, 4000, 6000, 10000, 20000 ])) view_manager = stats.Stats().view_manager view_manager.register_view(calls_view) view_manager.register_view(latency_view)
def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_2 = aggregation_module.SumAggregation(sum=2.2) view_name2 = "view-name2" new_view2 = view_module.View(view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_2) view_manager.register_view(new_view2) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name2, None) time_series = exporter.create_time_series_list(v_data, "global", "") self.assertEquals(time_series.metric.type, "custom.googleapis.com/opencensus/view-name2") self.assertIsNotNone(time_series)
def test_create_timeseries(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view( VIDEO_SIZE_VIEW_NAME, None) time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, "global") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) time_series = exporter.create_time_series_list(v_data, "global", "kubernetes.io/myorg") self.assertEquals(time_series.metric.type, "kubernetes.io/myorg/my.org/views/video_size_test2") self.assertIsNotNone(time_series)
def setup_create_timeseries_test(self): client = mock.Mock() execution_context.clear() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) return view_manager, stats_recorder, exporter
def test_get_metrics(self): """Test that Stats converts recorded values into metrics.""" stats = stats_module.Stats() # Check that metrics are empty before view registration initial_metrics = list(stats.get_metrics()) self.assertEqual(initial_metrics, []) mock_measure = Mock(spec=measure.MeasureFloat) mock_md = Mock(spec=metric_descriptor.MetricDescriptor) mock_md.type =\ metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION mock_view = Mock(spec=view.View) mock_view.measure = mock_measure mock_view.get_metric_descriptor.return_value = mock_md mock_view.columns = ['k1'] stats.view_manager.measure_to_view_map.register_view(mock_view, Mock()) # Check that metrics are stil empty until we record empty_metrics = list(stats.get_metrics()) self.assertEqual(empty_metrics, []) mm = stats.stats_recorder.new_measurement_map() mm._measurement_map = {mock_measure: 1.0} mock_view.aggregation = aggregation.DistributionAggregation() tm = tag_map.TagMap() tm.insert('k1', 'v1') mm.record(tm) metrics = list(stats.get_metrics()) self.assertEqual(len(metrics), 1) [metric] = metrics self.assertEqual(len(metric.time_series), 1) [ts] = metric.time_series self.assertEqual(len(ts.points), 1) [point] = ts.points self.assertTrue(isinstance(point.value, value.ValueDistribution))
def setUp(self): self._fake_pip_path = os.path.join(os.path.dirname(__file__), 'fake_pip.py') self._stats = stats_module.Stats() for view in views.ALL_VIEWS: self._stats.view_manager.register_view(view)
def test_constructor(self): stats = stats_module.Stats() self.assertEqual(stats._view_manager, stats.view_manager) self.assertEqual(stats._stats_recorder, stats.stats_recorder)
import logging import os import time from opencensus import tags from opencensus.stats import aggregation from opencensus.stats import measure from opencensus.stats import stats from opencensus.stats import view from opencensus.stats.exporters import stackdriver_exporter from opencensus.stats.exporters.base import StatsExporter from opencensus.tags import execution_context from opencensus.tags.propagation import binary_serializer _logger = logging.getLogger('fireci.stats') STATS = stats.Stats() _m_latency = measure.MeasureFloat("latency", "The latency in milliseconds", "ms") _m_success = measure.MeasureInt("success", "Indicated success or failure.", "1") _key_stage = tags.TagKey("stage") _TAGS = [ _key_stage, tags.TagKey("repo_owner"), tags.TagKey("repo_name"), tags.TagKey("pull_number"), tags.TagKey("job_name"), ]
import sys import wsgiref.simple_server import pip_checker import views from google import auth as google_auth from opencensus.stats import stats as stats_module from opencensus.stats.exporters import stackdriver_exporter PYTHON_VERSION_TO_COMMAND = { '2': ['python2', '-m', 'pip'], '3': ['python3', '-m', 'pip'], } STATS = stats_module.Stats() app = flask.Flask(__name__) def _get_project_id(): # get project id from default setting try: _, project_id = google_auth.default() except google_auth.exceptions.DefaultCredentialsError: raise ValueError("Couldn't find Google Cloud credentials, set the " "project ID with 'gcloud set project'") return project_id def _enable_exporter(): """Create and register the stackdriver exporter.
from opencensus.tags import tag_map as tag_map_module from opencensus.tags import tag_value as tag_value_module MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") VIDEO_SIZE_MEASURE = measure_module.MeasureInt( "my.org/measure/video_size_test2", "size of processed videos", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id="opencenus-node")) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size.
r = requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}, timeout=2) return r.text app = Flask(__name__) PARAM_TAG = 'param' m_requests = measure.MeasureInt("app/requests", "Number of requests", "1") requests_view = view.View("opencensus-example/app/requests", "The sum of requests", [PARAM_TAG], m_requests, aggregation.SumAggregation()) op_stats = stats.Stats() exporter = stackdriver_exporter.new_stats_exporter( stackdriver_exporter.Options(project_id=project_id())) op_stats.view_manager.register_exporter(exporter) op_stats.view_manager.register_view(requests_view) @app.route('/') def hello_world(): mmap = op_stats.stats_recorder.new_measurement_map() mmap.measure_int_put(m_requests, 1) tmap = TagMap() param = request.args.get('param') if param: tmap.insert(PARAM_TAG, param) mmap.record(tmap)