def test_stats_record_async(self): # We are using sufix in order to prevent cached objects sufix = str(os.getpid()) tag_key = "SampleKeyAsyncTest%s" % sufix measure_name = "SampleMeasureNameAsyncTest%s" % sufix measure_description = "SampleDescriptionAsyncTest%s" % sufix view_name = "SampleViewNameAsyncTest%s" % sufix view_description = "SampleViewDescriptionAsyncTest%s" % sufix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME_ASYNC = view_name VIDEO_SIZE_DISTRIBUTION_ASYNC =\ aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB] ) VIDEO_SIZE_VIEW_ASYNC = view_module.View( VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC], VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=PROJECT)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_ASYNC, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB) measure_map.record(tag_map) @retry(wait_fixed=RETRY_WAIT_PERIOD, stop_max_attempt_number=RETRY_MAX_ATTEMPT) def get_metric_descriptors(self, exporter, view_description): name = exporter.client.project_path(PROJECT) list_metrics_descriptors = exporter.client.list_metric_descriptors( name) element = next((element for element in list_metrics_descriptors if element.description == view_description), None) self.assertIsNotNone(element) self.assertEqual(element.description, view_description) self.assertEqual(element.unit, "By") get_metric_descriptors(self, exporter, view_description)
def stats(): global _stats if not _stats: new_stats = stats_module.Stats() if environment.in_gae() and not environment.in_test(): exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=metadata.project_id(), )) new_stats.view_manager.register_exporter(exporter) _stats = new_stats return _stats
def test_not_blank_project(self): patch_client = mock.patch( ('opencensus.stats.exporters.stackdriver_exporter' '.monitoring_v3.MetricServiceClient'), _Client) with patch_client: exporter_created = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) self.assertIsInstance(exporter_created, stackdriver.StackdriverStatsExporter)
def test_singleton_with_params(self): default_labels = {'key1': 'value1'} patch_client = mock.patch( ('opencensus.stats.exporters.stackdriver_exporter' '.monitoring_v3.MetricServiceClient'), _Client) with patch_client: exporter_created = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1, default_monitoring_labels=default_labels)) self.assertEqual(exporter_created.default_labels, default_labels)
def _enable_exporter(): """Create and register the stackdriver exporter. For any data to be exported to stackdriver, an exporter needs to be created and registered with the view manager. Collected data will be reported via all the registered exporters. By not creating and registering an exporter, all collected data will stay local and will not appear on stackdriver. """ project_id = _get_project_id() exporter = stackdriver_exporter.new_stats_exporter( stackdriver_exporter.Options(project_id=project_id)) STATS.view_manager.register_exporter(exporter)
def _new_exporter(): """ Initializes a metrics exporter. Tries to initialize a Stackdriver exporter, falls back to StdoutExporter. """ try: _, project_id = google.auth.default() return stackdriver_exporter.new_stats_exporter( stackdriver_exporter.Options(project_id=project_id, resource='global')) except google.auth.exceptions.DefaultCredentialsError: _logger.exception("Using stdout exporter") return StdoutExporter()
def test_client_info_user_agent(self): """Check that the monitoring client sets a user agent. The user agent should include the library version. Note that this assumes MetricServiceClient calls ClientInfo.to_user_agent to attach the user agent as metadata to metric service API calls. """ patch_client = mock.patch( 'opencensus.stats.exporters.stackdriver_exporter.monitoring_v3' '.MetricServiceClient', _Client) with patch_client: exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) self.assertIn(stackdriver.get_user_agent_slug(), exporter.client.client_info.to_user_agent())
def main(): # Enable metrics exporter = stackdriver_exporter.new_stats_exporter( stackdriver_exporter.Options(project_id=project_id)) view_manager.register_exporter(exporter) view_manager.register_view(latency_view) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() for i in range(100): ms = random.random() * 5 * 1000 print("Latency {0}:{1}".format(i, ms)) mmap.measure_float_put(m_latency_ms, ms) mmap.record(tmap) time.sleep(1) print("Done recording metrics")
FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") VIDEO_SIZE_MEASURE = measure_module.MeasureInt( "my.org/measure/video_size_test2", "size of processed videos", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id="opencenus-node")) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue(str(1200)) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
def test_not_blank_project(self): exporter_created = stackdriver.new_stats_exporter(stackdriver.Options(project_id=1)) self.assertIsInstance(exporter_created, stackdriver.StackdriverStatsExporter)
def test_singleton_with_params(self): default_labels = {'key1':'value1'} exporter_created = stackdriver.new_stats_exporter(stackdriver.Options(project_id=1,default_monitoring_labels=default_labels)) self.assertEqual(exporter_created.default_labels, default_labels)
[0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View( VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder try: _, project_id = google.auth.default() except google.auth.exceptions.DefaultCredentialsError: raise ValueError("Couldn't find Google Cloud credentials, set the " "project ID with 'gcloud set project'") exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=project_id)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue(str(1200)) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)