def test_constructor_defaults(self): name = "testName" description = "testMeasure" measure = measure_module.MeasureInt(name=name, description=description) self.assertEqual(None, measure.unit)
def test_prometheus_stats(self): method_key = tag_key_module.TagKey("method") request_count_measure = measure_module.MeasureInt( "request_count", "number of requests", "1") request_count_view_name = "request_count_view" count_agg = aggregation_module.CountAggregation() request_count_view = view_module.View( request_count_view_name, "number of requests broken down by methods", [method_key], request_count_measure, count_agg) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = prometheus.new_stats_exporter( prometheus.Options(namespace="opencensus", port=9303)) view_manager.register_exporter(exporter) view_manager.register_view(request_count_view) time.sleep(random.randint(1, 10) / 1000.0) method_value_1 = tag_value_module.TagValue("some method") tag_map_1 = tag_map_module.TagMap() tag_map_1.insert(method_key, method_value_1) measure_map_1 = stats_recorder.new_measurement_map() measure_map_1.measure_int_put(request_count_measure, 1) measure_map_1.record(tag_map_1) method_value_2 = tag_value_module.TagValue("some other method") tag_map_2 = tag_map_module.TagMap() tag_map_2.insert(method_key, method_value_2) measure_map_2 = stats_recorder.new_measurement_map() measure_map_2.measure_int_put(request_count_measure, 1) measure_map_2.record(tag_map_2) measure_map_2.record(tag_map_2) if sys.version_info > (3, 0): import urllib.request contents = urllib.request.urlopen( "http://localhost:9303/metrics").read() else: import urllib2 contents = urllib2.urlopen("http://localhost:9303/metrics").read() self.assertIn(b'# TYPE opencensus_request_count_view_total counter', contents) self.assertIn( b'opencensus_request_count_view_total' b'{method="some method"} 1.0', contents) self.assertIn( b'opencensus_request_count_view_total' b'{method="some other method"} 2.0', contents)
def test_constructor_explicit(self): name = "testName" description = "testMeasure" unit = "testUnit" measure = measure_module.MeasureInt(name=name, description=description, unit=unit) self.assertEqual("testName", measure.name) self.assertEqual("testMeasure", measure.description) self.assertEqual("testUnit", measure.unit)
def test_stats_record_sync(self): # We are using suffix in order to prevent cached objects suffix = str(os.getgid()) tag_key = "SampleKeySyncTest%s" % suffix measure_name = "SampleMeasureNameSyncTest%s" % suffix measure_description = "SampleDescriptionSyncTest%s" % suffix view_name = "SampleViewNameSyncTest%s" % suffix view_description = "SampleViewDescriptionSyncTest%s" % suffix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME = view_name VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View( VIDEO_SIZE_VIEW_NAME, view_description, [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder client = monitoring_v3.MetricServiceClient() exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id=PROJECT), client=client) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) exporter.export_metrics(stats_module.stats.get_metrics()) # Sleep for [0, 10] milliseconds to fake wait. time.sleep(random.randint(1, 10) / 1000.0) self.check_sd_md(exporter, view_description)
def test_stats_record_async(self): # We are using suffix in order to prevent cached objects suffix = str(os.getpid()) tag_key = "SampleKeyAsyncTest%s" % suffix measure_name = "SampleMeasureNameAsyncTest%s" % suffix measure_description = "SampleDescriptionAsyncTest%s" % suffix view_name = "SampleViewNameAsyncTest%s" % suffix view_description = "SampleViewDescriptionAsyncTest%s" % suffix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME_ASYNC = view_name VIDEO_SIZE_DISTRIBUTION_ASYNC =\ aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB] ) VIDEO_SIZE_VIEW_ASYNC = view_module.View( VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC], VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=PROJECT)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_ASYNC, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB) measure_map.record(tag_map) # Give the exporter thread enough time to export exactly once time.sleep(transport.DEFAULT_INTERVAL * 1.5) self.check_sd_md(exporter, view_description)
def test_record_with_attachment(self): boundaries = [1, 2, 3] distribution_aggregation = aggregation_module.DistributionAggregation( boundaries=boundaries) name = "testName" description = "testMeasure" unit = "testUnit" measure = measure_module.MeasureInt(name=name, description=description, unit=unit) description = "testMeasure" columns = ["key1", "key2"] view = view_module.View(name=name, description=description, columns=columns, measure=measure, aggregation=distribution_aggregation) start_time = datetime.utcnow() attachments = {"One": "one", "Two": "two"} end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock context.map = {'key1': 'val1', 'key2': 'val2'} time = utils.to_iso_str() value = 1 view_data.record(context=context, value=value, timestamp=time, attachments=attachments) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) self.assertIsNotNone( view_data.tag_value_aggregation_data_map[tuple_vals]) self.assertEqual( attachments, view_data.tag_value_aggregation_data_map[tuple_vals]. exemplars[1].attachments)
from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure_oc as measure_module from opencensus.stats import stats as stats_module from opencensus.stats import view as view_module from opencensus.stats import view_data as view_data_module from opencensus.tags import tag_key as tag_key_module from opencensus.tags import tag_map as tag_map_module from opencensus.tags import tag_value as tag_value_module MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("myorg_keys_frontend") FRONTEND_KEY_FLOAT = tag_key_module.TagKey("myorg_keys_frontend_FLOAT") FRONTEND_KEY_INT = tag_key_module.TagKey("myorg_keys_frontend_INT") FRONTEND_KEY_STR = tag_key_module.TagKey("myorg_keys_frontend_INT") VIDEO_SIZE_MEASURE = measure_module.MeasureInt( "myorg_measure_video_size_test2", "size of processed videos", "By") VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( "myorg_measure_video_size_test_float", "size of processed videos float", "By") VIDEO_SIZE_VIEW_NAME = "myorg_views_video_size_test2" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) REGISTERED_VIEW = { 'test1_myorg_views_video_size_test2': { 'documentation': 'processed video size over time',
import random import time from pprint import pprint from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure_oc as measure_module from opencensus.stats import stats as stats_module from opencensus.stats import view as view_module from opencensus.tags import tag_key as tag_key_module from opencensus.tags import tag_map as tag_map_module from opencensus.tags import tag_value as tag_value_module MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") VIDEO_SIZE_MEASURE = measure_module.MeasureInt("my.org/measures/video_size", "size of processed videos", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) def main(): stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder
# limitations under the License. import time from opencensus.ext.azure import metrics_exporter from opencensus.stats import aggregation as aggregation_module from opencensus.stats import measure_oc as measure_module from opencensus.stats import stats as stats_module from opencensus.stats import view as view_module from opencensus.tags import tag_map as tag_map_module stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder CARROTS_MEASURE = measure_module.MeasureInt("carrots", "number of carrots", "carrots") CARROTS_VIEW = view_module.View("carrots_view", "number of carrots", [], CARROTS_MEASURE, aggregation_module.CountAggregation()) def main(): # Enable metrics # Set the interval in seconds in which you want to send metrics # TODO: you need to specify the instrumentation key in a connection string # and place it in the APPLICATIONINSIGHTS_CONNECTION_STRING # environment variable. exporter = metrics_exporter.new_metrics_exporter() view_manager.register_exporter(exporter) view_manager.register_view(CARROTS_VIEW)