def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_3 = aggregation_module.SumAggregation(sum=2.2) view_name3 = "view-name3" new_view3 = view_module.View(view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_3) view_manager.register_view(new_view3) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name3, None) time_series_list = exporter.create_time_series_list( v_data, "global", "") self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.assertEqual(time_series.metric.type, "custom.googleapis.com/opencensus/view-name3") self.assertCorrectLabels(time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, include_opencensus=True) self.assertIsNotNone(time_series.resource) self.assertEqual(len(time_series.points), 1) expected_value = monitoring_v3.types.TypedValue() expected_value.double_value = 2.2 + 25 * MiB self.assertEqual(time_series.points[0].value, expected_value)
def test_stats_record_sync(self): # We are using suffix in order to prevent cached objects suffix = str(os.getgid()) tag_key = "SampleKeySyncTest%s" % suffix measure_name = "SampleMeasureNameSyncTest%s" % suffix measure_description = "SampleDescriptionSyncTest%s" % suffix view_name = "SampleViewNameSyncTest%s" % suffix view_description = "SampleViewDescriptionSyncTest%s" % suffix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt(measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME = view_name VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, view_description, [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder client = monitoring_v3.MetricServiceClient() exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id=PROJECT), client=client) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) exporter.export_metrics(stats_module.stats.get_metrics()) # Sleep for [0, 10] milliseconds to fake wait. time.sleep(random.randint(1, 10) / 1000.0) self.check_sd_md(exporter, view_description)
def test_create_timeseries_multiple_tags(self): """Check that exporter creates timeseries for multiple tag values. create_time_series_list should return a time series for each set of values in the tag value aggregation map. """ agg = aggregation_module.CountAggregation( aggregation_type=aggregation_module.Type.COUNT) view = view_module.View( name="example.org/test_view", description="example.org/test_view", columns=[ tag_key_module.TagKey('color'), tag_key_module.TagKey('shape') ], measure=mock.Mock(), aggregation=agg, ) v_data = view_data_module.ViewData( view=view, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR, ) rs_count = aggregation_data_module.CountAggregationData(10) bc_count = aggregation_data_module.CountAggregationData(20) v_data._tag_value_aggregation_data_map = { ('red', 'square'): rs_count, ('blue', 'circle'): bc_count, } v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) exporter = stackdriver.StackdriverStatsExporter() time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 2) self.assertEqual(len(time_series_list[0].points), 1) self.assertEqual(len(time_series_list[1].points), 1) ts_by_color = { ts.metric.labels.get('color'): ts for ts in time_series_list } rs_ts = ts_by_color['red'] bc_ts = ts_by_color['blue'] self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') self.assertEqual(rs_ts.points[0].value.int64_value, 10) self.assertEqual(bc_ts.points[0].value.int64_value, 20)
def test_create_metric_descriptor_count(self): client = mock.Mock() option = stackdriver.Options( project_id="project-test", metric_prefix="teste") view_name_count = "view-count" agg_count = aggregation_module.CountAggregation(count=2) view_count = view_module.View( view_name_count, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_count) exporter = stackdriver.StackdriverStatsExporter( options=option, client=client) desc = exporter.create_metric_descriptor(view_count) self.assertIsNotNone(desc)
def test_record_with_multi_keys(self): measure = mock.Mock() sum_aggregation = aggregation_module.SumAggregation() view = view_module.View("test_view", "description", ['key1', 'key2'], measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock() context.map = {'key1': 'val1', 'key2': 'val2'} time = datetime.utcnow().isoformat() + 'Z' value = 1 self.assertEqual({}, view_data.tag_value_aggregation_data_map) view_data.record(context=context, value=value, timestamp=time) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) self.assertIsNotNone( view_data.tag_value_aggregation_data_map[tuple_vals]) sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals) self.assertEqual(1, sum_data.sum_data) context_2 = mock.Mock() context_2.map = {'key1': 'val3', 'key2': 'val2'} time_2 = datetime.utcnow().isoformat() + 'Z' value_2 = 2 view_data.record(context=context_2, value=value_2, timestamp=time_2) tag_values_2 = view_data.get_tag_values(tags=context_2.map, columns=view.columns) tuple_vals_2 = tuple(tag_values_2) self.assertEqual(['val3', 'val2'], tag_values_2) self.assertTrue( tuple_vals_2 in view_data.tag_value_aggregation_data_map) sum_data_2 = view_data.tag_value_aggregation_data_map.get(tuple_vals_2) self.assertEqual(2, sum_data_2.sum_data) time_3 = datetime.utcnow().isoformat() + 'Z' value_3 = 3 # Use the same context {'key1': 'val1', 'key2': 'val2'}. # Record to entry [(val1, val2), sum=1]. view_data.record(context=context, value=value_3, timestamp=time_3) self.assertEqual(4, sum_data.sum_data) # The other entry should remain unchanged. self.assertEqual(2, sum_data_2.sum_data)
def test_create_metric_descriptor_base(self): client = mock.Mock() option = stackdriver.Options( project_id="project-test", metric_prefix="teste") view_name_base = "view-base" agg_base = aggregation_module.BaseAggregation() view_base = view_module.View( view_name_base, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_base) exporter = stackdriver.StackdriverStatsExporter( options=option, client=client) self.assertRaises(Exception, exporter.create_metric_descriptor, view_base)
def test_create_metric_descriptor_last_value_float(self): client = mock.Mock() option = stackdriver.Options( project_id="project-test", metric_prefix="teste") view_name_base = "view-base" agg_base = aggregation_module.LastValueAggregation() view_base = view_module.View( view_name_base, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE_FLOAT, agg_base) exporter = stackdriver.StackdriverStatsExporter( options=option, client=client) desc = exporter.create_metric_descriptor(view_base) self.assertIsNotNone(desc)
def test_create_metric_descriptor_sum_float(self): client = mock.Mock() option = stackdriver.Options( project_id="project-test", metric_prefix="teste") view_name_sum_float = "view-sum-float" agg_sum = aggregation_module.SumAggregation(sum=2) view_sum_float = view_module.View( view_name_sum_float, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_sum) exporter = stackdriver.StackdriverStatsExporter( options=option, client=client) desc = exporter.create_metric_descriptor(view_sum_float) self.assertIsNotNone(desc)
def create_measurement_view(self, measurement_name): "creates a measurement and a view" tg_key = tag_key_module.TagKey("TEST_ID") measurement = measure_module.MeasureInt( f"gw_m_{measurement_name}_response", "response time of the home page", "s") view_name = f"views_{measurement_name}_response" aggregation = aggregation_module.LastValueAggregation() view = view_module.View(view_name, f"glasswall {measurement_name} response time", [tg_key], measurement, aggregation) # Register view. self.view_manager.register_view(view) return measurement
def test_constructor(self): name = "testName" description = "testMeasure" columns = ["testTagKey1", "testTagKey2"] measure = mock.Mock() aggregation = mock.Mock() view = view_module.View(name=name, description=description, columns=columns, measure=measure, aggregation=aggregation) self.assertEqual("testName", view.name) self.assertEqual("testMeasure", view.description) self.assertEqual(["testTagKey1", "testTagKey2"], view.columns) self.assertEqual(measure, view.measure) self.assertEqual(aggregation, view.aggregation)
def configure(): """Globally enables metrics collection.""" global _METRICS_ENABLED if _METRICS_ENABLED: return _METRICS_ENABLED = True STATS.view_manager.register_exporter(_new_exporter()) latency_view = view.View( "fireci/latency", "Latency of fireci execution stages", _TAGS, _m_latency, aggregation.LastValueAggregation()) success_view = view.View( "fireci/success", "Success indication of fireci execution stages", _TAGS, _m_success, aggregation.LastValueAggregation()) STATS.view_manager.register_view(latency_view) STATS.view_manager.register_view(success_view) context = tags.TagMap() for tag in _TAGS: if tag.upper() in os.environ: context.insert(tag, tags.TagValue(os.environ[tag.upper()])) execution_context.set_current_tag_map(context)
def test_stats_record_async(self): # We are using suffix in order to prevent cached objects suffix = str(os.getpid()) tag_key = "SampleKeyAsyncTest%s" % suffix measure_name = "SampleMeasureNameAsyncTest%s" % suffix measure_description = "SampleDescriptionAsyncTest%s" % suffix view_name = "SampleViewNameAsyncTest%s" % suffix view_description = "SampleViewDescriptionAsyncTest%s" % suffix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME_ASYNC = view_name VIDEO_SIZE_DISTRIBUTION_ASYNC =\ aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB] ) VIDEO_SIZE_VIEW_ASYNC = view_module.View( VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC], VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=PROJECT)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_ASYNC, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB) measure_map.record(tag_map) # Give the exporter thread enough time to export exactly once time.sleep(transport.DEFAULT_INTERVAL * 1.5) self.check_sd_md(exporter, view_description)
def test_record_with_attachment(self): boundaries = [1, 2, 3] distribution = {1: "test"} distribution_aggregation = aggregation_module.DistributionAggregation( boundaries=boundaries, distribution=distribution) name = "testName" description = "testMeasure" unit = "testUnit" measure = measure_module.MeasureInt(name=name, description=description, unit=unit) description = "testMeasure" columns = ["key1", "key2"] view = view_module.View(name=name, description=description, columns=columns, measure=measure, aggregation=distribution_aggregation) start_time = datetime.utcnow() attachments = {"One": "one", "Two": "two"} end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock context.map = {'key1': 'val1', 'key2': 'val2'} time = datetime.utcnow().isoformat() + 'Z' value = 1 view_data.record(context=context, value=value, timestamp=time, attachments=attachments) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) self.assertIsNotNone( view_data.tag_value_aggregation_data_map[tuple_vals]) self.assertEqual( attachments, view_data.tag_value_aggregation_data_map[tuple_vals]. exemplars[1].attachments)
def __init__(self, export_metrics=False): self.nb_check_requests = measure_module.MeasureInt( "nb_check_requests", "number of dates check requests for all weeks", "nb") self.nb_check_requests_view = view_module.View( "nb_check_requests view", "number of dates check requests for all weeks", [], self.nb_check_requests, aggregation_module.CountAggregation()) view_manager.register_view(self.nb_check_requests_view) self.mmap = stats_recorder.new_measurement_map() self.tmap = tag_map_module.TagMap() if export_metrics: exporter = metrics_exporter.new_metrics_exporter( connection_string=azure_insights.connection_string) view_manager.register_exporter(exporter)
def setup_open_census(): stats_stats = stats.Stats() app.view_manager = stats_stats.view_manager app.stats_recorder = stats_stats.stats_recorder response_time_view = view.View("response_time", "The time it took to respond", [key_method, key_status, key_error], m_response_ms, aggregation.LastValueAggregation()) app.exporter = prometheus.new_stats_exporter( prometheus.Options(namespace="flask_app", port=8000)) app.view_manager.register_exporter(app.exporter) app.view_manager.register_view(response_time_view)
def test_collector_to_metric_invalid_dist(self): agg = mock.Mock() view = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg) registry = mock.Mock() options = prometheus.Options("test1", 8001, "localhost", registry) collector = prometheus.Collector(options=options) collector.register_view(view) desc = collector.registered_views[list(REGISTERED_VIEW)[0]] with self.assertRaisesRegexp( ValueError, 'unsupported aggregation type <class \'mock.mock.Mock\'>'): collector.to_metric(desc=desc, tag_values=[None], agg_data=agg)
def test_create_metric_descriptor_sum_int(self): client = mock.Mock() start_time = datetime.utcnow() end_time = datetime.utcnow() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_sum_int= "view-sum-int" agg_sum = aggregation_module.SumAggregation(sum=2) view_sum_int = view_module.View(view_name_sum_int, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_sum) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_sum_int) self.assertNotEqual(desc, None)
def test_create_timeseries_from_distribution(self): """Check for explicit 0-bound bucket for SD export.""" agg = aggregation_module.DistributionAggregation( aggregation_type=aggregation_module.Type.DISTRIBUTION) view = view_module.View( name="example.org/test_view", description="example.org/test_view", columns=['tag_key'], measure=mock.Mock(), aggregation=agg, ) v_data = view_data_module.ViewData( view=view, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR, ) # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] dad = aggregation_data_module.DistributionAggregationData( mean_data=4.5, count_data=100, sum_of_sqd_deviations=825, counts_per_bucket=[20, 20, 20, 20, 20], bounds=[2, 4, 6, 8], exemplars={mock.Mock() for ii in range(5)}) v_data._tag_value_aggregation_data_map = {('tag_value', ): dad} v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) exporter = stackdriver.StackdriverStatsExporter() time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.check_labels(time_series.metric.labels, {'tag_key': 'tag_value'}, include_opencensus=True) self.assertEqual(len(time_series.points), 1) [point] = time_series.points dv = point.value.distribution_value self.assertEqual(100, dv.count) self.assertEqual(825.0, dv.sum_of_squared_deviation) self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) self.assertEqual([0, 2, 4, 6, 8], dv.bucket_options.explicit_buckets.bounds)
def test_create_timeseries_last_value_float_tagvalue( self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_1 = aggregation_module.LastValueAggregation(value=2) view_name1 = "view-name1" new_view1 = view_module.View(view_name1, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_1) view_manager.register_view(new_view1) tag_value_int = tag_value_module.TagValue("Abc") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_INT, tag_value_int) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name1, None) time_series = exporter.create_time_series_list(v_data, "global", "kubernetes.io/myorg") self.assertEquals(time_series.metric.type, "kubernetes.io/myorg/view-name1") self.assertIsNotNone(time_series)
def test_collector_to_metric_last_value(self): agg = aggregation_module.LastValueAggregation(256) view = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg) registry = mock.Mock() view_data = mock.Mock() options = prometheus.Options("test1", 8001, "localhost", registry) collector = prometheus.Collector(options=options, view_data=view_data) collector.register_view(view) desc = collector.registered_views[list(REGISTERED_VIEW)[0]] metric = collector.to_metric(desc=desc, view=view) self.assertEqual(desc['name'], metric.name) self.assertEqual(desc['documentation'], metric.documentation) self.assertEqual('gauge', metric.type) self.assertEqual(1, len(metric.samples))
def test_view_to_metric_descriptor(self): mock_measure = mock.Mock(spec=measure.MeasureFloat) mock_agg = mock.Mock(spec=aggregation.SumAggregation) mock_agg.aggregation_type = aggregation.Type.SUM test_view = view.View("name", "description", ["tk1", "tk2"], mock_measure, mock_agg) md = metric_utils.view_to_metric_descriptor(test_view) self.assertTrue(isinstance(md, metric_descriptor.MetricDescriptor)) self.assertEqual(md.name, test_view.name) self.assertEqual(md.description, test_view.description) self.assertEqual(md.unit, test_view.measure.unit) self.assertEqual( md.type, metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE) self.assertTrue( all(lk.key == col for lk, col in zip(md.label_keys, test_view.columns)))
def test_export_double_point_value(self): view = view_module.View('', '', [FRONTEND_KEY], VIDEO_SIZE_MEASURE, aggregation_module.SumAggregation()) v_data = view_data_module.ViewData(view=view, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data.record(context=tag_map_module.TagMap(), value=2.5, timestamp=None) view_data = [v_data] view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] handler = mock.Mock(spec=ocagent.ExportRpcHandler) ocagent.StatsExporter(handler).export_metrics(view_data) self.assertEqual( handler.send.call_args[0] [0].metrics[0].timeseries[0].points[0].double_value, 2.5)
def test_collector_collect_with_none_label_value(self): agg = aggregation_module.LastValueAggregation(256) view = view_module.View("new_view", "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg) registry = mock.Mock() options = prometheus.Options("test3", 8001, "localhost", registry) collector = prometheus.Collector(options=options) collector.register_view(view) desc = collector.registered_views['test3_new_view'] metric = collector.to_metric( desc=desc, tag_values=[None], agg_data=agg.aggregation_data) self.assertEqual(1, len(metric.samples)) sample = metric.samples[0] # Sample is a namedtuple # ('Sample', ['name', 'labels', 'value', 'timestamp', 'exemplar']) label_map = sample[1] self.assertEqual({"myorg_keys_frontend": ""}, label_map)
def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_2 = aggregation_module.SumAggregation(sum=2.2) view_name2 = "view-name2" new_view2 = view_module.View(view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_2) view_manager.register_view(new_view2) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name2, None) time_series = exporter.create_time_series_list(v_data, "global", "") self.assertEquals(time_series.metric.type, "custom.googleapis.com/opencensus/view-name2") self.assertIsNotNone(time_series)
def test_create_timeseries_disjoint_tags(self, monitoring_resoure_mock): view_manager, stats_recorder, exporter = \ self.setup_create_timeseries_test() # Register view with two tags view_name = "view-name" view = view_module.View(view_name, "test description", [FRONTEND_KEY, FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE, aggregation_module.SumAggregation()) view_manager.register_view(view) # Add point with one tag in common and one different tag measure_map = stats_recorder.new_measurement_map() tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value_module.TagValue("1200")) tag_map.insert(FRONTEND_KEY_STR, tag_value_module.TagValue("1800")) measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name, None) v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list # Verify first time series self.assertEqual(time_series.resource.type, "global") self.assertEqual(time_series.metric.type, "custom.googleapis.com/opencensus/" + view_name) self.check_labels(time_series.metric.labels, {FRONTEND_KEY_CLEAN: "1200"}, include_opencensus=True) self.assertIsNotNone(time_series.resource) self.assertEqual(len(time_series.points), 1) expected_value = monitoring_v3.types.TypedValue() # TODO: #565 expected_value.double_value = 25.0 * MiB self.assertEqual(time_series.points[0].value, expected_value)
def test_prometheus_stats(self): MiB = 1 << 20 FRONTEND_KEY = tag_key_module.TagKey("my.org/keys/frontend") VIDEO_SIZE_MEASURE = measure_module.MeasureInt( "my.org/measures/video_size", "size of processed videos", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size" VIDEO_SIZE_DISTRIBUTION = aggregation_module.CountAggregation(256.0 * MiB) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = prometheus.new_stats_exporter( prometheus.Options(namespace="opencensus", port=9303)) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) time.sleep(random.randint(1, 10) / 1000.0) tag_value = tag_value_module.TagValue(str(random.randint(1, 10000))) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) if sys.version_info > (3, 0): import urllib.request contents = urllib.request.urlopen( "http://localhost:9303/metrics").read() else: import urllib2 contents = urllib2.urlopen("http://localhost:9303/metrics").read() self.assertIn(b'# TYPE opencensus_my.org/views/video_size counter', contents) self.assertIn(b'opencensus_my.org/views/video_size 268435456.0', contents)
def test_collector_to_metric_sum(self): agg = aggregation_module.SumAggregation(256.0) view = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg) registry = mock.Mock() options = prometheus.Options("test1", 8001, "localhost", registry) collector = prometheus.Collector(options=options) collector.register_view(view) desc = collector.registered_views[list(REGISTERED_VIEW)[0]] metric = collector.to_metric( desc=desc, tag_values=[None], agg_data=agg.new_aggregation_data(VIDEO_SIZE_MEASURE)) self.assertEqual(desc['name'], metric.name) self.assertEqual(desc['documentation'], metric.documentation) self.assertEqual('unknown', metric.type) self.assertEqual(1, len(metric.samples))
def test_collector_collect(self): agg = aggregation_module.LastValueAggregation(256) view = view_module.View("new_view", "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg) registry = mock.Mock() options = prometheus.Options("test2", 8001, "localhost", registry) collector = prometheus.Collector(options=options) collector.register_view(view) desc = collector.registered_views['test2_new_view'] metric = collector.to_metric( desc=desc, tag_values=[tag_value_module.TagValue("value")], agg_data=agg.aggregation_data) self.assertEqual(desc['name'], metric.name) self.assertEqual(desc['documentation'], metric.documentation) self.assertEqual('gauge', metric.type) expected_samples = [ Sample(metric.name, {"myorg_keys_frontend": "value"}, 256)] self.assertEqual(expected_samples, metric.samples)
def test_record_with_none_context(self): measure = mock.Mock(spec=measure_module.MeasureInt) sum_aggregation = aggregation_module.SumAggregation() view = view_module.View("test_view", "description", ['key1', 'key2'], measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) time = utils.to_iso_str() value = 4 view_data.record(context=None, value=value, timestamp=time, attachments=None) tag_values = view_data.get_tag_values(tags={}, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual([None, None], tag_values) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals) self.assertEqual(4, sum_data.sum_data)
def test_create_timeseries_last_value_float_tagvalue( self, monitor_resource_mock): view_manager, stats_recorder, exporter = \ self.setup_create_timeseries_test() agg_2 = aggregation_module.LastValueAggregation(value=2.2 * MiB) view_name2 = "view-name2" new_view2 = view_module.View(view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_2) view_manager.register_view(new_view2) tag_value_float = tag_value_module.TagValue("Abc") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25.7 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name2, None) time_series_list = exporter.create_time_series_list( v_data, "global", "kubernetes.io/myorg") self.assertEqual(len(time_series_list), 1) time_series = time_series_list[0] self.assertEqual(time_series.metric.type, "kubernetes.io/myorg/view-name2") self.assertCorrectLabels(time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "Abc"}, include_opencensus=True) self.assertIsNotNone(time_series.resource) self.assertEqual(len(time_series.points), 1) expected_value = monitoring_v3.types.TypedValue() expected_value.double_value = 25.7 * MiB self.assertEqual(time_series.points[0].value, expected_value)