def test_export_metrics(self): lv = label_value.LabelValue('val') val = value.ValueLong(value=123) dt = datetime(2019, 3, 20, 21, 34, 0, 537954) pp = point.Point(value=val, timestamp=dt) ts = [ time_series.TimeSeries(label_values=[lv], points=[pp], start_timestamp=utils.to_iso_str(dt)) ] desc = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) mm = metric.Metric(descriptor=desc, time_series=ts) exporter = stackdriver.StackdriverStatsExporter(client=mock.Mock()) exporter.export_metrics([mm]) self.assertEqual(exporter.client.create_time_series.call_count, 1) sd_args = exporter.client.create_time_series.call_args[0][1] self.assertEqual(len(sd_args), 1) [sd_arg] = exporter.client.create_time_series.call_args[0][1] self.assertEqual(sd_arg.points[0].value.int64_value, 123)
def test_create_batched_time_series(self, monitor_resource_mock): client = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] time_series_batches = exporter.create_batched_time_series(view_data, 1) self.assertEqual(len(time_series_batches), 1) [time_series_batch] = time_series_batches self.assertEqual(len(time_series_batch), 1) [time_series] = time_series_batch self.assertEqual( time_series.metric.type, 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) self.check_labels(time_series.metric.labels, {}, include_opencensus=True)
def test_handle_upload_no_data(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) exporter.handle_upload(None) self.assertFalse(client.create_time_series.called)
def test_create_metric_descriptor(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(VIDEO_SIZE_VIEW) self.assertIsNotNone(desc)
def test_constructor_param(self): project_id = 1 default_labels = {'key1': 'value1'} exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options( project_id=project_id, default_monitoring_labels=default_labels)) self.assertEqual(exporter.options.project_id, project_id)
def test_on_register_view(self): client = mock.Mock() view_none = None option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) exporter.on_register_view(VIDEO_SIZE_VIEW) exporter.on_register_view(view_none) self.assertTrue(client.create_metric_descriptor.called)
def test_export_no_data(self): client = mock.Mock() transport = mock.Mock() option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client, transport=transport) exporter.export(None) self.assertFalse(exporter.transport.export.called)
def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_3 = aggregation_module.SumAggregation(sum=2.2) view_name3 = "view-name3" new_view3 = view_module.View(view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_3) view_manager.register_view(new_view3) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name3, None) v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.assertEqual(time_series.metric.type, "custom.googleapis.com/opencensus/view-name3") self.check_labels(time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, include_opencensus=True) self.assertIsNotNone(time_series.resource) self.assertEqual(len(time_series.points), 1) expected_value = monitoring_v3.types.TypedValue() expected_value.double_value = 2.2 + 25 * MiB self.assertEqual(time_series.points[0].value, expected_value)
def test_handle_upload_with_data(self, monitor_resource_mock): client = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME, end_time=TEST_TIME) v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) exporter.handle_upload(view_data) self.assertTrue(client.create_time_series.called)
def test_create_batched_time_series_with_many(self, monitor_resource_mock): client = mock.Mock() # First view with 3 view_name1 = "view-name1" view1 = view_module.View(view_name1, "test description", ['test'], VIDEO_SIZE_MEASURE, aggregation_module.LastValueAggregation()) v_data1 = view_data_module.ViewData(view=view1, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, timestamp=None) v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, timestamp=None) v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, timestamp=None) # Second view with 2 view_name2 = "view-name2" view2 = view_module.View(view_name2, "test description", ['test'], VIDEO_SIZE_MEASURE, aggregation_module.LastValueAggregation()) v_data2 = view_data_module.ViewData(view=view2, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, timestamp=None) v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, timestamp=None) view_data = [v_data1, v_data2] view_data = [ metric_utils.view_data_to_metric(vd, TEST_TIME) for vd in view_data ] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) time_series_batches = exporter.create_batched_time_series(view_data, 2) self.assertEqual(len(time_series_batches), 3) [tsb1, tsb2, tsb3] = time_series_batches self.assertEqual(len(tsb1), 2) self.assertEqual(len(tsb2), 2) self.assertEqual(len(tsb3), 1)
def test_stats_record_sync(self): # We are using sufix in order to prevent cached objects sufix = str(os.getgid()) tag_key = "SampleKeySyncTest%s" % sufix measure_name = "SampleMeasureNameSyncTest%s" % sufix measure_description = "SampleDescriptionSyncTest%s" % sufix view_name = "SampleViewNameSyncTest%s" % sufix view_description = "SampleViewDescriptionSyncTest%s" % sufix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt(measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME = view_name VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, view_description, [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder client = monitoring_v3.MetricServiceClient() exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id=PROJECT), client=client) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) exporter.export_metrics(stats_module.stats.get_metrics()) # Sleep for [0, 10] milliseconds to fake wait. time.sleep(random.randint(1, 10) / 1000.0) self.check_sd_md(exporter, view_description)
def test_create_timeseries_multiple_tags(self): """Check that exporter creates timeseries for multiple tag values. create_time_series_list should return a time series for each set of values in the tag value aggregation map. """ agg = aggregation_module.CountAggregation( aggregation_type=aggregation_module.Type.COUNT) view = view_module.View( name="example.org/test_view", description="example.org/test_view", columns=[ tag_key_module.TagKey('color'), tag_key_module.TagKey('shape') ], measure=mock.Mock(), aggregation=agg, ) v_data = view_data_module.ViewData( view=view, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR, ) rs_count = aggregation_data_module.CountAggregationData(10) bc_count = aggregation_data_module.CountAggregationData(20) v_data._tag_value_aggregation_data_map = { ('red', 'square'): rs_count, ('blue', 'circle'): bc_count, } v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) exporter = stackdriver.StackdriverStatsExporter() time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 2) self.assertEqual(len(time_series_list[0].points), 1) self.assertEqual(len(time_series_list[1].points), 1) ts_by_color = { ts.metric.labels.get('color'): ts for ts in time_series_list } rs_ts = ts_by_color['red'] bc_ts = ts_by_color['blue'] self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') self.assertEqual(rs_ts.points[0].value.int64_value, 10) self.assertEqual(bc_ts.points[0].value.int64_value, 20)
def test_export_with_data(self): client = mock.Mock() transport = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME, end_time=TEST_TIME) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client, transport=transport) exporter.export(view_data) self.assertTrue(exporter.transport.export.called)
def test_create_metric_descriptor_count(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_count = "view-count" agg_count = aggregation_module.CountAggregation(count=2) view_count = view_module.View(view_name_count, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_count) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_count) self.assertIsNotNone(desc)
def test_create_metric_descriptor_sum_float(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_sum_float = "view-sum-float" agg_sum = aggregation_module.SumAggregation(sum=2) view_sum_float = view_module.View(view_name_sum_float, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_sum) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_sum_float) self.assertIsNotNone(desc)
def test_create_metric_descriptor_base(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_base = "view-base" agg_base = aggregation_module.BaseAggregation() view_base = view_module.View(view_name_base, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_base) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) self.assertRaises(Exception, exporter.create_metric_descriptor, view_base)
def test_create_metric_descriptor_last_value_float(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_base = "view-base" agg_base = aggregation_module.LastValueAggregation() view_base = view_module.View(view_name_base, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE_FLOAT, agg_base) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_base) self.assertIsNotNone(desc)
def test_get_metric_descriptor_bad_type(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id='project_id'), client=mock.Mock()) bad_type_oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', # Need a valid type to create the descriptor type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) bad_type_oc_md._type = 100 with self.assertRaises(TypeError): exporter.get_metric_descriptor(bad_type_oc_md)
def test_create_timeseries_from_distribution(self): """Check for explicit 0-bound bucket for SD export.""" agg = aggregation_module.DistributionAggregation( aggregation_type=aggregation_module.Type.DISTRIBUTION) view = view_module.View( name="example.org/test_view", description="example.org/test_view", columns=['tag_key'], measure=mock.Mock(), aggregation=agg, ) v_data = view_data_module.ViewData( view=view, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR, ) # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] dad = aggregation_data_module.DistributionAggregationData( mean_data=4.5, count_data=100, sum_of_sqd_deviations=825, counts_per_bucket=[20, 20, 20, 20, 20], bounds=[2, 4, 6, 8], exemplars={mock.Mock() for ii in range(5)}) v_data._tag_value_aggregation_data_map = {('tag_value', ): dad} v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) exporter = stackdriver.StackdriverStatsExporter() time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.check_labels(time_series.metric.labels, {'tag_key': 'tag_value'}, include_opencensus=True) self.assertEqual(len(time_series.points), 1) [point] = time_series.points dv = point.value.distribution_value self.assertEqual(100, dv.count) self.assertEqual(825.0, dv.sum_of_squared_deviation) self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) self.assertEqual([0, 2, 4, 6, 8], dv.bucket_options.explicit_buckets.bounds)
def test_create_timeseries_something(self): """Check that exporter creates timeseries for multiple tag values. create_time_series_list should return a time series for each set of values in the tag value aggregation map. """ v_data = mock.Mock(spec=view_data_module.ViewData) v_data.view.name = "example.org/test_view" v_data.view.columns = [ tag_key_module.TagKey('color'), tag_key_module.TagKey('shape') ] v_data.view.aggregation.aggregation_type = \ aggregation_module.Type.COUNT v_data.start_time = TEST_TIME v_data.end_time = TEST_TIME rs_count = aggregation_data_module.CountAggregationData(10) bc_count = aggregation_data_module.CountAggregationData(20) v_data.tag_value_aggregation_data_map = { ('red', 'square'): rs_count, ('blue', 'circle'): bc_count, } exporter = stackdriver.StackdriverStatsExporter( options=mock.Mock(), client=mock.Mock(), ) time_series_list = exporter.create_time_series_list(v_data, "", "") self.assertEqual(len(time_series_list), 2) self.assertEqual(len(time_series_list[0].points), 1) self.assertEqual(len(time_series_list[1].points), 1) ts_by_color = { ts.metric.labels.get('color'): ts for ts in time_series_list } rs_ts = ts_by_color['red'] bc_ts = ts_by_color['blue'] self.assertEqual(rs_ts.metric.labels.get('shape'), 'square') self.assertEqual(bc_ts.metric.labels.get('shape'), 'circle') self.assertEqual(rs_ts.points[0].value.int64_value, 10) self.assertEqual(bc_ts.points[0].value.int64_value, 20)
def test_get_metric_descriptor_custom_prefix(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(default_monitoring_labels={'dk': 'dd'}, metric_prefix='metric_prefix', project_id='project_id'), client=mock.Mock()) oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('ck', 'cd')]) sd_md = exporter.get_metric_descriptor(oc_md) self.assertIn('metric_prefix', sd_md.type) self.assertIn('metric_prefix', sd_md.name)
def setup_create_timeseries_test(self): client = mock.Mock() execution_context.clear() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) return view_manager, stats_recorder, exporter
def test_register_metric_descriptor(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(metric_prefix='metric_prefix', project_id='project_id'), client=mock.Mock()) oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) exporter.register_metric_descriptor(oc_md) self.assertEqual(exporter.client.create_metric_descriptor.call_count, 1) exporter.register_metric_descriptor(oc_md) self.assertEqual(exporter.client.create_metric_descriptor.call_count, 1)
def test_create_timeseries_from_distribution(self): """Check for explicit 0-bound bucket for SD export.""" v_data = mock.Mock(spec=view_data_module.ViewData) v_data.view.name = "example.org/test_view" v_data.view.columns = ['tag_key'] v_data.view.aggregation.aggregation_type = \ aggregation_module.Type.DISTRIBUTION v_data.start_time = TEST_TIME v_data.end_time = TEST_TIME # Aggregation over (10 * range(10)) for buckets [2, 4, 6, 8] dad = aggregation_data_module.DistributionAggregationData( mean_data=4.5, count_data=100, sum_of_sqd_deviations=825, counts_per_bucket=[20, 20, 20, 20, 20], bounds=[2, 4, 6, 8], exemplars={mock.Mock() for ii in range(5)}) v_data.tag_value_aggregation_data_map = {('tag_value', ): dad} exporter = stackdriver.StackdriverStatsExporter( options=mock.Mock(), client=mock.Mock(), ) time_series_list = exporter.create_time_series_list(v_data, "", "") self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.assertCorrectLabels(time_series.metric.labels, {'tag_key': 'tag_value'}, include_opencensus=True) self.assertEqual(len(time_series.points), 1) [point] = time_series.points dv = point.value.distribution_value self.assertEqual(100, dv.count) self.assertEqual(4.5, dv.mean) self.assertEqual(825.0, dv.sum_of_squared_deviation) self.assertEqual([0, 20, 20, 20, 20, 20], dv.bucket_counts) self.assertEqual([0, 2, 4, 6, 8], dv.bucket_options.explicit_buckets.bounds)
def test_create_timeseries_invalid_aggregation(self): v_data = mock.Mock(spec=view_data_module.ViewData) v_data.view.name = "example.org/base_view" v_data.view.columns = [tag_key_module.TagKey('base_key')] v_data.view.aggregation.aggregation_type = \ aggregation_module.Type.NONE v_data.start_time = TEST_TIME v_data.end_time = TEST_TIME base_data = aggregation_data_module.BaseAggregationData(10) v_data.tag_value_aggregation_data_map = { (None, ): base_data, } exporter = stackdriver.StackdriverStatsExporter( options=mock.Mock(), client=mock.Mock(), ) self.assertRaises(TypeError, exporter.create_time_series_list, v_data, "", "")
def test_get_metric_descriptor(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(default_monitoring_labels={'dk': 'dd'}, project_id='project_id'), client=mock.Mock()) oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('ck', 'cd')]) sd_md = exporter.get_metric_descriptor(oc_md) self.assertEqual(sd_md.metric_kind, monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) self.assertEqual(sd_md.value_type, monitoring_v3.enums.MetricDescriptor.ValueType.INT64) self.assertIsInstance(sd_md, monitoring_v3.types.MetricDescriptor) exporter.client.create_metric_descriptor.assert_not_called()
def test_stats_record_sync(self): # We are using sufix in order to prevent cached objects sufix = str(os.getgid()) tag_key = "SampleKeySyncTest%s" % sufix measure_name = "SampleMeasureNameSyncTest%s" % sufix measure_description = "SampleDescriptionSyncTest%s" % sufix view_name = "SampleViewNameSyncTest%s" % sufix view_description = "SampleViewDescriptionSyncTest%s" % sufix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt(measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME = view_name VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, view_description, [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder client = monitoring_v3.MetricServiceClient() exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id=PROJECT), client=client, transport=sync.SyncTransport) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) # Sleep for [0, 10] milliseconds to fake wait. time.sleep(random.randint(1, 10) / 1000.0) @retry(wait_fixed=RETRY_WAIT_PERIOD, stop_max_attempt_number=RETRY_MAX_ATTEMPT) def get_metric_descriptors(self, exporter, view_description): name = exporter.client.project_path(PROJECT) list_metrics_descriptors = exporter.client.list_metric_descriptors( name) element = next((element for element in list_metrics_descriptors if element.description == view_description), None) self.assertIsNotNone(element) self.assertEqual(element.description, view_description) self.assertEqual(element.unit, "By") get_metric_descriptors(self, exporter, view_description)
def test_constructor(self): exporter = stackdriver.StackdriverStatsExporter() self.assertIsNone(exporter.client)
def test_create_timeseries_with_resource(self, monitor_resource_mock): client = mock.Mock() execution_context.clear() option = stackdriver.Options(project_id="project-test", resource="") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view( VIDEO_SIZE_VIEW_NAME, None) v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) # check for gce_instance monitored resource mocked_labels = { 'instance_id': 'my-instance', 'project_id': 'my-project', 'zone': 'us-east1', 'pod_id': 'localhost', 'namespace_id': 'namespace' } mock_resource = mock.Mock() mock_resource.get_type.return_value = 'gce_instance' mock_resource.get_labels.return_value = mocked_labels monitor_resource_mock.return_value = mock_resource time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) time_series = time_series_list[0] self.assertEqual(time_series.resource.type, "gce_instance") self.check_labels( time_series.resource.labels, { 'instance_id': 'my-instance', 'project_id': 'my-project', 'zone': 'us-east1', }) self.assertEqual( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) time_series = time_series_list[0] self.assertEqual( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") # check for gke_container monitored resource mocked_labels = { 'instance_id': 'my-instance', 'project_id': 'my-project', 'zone': 'us-east1', 'pod_id': 'localhost', 'cluster_name': 'cluster', 'namespace_id': 'namespace' } mock_resource = mock.Mock() mock_resource.get_type.return_value = 'gke_container' mock_resource.get_labels.return_value = mocked_labels monitor_resource_mock.return_value = mock_resource time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) time_series = time_series_list[0] self.assertEqual(time_series.resource.type, "k8s_container") self.check_labels( time_series.resource.labels, { 'project_id': 'my-project', 'location': 'us-east1', 'cluster_name': 'cluster', 'pod_name': 'localhost', 'namespace_name': 'namespace', }) self.assertEqual( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) # check for aws_ec2_instance monitored resource mocked_labels = { 'instance_id': 'my-instance', 'aws_account': 'my-project', 'region': 'us-east1', } mock_resource = mock.Mock() mock_resource.get_type.return_value = 'aws_ec2_instance' mock_resource.get_labels.return_value = mocked_labels monitor_resource_mock.return_value = mock_resource time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) time_series = time_series_list[0] self.assertEqual(time_series.resource.type, "aws_ec2_instance") self.check_labels( time_series.resource.labels, { 'instance_id': 'my-instance', 'aws_account': 'my-project', 'region': 'aws:us-east1', }) self.assertEqual( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) # check for out of box monitored resource mock_resource = mock.Mock() mock_resource.get_type.return_value = '' mock_resource.get_labels.return_value = mock.Mock() monitor_resource_mock.return_value = mock_resource time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) time_series = time_series_list[0] self.assertEqual(time_series.resource.type, 'global') self.check_labels(time_series.resource.labels, {}) self.assertEqual( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series)
def test_set_default_labels(self): labels = {'key': 'value'} exporter = stackdriver.StackdriverStatsExporter() exporter.set_default_labels(labels) self.assertEqual(exporter.default_labels, labels)