def test_create_batched_time_series(self, monitor_resource_mock): client = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] time_series_batches = exporter.create_batched_time_series(view_data, 1) self.assertEqual(len(time_series_batches), 1) [time_series_batch] = time_series_batches self.assertEqual(len(time_series_batch), 1) [time_series] = time_series_batch self.assertEqual( time_series.metric.type, 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) self.check_labels(time_series.metric.labels, {}, include_opencensus=True)
def test_handle_upload_no_data(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) exporter.handle_upload(None) self.assertFalse(client.create_time_series.called)
def setup_open_census(): stats_stats = stats.Stats() app.m_response_ms = measure_module.MeasureFloat("flask_response_time", "The request duration", "ms") app.key_method = tag_key_module.TagKey("method") # Create the status key app.key_status = tag_key_module.TagKey("status") # Create the error key app.key_error = tag_key_module.TagKey("error") app.view_manager = stats_stats.view_manager app.stats_recorder = stats_stats.stats_recorder response_time_view = view.View( "response_time", "The time it took to respond", [app.key_method, app.key_status, app.key_error], app.m_response_ms, aggregation.LastValueAggregation()) app.exporter = stackdriver.new_stats_exporter(options=stackdriver.Options( project_id=os.getenv('PROJECT_ID'))) app.view_manager.register_exporter(app.exporter) app.view_manager.register_view(response_time_view)
def test_stats_record_async(self): # We are using sufix in order to prevent cached objects sufix = str(os.getpid()) tag_key = "SampleKeyAsyncTest%s" % sufix measure_name = "SampleMeasureNameAsyncTest%s" % sufix measure_description = "SampleDescriptionAsyncTest%s" % sufix view_name = "SampleViewNameAsyncTest%s" % sufix view_description = "SampleViewDescriptionAsyncTest%s" % sufix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME_ASYNC = view_name VIDEO_SIZE_DISTRIBUTION_ASYNC =\ aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB] ) VIDEO_SIZE_VIEW_ASYNC = view_module.View( VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC], VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=PROJECT)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_ASYNC, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB) measure_map.record(tag_map) @retry(wait_fixed=RETRY_WAIT_PERIOD, stop_max_attempt_number=RETRY_MAX_ATTEMPT) def get_metric_descriptors(self, exporter, view_description): name = exporter.client.project_path(PROJECT) list_metrics_descriptors = exporter.client.list_metric_descriptors( name) element = next((element for element in list_metrics_descriptors if element.description == view_description), None) self.assertIsNotNone(element) self.assertEqual(element.description, view_description) self.assertEqual(element.unit, "By") get_metric_descriptors(self, exporter, view_description)
def test_create_metric_descriptor(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(VIDEO_SIZE_VIEW) self.assertIsNotNone(desc)
def test_constructor_param(self): project_id = 1 default_labels = {'key1': 'value1'} exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options( project_id=project_id, default_monitoring_labels=default_labels)) self.assertEqual(exporter.options.project_id, project_id)
def test_on_register_view(self): client = mock.Mock() view_none = None option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) exporter.on_register_view(VIDEO_SIZE_VIEW) exporter.on_register_view(view_none) self.assertTrue(client.create_metric_descriptor.called)
def test_export_no_data(self): client = mock.Mock() transport = mock.Mock() option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client, transport=transport) exporter.export(None) self.assertFalse(exporter.transport.export.called)
def test_not_blank_project(self): patch_client = mock.patch(('opencensus.ext.stackdriver.stats_exporter' '.monitoring_v3.MetricServiceClient'), _Client) with patch_client: exporter_created = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) self.assertIsInstance(exporter_created, stackdriver.StackdriverStatsExporter)
def test_export_empty(self, mock_stats, mock_client): """Check that we don't attempt to export empty metric sets.""" mock_stats.get_metrics.return_value = [] with MockGetExporterThread() as mget: exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) mget.transport.step() exporter.client.create_metric_descriptor.assert_not_called() exporter.client.create_time_series.assert_not_called()
def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_3 = aggregation_module.SumAggregation(sum=2.2) view_name3 = "view-name3" new_view3 = view_module.View(view_name3, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_3) view_manager.register_view(new_view3) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name3, None) v_data = metric_utils.view_data_to_metric(v_data, TEST_TIME) time_series_list = exporter.create_time_series_list(v_data) self.assertEqual(len(time_series_list), 1) [time_series] = time_series_list self.assertEqual(time_series.metric.type, "custom.googleapis.com/opencensus/view-name3") self.check_labels(time_series.metric.labels, {FRONTEND_KEY_FLOAT_CLEAN: "1200"}, include_opencensus=True) self.assertIsNotNone(time_series.resource) self.assertEqual(len(time_series.points), 1) expected_value = monitoring_v3.types.TypedValue() expected_value.double_value = 2.2 + 25 * MiB self.assertEqual(time_series.points[0].value, expected_value)
def test_handle_upload_with_data(self, monitor_resource_mock): client = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME, end_time=TEST_TIME) v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) exporter.handle_upload(view_data) self.assertTrue(client.create_time_series.called)
def test_singleton_with_params(self): default_labels = {'key1': 'value1'} patch_client = mock.patch(('opencensus.ext.stackdriver.stats_exporter' '.monitoring_v3.MetricServiceClient'), _Client) with patch_client: exporter_created = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1, default_monitoring_labels=default_labels)) self.assertEqual(exporter_created.default_labels, default_labels)
def test_create_batched_time_series_with_many(self, monitor_resource_mock): client = mock.Mock() # First view with 3 view_name1 = "view-name1" view1 = view_module.View(view_name1, "test description", ['test'], VIDEO_SIZE_MEASURE, aggregation_module.LastValueAggregation()) v_data1 = view_data_module.ViewData(view=view1, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, timestamp=None) v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, timestamp=None) v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, timestamp=None) # Second view with 2 view_name2 = "view-name2" view2 = view_module.View(view_name2, "test description", ['test'], VIDEO_SIZE_MEASURE, aggregation_module.LastValueAggregation()) v_data2 = view_data_module.ViewData(view=view2, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, timestamp=None) v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, timestamp=None) view_data = [v_data1, v_data2] view_data = [ metric_utils.view_data_to_metric(vd, TEST_TIME) for vd in view_data ] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) time_series_batches = exporter.create_batched_time_series(view_data, 2) self.assertEqual(len(time_series_batches), 3) [tsb1, tsb2, tsb3] = time_series_batches self.assertEqual(len(tsb1), 2) self.assertEqual(len(tsb2), 2) self.assertEqual(len(tsb3), 1)
def test_stats_record_sync(self): # We are using sufix in order to prevent cached objects sufix = str(os.getgid()) tag_key = "SampleKeySyncTest%s" % sufix measure_name = "SampleMeasureNameSyncTest%s" % sufix measure_description = "SampleDescriptionSyncTest%s" % sufix view_name = "SampleViewNameSyncTest%s" % sufix view_description = "SampleViewDescriptionSyncTest%s" % sufix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt(measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME = view_name VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, view_description, [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder client = monitoring_v3.MetricServiceClient() exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id=PROJECT), client=client) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) exporter.export_metrics(stats_module.stats.get_metrics()) # Sleep for [0, 10] milliseconds to fake wait. time.sleep(random.randint(1, 10) / 1000.0) self.check_sd_md(exporter, view_description)
def test_export_with_data(self): client = mock.Mock() transport = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME, end_time=TEST_TIME) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client, transport=transport) exporter.export(view_data) self.assertTrue(exporter.transport.export.called)
def test_create_metric_descriptor_count(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_count = "view-count" agg_count = aggregation_module.CountAggregation(count=2) view_count = view_module.View(view_name_count, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_count) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_count) self.assertIsNotNone(desc)
def test_stats_record_async(self): # We are using suffix in order to prevent cached objects suffix = str(os.getpid()) tag_key = "SampleKeyAsyncTest%s" % suffix measure_name = "SampleMeasureNameAsyncTest%s" % suffix measure_description = "SampleDescriptionAsyncTest%s" % suffix view_name = "SampleViewNameAsyncTest%s" % suffix view_description = "SampleViewDescriptionAsyncTest%s" % suffix FRONTEND_KEY_ASYNC = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE_ASYNC = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME_ASYNC = view_name VIDEO_SIZE_DISTRIBUTION_ASYNC =\ aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB] ) VIDEO_SIZE_VIEW_ASYNC = view_module.View( VIDEO_SIZE_VIEW_NAME_ASYNC, view_description, [FRONTEND_KEY_ASYNC], VIDEO_SIZE_MEASURE_ASYNC, VIDEO_SIZE_DISTRIBUTION_ASYNC) stats = stats_module.stats view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=PROJECT)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW_ASYNC) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_ASYNC, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE_ASYNC, 25 * MiB) measure_map.record(tag_map) # Give the exporter thread enough time to export exactly once time.sleep(transport.DEFAULT_INTERVAL * 1.5) self.check_sd_md(exporter, view_description)
def test_create_metric_descriptor_sum_float(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_sum_float = "view-sum-float" agg_sum = aggregation_module.SumAggregation(sum=2) view_sum_float = view_module.View(view_name_sum_float, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_sum) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_sum_float) self.assertIsNotNone(desc)
def test_create_metric_descriptor_base(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_base = "view-base" agg_base = aggregation_module.BaseAggregation() view_base = view_module.View(view_name_base, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, agg_base) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) self.assertRaises(Exception, exporter.create_metric_descriptor, view_base)
def test_create_metric_descriptor_last_value_float(self): client = mock.Mock() option = stackdriver.Options(project_id="project-test", metric_prefix="teste") view_name_base = "view-base" agg_base = aggregation_module.LastValueAggregation() view_base = view_module.View(view_name_base, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE_FLOAT, agg_base) exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) desc = exporter.create_metric_descriptor(view_base) self.assertIsNotNone(desc)
def test_get_metric_descriptor_bad_type(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(project_id='project_id'), client=mock.Mock()) bad_type_oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', # Need a valid type to create the descriptor type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) bad_type_oc_md._type = 100 with self.assertRaises(TypeError): exporter.get_metric_descriptor(bad_type_oc_md)
def test_client_info_user_agent(self): """Check that the monitoring client sets a user agent. The user agent should include the library version. Note that this assumes MetricServiceClient calls ClientInfo.to_user_agent to attach the user agent as metadata to metric service API calls. """ patch_client = mock.patch( 'opencensus.ext.stackdriver.stats_exporter.monitoring_v3' '.MetricServiceClient', _Client) with patch_client: exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) self.assertIn(stackdriver.get_user_agent_slug(), exporter.client.client_info.to_user_agent())
def test_export_single_metric(self, mock_stats, mock_client): """Check that we can export a set of a single metric.""" lv = label_value.LabelValue('val') val = value.ValueLong(value=123) dt = datetime(2019, 3, 20, 21, 34, 0, 537954) pp = point.Point(value=val, timestamp=dt) ts = [ time_series.TimeSeries(label_values=[lv], points=[pp], start_timestamp=utils.to_iso_str(dt)) ] desc = metric_descriptor.MetricDescriptor( name='name2', description='description2', unit='unit2', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) mm = metric.Metric(descriptor=desc, time_series=ts) mock_stats.get_metrics.return_value = [mm] with MockGetExporterThread() as mget: exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) mget.transport.step() exporter.client.create_metric_descriptor.assert_called() self.assertEqual(exporter.client.create_metric_descriptor.call_count, 1) md_call_arg =\ exporter.client.create_metric_descriptor.call_args[0][1] self.assertEqual(md_call_arg.metric_kind, monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) self.assertEqual(md_call_arg.value_type, monitoring_v3.enums.MetricDescriptor.ValueType.INT64) exporter.client.create_time_series.assert_called() self.assertEqual(exporter.client.create_time_series.call_count, 1) ts_call_arg = exporter.client.create_time_series.call_args[0][1] self.assertEqual(len(ts_call_arg), 1) self.assertEqual(len(ts_call_arg[0].points), 1) self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123)
def main(): # Enable metrics exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=project_id)) view_manager.register_exporter(exporter) view_manager.register_view(latency_view) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() for i in range(100): ms = random.random() * 5 * 1000 print("Latency {0}:{1}".format(i, ms)) mmap.measure_float_put(m_latency_ms, ms) mmap.record(tmap) time.sleep(1) print("Done recording metrics")
def test_get_metric_descriptor_custom_prefix(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(default_monitoring_labels={'dk': 'dd'}, metric_prefix='metric_prefix', project_id='project_id'), client=mock.Mock()) oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('ck', 'cd')]) sd_md = exporter.get_metric_descriptor(oc_md) self.assertIn('metric_prefix', sd_md.type) self.assertIn('metric_prefix', sd_md.name)
def setup_create_timeseries_test(self): client = mock.Mock() execution_context.clear() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) return view_manager, stats_recorder, exporter
def test_register_metric_descriptor(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(metric_prefix='metric_prefix', project_id='project_id'), client=mock.Mock()) oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) exporter.register_metric_descriptor(oc_md) self.assertEqual(exporter.client.create_metric_descriptor.call_count, 1) exporter.register_metric_descriptor(oc_md) self.assertEqual(exporter.client.create_metric_descriptor.call_count, 1)
def test_get_metric_descriptor(self): exporter = stackdriver.StackdriverStatsExporter( options=stackdriver.Options(default_monitoring_labels={'dk': 'dd'}, project_id='project_id'), client=mock.Mock()) oc_md = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('ck', 'cd')]) sd_md = exporter.get_metric_descriptor(oc_md) self.assertEqual(sd_md.metric_kind, monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) self.assertEqual(sd_md.value_type, monitoring_v3.enums.MetricDescriptor.ValueType.INT64) self.assertIsInstance(sd_md, monitoring_v3.types.MetricDescriptor) exporter.client.create_metric_descriptor.assert_not_called()
app.view_manager = stats_stats.view_manager app.stats_recorder = stats_stats.stats_recorder response_time_view = view.View( "response_time", "The time it took to respond", [app.key_method, app.key_status, app.key_error], app.m_response_ms, aggregation.LastValueAggregation()) app.exporter = stackdriver.new_stats_exporter(options=stackdriver.Options( project_id=os.getenv('PROJECT_ID'))) app.view_manager.register_exporter(app.exporter) app.view_manager.register_view(response_time_view) '''A more complex flask app making use of middleware and the open census lib to measure metrics in stackdriver''' middleware = OpenCensusFlask( app, exporter=stackdriver.new_stats_exporter(options=stackdriver.Options( project_id=os.getenv('PROJECT_ID')))) @app.route('/', methods=["GET", "POST"]) @export_response_time def hello(): return 'Hello world!' if __name__ == '__main__': app.run(host='localhost', port=8080)