def test_create_timeseries_last_value_float_tagvalue( self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_1 = aggregation_module.LastValueAggregation(value=2) view_name1 = "view-name1" new_view1 = view_module.View(view_name1, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_1) view_manager.register_view(new_view1) tag_value_int = tag_value_module.TagValue("Abc") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_INT, tag_value_int) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name1, None) time_series = exporter.create_time_series_list(v_data, "global", "kubernetes.io/myorg") self.assertEquals(time_series.metric.type, "kubernetes.io/myorg/view-name1") self.assertIsNotNone(time_series)
def main(): # Enable metrics exporter = stackdriver_exporter.new_stats_exporter( stackdriver_exporter.Options(project_id=project_id)) view_manager.register_exporter(exporter) view_manager.register_view(latency_view) mmap = stats_recorder.new_measurement_map() tmap = tag_map_module.TagMap() for i in range(100): ms = random.random() * 5 * 1000 print("Latency {0}:{1}".format(i, ms)) mmap.measure_float_put(m_latency_ms, ms) mmap.record(tmap) time.sleep(1) print("Done recording metrics")
def test_create_timeseries_float_tagvalue(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) agg_2 = aggregation_module.SumAggregation(sum=2.2) view_name2 = "view-name2" new_view2 = view_module.View(view_name2, "processed video size over time", [FRONTEND_KEY_FLOAT], VIDEO_SIZE_MEASURE_FLOAT, agg_2) view_manager.register_view(new_view2) tag_value_float = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY_FLOAT, tag_value_float) measure_map = stats_recorder.new_measurement_map() measure_map.measure_float_put(VIDEO_SIZE_MEASURE_FLOAT, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view(view_name2, None) time_series = exporter.create_time_series_list(v_data, "global", "") self.assertEquals(time_series.metric.type, "custom.googleapis.com/opencensus/view-name2") self.assertIsNotNone(time_series)
def test_create_timeseries(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view( VIDEO_SIZE_VIEW_NAME, None) time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, "global") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) time_series = exporter.create_time_series_list(v_data, "global", "kubernetes.io/myorg") self.assertEquals(time_series.metric.type, "kubernetes.io/myorg/my.org/views/video_size_test2") self.assertIsNotNone(time_series)
def setup_create_timeseries_test(self): client = mock.Mock() execution_context.clear() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) return view_manager, stats_recorder, exporter
def test_create_batched_time_series_with_many(self, monitor_resource_mock): client = mock.Mock() # First view with 3 view_name1 = "view-name1" view1 = view_module.View(view_name1, "test description", ['test'], VIDEO_SIZE_MEASURE, aggregation_module.LastValueAggregation()) v_data1 = view_data_module.ViewData( view=view1, start_time=TEST_TIME, end_time=TEST_TIME) v_data1.record(context=tag_map_module.TagMap({'test': '1'}), value=7, timestamp=None) v_data1.record(context=tag_map_module.TagMap({'test': '2'}), value=5, timestamp=None) v_data1.record(context=tag_map_module.TagMap({'test': '3'}), value=3, timestamp=None) # Second view with 2 view_name2 = "view-name2" view2 = view_module.View(view_name2, "test description", ['test'], VIDEO_SIZE_MEASURE, aggregation_module.LastValueAggregation()) v_data2 = view_data_module.ViewData( view=view2, start_time=TEST_TIME, end_time=TEST_TIME) v_data2.record(context=tag_map_module.TagMap({'test': '1'}), value=7, timestamp=None) v_data2.record(context=tag_map_module.TagMap({'test': '2'}), value=5, timestamp=None) view_data = [v_data1, v_data2] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter( options=option, client=client) time_series_batches = exporter.create_batched_time_series(view_data, 2) self.assertEqual(len(time_series_batches), 3) [tsb1, tsb2, tsb3] = time_series_batches self.assertEqual(len(tsb1), 2) self.assertEqual(len(tsb2), 2) self.assertEqual(len(tsb3), 1)
def test_create_batched_time_series(self, monitor_resource_mock): client = mock.Mock() v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME, end_time=TEST_TIME) v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) view_data = [v_data] option = stackdriver.Options(project_id="project-test") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) time_series_batches = exporter.create_batched_time_series(view_data, 1) self.assertEqual(len(time_series_batches), 1) [time_series_batch] = time_series_batches self.assertEqual(len(time_series_batch), 1) [time_series] = time_series_batch self.assertEqual( time_series.metric.type, 'custom.googleapis.com/opencensus/' + VIDEO_SIZE_VIEW_NAME) self.assertCorrectLabels(time_series.metric.labels, {}, include_opencensus=True)
def test_default_monitoring_labels(self): default_labels = {'key1': 'value1'} option = stackdriver.Options(default_monitoring_labels=default_labels) self.assertEqual(option.default_monitoring_labels, default_labels)
def test_options_parameters(self): option = stackdriver.Options(project_id="project-id", metric_prefix="sample") self.assertEqual(option.project_id, "project-id") self.assertEqual(option.metric_prefix, "sample")
def test_default_monitoring_labels_blank(self): option = stackdriver.Options() self.assertIsNone(option.default_monitoring_labels)
def test_options_blank(self): option = stackdriver.Options() self.assertEqual(option.project_id, "") self.assertEqual(option.resource, "")
def test_stats_record_sync(self): # We are using sufix in order to prevent cached objects sufix = str(os.getgid()) tag_key = "SampleKeySyncTest%s" % sufix measure_name = "SampleMeasureNameSyncTest%s" % sufix measure_description = "SampleDescriptionSyncTest%s" % sufix view_name = "SampleViewNameSyncTest%s" % sufix view_description = "SampleViewDescriptionSyncTest%s" % sufix FRONTEND_KEY = tag_key_module.TagKey(tag_key) VIDEO_SIZE_MEASURE = measure_module.MeasureInt( measure_name, measure_description, "By") VIDEO_SIZE_VIEW_NAME = view_name VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, view_description, [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder client = monitoring_v3.MetricServiceClient() exporter = stackdriver.StackdriverStatsExporter(options=stackdriver.Options(project_id=PROJECT), client=client, transport=sync.SyncTransport) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) # Sleep for [0, 10] milliseconds to fake wait. time.sleep(random.randint(1, 10) / 1000.0) @retry(wait_fixed=RETRY_WAIT_PERIOD, stop_max_attempt_number=RETRY_MAX_ATTEMPT) def get_metric_descriptors(self, exporter, view_description): name = exporter.client.project_path(PROJECT) list_metrics_descriptors = exporter.client.list_metric_descriptors(name) element = next((element for element in list_metrics_descriptors if element.description == view_description), None) self.assertIsNotNone(element) self.assertEqual(element.description, view_description) self.assertEqual(element.unit, "By") get_metric_descriptors(self, exporter, view_description)
def test_create_timeseries_with_resource(self, monitor_resource_mock): client = mock.Mock() option = stackdriver.Options(project_id="project-test", resource="global") exporter = stackdriver.StackdriverStatsExporter(options=option, client=client) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder if len(view_manager.measure_to_view_map.exporters) > 0: view_manager.unregister_exporter( view_manager.measure_to_view_map.exporters[0]) view_manager.register_exporter(exporter) view_manager.register_view(VIDEO_SIZE_VIEW) tag_value = tag_value_module.TagValue("1200") tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB) measure_map.record(tag_map) v_data = measure_map.measure_to_view_map.get_view( VIDEO_SIZE_VIEW_NAME, None) # check for gce_instance monitored resource mocked_labels = { 'instance_id': 'my-instance', 'project_id': 'my-project', 'zone': 'us-east1', 'pod_id': 'localhost', 'namespace_id': 'namespace' } monitor_resource_mock.return_value = mock.Mock() monitor_resource_mock.return_value.resource_type = 'gce_instance' monitor_resource_mock.return_value.get_resource_labels.return_value =\ mocked_labels time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, "gce_instance") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) self.assertEquals(time_series.resource.labels['instance_id'], 'my-instance') self.assertEquals(time_series.resource.labels['project_id'], 'my-project') self.assertEquals(time_series.resource.labels['zone'], 'us-east1') time_series = exporter.create_time_series_list(v_data, "global", "") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) # check for gke_container monitored resource mocked_labels = { 'instance_id': 'my-instance', 'project_id': 'my-project', 'zone': 'us-east1', 'pod_id': 'localhost', 'cluster_name': 'cluster', 'namespace_id': 'namespace' } monitor_resource_mock.return_value = mock.Mock() monitor_resource_mock.return_value.resource_type = 'gke_container' monitor_resource_mock.return_value.get_resource_labels.return_value =\ mocked_labels time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, "k8s_container") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) self.assertEquals(time_series.resource.labels['project_id'], 'my-project') self.assertEquals(time_series.resource.labels['location'], 'us-east1') self.assertEquals(time_series.resource.labels['pod_name'], 'localhost') self.assertEquals(time_series.resource.labels['namespace_name'], 'namespace') self.assertEquals(time_series.resource.labels['container_name'], '') # check for aws_ec2_instance monitored resource mocked_labels = { 'instance_id': 'my-instance', 'aws_account': 'my-project', 'region': 'us-east1', } monitor_resource_mock.return_value = mock.Mock() monitor_resource_mock.return_value.resource_type = 'aws_ec2_instance' monitor_resource_mock.return_value.get_resource_labels.return_value =\ mocked_labels time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, "aws_ec2_instance") self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series) self.assertEquals(time_series.resource.labels['instance_id'], 'my-instance') self.assertEquals(time_series.resource.labels['aws_account'], 'my-project') self.assertEquals(time_series.resource.labels['region'], 'aws:us-east1') # check for out of box monitored resource monitor_resource_mock.return_value = mock.Mock() monitor_resource_mock.return_value.resource_type = '' monitor_resource_mock.return_value.get_resource_labels.return_value =\ mock.Mock() time_series = exporter.create_time_series_list(v_data, "", "") self.assertEquals(time_series.resource.type, 'global') self.assertEquals( time_series.metric.type, "custom.googleapis.com/opencensus/my.org/views/video_size_test2") self.assertIsNotNone(time_series)
VIDEO_SIZE_VIEW = view_module.View( VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder try: _, project_id = google.auth.default() except google.auth.exceptions.DefaultCredentialsError: raise ValueError("Couldn't find Google Cloud credentials, set the " "project ID with 'gcloud set project'") exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=project_id)) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue(str(1200)) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
def test_singleton_with_params(self): default_labels = {'key1':'value1'} exporter_created = stackdriver.new_stats_exporter(stackdriver.Options(project_id=1,default_monitoring_labels=default_labels)) self.assertEqual(exporter_created.default_labels, default_labels)
def test_blank_project(self): self.assertRaises(Exception, stackdriver.new_stats_exporter, stackdriver.Options(project_id=""))
def test_not_blank_project(self): exporter_created = stackdriver.new_stats_exporter(stackdriver.Options(project_id=1)) self.assertIsInstance(exporter_created, stackdriver.StackdriverStatsExporter)
VIDEO_SIZE_MEASURE = measure_module.MeasureInt( "my.org/measure/video_size_test2", "size of processed videos", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [0.0, 16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) stats = stats_module.Stats() view_manager = stats.view_manager stats_recorder = stats.stats_recorder exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id="opencenus-node")) view_manager.register_exporter(exporter) # Register view. view_manager.register_view(VIDEO_SIZE_VIEW) # Sleep for [0, 10] milliseconds to fake work. time.sleep(random.randint(1, 10) / 1000.0) # Process video. # Record the processed video size. tag_value = tag_value_module.TagValue(str(1200)) tag_map = tag_map_module.TagMap() tag_map.insert(FRONTEND_KEY, tag_value) measure_map = stats_recorder.new_measurement_map() measure_map.measure_int_put(VIDEO_SIZE_MEASURE, 25 * MiB)
timeout=2) return r.text app = Flask(__name__) PARAM_TAG = 'param' m_requests = measure.MeasureInt("app/requests", "Number of requests", "1") requests_view = view.View("opencensus-example/app/requests", "The sum of requests", [PARAM_TAG], m_requests, aggregation.SumAggregation()) op_stats = stats.Stats() exporter = stackdriver_exporter.new_stats_exporter( stackdriver_exporter.Options(project_id=project_id())) op_stats.view_manager.register_exporter(exporter) op_stats.view_manager.register_view(requests_view) @app.route('/') def hello_world(): mmap = op_stats.stats_recorder.new_measurement_map() mmap.measure_int_put(m_requests, 1) tmap = TagMap() param = request.args.get('param') if param: tmap.insert(PARAM_TAG, param) mmap.record(tmap) return 'Hello, World!'