def test_format_legacy_trace_json(self): trace_id = '2dd43a1d6b2549c6bc2a1a54c2fc0b05' span_data = span_data_module.SpanData( name='root', context=span_context.SpanContext(trace_id=trace_id, span_id='6e0c63257de34c92'), span_id='6e0c63257de34c92', parent_span_id='6e0c63257de34c93', attributes={'key1': 'value1'}, start_time=utils.to_iso_str(), end_time=utils.to_iso_str(), stack_trace=stack_trace.StackTrace(stack_trace_hash_id='111'), links=[link.Link('1111', span_id='6e0c63257de34c92')], status=status.Status(code=0, message='pok'), annotations=[ time_event.Annotation(timestamp=datetime.datetime(1970, 1, 1), description='description') ], message_events=[ time_event.MessageEvent( timestamp=datetime.datetime(1970, 1, 1), id=0, ) ], same_process_as_parent_span=False, child_span_count=0, span_kind=0, ) trace_json = span_data_module.format_legacy_trace_json([span_data]) self.assertEqual(trace_json.get('traceId'), trace_id) self.assertEqual(len(trace_json.get('spans')), 1)
def test_record_with_multi_keys(self): measure = mock.Mock(spec=measure_module.MeasureInt) sum_aggregation = aggregation_module.SumAggregation() view = view_module.View("test_view", "description", ['key1', 'key2'], measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock() context.map = {'key1': 'val1', 'key2': 'val2'} time = utils.to_iso_str() value = 1 self.assertEqual({}, view_data.tag_value_aggregation_data_map) view_data.record(context=context, value=value, timestamp=time, attachments=None) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) self.assertIsNotNone( view_data.tag_value_aggregation_data_map[tuple_vals]) sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals) self.assertEqual(1, sum_data.sum_data) context_2 = mock.Mock() context_2.map = {'key1': 'val3', 'key2': 'val2'} time_2 = utils.to_iso_str() value_2 = 2 view_data.record(context=context_2, value=value_2, timestamp=time_2, attachments=None) tag_values_2 = view_data.get_tag_values(tags=context_2.map, columns=view.columns) tuple_vals_2 = tuple(tag_values_2) self.assertEqual(['val3', 'val2'], tag_values_2) self.assertTrue( tuple_vals_2 in view_data.tag_value_aggregation_data_map) sum_data_2 = view_data.tag_value_aggregation_data_map.get(tuple_vals_2) self.assertEqual(2, sum_data_2.sum_data) time_3 = utils.to_iso_str() value_3 = 3 # Use the same context {'key1': 'val1', 'key2': 'val2'}. # Record to entry [(val1, val2), sum=1]. view_data.record(context=context, value=value_3, timestamp=time_3, attachments=None) self.assertEqual(4, sum_data.sum_data) # The other entry should remain unchanged. self.assertEqual(2, sum_data_2.sum_data)
def test_export_metrics(self): lv = label_value.LabelValue('val') val = value.ValueLong(value=123) dt = datetime(2019, 3, 20, 21, 34, 0, 537954) pp = point.Point(value=val, timestamp=dt) ts = [ time_series.TimeSeries(label_values=[lv], points=[pp], start_timestamp=utils.to_iso_str(dt)) ] desc = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) mm = metric.Metric(descriptor=desc, time_series=ts) exporter = stackdriver.StackdriverStatsExporter(client=mock.Mock()) exporter.export_metrics([mm]) self.assertEqual(exporter.client.create_time_series.call_count, 1) sd_args = exporter.client.create_time_series.call_args[0][1] self.assertEqual(len(sd_args), 1) [sd_arg] = exporter.client.create_time_series.call_args[0][1] self.assertEqual(sd_arg.points[0].value.int64_value, 123)
def test_record_with_missing_key_in_context(self): measure = mock.Mock(spec=measure_module.MeasureInt) sum_aggregation = aggregation_module.SumAggregation() view = view_module.View("test_view", "description", ['key1', 'key2'], measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock() context.map = { 'key1': 'val1', 'key3': 'val3' } # key2 is not in the context. time = utils.to_iso_str() value = 4 view_data.record(context=context, value=value, timestamp=time, attachments=None) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', None], tag_values) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals) self.assertEqual(4, sum_data.sum_data)
def record(self, tags=None): """records all the measures at the same time with a tag_map. tag_map could either be explicitly passed to the method, or implicitly read from current runtime context. """ if tags is None: tags = TagContext.get() if self._invalid: logger.warning("Measurement map has included negative value " "measurements, refusing to record") return for measure, value in self.measurement_map.items(): if value < 0: self._invalid = True logger.warning("Dropping values, value to record must be " "non-negative") logger.info("Measure '{}' has negative value ({}), refusing " "to record measurements from {}".format( measure.name, value, self)) return self.measure_to_view_map.record(tags=tags, measurement_map=self.measurement_map, timestamp=utils.to_iso_str(), attachments=self.attachments)
def test_create_span_data(self): span_data_module.SpanData( name='root', context=None, span_id='6e0c63257de34c92', parent_span_id='6e0c63257de34c93', attributes={'key1': 'value1'}, start_time=utils.to_iso_str(), end_time=utils.to_iso_str(), stack_trace=None, links=None, status=None, time_events=None, same_process_as_parent_span=None, child_span_count=None, span_kind=0, )
def test_constructor_explicit(self): span_id = 'test_span_id' span_name = 'test_span_name' parent_span = mock.Mock() start_time = utils.to_iso_str() end_time = utils.to_iso_str() attributes = { 'http.status_code': '200', 'component': 'HTTP load balancer', } annotations = [mock.Mock()] message_events = [mock.Mock()] links = [mock.Mock()] stack_trace = mock.Mock() status = mock.Mock() context_tracer = mock.Mock() span = self._make_one( name=span_name, parent_span=parent_span, attributes=attributes, start_time=start_time, end_time=end_time, span_id=span_id, stack_trace=stack_trace, annotations=annotations, message_events=message_events, links=links, status=status, context_tracer=context_tracer) self.assertEqual(span.name, span_name) self.assertEqual(span.span_id, span_id) self.assertEqual(span.parent_span, parent_span) self.assertEqual(span.attributes, attributes) self.assertEqual(span.start_time, start_time) self.assertEqual(span.end_time, end_time) self.assertEqual(list(span.annotations), annotations) self.assertEqual(list(span.message_events), message_events) self.assertEqual(span.stack_trace, stack_trace) self.assertEqual(list(span.links), links) self.assertEqual(span.status, status) self.assertEqual(span.children, []) self.assertEqual(span.context_tracer, context_tracer)
def test_constructor(self): timestamp = datetime.datetime.utcnow() message_event = mock.Mock() time_event = time_event_module.TimeEvent(timestamp=timestamp, message_event=message_event) self.assertEqual(time_event.timestamp, utils.to_iso_str(timestamp)) self.assertEqual(time_event.message_event, message_event)
def __init__(self, timestamp, annotation=None, message_event=None): self.timestamp = utils.to_iso_str(timestamp) if annotation is not None and message_event is not None: raise ValueError("A TimeEvent can contain either an Annotation" "object or a MessageEvent object, but not both.") self.annotation = annotation self.message_event = message_event
def test_datetime_str_to_proto_ts_conversion(self): now = datetime.utcnow() delta = now - datetime(1970, 1, 1) expected_seconds = int(delta.total_seconds()) expected_nanos = delta.microseconds * 1000 proto_ts = utils.proto_ts_from_datetime_str( common_utils.to_iso_str(now)) self.assertEqual(proto_ts.seconds, int(expected_seconds)) self.assertEqual(proto_ts.nanos, expected_nanos)
def test_constructor_explicit(self): span_id = 'test_span_id' span_name = 'test_span_name' parent_span = mock.Mock() start_time = utils.to_iso_str() end_time = utils.to_iso_str() attributes = { 'http.status_code': '200', 'component': 'HTTP load balancer', } time_events = mock.Mock() links = mock.Mock() stack_trace = mock.Mock() status = mock.Mock() context_tracer = mock.Mock() span = self._make_one( name=span_name, parent_span=parent_span, attributes=attributes, start_time=start_time, end_time=end_time, span_id=span_id, stack_trace=stack_trace, time_events=time_events, links=links, status=status, context_tracer=context_tracer) self.assertEqual(span.name, span_name) self.assertIsNotNone(span.span_id) self.assertEqual(span.attributes, {}) self.assertEqual(span.start_time, start_time) self.assertEqual(span.end_time, end_time) self.assertEqual(span.time_events, time_events) self.assertEqual(span.stack_trace, stack_trace) self.assertEqual(span.links, []) self.assertEqual(span.status, status) self.assertEqual(span.children, []) self.assertEqual(span.context_tracer, context_tracer)
def test_span_data_immutable(self): span_data = span_data_module.SpanData( name='root', context=None, span_id='6e0c63257de34c92', parent_span_id='6e0c63257de34c93', attributes={'key1': 'value1'}, start_time=utils.to_iso_str(), end_time=utils.to_iso_str(), stack_trace=None, links=None, status=None, time_events=None, same_process_as_parent_span=None, child_span_count=None, span_kind=0, ) with self.assertRaises(AttributeError): span_data.name = 'child' with self.assertRaises(AttributeError): span_data.new_attr = 'a'
def test_record_with_attachment(self): boundaries = [1, 2, 3] distribution = {1: "test"} distribution_aggregation = aggregation_module.DistributionAggregation( boundaries=boundaries, distribution=distribution) name = "testName" description = "testMeasure" unit = "testUnit" measure = measure_module.MeasureInt(name=name, description=description, unit=unit) description = "testMeasure" columns = ["key1", "key2"] view = view_module.View(name=name, description=description, columns=columns, measure=measure, aggregation=distribution_aggregation) start_time = datetime.utcnow() attachments = {"One": "one", "Two": "two"} end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock context.map = {'key1': 'val1', 'key2': 'val2'} time = utils.to_iso_str() value = 1 view_data.record(context=context, value=value, timestamp=time, attachments=attachments) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1', 'val2'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) self.assertIsNotNone( view_data.tag_value_aggregation_data_map[tuple_vals]) self.assertEqual( attachments, view_data.tag_value_aggregation_data_map[tuple_vals]. exemplars[1].attachments)
def __init__(self, timestamp, id, type=None, uncompressed_size_bytes=None, compressed_size_bytes=None): self.timestamp = utils.to_iso_str(timestamp) if type is None: type = Type.TYPE_UNSPECIFIED if compressed_size_bytes is None and \ uncompressed_size_bytes is not None: compressed_size_bytes = uncompressed_size_bytes self.id = id self.type = type self.uncompressed_size_bytes = uncompressed_size_bytes self.compressed_size_bytes = compressed_size_bytes
def test_format_time_event_json_annotation(self): timestamp = datetime.datetime.utcnow() mock_annotation = 'test annotation' annotation = mock.Mock() annotation.format_annotation_json.return_value = mock_annotation time_event = time_event_module.TimeEvent(timestamp=timestamp, annotation=annotation) time_event_json = time_event.format_time_event_json() expected_time_event_json = { 'time': utils.to_iso_str(timestamp), 'annotation': mock_annotation } self.assertEqual(time_event_json, expected_time_event_json)
def test_format_time_event_json_message_event(self): timestamp = datetime.datetime.utcnow() mock_message_event = 'test annotation' message_event = mock.Mock() message_event.format_message_event_json.return_value = \ mock_message_event time_event = time_event_module.TimeEvent(timestamp=timestamp, message_event=message_event) time_event_json = time_event.format_time_event_json() expected_time_event_json = { 'time': utils.to_iso_str(timestamp), 'message_event': mock_message_event } self.assertEqual(time_event_json, expected_time_event_json)
def test_export_single_metric(self, mock_stats, mock_client): """Check that we can export a set of a single metric.""" lv = label_value.LabelValue('val') val = value.ValueLong(value=123) dt = datetime(2019, 3, 20, 21, 34, 0, 537954) pp = point.Point(value=val, timestamp=dt) ts = [ time_series.TimeSeries(label_values=[lv], points=[pp], start_timestamp=utils.to_iso_str(dt)) ] desc = metric_descriptor.MetricDescriptor( name='name2', description='description2', unit='unit2', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) mm = metric.Metric(descriptor=desc, time_series=ts) mock_stats.get_metrics.return_value = [mm] with MockGetExporterThread() as mget: exporter = stackdriver.new_stats_exporter( stackdriver.Options(project_id=1)) mget.transport.step() exporter.client.create_metric_descriptor.assert_called() self.assertEqual(exporter.client.create_metric_descriptor.call_count, 1) md_call_arg =\ exporter.client.create_metric_descriptor.call_args[0][1] self.assertEqual(md_call_arg.metric_kind, monitoring_v3.enums.MetricDescriptor.MetricKind.GAUGE) self.assertEqual(md_call_arg.value_type, monitoring_v3.enums.MetricDescriptor.ValueType.INT64) exporter.client.create_time_series.assert_called() self.assertEqual(exporter.client.create_time_series.call_count, 1) ts_call_arg = exporter.client.create_time_series.call_args[0][1] self.assertEqual(len(ts_call_arg), 1) self.assertEqual(len(ts_call_arg[0].points), 1) self.assertEqual(ts_call_arg[0].points[0].value.int64_value, 123)
def test_record(self): view = mock.Mock() view.columns = ['key1'] view.aggregation = mock.Mock() start_time = datetime.utcnow() end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) context = mock.Mock() context.map = {'key1': 'val1', 'key2': 'val2'} time = utils.to_iso_str() value = 1 self.assertEqual({}, view_data.tag_value_aggregation_data_map) view_data.record(context=context, value=value, timestamp=time) tag_values = view_data.get_tag_values(tags=context.map, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual(['val1'], tag_values) self.assertIsNotNone(view_data.tag_value_aggregation_data_map) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) self.assertIsNotNone( view_data.tag_value_aggregation_data_map[tuple_vals]) self.assertIsNotNone( view_data.tag_value_aggregation_data_map.get(tuple_vals).add( value)) view_data.record(context=context, value=value, timestamp=time) tag_values.append('val2') tuple_vals_2 = tuple(['val2']) self.assertFalse( tuple_vals_2 in view_data.tag_value_aggregation_data_map) view_data.tag_value_aggregation_data_map[ tuple_vals_2] = view.aggregation self.assertEqual( view_data.tag_value_aggregation_data_map.get(tuple_vals_2), view_data.view.aggregation) self.assertIsNotNone( view_data.tag_value_aggregation_data_map.get(tuple_vals_2).add( value))
def create_metric(): lv = label_value.LabelValue('val') val = value.ValueLong(value=123) dt = datetime(2019, 3, 20, 21, 34, 0, 537954) pp = point.Point(value=val, timestamp=dt) ts = [ time_series.TimeSeries(label_values=[lv], points=[pp], start_timestamp=utils.to_iso_str(dt)) ] desc = metric_descriptor.MetricDescriptor( name='name', description='description', unit='unit', type_=metric_descriptor.MetricDescriptorType.GAUGE_INT64, label_keys=[label_key.LabelKey('key', 'description')]) return metric.Metric(descriptor=desc, time_series=ts)
def test_record_with_none_context(self): measure = mock.Mock() sum_aggregation = aggregation_module.SumAggregation() view = view_module.View("test_view", "description", ['key1', 'key2'], measure, sum_aggregation) start_time = datetime.utcnow() end_time = datetime.utcnow() view_data = view_data_module.ViewData(view=view, start_time=start_time, end_time=end_time) time = utils.to_iso_str() value = 4 view_data.record(context=None, value=value, timestamp=time, attachments=None) tag_values = view_data.get_tag_values(tags={}, columns=view.columns) tuple_vals = tuple(tag_values) self.assertEqual([None, None], tag_values) self.assertTrue(tuple_vals in view_data.tag_value_aggregation_data_map) sum_data = view_data.tag_value_aggregation_data_map.get(tuple_vals) self.assertEqual(4, sum_data.sum_data)
def timestamp_to_iso_str(timestamp): return to_iso_str(datetime.datetime.utcfromtimestamp(timestamp))
def start(self): """Set the start time for a span.""" self.start_time = utils.to_iso_str()
def end(self): """sets the end time for the view data""" self._end_time = utils.to_iso_str()
def start(self): """sets the start time for the view data""" self._start_time = utils.to_iso_str()
def __init__(self): self.time = utils.to_iso_str() if execution_context.get_measure_to_view_map() == {}: execution_context.set_measure_to_view_map(MeasureToViewMap()) self._measure_view_map = execution_context.get_measure_to_view_map()
"my.org/measure/video_size_test_2", "size of processed videos", "By") VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( "my.org/measure/video_size_test-float", "size of processed videos-float", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) TEST_TIME = datetime(2018, 12, 25, 1, 2, 3, 4) TEST_TIME_STR = utils.to_iso_str(TEST_TIME) class TestStatsExporter(unittest.TestCase): def test_export_view_data(self): v_data = view_data_module.ViewData(view=VIDEO_SIZE_VIEW, start_time=TEST_TIME_STR, end_time=TEST_TIME_STR) v_data.record(context=tag_map_module.TagMap(), value=2, timestamp=None) view_data = [v_data] view_data = [metric_utils.view_data_to_metric(view_data[0], TEST_TIME)] handler = mock.Mock(spec=ocagent.ExportRpcHandler) ocagent.StatsExporter(handler).export_metrics(view_data) self.assertEqual(
VIDEO_SIZE_MEASURE_2 = measure_module.MeasureInt( "my.org/measure/video_size_test_2", "size of processed videos", "By") VIDEO_SIZE_MEASURE_FLOAT = measure_module.MeasureFloat( "my.org/measure/video_size_test-float", "size of processed videos-float", "By") VIDEO_SIZE_VIEW_NAME = "my.org/views/video_size_test2" VIDEO_SIZE_DISTRIBUTION = aggregation_module.DistributionAggregation( [16.0 * MiB, 256.0 * MiB]) VIDEO_SIZE_VIEW = view_module.View(VIDEO_SIZE_VIEW_NAME, "processed video size over time", [FRONTEND_KEY], VIDEO_SIZE_MEASURE, VIDEO_SIZE_DISTRIBUTION) TEST_TIME = utils.to_iso_str(datetime(2018, 12, 25, 1, 2, 3, 4)) class _Client(object): def __init__(self, client_info=None): self.client_info = client_info class TestOptions(unittest.TestCase): def test_options_blank(self): option = stackdriver.Options() self.assertEqual(option.project_id, "") self.assertEqual(option.resource, "") def test_options_parameters(self):
def finish(self): """Set the end time for a span.""" self.end_time = utils.to_iso_str()
def __init__(self, timestamp, description, attributes=None): self.timestamp = utils.to_iso_str(timestamp) self.description = description self.attributes = attributes