def _generate_proto(): """Generate MetricsPayload for global_monitor.send().""" proto = metrics_pb2.MetricsPayload() # Key: Target, value: MetricsCollection. collections = {} # Key: (Target, metric name) tuple, value: MetricsDataSet. data_sets = {} count = 0 for (target, metric, start_time, end_time, fields_values) in state.store.get_all(): for fields, value in fields_values.iteritems(): if count >= METRICS_DATA_LENGTH_LIMIT: yield proto proto = metrics_pb2.MetricsPayload() collections.clear() data_sets.clear() count = 0 if target not in collections: collections[target] = proto.metrics_collection.add() target.populate_target_pb(collections[target]) collection = collections[target] key = (target, metric.name) new_data_set = None if key not in data_sets: new_data_set = metrics_pb2.MetricsDataSet() metric.populate_data_set(new_data_set) data = metrics_pb2.MetricsData() metric.populate_data(data, start_time, end_time, fields, value) # All required data protos have been successfully populated. Now we can # insert them in serialized proto and bookeeping data structures. if new_data_set is not None: collection.metrics_data_set.add().CopyFrom(new_data_set) data_sets[key] = collection.metrics_data_set[-1] data_sets[key].data.add().CopyFrom(data) count += 1 if count > 0: yield proto
def test_populate_fields(self): data = metrics_pb2.MetricsData() m = metrics.Metric('test', 'test', [ metrics.IntegerField('a'), metrics.BooleanField('b'), metrics.StringField('c'), ]) m._populate_fields(data, (1, True, 'test')) self.assertEqual(3, len(data.field)) self.assertEqual('a', data.field[0].name) self.assertEqual(1, data.field[0].int64_value) self.assertEqual('b', data.field[1].name) self.assertTrue(data.field[1].bool_value) self.assertEqual('c', data.field[2].name) self.assertEqual('test', data.field[2].string_value)
def test_init_service_account(self, gc, discovery, instrumented_http): m_open = mock.mock_open(read_data='{"type": "service_account"}') creds = gc.from_stream.return_value scoped_creds = creds.create_scoped.return_value http_mock = instrumented_http.return_value metric1 = metrics_pb2.MetricsData(name='m1') with mock.patch('infra_libs.ts_mon.common.monitors.open', m_open, create=True): mon = monitors.PubSubMonitor('/path/to/creds.p8.json', 'myproject', 'mytopic') mon.send(metric1) m_open.assert_called_once_with('/path/to/creds.p8.json', 'r') creds.create_scoped.assert_called_once_with( monitors.PubSubMonitor._SCOPES) scoped_creds.authorize.assert_called_once_with(http_mock) discovery.build.assert_called_once_with('pubsub', 'v1', http=http_mock) self.assertEquals(mon._topic, 'projects/myproject/topics/mytopic')
def test_init_storage(self, storage, discovery, instrumented_http): storage_inst = mock.Mock() storage.return_value = storage_inst creds = storage_inst.get.return_value m_open = mock.mock_open(read_data='{}') http_mock = instrumented_http.return_value metric1 = metrics_pb2.MetricsData(name='m1') with mock.patch('infra_libs.ts_mon.common.monitors.open', m_open, create=True): mon = monitors.PubSubMonitor('/path/to/creds.p8.json', 'myproject', 'mytopic') mon.send(metric1) m_open.assert_called_once_with('/path/to/creds.p8.json', 'r') storage_inst.get.assert_called_once_with() creds.authorize.assert_called_once_with(http_mock) discovery.build.assert_called_once_with('pubsub', 'v1', http=http_mock) self.assertEquals(mon._topic, 'projects/myproject/topics/mytopic')
def test_populate_custom(self): pb = metrics_pb2.MetricsData() m = metrics.DistributionMetric('test') m._populate_value( pb, distribution.Distribution(distribution.GeometricBucketer(4)), 1234) self.assertEquals( pb.distribution.spec_type, metrics_pb2.PrecomputedDistribution.CUSTOM_PARAMETERIZED) self.assertEquals(0, pb.distribution.width) self.assertEquals(4, pb.distribution.growth_factor) self.assertEquals(100, pb.distribution.num_buckets) m._populate_value( pb, distribution.Distribution(distribution.FixedWidthBucketer(10)), 1234) self.assertEquals( pb.distribution.spec_type, metrics_pb2.PrecomputedDistribution.CUSTOM_PARAMETERIZED) self.assertEquals(10, pb.distribution.width) self.assertEquals(0, pb.distribution.growth_factor) self.assertEquals(100, pb.distribution.num_buckets)
def test_populate_canonical(self): pb = metrics_pb2.MetricsData() m = metrics.DistributionMetric('test') m._populate_value( pb, distribution.Distribution(distribution.GeometricBucketer()), 1234) self.assertEquals( pb.distribution.spec_type, metrics_pb2.PrecomputedDistribution.CANONICAL_POWERS_OF_10_P_0_2) m._populate_value( pb, distribution.Distribution(distribution.GeometricBucketer(2)), 1234) self.assertEquals( pb.distribution.spec_type, metrics_pb2.PrecomputedDistribution.CANONICAL_POWERS_OF_2) m._populate_value( pb, distribution.Distribution(distribution.GeometricBucketer(10)), 1234) self.assertEquals( pb.distribution.spec_type, metrics_pb2.PrecomputedDistribution.CANONICAL_POWERS_OF_10)
def test_populate_value(self): pb = metrics_pb2.MetricsData() m = metrics.FloatMetric('test') m._populate_value(pb, 1.618, 1234) self.assertEquals(pb.noncumulative_double_value, 1.618)
def test_populate_value(self): pb = metrics_pb2.MetricsData() m = metrics.GaugeMetric('test') m._populate_value(pb, 1, 1234) self.assertEquals(pb.gauge, 1)
def test_populate_value(self): pb = metrics_pb2.MetricsData() m = metrics.CounterMetric('test') m._populate_value(pb, 1, 1234) self.assertEquals(pb.counter, 1)
def test_populate_value(self): pb = metrics_pb2.MetricsData() m = metrics.BooleanMetric('test') m._populate_value(pb, True, 1234) self.assertEquals(pb.boolean_value, True)
def test_populate_value(self): pb = metrics_pb2.MetricsData() m = metrics.StringMetric('test') m._populate_value(pb, 'foo', 1234) self.assertEquals(pb.string_value, 'foo')
def _generate_proto(): """Generate MetricsPayload for global_monitor.send().""" proto = metrics_pb2.MetricsPayload() # Key: Target, value: MetricsCollection. collections = {} # pylint: disable=redefined-outer-name # Key: (Target, metric name) tuple, value: MetricsDataSet. data_sets = {} count = 0 for (target, metric, start_times, end_time, fields_values) in state.store.get_all(): for fields, value in six.iteritems(fields_values): # In default, the start time of all data points for a single stream # should be set with the first time of a value change in the stream, # until metric.reset() invoked. # # e.g., # At 00:00. # {value: 1, # fields: ('metric:result': 'success', 'metric:command': 'get_name'), # start_timestamp=0, end_timestamp=0} # # At 00:01. # {value: 1, # fields: ('metric:result': 'success', 'metric:command': 'get_name'), # start_timestamp=0, end_timestamp=1} # # At 00:02. # {value: 2, # fields: ('metric:result': 'success', 'metric:command': 'get_name'), # start_timestamp=0, end_timestamp=2} # # This is important for cumulative metrics, because the monitoring # backend detects the restart of a monitoring target and inserts a reset # point to make Delta()/Rate() computation results accurate. # If a given metric has own start_time, which can be set via # metric.dangerously_set_start_time(), then report all the data points # with the metric-level start_time. # # Otherwise, report data points with the first value change time. start_time = metric.start_time or start_times.get(fields, end_time) if count >= METRICS_DATA_LENGTH_LIMIT: yield proto proto = metrics_pb2.MetricsPayload() collections.clear() data_sets.clear() count = 0 if target not in collections: collections[target] = proto.metrics_collection.add() if isinstance(target, tuple): _populate_root_labels(collections[target].root_labels, target) else: target.populate_target_pb(collections[target]) collection = collections[target] key = (target, metric.name) new_data_set = None if key not in data_sets: new_data_set = metrics_pb2.MetricsDataSet() metric.populate_data_set(new_data_set) data = metrics_pb2.MetricsData() metric.populate_data(data, start_time, end_time, fields, value) # All required data protos have been successfully populated. Now we can # insert them in serialized proto and bookeeping data structures. if new_data_set is not None: collection.metrics_data_set.add().CopyFrom(new_data_set) data_sets[key] = collection.metrics_data_set[-1] data_sets[key].data.add().CopyFrom(data) count += 1 if count > 0: yield proto
def test_send(self): m = monitors.NullMonitor() metric1 = metrics_pb2.MetricsData(name='m1') m.send(metric1)
def test_send(self): m = monitors.Monitor() metric1 = metrics_pb2.MetricsData(name='m1') with self.assertRaises(NotImplementedError): m.send(metric1)