def _test_gauge_or_ms(self, metric_type, utcnow): metric_name = "test_gauge_or_ms" metric_key = metric_name + "|" + metric_type utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 58, 36) self.server.datagram_received( ("%s:1|%s" % (metric_name, metric_type)).encode('ascii'), ("127.0.0.1", 12345)) self.stats.flush() r = self.stats.indexer.get_resource('generic', self.conf.statsd.resource_id, with_metrics=True) metric = r.get_metric(metric_key) self.trigger_processing([metric]) measures = self.storage.get_aggregated_measures( {metric: self.aggregations})[metric] measures = get_measures_list(measures) self.assertEqual( { "mean": [(datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 1.0), (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 1.0), (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0)] }, measures) utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 59, 37) # This one is going to be ignored self.server.datagram_received( ("%s:45|%s" % (metric_name, metric_type)).encode('ascii'), ("127.0.0.1", 12345)) self.server.datagram_received( ("%s:2|%s" % (metric_name, metric_type)).encode('ascii'), ("127.0.0.1", 12345)) self.stats.flush() self.trigger_processing([metric]) measures = self.storage.get_aggregated_measures( {metric: self.aggregations})[metric] measures = get_measures_list(measures) self.assertEqual( { "mean": [(datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 1.5), (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 1.5), (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0), (datetime64(2015, 1, 7, 13, 59), numpy.timedelta64(1, 'm'), 2.0)] }, measures)
def test_counter(self, utcnow): metric_name = "test_counter" metric_key = metric_name + "|c" utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 58, 36) self.server.datagram_received(("%s:1|c" % metric_name).encode('ascii'), ("127.0.0.1", 12345)) self.stats.flush() r = self.stats.indexer.get_resource('generic', self.conf.statsd.resource_id, with_metrics=True) metric = r.get_metric(metric_key) self.assertIsNotNone(metric) self.trigger_processing([metric]) measures = self.storage.get_aggregated_measures( {metric: self.aggregations})[metric] measures = get_measures_list(measures) self.assertEqual( { "mean": [(datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 1.0), (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 1.0), (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0)] }, measures) utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 59, 37) self.server.datagram_received( ("%s:45|c" % metric_name).encode('ascii'), ("127.0.0.1", 12345)) self.server.datagram_received( ("%s:2|c|@0.2" % metric_name).encode('ascii'), ("127.0.0.1", 12345)) self.stats.flush() self.trigger_processing([metric]) measures = self.storage.get_aggregated_measures( {metric: self.aggregations})[metric] measures = get_measures_list(measures) self.assertEqual( { "mean": [(datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 28), (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 28), (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0), (datetime64(2015, 1, 7, 13, 59), numpy.timedelta64( 1, 'm'), 55.0)] }, measures)
def test_amqp1d(self, utcnow): utcnow.return_value = utils.datetime_utc(2017, 1, 10, 13, 58, 36) metrics = json.dumps([{ u'dstypes': [u'gauge'], u'plugin': u'memory', u'dsnames': [u'value'], u'interval': 10.0, u'host': u'www.gnocchi.test.com', u'values': [9], u'time': 1506712460.824, u'plugin_instance': u'', u'type_instance': u'free', u'type': u'memory' }, { u'dstypes': [u'derive', u'derive'], u'plugin': u'interface', u'dsnames': [u'rx', u'tx'], u'interval': 10.0, u'host': u'www.gnocchi.test.com', u'values': [2, 5], u'time': 1506712460.824, u'plugin_instance': u'ens2f1', u'type_instance': u'', u'type': u'if_errors' }]) self.server.on_message(mock.Mock(message=mock.Mock(body=metrics))) self.server.processor.flush() resources = self.index.list_resources( self.conf.amqp1d.resource_type, attribute_filter={"=": { "host": "www.gnocchi.test.com" }}) self.assertEqual(1, len(resources)) self.assertEqual("www.gnocchi.test.com", resources[0].host) metrics = self.index.list_metrics( attribute_filter={'=': { "resource_id": resources[0].id }}) self.assertEqual(3, len(metrics)) self.trigger_processing(metrics) expected_measures = { "memory@memory-free": [(datetime64(2017, 1, 10, 13, 58), numpy.timedelta64(1, 'm'), 9)], "interface-ens2f1@if_errors-rx": [(datetime64(2017, 1, 10, 13, 58), numpy.timedelta64(1, 'm'), 2)], "interface-ens2f1@if_errors-tx": [(datetime64(2017, 1, 10, 13, 58), numpy.timedelta64(1, 'm'), 5)] } for metric in metrics: aggregation = metric.archive_policy.get_aggregation( "mean", numpy.timedelta64(1, 'm')) results = self.storage.get_aggregated_measures( {metric: [aggregation]})[metric] measures = get_measures_list(results) self.assertEqual(expected_measures[metric.name], measures["mean"])