class TestSysLogReporter(TimedTestCase): def setUp(self): super(TestSysLogReporter, self).setUp() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None self.clock.now = 0 def tearDown(self): super(TestSysLogReporter, self).tearDown() self.clock.now = 0 def test_report_now(self): # connect to a local rsyslog server r = SysLogReporter(registry=self.registry, reporting_interval=1, clock=self.clock) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2 ** i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) with mock.patch("pyformance.reporters.syslog_reporter.logging.Logger.info") as patch: r.report_now() expected = '{"c1.count": 1, "counter-2.count": -2, "gcb.value": 123, "gsimple.value": 42, "hist.75_percentile": 160.0, "hist.95_percentile": 512, "hist.999_percentile": 512, "hist.99_percentile": 512, "hist.avg": 102.3, "hist.count": 10.0, "hist.max": 512, "hist.min": 1, "hist.std_dev": 164.94851048466944, "m1.15m_rate": 0, "m1.1m_rate": 0, "m1.5m_rate": 0, "m1.count": 1.0, "m1.mean_rate": 1.0, "t1.15m_rate": 0, "t1.1m_rate": 0, "t1.50_percentile": 1, "t1.5m_rate": 0, "t1.75_percentile": 1, "t1.95_percentile": 1, "t1.999_percentile": 1, "t1.99_percentile": 1, "t1.avg": 1.0, "t1.count": 1.0, "t1.max": 1, "t1.mean_rate": 1.0, "t1.min": 1, "t1.std_dev": 0.0, "t1.sum": 1.0, "timestamp": 1}' patch.assert_called_with(expected)
class TestOpenTSDBReporter(TimedTestCase): def setUp(self): super(TestOpenTSDBReporter, self).setUp() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def tearDown(self): super(TestOpenTSDBReporter, self).tearDown() def test_report_now(self): r = OpenTSDBReporter(application_name="app", write_key="key", registry=self.registry, reporting_interval=1, clock=self.clock, prefix="prefix.", url="http://opentsdb.com/api/put") h1 = self.registry.histogram("hist") for i in range(10): h1.add(2 ** i) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) output = r._collect_metrics(registry=self.registry) self.assertEqual(len(output), 31) for data in output: assert data['metric'].startswith("prefix.") def test_send_request(self): r = OpenTSDBReporter(application_name="app", write_key="key", registry=self.registry, reporting_interval=1, clock=self.clock, prefix="prefix.", url="http://opentsdb.com/api/put") h1 = self.registry.histogram("hist") for i in range(10): h1.add(2 ** i) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) with mock.patch("pyformance.reporters.opentsdb_reporter.urllib.urlopen") as patch: r.report_now() patch.assert_called()
class TestCarbonReporter(TimedTestCase): def setUp(self): super(TestCarbonReporter, self).setUp() self.output = BytesIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def connect(self, *args): # part of fake socket interface pass def sendall(self, data): # part of fake socket interface self.output.write(data) def close(self): # part of fake socket interface pass def tearDown(self): super(TestCarbonReporter, self).tearDown() def capture_test_metrics(self): self.clock.now = 1 h1 = self.registry.histogram("hist") for i in range(10): h1.add(2 ** i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) def test_report_now_plain(self): r = CarbonReporter( registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self, ) self.capture_test_metrics() r.report_now() test_data = sorted(self.output.getvalue().decode().splitlines()) expected_data = sorted( [ "counter-2.count -2 2", "c1.count 1 2", "gsimple.value 42 2", "gcb.value 123 2", "t1.1m_rate 0 2", "t1.999_percentile 1 2", "t1.15m_rate 0 2", "t1.99_percentile 1 2", "t1.mean_rate 1.0 2", "t1.95_percentile 1 2", "t1.min 1 2", "t1.50_percentile 1 2", "t1.5m_rate 0 2", "t1.count 1.0 2", "t1.75_percentile 1 2", "t1.std_dev 0.0 2", "t1.max 1 2", "t1.sum 1.0 2", "t1.avg 1.0 2", "hist.count 10.0 2", "hist.999_percentile 512 2", "hist.99_percentile 512 2", "hist.min 1 2", "hist.95_percentile 512 2", "hist.75_percentile 160.0 2", "hist.std_dev 164.94851048466944 2" if PY3 else "hist.std_dev 164.948510485 2", "hist.max 512 2", "hist.avg 102.3 2", "m1.count 1.0 2", "m1.1m_rate 0 2", "m1.15m_rate 0 2", "m1.5m_rate 0 2", "m1.mean_rate 1.0 2", ] ) self.assertEqual(test_data, expected_data) def test_report_now_pickle(self): r = CarbonReporter( registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self, pickle_protocol=True, ) self.capture_test_metrics() r.report_now() test_data = sorted(pickle.loads(self.output.getvalue()[4:])) expected_data = sorted( [ ("counter-2.count", (2, -2.0)), ("c1.count", (2, 1)), ("gsimple.value", (2, 42.0)), ("gcb.value", (2, 123.0)), ("t1.1m_rate", (2, 0.0)), ("t1.999_percentile", (2, 1)), ("t1.15m_rate", (2, 0.0)), ("t1.99_percentile", (2, 1)), ("t1.mean_rate", (2, 1)), ("t1.95_percentile", (2, 1)), ("t1.min", (2, 1)), ("t1.50_percentile", (2, 1)), ("t1.5m_rate", (2, 0.0)), ("t1.count", (2, 1)), ("t1.75_percentile", (2, 1)), ("t1.std_dev", (2, 0.0)), ("t1.max", (2, 1)), ("t1.sum", (2, 1)), ("t1.avg", (2, 1)), ("hist.count", (2, 10.0)), ("hist.999_percentile", (2, 512.0)), ("hist.99_percentile", (2, 512.0)), ("hist.min", (2, 1)), ("hist.95_percentile", (2, 512.0)), ("hist.75_percentile", (2, 160.0)), ("hist.std_dev", (2, 164.94851048466944)), ("hist.max", (2, 512.0)), ("hist.avg", (2, 102.3)), ("m1.count", (2, 1)), ("m1.1m_rate", (2, 0.0)), ("m1.15m_rate", (2, 0.0)), ("m1.5m_rate", (2, 0.0)), ("m1.mean_rate", (2, 1)), ] ) self.assertEqual(test_data, expected_data)
class TestCarbonReporter(TimedTestCase): def setUp(self): super(TestCarbonReporter, self).setUp() self.output = StringIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def connect(self, *args): # part of fake socket interface pass def sendall(self, data): # part of fake socket interface self.output.write(data) def close(self): # part of fake socket interface pass def tearDown(self): super(TestCarbonReporter, self).tearDown() def test_report_now(self): r = CarbonReporter( registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2 ** i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) r.report_now() self.assertEqual(self.output.getvalue().splitlines(), [ 'counter-2.count -2 1', 'gsimple.value 42 1', 'gcb.value 123 1', 't1.1m_rate 0 1', 't1.999_percentile 1 1', 't1.15m_rate 0 1', 't1.99_percentile 1 1', 't1.mean_rate 1.0 1', 't1.95_percentile 1 1', 't1.min 1 1', 't1.5m_rate 0 1', 't1.count 1.0 1', 't1.75_percentile 1 1', 't1.std_dev 0.0 1', 't1.max 1 1', 't1.avg 1.0 1', 'hist.count 10.0 1', 'hist.999_percentile 512 1', 'hist.99_percentile 512 1', 'hist.min 1 1', 'hist.95_percentile 512 1', 'hist.75_percentile 160.0 1', 'hist.std_dev 164.948510485 1', 'hist.max 512 1', 'hist.avg 102.3 1', 'm1.1m_rate 0 1', 'm1.15m_rate 0 1', 'm1.5m_rate 0 1', 'm1.mean_rate 1.0 1', 'c1.count 1 1' ])
class TestConsoleReporter(TimedTestCase): def setUp(self): super(TestConsoleReporter, self).setUp() self.output = StringIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None self.clock.now = 0 def tearDown(self): super(TestConsoleReporter, self).tearDown() self.clock.now = 0 def test_report_now(self): r = ConsoleReporter( registry=self.registry, reporting_interval=1, stream=self.output, clock=self.clock) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2 ** i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) r.report_now() self.assertEqual(self.output.getvalue().splitlines(), [ '== 1970-01-01 00:00:01 ===================================', 'counter-2:', ' count = -2', 'gsimple:', ' value = 42', 'gcb:', ' value = 123', 't1:', ' 1m_rate = 0', ' 999_percentile = 1', ' 15m_rate = 0', ' 99_percentile = 1', ' mean_rate = 1.0', ' 95_percentile = 1', ' min = 1', ' 5m_rate = 0', ' count = 1.0', ' 75_percentile = 1', ' std_dev = 0.0', ' max = 1', ' avg = 1.0', 'hist:', ' count = 10.0', ' 999_percentile = 512', ' 99_percentile = 512', ' min = 1', ' 95_percentile = 512', ' 75_percentile = 160.0', ' std_dev = 164.948510485', ' max = 512', ' avg = 102.3', 'm1:', ' 1m_rate = 0', ' 15m_rate = 0', ' 5m_rate = 0', ' mean_rate = 1.0', 'c1:', ' count = 1', ''])
class TestOpenTSDBReporter(TimedTestCase): def setUp(self): super(TestOpenTSDBReporter, self).setUp() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def tearDown(self): super(TestOpenTSDBReporter, self).tearDown() def test_report_now(self): r = OpenTSDBReporter(application_name="app", write_key="key", registry=self.registry, reporting_interval=1, clock=self.clock, prefix="prefix.", url="http://opentsdb.com/api/put") h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) output = r._collect_metrics(registry=self.registry) self.assertEqual(len(output), 31) for data in output: assert data['metric'].startswith("prefix.") def test_send_request(self): r = OpenTSDBReporter(application_name="app", write_key="key", registry=self.registry, reporting_interval=1, clock=self.clock, prefix="prefix.", url="http://opentsdb.com/api/put") h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) with mock.patch( "pyformance.reporters.opentsdb_reporter.urllib.request.urlopen" ) as patch: r.report_now() patch.assert_called()
class TestConsoleReporter(TimedTestCase): def setUp(self): super(TestConsoleReporter, self).setUp() self.output = StringIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None self.clock.now = 0 def tearDown(self): super(TestConsoleReporter, self).tearDown() self.clock.now = 0 def test_report_now(self): r = ConsoleReporter( registry=self.registry, reporting_interval=1, stream=self.output, clock=self.clock, ) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() e1 = self.registry.event("e1") e1.add({"field": 1}) with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) r.report_now() input_arr = self.output.getvalue().splitlines() input_arr.sort() expected = [ '== 1970-01-01 00:00:01 ===================================', 'c1:', ' count = 1', 'counter-2:', ' count = -2', 'hist:', ' avg = 102.3', ' count = 10.0', ' max = 512', ' min = 1', ' std_dev = 164.94851048466944', ' 75_percentile = 160.0', ' 95_percentile = 512', ' 99_percentile = 512', ' 999_percentile = 512', 'm1:', ' count = 1.0', ' 15m_rate = 0', ' 5m_rate = 0', ' 1m_rate = 0', ' mean_rate = 1.0', 't1:', ' avg = 1.0', ' sum = 1.0', ' count = 1.0', ' max = 1', ' min = 1', ' std_dev = 0.0', ' 15m_rate = 0', ' 5m_rate = 0', ' 1m_rate = 0', ' mean_rate = 1.0', ' 50_percentile = 1', ' 75_percentile = 1', ' 95_percentile = 1', ' 99_percentile = 1', ' 999_percentile = 1', 'gcb:', ' value = 123', 'gsimple:', ' value = 42', '', '== 1970-01-01 00:00:00 ===================================', 'e1:', ' field = 1', '' ] expected.sort() self.assertEqual(input_arr, expected)
class TestCarbonReporter(TimedTestCase): def setUp(self): super(TestCarbonReporter, self).setUp() self.output = StringIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def connect(self, *args): # part of fake socket interface pass def sendall(self, data): # part of fake socket interface if sys.version_info[0] > 2: self.output.write(data.decode()) else: self.output.write(data) def close(self): # part of fake socket interface pass def tearDown(self): super(TestCarbonReporter, self).tearDown() def test_report_now(self): r = CarbonReporter(registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) r.report_now() self.assertEqual(self.output.getvalue().splitlines().sort(), [ 'counter-2.count -2 1', 'c1.count 1 1', 'gsimple.value 42 1', 'gcb.value 123 1', 't1.1m_rate 0 1', 't1.999_percentile 1 1', 't1.15m_rate 0 1', 't1.99_percentile 1 1', 't1.mean_rate 1.0 1', 't1.95_percentile 1 1', 't1.min 1 1', 't1.5m_rate 0 1', 't1.count 1.0 1', 't1.75_percentile 1 1', 't1.std_dev 0.0 1', 't1.max 1 1', 't1.avg 1.0 1', 'hist.count 10.0 1', 'hist.999_percentile 512 1', 'hist.99_percentile 512 1', 'hist.min 1 1', 'hist.95_percentile 512 1', 'hist.75_percentile 160.0 1', 'hist.std_dev 164.948510485 1', 'hist.max 512 1', 'hist.avg 102.3 1', 'm1.count 1.0 1', 'm1.1m_rate 0 1', 'm1.15m_rate 0 1', 'm1.5m_rate 0 1', 'm1.mean_rate 1.0 1', ].sort())
class TestSysLogReporter(TimedTestCase): def setUp(self): super(TestSysLogReporter, self).setUp() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None self.clock.now = 0 def tearDown(self): super(TestSysLogReporter, self).tearDown() self.clock.now = 0 def test_report_now(self): # connect to a local rsyslog server r = SysLogReporter(registry=self.registry, reporting_interval=1, clock=self.clock) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() e1 = self.registry.event("e1") e1.add({"field": 1}) with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) with mock.patch( "pyformance.reporters.syslog_reporter.logging.Logger.info" ) as patch: r.report_now() self.assertEqual(patch.call_count, 2) calls = patch.call_args_list expected = ( '{"c1.count": 1, "counter-2.count": -2, ' '"gcb.value": 123, "gsimple.value": 42, ' '"hist.75_percentile": 160.0, "hist.95_percentile": 512, ' '"hist.999_percentile": 512, "hist.99_percentile": 512, ' '"hist.avg": 102.3, "hist.count": 10.0, "hist.max": 512, ' '"hist.min": 1, "hist.std_dev": 164.94851048466944, ' '"m1.15m_rate": 0, "m1.1m_rate": 0, "m1.5m_rate": 0, "m1.count": 1.0, ' '"m1.mean_rate": 1.0, "t1.15m_rate": 0, "t1.1m_rate": 0, ' '"t1.50_percentile": 1, "t1.5m_rate": 0, "t1.75_percentile": 1, ' '"t1.95_percentile": 1, "t1.999_percentile": 1, "t1.99_percentile": 1, ' '"t1.avg": 1.0, "t1.count": 1.0, "t1.max": 1, "t1.mean_rate": 1.0, ' '"t1.min": 1, "t1.std_dev": 0.0, "t1.sum": 1.0, "timestamp": 1}' ) # First call should be event free because events are reported based on submission # rather than report timestamp. And a single line contains a single timestamp. self.assertEqual(calls[0][0][0], expected) # Second call should only be events self.assertEqual(calls[1][0][0], '{"e1.field": 1, "timestamp": 0}')
class RegistryTestCase(TimedTestCase): def setUp(self): super(RegistryTestCase, self).setUp() self.registry = MetricsRegistry(TimedTestCase.clock) def tearDown(self): super(RegistryTestCase, self).tearDown() def test__add(self): self.registry.add("foo", Meter(TimedTestCase.clock)) def test_updating_counter(self): self.registry.counter("test_counter").inc() self.registry.counter("test_counter").inc() self.assertEqual(self.registry.counter("test_counter").get_count(), 2) def test_updating_counter_with_tags(self): self.registry.counter("test_counter", {"weather": "sunny"}).inc() self.registry.counter("test_counter", {"weather": "sunny"}).inc() self.assertEqual( self.registry.counter("test_counter", { "weather": "sunny" }).get_count(), 2) def test_updating_counters_with_same_key_different_tags(self): self.registry.counter("test_counter", { "weather": "sunny", "cloudy": False }).inc() self.registry.counter("test_counter", { "weather": "rainy", "cloudy": True }).inc() self.registry.counter("test_counter", { "cloudy": False, "weather": "sunny" }).inc() self.registry.counter("test_counter", { "cloudy": True, "weather": "rainy" }).inc() self.assertEqual( self.registry.counter("test_counter", { "weather": "sunny", "cloudy": False }).get_count(), 2) self.assertEqual( self.registry.counter("test_counter", { "weather": "rainy", "cloudy": True }).get_count(), 2) def test_get_metrics(self): self.registry.counter("test_counter").inc() self.assertEqual(self.registry.get_metrics("test_counter"), {"count": 1}) self.registry.gauge("test_gauge").set_value(10) self.assertEqual(self.registry.get_metrics("test_gauge"), {"value": 10}) def test_dump_metrics(self): self.registry.counter("test_counter", {"tag1": "val1"}).inc() self.assertEqual(self.registry.dump_metrics(), {"test_counter": { "count": 1 }}) def test_dump_metrics_with_tags(self): self.registry.counter("test_counter", {"tag1": "val1"}).inc() self.assertEqual( self.registry.dump_metrics(key_is_metric=True), {BaseMetric("test_counter", {"tag1": "val1"}): { "count": 1 }}) def test_dump_events(self): self.registry.event("test_event", {"tag1": "val1"}).add({"field": 1}) self.assertEqual( self.registry.dump_metrics(key_is_metric=True), { BaseMetric("test_event", {"tag1": "val1"}): { "events": [EventPoint(time=self.clock.time(), values={"field": 1})] } }) # Make sure the same event is never dumped twice self.assertEqual(self.registry.dump_metrics(key_is_metric=True), {BaseMetric("test_event", {"tag1": "val1"}): {}}) def test_time_calls_with_registry(self): registry = MetricsRegistry() @time_calls(registry=registry, tags={"tag1": "val1"}) def timed_func(): pass timed_func() metric_name = "RegistryTestCase.test_time_calls_with_registry.<locals>.timed_func_calls" stats = registry.get_metrics(key=metric_name, tags={"tag1": "val1"}) print(registry.get_metrics(key=metric_name, tags={"tag1": "val1"})) self.assertEqual(stats["count"], 1) self.assertTrue(stats["mean_rate"]) def test_time_calls(self): @time_calls def timed_func(): pass timed_func() func_timer = timer( "RegistryTestCase.test_time_calls.<locals>.timed_func_calls") self.assertEqual(func_timer.get_count(), 1) self.assertTrue(func_timer.get_mean()) def test_get_qualname(self): def foo(): pass self.assertEqual(get_qualname(foo), "RegistryTestCase.test_get_qualname.<locals>.foo")
class TestCarbonReporter(TimedTestCase): def setUp(self): super(TestCarbonReporter, self).setUp() self.output = BytesIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None def connect(self, *args): # part of fake socket interface pass def sendall(self, data): # part of fake socket interface self.output.write(data) def close(self): # part of fake socket interface pass def tearDown(self): super(TestCarbonReporter, self).tearDown() def capture_test_metrics(self): self.clock.now = 1 h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) def test_report_now_plain(self): r = CarbonReporter( registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self, ) self.capture_test_metrics() r.report_now() test_data = sorted(self.output.getvalue().decode().splitlines()) expected_data = sorted([ "counter-2.count -2 2", "c1.count 1 2", "gsimple.value 42 2", "gcb.value 123 2", "t1.1m_rate 0 2", "t1.999_percentile 1 2", "t1.15m_rate 0 2", "t1.99_percentile 1 2", "t1.mean_rate 1.0 2", "t1.95_percentile 1 2", "t1.min 1 2", "t1.50_percentile 1 2", "t1.5m_rate 0 2", "t1.count 1.0 2", "t1.75_percentile 1 2", "t1.std_dev 0.0 2", "t1.max 1 2", "t1.sum 1.0 2", "t1.avg 1.0 2", "hist.count 10.0 2", "hist.999_percentile 512 2", "hist.99_percentile 512 2", "hist.min 1 2", "hist.95_percentile 512 2", "hist.75_percentile 160.0 2", "hist.std_dev 164.94851048466944 2" if PY3 else "hist.std_dev 164.948510485 2", "hist.max 512 2", "hist.avg 102.3 2", "m1.count 1.0 2", "m1.1m_rate 0 2", "m1.15m_rate 0 2", "m1.5m_rate 0 2", "m1.mean_rate 1.0 2", ]) self.assertEqual(test_data, expected_data) def test_report_now_pickle(self): r = CarbonReporter( registry=self.registry, reporting_interval=1, clock=self.clock, socket_factory=lambda: self, pickle_protocol=True, ) self.capture_test_metrics() r.report_now() test_data = sorted(pickle.loads(self.output.getvalue()[4:])) expected_data = sorted([ ("counter-2.count", (2, -2.0)), ("c1.count", (2, 1)), ("gsimple.value", (2, 42.0)), ("gcb.value", (2, 123.0)), ("t1.1m_rate", (2, 0.0)), ("t1.999_percentile", (2, 1)), ("t1.15m_rate", (2, 0.0)), ("t1.99_percentile", (2, 1)), ("t1.mean_rate", (2, 1)), ("t1.95_percentile", (2, 1)), ("t1.min", (2, 1)), ("t1.50_percentile", (2, 1)), ("t1.5m_rate", (2, 0.0)), ("t1.count", (2, 1)), ("t1.75_percentile", (2, 1)), ("t1.std_dev", (2, 0.0)), ("t1.max", (2, 1)), ("t1.sum", (2, 1)), ("t1.avg", (2, 1)), ("hist.count", (2, 10.0)), ("hist.999_percentile", (2, 512.0)), ("hist.99_percentile", (2, 512.0)), ("hist.min", (2, 1)), ("hist.95_percentile", (2, 512.0)), ("hist.75_percentile", (2, 160.0)), ("hist.std_dev", (2, 164.94851048466944)), ("hist.max", (2, 512.0)), ("hist.avg", (2, 102.3)), ("m1.count", (2, 1)), ("m1.1m_rate", (2, 0.0)), ("m1.15m_rate", (2, 0.0)), ("m1.5m_rate", (2, 0.0)), ("m1.mean_rate", (2, 1)), ]) self.assertEqual(test_data, expected_data)
class RegistryTestCase(TimedTestCase): def setUp(self): super(RegistryTestCase, self).setUp() self.registry = MetricsRegistry(TimedTestCase.clock) def tearDown(self): super(RegistryTestCase, self).tearDown() def test__add(self): self.registry.add("foo", Meter(TimedTestCase.clock)) def test_updating_counter(self): self.registry.counter("test_counter").inc() self.registry.counter("test_counter").inc() self.assertEqual(self.registry.counter("test_counter").get_count(), 2) def test_updating_counter_with_tags(self): self.registry.counter("test_counter", {"weather": "sunny"}).inc() self.registry.counter("test_counter", {"weather": "sunny"}).inc() self.assertEqual(self.registry.counter("test_counter", {"weather": "sunny"}).get_count(), 2) def test_updating_counters_with_same_key_different_tags(self): self.registry.counter("test_counter", {"weather": "sunny", "cloudy": False}).inc() self.registry.counter("test_counter", {"weather": "rainy", "cloudy": True}).inc() self.registry.counter("test_counter", {"cloudy": False, "weather": "sunny"}).inc() self.registry.counter("test_counter", {"cloudy": True, "weather": "rainy"}).inc() self.assertEqual(self.registry.counter( "test_counter", {"weather": "sunny", "cloudy": False} ).get_count(), 2) self.assertEqual(self.registry.counter( "test_counter", {"weather": "rainy", "cloudy": True} ).get_count(), 2) def test_get_metrics(self): self.registry.counter("test_counter").inc() self.assertEqual(self.registry.get_metrics("test_counter"), {"count": 1}) self.registry.gauge("test_gauge").set_value(10) self.assertEqual(self.registry.get_metrics("test_gauge"), {"value": 10}) def test_dump_metrics(self): self.registry.counter("test_counter", {"tag1": "val1"}).inc() self.assertEqual(self.registry.dump_metrics(), {"test_counter": {"count": 1}}) def test_dump_metrics_with_tags(self): self.registry.counter("test_counter", {"tag1": "val1"}).inc() self.assertEqual( self.registry.dump_metrics(key_is_metric=True), {BaseMetric("test_counter", {"tag1": "val1"}): {"count": 1}} ) def test_time_calls_with_registry(self): registry = MetricsRegistry() @time_calls(registry=registry, tags={"tag1": "val1"}) def timed_func(): pass timed_func() stats = registry.get_metrics(key="timed_func_calls", tags={"tag1": "val1"}) print(registry.get_metrics(key="timed_func_calls", tags={"tag1": "val1"})) self.assertEqual(stats["count"], 1) self.assertTrue(stats["mean_rate"]) def test_time_calls(self): @time_calls def timed_func(): pass timed_func() func_timer = timer("timed_func_calls") self.assertEqual(func_timer.get_count(), 1) self.assertTrue(func_timer.get_mean()) def test_get_qualname(self): def foo(): pass self.assertEqual(get_qualname(foo), "foo")
class TestConsoleReporter(TimedTestCase): def setUp(self): super(TestConsoleReporter, self).setUp() self.output = StringIO() self.registry = MetricsRegistry(clock=self.clock) self.maxDiff = None self.clock.now = 0 def tearDown(self): super(TestConsoleReporter, self).tearDown() self.clock.now = 0 def test_report_now(self): r = ConsoleReporter( registry=self.registry, reporting_interval=1, stream=self.output, clock=self.clock, ) h1 = self.registry.histogram("hist") for i in range(10): h1.add(2**i) gcb = self.registry.gauge("gcb", lambda: 123) gsimple = self.registry.gauge("gsimple").set_value(42) t1 = self.registry.timer("t1") m1 = self.registry.meter("m1") m1.mark() with t1.time(): c1 = self.registry.counter("c1") c2 = self.registry.counter("counter-2") c1.inc() c2.dec() c2.dec() self.clock.add(1) r.report_now() self.assertEqual( self.output.getvalue().splitlines().sort(), [ "== 1970-01-01 00:00:01 ===================================", "counter-2:", " count = -2", "gsimple:", " value = 42", "gcb:", " value = 123", "t1:", " 1m_rate = 0", " 999_percentile = 1", " 15m_rate = 0", " 99_percentile = 1", " mean_rate = 1.0", " 95_percentile = 1", " min = 1", " 5m_rate = 0", " count = 1.0", " 75_percentile = 1", " std_dev = 0.0", " max = 1", " avg = 1.0", "hist:", " count = 10.0", " 999_percentile = 512", " 99_percentile = 512", " min = 1", " 95_percentile = 512", " 75_percentile = 160.0", " std_dev = 164.94851048466944", " max = 512", " avg = 102.3", "m1:", " count = 1.0", " 1m_rate = 0", " 15m_rate = 0", " 5m_rate = 0", " mean_rate = 1.0", "c1:", " count = 1", "", ].sort(), )
class TestInfluxReporter(TimedTestCase): def setUp(self): super(TestInfluxReporter, self).setUp() self.registry = MetricsRegistry() def tearDown(self): super(TestInfluxReporter, self).tearDown() def test_not_called_on_blank(self): influx_reporter = InfluxReporter(registry=self.registry) with mock.patch("pyformance.reporters.influx.urlopen") as patch: influx_reporter.report_now() patch.assert_not_called() def test_create_database(self): r1 = InfluxReporter(registry=self.registry, autocreate_database=True) with mock.patch("pyformance.reporters.influx.urlopen") as patch: r1.report_now() if patch.call_count != 1: raise AssertionError( "Expected 1 calls to 'urlopen'. Received: {}".format( patch.call_count)) def test_gauge_without_tags(self): self.registry.gauge("cpu").set_value(65) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = "cpu value=65 " + self.clock.time_string() send_mock.assert_called_once_with(expected_url, expected_data) def test_gauge_with_tags(self): tags = {"region": "us - west"} self.registry.gauge(key="cpu", tags=tags).set_value(65) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = "cpu,region=us\\ -\\ west value=65 " + \ self.clock.time_string() send_mock.assert_called_once_with(expected_url, expected_data) def test_gauge_with_global_tags(self): tags = {"region": "us-west-2"} self.registry.gauge(key="cpu", tags=tags).set_value(65) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False, global_tags={ "stage": "dev", "region": "override" }) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = "cpu,stage=dev,region=us-west-2 value=65 " + \ self.clock.time_string() send_mock.assert_called_once_with(expected_url, expected_data) def test_counter_with_tags(self): tags = {"host": "server1"} counter = self.registry.counter(key="cpu", tags=tags) for i in range(5): counter.inc(1) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = "cpu,host=server1 count=5 " + \ self.clock.time_string() send_mock.assert_called_once_with(expected_url, expected_data) def test_events_with_tags(self): tags = {"host": "server1"} self.registry._clock = self.clock event = self.registry.event(key="event", tags=tags) event.add({"field": 1, "float": 0.12, "int": MarkInt(1)}) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = "event,host=server1 field=1,float=0.12,int=1i " + \ self.clock.time_string() send_mock.assert_called_once_with(expected_url, expected_data) def test_combined_events_with_counter(self): tags = {"host": "server1"} self.registry._clock = self.clock event = self.registry.event(key="event", tags=tags) event.add({"field": 1}) counter = self.registry.counter("event", tags=tags) counter.inc(5) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = [ "event,host=server1 count=5 " + self.clock.time_string(), "event,host=server1 field=1 " + self.clock.time_string() ] send_mock.assert_called_once_with(expected_url, "\n".join(expected_data)) def test_count_calls_with_tags(self): tags = {"host": "server1"} counter = self.registry.counter(key="cpu", tags=tags) for i in range(5): counter.inc(1) influx_reporter = InfluxReporter(registry=self.registry, clock=self.clock, autocreate_database=False) with mock.patch.object(influx_reporter, "_try_send") as send_mock: influx_reporter.report_now() expected_url = "http://127.0.0.1:8086/write?db=metrics&precision=s" expected_data = "cpu,host=server1 count=5 " + \ self.clock.time_string() send_mock.assert_called_once_with(expected_url, expected_data) def test__format_tag_value(self): self.assertEqual(_format_tag_value("no_special_chars"), "no_special_chars") self.assertEqual(_format_tag_value("has space"), "has\\ space") self.assertEqual(_format_tag_value("has,comma"), "has\\,comma") self.assertEqual(_format_tag_value("has=equals"), "has\\=equals")
def pausing_vm_decorator( original_vm_class: Type[VirtualMachineAPI], event_bus: EndpointAPI, metrics_registry: MetricsRegistry, loop: asyncio.AbstractEventLoop, urgent: bool = True) -> Type[VirtualMachineAPI]: """ Decorate a py-evm VM so that it will pause when data is missing """ missing_storage_metrics_counter = metrics_registry.counter('trinity.sync/missing_storage') missing_account_metrics_counter = metrics_registry.counter('trinity.sync/missing_account') missing_bytecode_metrics_counter = metrics_registry.counter('trinity.sync/missing_bytecode') async def request_missing_storage( missing_node_hash: Hash32, storage_key: Hash32, storage_root_hash: Hash32, account_address: Address, block_number: BlockNumber) -> MissingStorageResult: missing_storage_metrics_counter.inc() if event_bus.is_any_endpoint_subscribed_to(CollectMissingStorage): return await event_bus.request(CollectMissingStorage( missing_node_hash, storage_key, storage_root_hash, account_address, urgent, block_number, )) else: raise StateUnretrievable("No servers for CollectMissingStorage") async def request_missing_account( missing_node_hash: Hash32, address_hash: Hash32, state_root_hash: Hash32, block_number: BlockNumber) -> MissingAccountResult: missing_account_metrics_counter.inc() if event_bus.is_any_endpoint_subscribed_to(CollectMissingAccount): return await event_bus.request(CollectMissingAccount( missing_node_hash, address_hash, state_root_hash, urgent, block_number, )) else: raise StateUnretrievable("No servers for CollectMissingAccount") async def request_missing_bytecode( bytecode_hash: Hash32, block_number: BlockNumber) -> MissingBytecodeResult: missing_bytecode_metrics_counter.inc() if event_bus.is_any_endpoint_subscribed_to(CollectMissingBytecode): return await event_bus.request(CollectMissingBytecode( bytecode_hash, urgent, block_number, )) else: raise StateUnretrievable("No servers for CollectMissingBytecode") class PausingVMState(original_vm_class.get_state_class()): # type: ignore """ A custom version of VMState that pauses EVM execution when required data is missing. """ stats_counter: BeamStats node_retrieval_timeout = BLOCK_IMPORT_MISSING_STATE_TIMEOUT def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) self.stats_counter = BeamStats() def _pause_on_missing_data( self, vm_method: Callable[[Any], TVMFuncReturn], *args: Any, **kwargs: Any) -> TVMFuncReturn: """ Catch exceptions about missing state data and pause while waiting for the event bus to reply with the needed data. Repeat if there is a request timeout. """ while True: try: return self._request_missing_data(vm_method, *args, **kwargs) except futures.TimeoutError: self.stats_counter.data_pause_time += self.node_retrieval_timeout if urgent: log_func = self.logger.warning else: log_func = self.logger.debug log_func( "Timed out requsting state data for block #%d, retrying..." " Stats so far: %s", self.block_number, self.stats_counter, ) def _request_missing_data( self, vm_method: Callable[[Any], TVMFuncReturn], *args: Any, **kwargs: Any) -> TVMFuncReturn: """ Catch exceptions about missing state data and pause while waiting for the event bus to reply with the needed data. """ while True: try: return vm_method(*args, **kwargs) # type: ignore except MissingAccountTrieNode as exc: t = Timer() account_future = asyncio.run_coroutine_threadsafe( request_missing_account( exc.missing_node_hash, exc.address_hash, exc.state_root_hash, self.block_number, ), loop, ) account_event = account_future.result(timeout=self.node_retrieval_timeout) if urgent: self.logger.debug( "Paused for account nodes (%d) for %.3fs, %.3fs avg (starts on %s)", account_event.num_nodes_collected, t.elapsed, t.elapsed / (account_event.num_nodes_collected or 1), exc.missing_node_hash[:2].hex(), ) # Collect the amount of paused time before checking if we should exit, so # it shows up in logged statistics. self.stats_counter.data_pause_time += t.elapsed if not account_event.is_retry_acceptable: raise StateUnretrievable("Server asked us to stop trying") self.stats_counter.num_accounts += 1 self.stats_counter.num_account_nodes += account_event.num_nodes_collected except MissingBytecode as exc: t = Timer() bytecode_future = asyncio.run_coroutine_threadsafe( request_missing_bytecode( exc.missing_code_hash, self.block_number, ), loop, ) bytecode_event = bytecode_future.result(timeout=self.node_retrieval_timeout) if urgent: self.logger.debug( "Paused for bytecode for %.3fs (%s)", t.elapsed, exc.missing_code_hash[:2].hex(), ) self.stats_counter.data_pause_time += t.elapsed if not bytecode_event.is_retry_acceptable: raise StateUnretrievable("Server asked us to stop trying") self.stats_counter.num_bytecodes += 1 except MissingStorageTrieNode as exc: t = Timer() storage_future = asyncio.run_coroutine_threadsafe( request_missing_storage( exc.missing_node_hash, exc.requested_key, exc.storage_root_hash, exc.account_address, self.block_number, ), loop, ) storage_event = storage_future.result(timeout=self.node_retrieval_timeout) if urgent: self.logger.debug( "Paused for storage nodes (%d) for %.3fs, %.3fs avg (starts on %s)", storage_event.num_nodes_collected, t.elapsed, t.elapsed / (storage_event.num_nodes_collected or 1), exc.missing_node_hash[:2].hex(), ) self.stats_counter.data_pause_time += t.elapsed if not storage_event.is_retry_acceptable: raise StateUnretrievable("Server asked us to stop trying") self.stats_counter.num_storages += 1 self.stats_counter.num_storage_nodes += storage_event.num_nodes_collected def get_balance(self, account: bytes) -> int: return self._pause_on_missing_data(super().get_balance, account) def get_code(self, account: bytes) -> bytes: return self._pause_on_missing_data(super().get_code, account) def get_storage(self, *args: Any, **kwargs: Any) -> int: return self._pause_on_missing_data(super().get_storage, *args, **kwargs) def delete_storage(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().delete_storage, *args, **kwargs) def delete_account(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().delete_account, *args, **kwargs) def set_balance(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().set_balance, *args, **kwargs) def get_nonce(self, *args: Any, **kwargs: Any) -> int: return self._pause_on_missing_data(super().get_nonce, *args, **kwargs) def set_nonce(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().set_nonce, *args, **kwargs) def increment_nonce(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().increment_nonce, *args, **kwargs) def set_code(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().set_code, *args, **kwargs) def get_code_hash(self, *args: Any, **kwargs: Any) -> Hash32: return self._pause_on_missing_data(super().get_code_hash, *args, **kwargs) def delete_code(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().delete_code, *args, **kwargs) def has_code_or_nonce(self, *args: Any, **kwargs: Any) -> bool: return self._pause_on_missing_data(super().has_code_or_nonce, *args, **kwargs) def account_exists(self, *args: Any, **kwargs: Any) -> bool: return self._pause_on_missing_data(super().account_exists, *args, **kwargs) def touch_account(self, *args: Any, **kwargs: Any) -> None: return self._pause_on_missing_data(super().touch_account, *args, **kwargs) def account_is_empty(self, *args: Any, **kwargs: Any) -> bool: return self._pause_on_missing_data(super().account_is_empty, *args, **kwargs) def persist(self) -> Optional[Any]: return self._pause_on_missing_data(super().persist) def make_state_root(self) -> Optional[Any]: return self._pause_on_missing_data(super().make_state_root) class PausingVM(original_vm_class): # type: ignore logger = get_logger(f'eth.vm.base.VM.{original_vm_class.__name__}') last_log_time = 0.0 def import_block(self, block: BlockAPI) -> BlockAndMetaWitness: missing_account_metrics_counter.clear() missing_bytecode_metrics_counter.clear() missing_storage_metrics_counter.clear() return super().import_block(block) @classmethod def get_state_class(cls) -> Type[StateAPI]: return PausingVMState def get_beam_stats(self) -> BeamStats: return self.state.stats_counter def transaction_applied_hook( self, transaction_index: int, transactions: Sequence[SignedTransactionAPI], base_header: BlockHeaderAPI, partial_header: BlockHeaderAPI, computation: ComputationAPI, receipt: ReceiptAPI) -> None: num_transactions = len(transactions) now = time.monotonic() if urgent: # The currently-importing block if transaction_index == num_transactions - 1: logger = self.logger.info log_header = "Beamed" elif now - self.last_log_time > MIN_GAS_LOG_WAIT: logger = self.logger.info log_header = "Beaming" else: # Logged an update too recently, skipping... return else: # Don't log anything for preview executions, for now return beam_stats = self.get_beam_stats() fraction_complete = partial_header.gas_used / base_header.gas_used if fraction_complete: total_est = beam_stats.data_pause_time / fraction_complete est_time = humanize_seconds(total_est - beam_stats.data_pause_time) else: est_time = "?" logger( "%s: #%d txn %d/%d, rtt: %.3fs, wait: %s, nodes: %d, gas: %s/%s (%.1f%%) ETA %s", log_header, base_header.block_number, transaction_index + 1, num_transactions, beam_stats.avg_rtt, humanize_seconds(beam_stats.data_pause_time), beam_stats.num_nodes, f"{partial_header.gas_used:,d}", f"{base_header.gas_used:,d}", 100 * fraction_complete, est_time, ) self.last_log_time = now return PausingVM
class TestNewRelicReporter(TimedTestCase): def setUp(self): super(TestNewRelicReporter, self).setUp() self.registry = MetricsRegistry(clock=self.clock, sink=NewRelicSink) self.maxDiff = None def tearDown(self): super(TestNewRelicReporter, self).tearDown() def test_report_now(self): r = NewRelicReporter('license_key', registry=self.registry, reporting_interval=1, clock=self.clock, name='foo') h1 = self.registry.histogram("hist", 'a/b') for i in range(10): h1.add(2**i) t1 = self.registry.timer("t1") gauge = self.registry.gauge('g1', SimpleGauge(unit='g')) gauge_value = 10 gauge.set_value(gauge_value) m = self.registry.meter('m1', 'u1') m.mark() with t1.time(): m.mark() c1 = self.registry.counter("counter-1", 'c') c2 = self.registry.counter("counter-2", 'c') c1.inc() c2.dec() c2.dec() self.clock.add(1) m.mark() output = json.loads(r.collect_metrics(self.registry)) expected = { "agent": { "host": socket.gethostname(), "pid": os.getpid(), "version": "0.3.3" }, "components": [{ "duration": 1, "guid": "com.github.pyformance", "metrics": { "Component/hist/raw": { "max": 512, "total": 1023, "min": 1, "count": 10, "sum_of_squares": 349525 }, "Component/t1/95_percentile": 1., "Component/hist/999_percentile": 512, "Component/counter-2/raw": { "total": -3, "max": -1, "count": 2, "sum_of_squares": 5, "min": -2 }, "Component/t1/mean_rate": { "max": 1, "count": 1, "total": 1., "min": 1, "sum_of_squares": 1. }, "Component/t1/999_percentile": 1., "Component/m1/1m_rate": { "min": 0, "total": 0, "max": 0, "sum_of_squares": 0, "count": 0 }, "Component/t1/15m_rate": { "total": 0, "min": 0, "max": 0, "sum_of_squares": 0, "count": 0 }, "Component/hist/99_percentile": 512, "Component/t1/raw": { "min": 1., "count": 1, "sum_of_squares": 1., "max": 1., "total": 1. }, "Component/m1/mean_rate": { "sum_of_squares": 3., "count": 3, "total": 3., "min": 1, "max": 1 }, "Component/hist/std_dev": 164.94851048466947, "Component/counter-1/raw": { "count": 1, "sum_of_squares": 1, "min": 1, "max": 1, "total": 1 }, "Component/t1/50_percentile": 1., "Component/t1/99_percentile": 1., "Component/hist/95_percentile": 512, "Component/m1/15m_rate": { "count": 0, "sum_of_squares": 0, "max": 0, "min": 0, "total": 0 }, "Component/hist/75_percentile": 160., "Component/t1/5m_rate": { "count": 0, "min": 0, "total": 0, "sum_of_squares": 0, "max": 0 }, "Component/hist/mean_rate": { "count": 103, "max": 1, "sum_of_squares": 102.09, "min": 0.29999999999999716, "total": 102.3 }, "Component/t1/count": 1., "Component/g1/gauge": 10, "Component/t1/1m_rate": { "count": 0, "total": 0, "min": 0, "max": 0, "sum_of_squares": 0 }, "Component/t1/75_percentile": 1., "Component/t1/std_dev": 0., "Component/m1/raw": { "min": 1, "sum_of_squares": 3, "count": 3, "total": 3, "max": 1 }, "Component/m1/5m_rate": { "count": 0, "min": 0, "total": 0, "sum_of_squares": 0, "max": 0 } }, "name": "foo" }] } self.assertEqual(json.loads(json.dumps(expected)), json.loads(json.dumps(output)))