def test_basic(self): self.assertEquals(time.convert(60, 's', 's'), 60.0) self.assertEquals(time.convert(60, 's', 'm'), 1.0) self.assertEquals(time.convert(60000, 'ms', 'm'), 1.0) self.assertEquals(time.convert(60000, 'MS', 'minutes'), 1.0) self.assertEquals(time.convert(3600 * 1000 * 1.4, 'ms', 'h'), 1.4) self.assertEquals(time.convert(86400 * 1000 * 2.5, 'ms', 'd'), 2.5) self.assertEquals(time.convert(86400 * 1000 * 365 * 0.7, 'ms', 'y'), 0.7) self.assertEquals(time.convert(1000, 'ms', 'us'), 1000000) self.assertEquals(time.convert(1000, 'ms', 'ns'), 1000000000) self.assertEquals(time.convert(1.5, 'y', 'ns'), 1.5 * 365 * 24 * 3600 * 1000 * 1000 * 1000)
def test_basic(self): self.assertEqual(time.convert(60, 's', 's'), 60.0) self.assertEqual(time.convert(60, 's', 'm'), 1.0) self.assertEqual(time.convert(60000, 'ms', 'm'), 1.0) self.assertEqual(time.convert(60000, 'MS', 'minutes'), 1.0) self.assertEqual(time.convert(3600 * 1000 * 1.4, 'ms', 'h'), 1.4) self.assertEqual(time.convert(86400 * 1000 * 2.5, 'ms', 'd'), 2.5) self.assertEqual(time.convert(86400 * 1000 * 365 * 0.7, 'ms', 'y'), 0.7) self.assertEqual(time.convert(1000, 'ms', 'us'), 1000000) self.assertEqual(time.convert(1000, 'ms', 'ns'), 1000000000) self.assertEqual(time.convert(1.5, 'y', 'ns'), 1.5 * 365 * 24 * 3600 * 1000 * 1000 * 1000)
def collect(self): rawmetrics = {} for subnode in self.PATHS: path = self.PATHS[subnode] rawmetrics[subnode] = self.fetch_metrics(path) self.publish_gauge('num_resources', rawmetrics['num-resources']['Value']) self.publish_gauge('catalog_duplicate_pct', rawmetrics['duplicate-pct']['Value']) self.publish_gauge( 'sec_command', time_convertor.convert( rawmetrics['processing-time']['50thPercentile'], rawmetrics['processing-time']['LatencyUnit'], 'seconds')) self.publish_gauge( 'resources_service_time', time_convertor.convert( rawmetrics['resources.service-time']['50thPercentile'], rawmetrics['resources.service-time']['LatencyUnit'], 'seconds')) self.publish_gauge( 'enqueueing_service_time', time_convertor.convert( rawmetrics['commands.service-time']['50thPercentile'], rawmetrics['commands.service-time']['LatencyUnit'], 'seconds')) self.publish_gauge('discarded', rawmetrics['discarded']['Count']) self.publish_gauge('processed', rawmetrics['processed']['Count']) self.publish_gauge('rejected', rawmetrics['fatal']['Count']) self.publish_gauge( 'DB_Compaction', time_convertor.convert( rawmetrics['gc-time']['50thPercentile'], rawmetrics['gc-time']['LatencyUnit'], 'seconds')) self.publish_gauge('resource_duplicate_pct', rawmetrics['pct-resource-dupes']['Value']) self.publish_gauge('num_nodes', rawmetrics['num-nodes']['Value']) self.publish_counter('queue.ProducerCount', rawmetrics['queue']['ProducerCount']) self.publish_counter('queue.DequeueCount', rawmetrics['queue']['DequeueCount']) self.publish_counter('queue.ConsumerCount', rawmetrics['queue']['ConsumerCount']) self.publish_gauge('queue.QueueSize', rawmetrics['queue']['QueueSize']) self.publish_counter('queue.ExpiredCount', rawmetrics['queue']['ExpiredCount']) self.publish_counter('queue.EnqueueCount', rawmetrics['queue']['EnqueueCount']) self.publish_counter('queue.InFlightCount', rawmetrics['queue']['InFlightCount']) self.publish_gauge('queue.CursorPercentUsage', rawmetrics['queue']['CursorPercentUsage']) self.publish_gauge('queue.MemoryUsagePortion', rawmetrics['queue']['MemoryUsagePortion']) self.publish_gauge('memory.NonHeapMemoryUsage.used', rawmetrics['memory']['NonHeapMemoryUsage']['used']) self.publish_gauge( 'memory.NonHeapMemoryUsage.committed', rawmetrics['memory']['NonHeapMemoryUsage']['committed']) self.publish_gauge('memory.HeapMemoryUsage.used', rawmetrics['memory']['HeapMemoryUsage']['used']) self.publish_gauge('memory.HeapMemoryUsage.committed', rawmetrics['memory']['HeapMemoryUsage']['committed'])
def collect(self): nodestats = self.count_events() rawmetrics = {} for subnode in self.PATHS: path = self.PATHS[subnode] rawmetrics[subnode] = self.fetch_metrics(path) self.publish_gauge('num_resources', rawmetrics['num-resources']['Value']) self.publish_gauge('avg_resources_per_node', rawmetrics['avg-resources-per-node']['Value']) self.publish_gauge('catalog_duplicate_pct', rawmetrics['duplicate-pct']['Value']) self.publish_gauge( 'resources_service_time', time_convertor.convert( rawmetrics['resources.service-time']['50thPercentile'], rawmetrics['resources.service-time']['DurationUnit'], 'seconds')) self.publish_gauge( 'enqueueing_service_time', time_convertor.convert( rawmetrics['commands.service-time']['50thPercentile'], rawmetrics['commands.service-time']['DurationUnit'], 'seconds')) self.publish_gauge('processed', rawmetrics['processed']['Count']) self.publish_gauge( 'DB_Compaction', time_convertor.convert( rawmetrics['gc-time']['50thPercentile'], rawmetrics['gc-time']['DurationUnit'], 'seconds')) self.publish_gauge('resource_duplicate_pct', rawmetrics['pct-resource-dupes']['Value']) self.publish_gauge('num_nodes', rawmetrics['num-nodes']['Value']) self.publish_gauge('queue.AwaitingRetry', rawmetrics['queue.AwaitingRetry']['Count']) self.publish_gauge( 'queue.CommandParseTime', time_convertor.convert( rawmetrics['queue.CommandParseTime']['50thPercentile'], rawmetrics['queue.CommandParseTime']['DurationUnit'], 'seconds')) self.publish_gauge('queue.Depth', rawmetrics['queue.Depth']['Count']) self.publish_counter('queue.Discarded', rawmetrics['queue.Discarded']['Count']) self.publish_counter('queue.Fatal', rawmetrics['queue.Fatal']['Count']) self.publish_counter('queue.Invalidated', rawmetrics['queue.Invalidated']['Count']) self.publish_gauge( 'queue.MessagePersistenceTime', time_convertor.convert( rawmetrics['queue.MessagePersistenceTime']['50thPercentile'], rawmetrics['queue.MessagePersistenceTime']['DurationUnit'], 'seconds')) self.publish_counter('queue.Processed', rawmetrics['queue.Processed']['Count']) self.publish_gauge( 'queue.ProcessingTime', time_convertor.convert( rawmetrics['queue.ProcessingTime']['50thPercentile'], rawmetrics['queue.ProcessingTime']['DurationUnit'], 'seconds')) self.publish_gauge('queue.QueueTime', rawmetrics['queue.QueueTime']['50thPercentile']) self.publish_counter('queue.Retried', rawmetrics['queue.Retried']['Count']) self.publish_gauge('queue.RetryCounts', rawmetrics['queue.RetryCounts']['50thPercentile']) self.publish_counter('queue.Seen', rawmetrics['queue.Seen']['Count']) self.publish_gauge('queue.Size', rawmetrics['queue.Size']['50thPercentile']) self.publish_gauge('memory.NonHeapMemoryUsage.used', rawmetrics['memory']['NonHeapMemoryUsage']['used']) self.publish_gauge( 'memory.NonHeapMemoryUsage.committed', rawmetrics['memory']['NonHeapMemoryUsage']['committed']) self.publish_gauge('memory.HeapMemoryUsage.used', rawmetrics['memory']['HeapMemoryUsage']['used']) self.publish_gauge('memory.HeapMemoryUsage.committed', rawmetrics['memory']['HeapMemoryUsage']['committed']) self.publish_gauge('nodes.status.failures', nodestats['failures']) self.publish_gauge('nodes.status.skips', nodestats['skips']) self.publish_gauge('nodes.status.successes', nodestats['successes']) self.publish_gauge('nodes.status.noops', nodestats['noops'])
def collect(self): nodestats = self.count_nodes() rawmetrics = {} for subnode in self.PATHS: path = self.PATHS[subnode] rawmetrics[subnode] = self.fetch_metrics(path) self.publish_gauge('num_resources', rawmetrics['num-resources']['Value']) self.publish_gauge('avg_resources_per_node', rawmetrics['avg-resources-per-node']['Value']) self.publish_gauge('catalog_duplicate_pct', rawmetrics['duplicate-pct']['Value']) self.publish_gauge( 'resources_service_time', time_convertor.convert( rawmetrics['resources.service-time']['50thPercentile'], rawmetrics['resources.service-time']['DurationUnit'], 'seconds')) self.publish_gauge( 'enqueueing_service_time', time_convertor.convert( rawmetrics['commands.service-time']['50thPercentile'], rawmetrics['commands.service-time']['DurationUnit'], 'seconds')) self.publish_gauge('processed', rawmetrics['processed']['Count']) self.publish_gauge( 'DB_Compaction', time_convertor.convert( rawmetrics['gc-time']['50thPercentile'], rawmetrics['gc-time']['DurationUnit'], 'seconds')) self.publish_gauge('resource_duplicate_pct', rawmetrics['pct-resource-dupes']['Value']) self.publish_gauge('num_nodes', rawmetrics['num-nodes']['Value']) self.publish_gauge('queue.AwaitingRetry', rawmetrics['queue.AwaitingRetry']['Count']) self.publish_gauge( 'queue.CommandParseTime', time_convertor.convert( rawmetrics['queue.CommandParseTime']['50thPercentile'], rawmetrics['queue.CommandParseTime']['DurationUnit'], 'seconds')) self.publish_gauge('queue.Depth', rawmetrics['queue.Depth']['Count']) self.publish_counter('queue.Discarded', rawmetrics['queue.Discarded']['Count']) self.publish_counter('queue.Fatal', rawmetrics['queue.Fatal']['Count']) self.publish_counter('queue.Invalidated', rawmetrics['queue.Invalidated']['Count']) self.publish_gauge( 'queue.MessagePersistenceTime', time_convertor.convert( rawmetrics['queue.MessagePersistenceTime']['50thPercentile'], rawmetrics['queue.MessagePersistenceTime']['DurationUnit'], 'seconds')) self.publish_counter('queue.Processed', rawmetrics['queue.Processed']['Count']) self.publish_gauge( 'queue.ProcessingTime', time_convertor.convert( rawmetrics['queue.ProcessingTime']['50thPercentile'], rawmetrics['queue.ProcessingTime']['DurationUnit'], 'seconds')) self.publish_gauge('queue.QueueTime', rawmetrics['queue.QueueTime']['50thPercentile']) self.publish_counter('queue.Retried', rawmetrics['queue.Retried']['Count']) self.publish_gauge('queue.RetryCounts', rawmetrics['queue.RetryCounts']['50thPercentile']) self.publish_counter('queue.Seen', rawmetrics['queue.Seen']['Count']) self.publish_gauge('queue.Size', rawmetrics['queue.Size']['50thPercentile']) self.publish_gauge('memory.NonHeapMemoryUsage.used', rawmetrics['memory']['NonHeapMemoryUsage']['used']) self.publish_gauge( 'memory.NonHeapMemoryUsage.committed', rawmetrics['memory']['NonHeapMemoryUsage']['committed']) self.publish_gauge('memory.HeapMemoryUsage.used', rawmetrics['memory']['HeapMemoryUsage']['used']) self.publish_gauge('memory.HeapMemoryUsage.committed', rawmetrics['memory']['HeapMemoryUsage']['committed']) self.publish_gauge('nodes.status.unchanged', nodestats['unchanged']) self.publish_gauge('nodes.status.changed', nodestats['changed']) self.publish_gauge('nodes.status.failed', nodestats['failed']) self.publish_gauge('nodes.status.unreported', nodestats['unreported']) self.publish_gauge('nodes.status.pending', nodestats['noop'])
def test_basic(self): self.assertEquals(time.convert(60, 's', 's'), 60.0) self.assertEquals(time.convert(60, 's', 'm'), 1.0) self.assertEquals(time.convert(60000, 'ms', 'm'), 1.0) self.assertEquals(time.convert(60000, 'MS', 'minutes'), 1.0)