def setUp(self): self.metrics = { "metric1": CountMetric(), "metric2": MultiCountMetric(), "metric3": MeanReducedMetric(), "metric4": MultiMeanReducedMetric() } self.metrics_helper = BaseMetricsHelper(self.metrics) self.metrics_collector = mock_generator.MockMetricsCollector()
def test_count_metric(self): metric = CountMetric() for _ in range(10): metric.incr() self.assertEqual(metric.get_value_and_reset(), 10) for _ in range(10): metric.incr(to_add=10) self.assertEqual(metric.get_value_and_reset(), 100) self.assertEqual(metric.get_value_and_reset(), 0)
def test_gather_metrics(self): name = "metric" metric = CountMetric() metric.incr(to_add=10) self.metrics_collector.register_metric(name, metric, 60) self.assertIn( 60, self.metrics_collector.time_bucket_in_sec_to_metrics_name) self.metrics_collector._gather_metrics(60) message = self.metrics_collector.out_metrics.poll() self.assertIsNotNone(message) self.assertIsInstance(message, metrics_pb2.MetricPublisherPublishMessage) self.assertEqual(message.metrics[0].name, name) self.assertEqual(message.metrics[0].value, str(10)) self.assertEqual(metric.get_value_and_reset(), 0)
class ComponentMetrics(BaseMetricsHelper): """Metrics to be collected for both Bolt and Spout""" FAIL_LATENCY = "__fail-latency" FAIL_COUNT = "__fail-count" EMIT_COUNT = "__emit-count" TUPLE_SERIALIZATION_TIME_NS = "__tuple-serialization-time-ns" OUT_QUEUE_FULL_COUNT = "__out-queue-full-count" component_metrics = { FAIL_LATENCY: MultiMeanReducedMetric(), FAIL_COUNT: MultiCountMetric(), EMIT_COUNT: MultiCountMetric(), TUPLE_SERIALIZATION_TIME_NS: MultiCountMetric(), OUT_QUEUE_FULL_COUNT: CountMetric() } def __init__(self, additional_metrics): metrics = self.component_metrics metrics.update(additional_metrics) super(ComponentMetrics, self).__init__(metrics) # pylint: disable=arguments-differ def register_metrics(self, context): """Registers metrics to context :param context: Topology Context """ sys_config = system_config.get_sys_config() interval = float( sys_config[constants.HERON_METRICS_EXPORT_INTERVAL_SEC]) collector = context.get_metrics_collector() super(ComponentMetrics, self).register_metrics(collector, interval) def update_out_queue_full_count(self): """Apply update to the out-queue full count""" self.update_count(self.OUT_QUEUE_FULL_COUNT) def update_emit_count(self, stream_id): """Apply update to emit count""" self.update_count(self.EMIT_COUNT, key=stream_id) def serialize_data_tuple(self, stream_id, latency_in_ns): """Apply update to serialization metrics""" self.update_count(self.TUPLE_SERIALIZATION_TIME_NS, incr_by=latency_in_ns, key=stream_id)
def test_register_metric(self): name1 = "metric1" metric1 = CountMetric() self.metrics_collector.register_metric(name1, metric1, 60) self.assertEqual(self.metrics_collector.metrics_map[name1], metric1) self.assertIn(60, self.metrics_collector.registered_timers) name2 = "metric2" metric2 = MeanReducedMetric() self.metrics_collector.register_metric(name2, metric2, 60) self.assertEqual(self.metrics_collector.metrics_map[name2], metric2) self.assertEqual( self.metrics_collector.time_bucket_in_sec_to_metrics_name[60], [name1, name2]) name3 = "metric3" metric3 = MultiMeanReducedMetric() self.metrics_collector.register_metric(name3, metric3, 30) self.assertEqual(self.metrics_collector.metrics_map[name3], metric3) self.assertEqual(self.metrics_collector.registered_timers, [60, 30])
class GatewayMetrics(BaseMetricsHelper): """Metrics helper class for Gateway metric""" RECEIVED_PKT_SIZE = '__gateway-received-packets-size' SENT_PKT_SIZE = '__gateway-sent-packets-size' RECEIVED_PKT_COUNT = '__gateway-received-packets-count' SENT_PKT_COUNT = '__gateway-sent-packets-count' SENT_METRICS_SIZE = '__gateway-sent-metrics-size' SENT_METRICS_PKT_COUNT = '__gateway-sent-metrics-packets-count' SENT_METRICS_COUNT = '__gateway-sent-metrics-count' SENT_EXCEPTION_COUNT = '__gateway-sent-exceptions-count' IN_STREAM_QUEUE_SIZE = '__gateway-in-stream-queue-size' OUT_STREAM_QUEUE_SIZE = '__gateway-out-stream-queue-size' IN_STREAM_QUEUE_EXPECTED_CAPACITY = '__gateway-in-stream-queue-expected-capacity' OUT_STREAM_QUEUE_EXPECTED_CAPACITY = '__gateway-out-stream-queue-expected-capacity' IN_QUEUE_FULL_COUNT = '__gateway-in-queue-full-count' metrics = { RECEIVED_PKT_SIZE: CountMetric(), SENT_PKT_SIZE: CountMetric(), RECEIVED_PKT_COUNT: CountMetric(), SENT_PKT_COUNT: CountMetric(), SENT_METRICS_SIZE: CountMetric(), SENT_METRICS_PKT_COUNT: CountMetric(), SENT_METRICS_COUNT: CountMetric(), SENT_EXCEPTION_COUNT: CountMetric(), IN_STREAM_QUEUE_SIZE: MeanReducedMetric(), OUT_STREAM_QUEUE_SIZE: MeanReducedMetric(), IN_STREAM_QUEUE_EXPECTED_CAPACITY: MeanReducedMetric(), OUT_STREAM_QUEUE_EXPECTED_CAPACITY: MeanReducedMetric() } def __init__(self, metrics_collector): sys_config = system_config.get_sys_config() super(GatewayMetrics, self).__init__(self.metrics) interval = float( sys_config[constants.HERON_METRICS_EXPORT_INTERVAL_SEC]) self.register_metrics(metrics_collector, interval) def update_received_packet(self, received_pkt_size_bytes): """Update received packet metrics""" self.update_count(self.RECEIVED_PKT_COUNT) self.update_count(self.RECEIVED_PKT_SIZE, incr_by=received_pkt_size_bytes) def update_sent_packet(self, sent_pkt_size_bytes): """Update sent packet metrics""" self.update_count(self.SENT_PKT_COUNT) self.update_count(self.SENT_PKT_SIZE, incr_by=sent_pkt_size_bytes) def update_sent_metrics_size(self, size): self.update_count(self.SENT_METRICS_SIZE, size) def update_sent_metrics(self, metrics_count, exceptions_count): self.update_count(self.SENT_METRICS_PKT_COUNT) self.update_count(self.SENT_METRICS_COUNT, metrics_count) self.update_count(self.SENT_EXCEPTION_COUNT, exceptions_count) def update_in_out_stream_metrics(self, in_size, out_size, in_expect_size, out_expect_size): self.update_reduced_metric(self.IN_STREAM_QUEUE_SIZE, in_size) self.update_reduced_metric(self.OUT_STREAM_QUEUE_SIZE, out_size) self.update_reduced_metric(self.IN_STREAM_QUEUE_EXPECTED_CAPACITY, in_expect_size) self.update_reduced_metric(self.OUT_STREAM_QUEUE_EXPECTED_CAPACITY, out_expect_size)
class SpoutMetrics(ComponentMetrics): """Metrics helper class for Spout""" ACK_COUNT = "__ack-count" COMPLETE_LATENCY = "__complete-latency" TIMEOUT_COUNT = "__timeout-count" NEXT_TUPLE_LATENCY = "__next-tuple-latency" NEXT_TUPLE_COUNT = "__next-tuple-count" PENDING_ACKED_COUNT = "__pending-acked-count" spout_metrics = { ACK_COUNT: MultiCountMetric(), COMPLETE_LATENCY: MultiMeanReducedMetric(), TIMEOUT_COUNT: MultiCountMetric(), NEXT_TUPLE_LATENCY: MeanReducedMetric(), NEXT_TUPLE_COUNT: CountMetric(), PENDING_ACKED_COUNT: MeanReducedMetric() } to_multi_init = [ ACK_COUNT, ComponentMetrics.FAIL_COUNT, TIMEOUT_COUNT, ComponentMetrics.EMIT_COUNT ] def __init__(self, pplan_helper): super(SpoutMetrics, self).__init__(self.spout_metrics) self._init_multi_count_metrics(pplan_helper) def _init_multi_count_metrics(self, pplan_helper): """Initializes the default values for a necessary set of MultiCountMetrics""" to_init = [ self.metrics[i] for i in self.to_multi_init if i in self.metrics and isinstance(self.metrics[i], MultiCountMetric) ] for out_stream in pplan_helper.get_my_spout().outputs: stream_id = out_stream.stream.id for metric in to_init: metric.add_key(stream_id) def next_tuple(self, latency_in_ns): """Apply updates to the next tuple metrics""" self.update_reduced_metric(self.NEXT_TUPLE_LATENCY, latency_in_ns) self.update_count(self.NEXT_TUPLE_COUNT) def acked_tuple(self, stream_id, complete_latency_ns): """Apply updates to the ack metrics""" self.update_count(self.ACK_COUNT, key=stream_id) self.update_reduced_metric(self.COMPLETE_LATENCY, complete_latency_ns, key=stream_id) def failed_tuple(self, stream_id, fail_latency_ns): """Apply updates to the fail metrics""" self.update_count(self.FAIL_COUNT, key=stream_id) self.update_reduced_metric(self.FAIL_LATENCY, fail_latency_ns, key=stream_id) def update_pending_tuples_count(self, count): """Apply updates to the pending tuples count""" self.update_reduced_metric(self.PENDING_ACKED_COUNT, count) def timeout_tuple(self, stream_id): """Apply updates to the timeout count""" self.update_count(self.TIMEOUT_COUNT, key=stream_id)