Beispiel #1
0
  def test_multi_mean_reduced_metric(self):
    metric = MultiMeanReducedMetric()
    key_list = ["key1", "key2", "key3"]
    for i in range(1, 11):
      metric.update(key=key_list[0], value=i)
      metric.update(key=key_list[1], value=i * 2)
      metric.update(key=key_list[2], value=i * 3)
    self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [5.5, 11, 16.5])))
    self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [None] * 3)))

    metric.add_key("key4")
    ret = metric.get_value_and_reset()
    self.assertIn("key4", ret)
    self.assertIsNone(ret["key4"])
Beispiel #2
0
class SpoutMetrics(ComponentMetrics):
    """Metrics helper class for Spout"""
    ACK_COUNT = "__ack-count"
    COMPLETE_LATENCY = "__complete-latency"
    TIMEOUT_COUNT = "__timeout-count"
    NEXT_TUPLE_LATENCY = "__next-tuple-latency"
    NEXT_TUPLE_COUNT = "__next-tuple-count"
    PENDING_ACKED_COUNT = "__pending-acked-count"

    spout_metrics = {
        ACK_COUNT: MultiCountMetric(),
        COMPLETE_LATENCY: MultiMeanReducedMetric(),
        TIMEOUT_COUNT: MultiCountMetric(),
        NEXT_TUPLE_LATENCY: MeanReducedMetric(),
        NEXT_TUPLE_COUNT: CountMetric(),
        PENDING_ACKED_COUNT: MeanReducedMetric()
    }

    to_multi_init = [
        ACK_COUNT, ComponentMetrics.FAIL_COUNT, TIMEOUT_COUNT,
        ComponentMetrics.EMIT_COUNT
    ]

    def __init__(self, pplan_helper):
        super(SpoutMetrics, self).__init__(self.spout_metrics)
        self._init_multi_count_metrics(pplan_helper)

    def _init_multi_count_metrics(self, pplan_helper):
        """Initializes the default values for a necessary set of MultiCountMetrics"""
        to_init = [
            self.metrics[i] for i in self.to_multi_init if i in self.metrics
            and isinstance(self.metrics[i], MultiCountMetric)
        ]
        for out_stream in pplan_helper.get_my_spout().outputs:
            stream_id = out_stream.stream.id
            for metric in to_init:
                metric.add_key(stream_id)

    def next_tuple(self, latency_in_ns):
        """Apply updates to the next tuple metrics"""
        self.update_reduced_metric(self.NEXT_TUPLE_LATENCY, latency_in_ns)
        self.update_count(self.NEXT_TUPLE_COUNT)

    def acked_tuple(self, stream_id, complete_latency_ns):
        """Apply updates to the ack metrics"""
        self.update_count(self.ACK_COUNT, key=stream_id)
        self.update_reduced_metric(self.COMPLETE_LATENCY,
                                   complete_latency_ns,
                                   key=stream_id)

    def failed_tuple(self, stream_id, fail_latency_ns):
        """Apply updates to the fail metrics"""
        self.update_count(self.FAIL_COUNT, key=stream_id)
        self.update_reduced_metric(self.FAIL_LATENCY,
                                   fail_latency_ns,
                                   key=stream_id)

    def update_pending_tuples_count(self, count):
        """Apply updates to the pending tuples count"""
        self.update_reduced_metric(self.PENDING_ACKED_COUNT, count)

    def timeout_tuple(self, stream_id):
        """Apply updates to the timeout count"""
        self.update_count(self.TIMEOUT_COUNT, key=stream_id)
class BoltMetrics(ComponentMetrics):
  """Metrics helper class for Bolt"""
  ACK_COUNT = "__ack-count"
  PROCESS_LATENCY = "__process-latency"
  EXEC_COUNT = "__execute-count"
  EXEC_LATENCY = "__execute-latency"
  EXEC_TIME_NS = "__execute-time-ns"
  TUPLE_DESERIALIZATION_TIME_NS = "__tuple-deserialization-time-ns"

  bolt_metrics = {ACK_COUNT: MultiCountMetric(),
                  PROCESS_LATENCY: MultiMeanReducedMetric(),
                  EXEC_COUNT: MultiCountMetric(),
                  EXEC_LATENCY: MultiMeanReducedMetric(),
                  EXEC_TIME_NS: MultiCountMetric(),
                  TUPLE_DESERIALIZATION_TIME_NS: MultiCountMetric()}

  inputs_init = [ACK_COUNT, ComponentMetrics.FAIL_COUNT,
                 EXEC_COUNT, EXEC_TIME_NS]
  outputs_init = [ComponentMetrics.EMIT_COUNT]

  def __init__(self, pplan_helper):
    super(BoltMetrics, self).__init__(self.bolt_metrics)
    self._init_multi_count_metrics(pplan_helper)

  def _init_multi_count_metrics(self, pplan_helper):
    """Initializes the default values for a necessary set of MultiCountMetrics"""
    # inputs
    to_in_init = [self.metrics[i] for i in self.inputs_init
                  if i in self.metrics and isinstance(self.metrics[i], MultiCountMetric)]
    for in_stream in pplan_helper.get_my_bolt().inputs:
      stream_id = in_stream.stream.id
      global_stream_id = in_stream.stream.component_name + "/" + stream_id
      for metric in to_in_init:
        metric.add_key(stream_id)
        metric.add_key(global_stream_id)
    # outputs
    to_out_init = [self.metrics[i] for i in self.outputs_init
                   if i in self.metrics and isinstance(self.metrics[i], MultiCountMetric)]
    for out_stream in pplan_helper.get_my_bolt().outputs:
      stream_id = out_stream.stream.id
      for metric in to_out_init:
        metric.add_key(stream_id)

  def execute_tuple(self, stream_id, source_component, latency_in_ns):
    """Apply updates to the execute metrics"""
    self.update_count(self.EXEC_COUNT, key=stream_id)
    self.update_reduced_metric(self.EXEC_LATENCY, latency_in_ns, stream_id)
    self.update_count(self.EXEC_TIME_NS, incr_by=latency_in_ns, key=stream_id)

    global_stream_id = source_component + "/" + stream_id
    self.update_count(self.EXEC_COUNT, key=global_stream_id)
    self.update_reduced_metric(self.EXEC_LATENCY, latency_in_ns, global_stream_id)
    self.update_count(self.EXEC_TIME_NS, incr_by=latency_in_ns, key=global_stream_id)

  def deserialize_data_tuple(self, stream_id, source_component, latency_in_ns):
    """Apply updates to the deserialization metrics"""
    self.update_count(self.TUPLE_DESERIALIZATION_TIME_NS, incr_by=latency_in_ns, key=stream_id)
    global_stream_id = source_component + "/" + stream_id
    self.update_count(self.TUPLE_DESERIALIZATION_TIME_NS, incr_by=latency_in_ns,
                      key=global_stream_id)

  def acked_tuple(self, stream_id, source_component, latency_in_ns):
    """Apply updates to the ack metrics"""
    self.update_count(self.ACK_COUNT, key=stream_id)
    self.update_reduced_metric(self.PROCESS_LATENCY, latency_in_ns, stream_id)
    global_stream_id = source_component + '/' + stream_id
    self.update_count(self.ACK_COUNT, key=global_stream_id)
    self.update_reduced_metric(self.PROCESS_LATENCY, latency_in_ns, global_stream_id)

  def failed_tuple(self, stream_id, source_component, latency_in_ns):
    """Apply updates to the fail metrics"""
    self.update_count(self.FAIL_COUNT, key=stream_id)
    self.update_reduced_metric(self.FAIL_LATENCY, latency_in_ns, stream_id)
    global_stream_id = source_component + '/' + stream_id
    self.update_count(self.FAIL_COUNT, key=global_stream_id)
    self.update_reduced_metric(self.FAIL_LATENCY, latency_in_ns, global_stream_id)
Beispiel #4
0
    def test_multi_mean_reduced_metric(self):
        metric = MultiMeanReducedMetric()
        key_list = ["key1", "key2", "key3"]
        for i in range(1, 11):
            metric.update(key=key_list[0], value=i)
            metric.update(key=key_list[1], value=i * 2)
            metric.update(key=key_list[2], value=i * 3)
        self.assertEqual(metric.get_value_and_reset(),
                         dict(zip(key_list, [5.5, 11, 16.5])))
        self.assertEqual(metric.get_value_and_reset(),
                         dict(zip(key_list, [None] * 3)))

        metric.add_key("key4")
        ret = metric.get_value_and_reset()
        self.assertIn("key4", ret)
        self.assertIsNone(ret["key4"])