class SpoutMetrics(ComponentMetrics):
  """Metrics helper class for Spout"""
  ACK_COUNT = "__ack-count"
  COMPLETE_LATENCY = "__complete-latency"
  TIMEOUT_COUNT = "__timeout-count"
  NEXT_TUPLE_LATENCY = "__next-tuple-latency"
  NEXT_TUPLE_COUNT = "__next-tuple-count"
  PENDING_ACKED_COUNT = "__pending-acked-count"

  spout_metrics = {ACK_COUNT: MultiCountMetric(),
                   COMPLETE_LATENCY: MultiMeanReducedMetric(),
                   TIMEOUT_COUNT: MultiCountMetric(),
                   NEXT_TUPLE_LATENCY: MeanReducedMetric(),
                   NEXT_TUPLE_COUNT: CountMetric(),
                   PENDING_ACKED_COUNT: MeanReducedMetric()}

  to_multi_init = [ACK_COUNT, ComponentMetrics.FAIL_COUNT,
                   TIMEOUT_COUNT, ComponentMetrics.EMIT_COUNT]

  def __init__(self, pplan_helper):
    super(SpoutMetrics, self).__init__(self.spout_metrics)
    self._init_multi_count_metrics(pplan_helper)

  def _init_multi_count_metrics(self, pplan_helper):
    """Initializes the default values for a necessary set of MultiCountMetrics"""
    to_init = [self.metrics[i] for i in self.to_multi_init
               if i in self.metrics and isinstance(self.metrics[i], MultiCountMetric)]
    for out_stream in pplan_helper.get_my_spout().outputs:
      stream_id = out_stream.stream.id
      for metric in to_init:
        metric.add_key(stream_id)

  def next_tuple(self, latency_in_ns):
    """Apply updates to the next tuple metrics"""
    self.update_reduced_metric(self.NEXT_TUPLE_LATENCY, latency_in_ns)
    self.update_count(self.NEXT_TUPLE_COUNT)

  def acked_tuple(self, stream_id, complete_latency_ns):
    """Apply updates to the ack metrics"""
    self.update_count(self.ACK_COUNT, key=stream_id)
    self.update_reduced_metric(self.COMPLETE_LATENCY, complete_latency_ns, key=stream_id)

  def failed_tuple(self, stream_id, fail_latency_ns):
    """Apply updates to the fail metrics"""
    self.update_count(self.FAIL_COUNT, key=stream_id)
    self.update_reduced_metric(self.FAIL_LATENCY, fail_latency_ns, key=stream_id)

  def update_pending_tuples_count(self, count):
    """Apply updates to the pending tuples count"""
    self.update_reduced_metric(self.PENDING_ACKED_COUNT, count)

  def timeout_tuple(self, stream_id):
    """Apply updates to the timeout count"""
    self.update_count(self.TIMEOUT_COUNT, key=stream_id)
 def setUp(self):
   self.metrics = {"metric1": CountMetric(),
                   "metric2": MultiCountMetric(),
                   "metric3": MeanReducedMetric(),
                   "metric4": MultiMeanReducedMetric()}
   self.metrics_helper = BaseMetricsHelper(self.metrics)
   self.metrics_collector = mock_generator.MockMetricsCollector()
Exemple #3
0
class ComponentMetrics(BaseMetricsHelper):
    """Metrics to be collected for both Bolt and Spout"""
    FAIL_LATENCY = "__fail-latency"
    FAIL_COUNT = "__fail-count"
    EMIT_COUNT = "__emit-count"
    TUPLE_SERIALIZATION_TIME_NS = "__tuple-serialization-time-ns"
    OUT_QUEUE_FULL_COUNT = "__out-queue-full-count"

    component_metrics = {
        FAIL_LATENCY: MultiMeanReducedMetric(),
        FAIL_COUNT: MultiCountMetric(),
        EMIT_COUNT: MultiCountMetric(),
        TUPLE_SERIALIZATION_TIME_NS: MultiCountMetric(),
        OUT_QUEUE_FULL_COUNT: CountMetric()
    }

    def __init__(self, additional_metrics):
        metrics = self.component_metrics
        metrics.update(additional_metrics)
        super().__init__(metrics)

    # pylint: disable=arguments-differ
    def register_metrics(self, context):
        """Registers metrics to context

    :param context: Topology Context
    """
        sys_config = system_config.get_sys_config()
        interval = float(
            sys_config[constants.HERON_METRICS_EXPORT_INTERVAL_SEC])
        collector = context.get_metrics_collector()
        super().register_metrics(collector, interval)

    def update_out_queue_full_count(self):
        """Apply update to the out-queue full count"""
        self.update_count(self.OUT_QUEUE_FULL_COUNT)

    def update_emit_count(self, stream_id):
        """Apply update to emit count"""
        self.update_count(self.EMIT_COUNT, key=stream_id)

    def serialize_data_tuple(self, stream_id, latency_in_ns):
        """Apply update to serialization metrics"""
        self.update_count(self.TUPLE_SERIALIZATION_TIME_NS,
                          incr_by=latency_in_ns,
                          key=stream_id)
Exemple #4
0
  def test_multi_count_metric(self):
    metric = MultiCountMetric()
    key_list = ["key1", "key2", "key3"]
    for _ in range(10):
      for key in key_list:
        metric.incr(key=key)
    self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [10] * 3)))
    self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [0] * 3)))

    metric.add_key("key4")
    ret = metric.get_value_and_reset()
    self.assertIn("key4", ret)
    self.assertEqual(ret["key4"], 0)
Exemple #5
0
#    http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing,
#  software distributed under the License is distributed on an
#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#  KIND, either express or implied.  See the License for the
#  specific language governing permissions and limitations
#  under the License.
""" Singleton class which exposes a simple globally available counter for heron jobs.

It serves the same functionality as GlobalMetrics.java
"""
import threading
from heronpy.api.metrics import MultiCountMetric

metricsContainer = MultiCountMetric()
registered = False
root_name = '__auto__'

lock = threading.Lock()


def incr(key, to_add=1):
    metricsContainer.incr(key, to_add)


def safe_incr(key, to_add=1):
    with lock:
        metricsContainer.incr(key, to_add)

Exemple #6
0
class BoltMetrics(ComponentMetrics):
    """Metrics helper class for Bolt"""
    ACK_COUNT = "__ack-count"
    PROCESS_LATENCY = "__process-latency"
    EXEC_COUNT = "__execute-count"
    EXEC_LATENCY = "__execute-latency"
    EXEC_TIME_NS = "__execute-time-ns"
    TUPLE_DESERIALIZATION_TIME_NS = "__tuple-deserialization-time-ns"

    bolt_metrics = {
        ACK_COUNT: MultiCountMetric(),
        PROCESS_LATENCY: MultiMeanReducedMetric(),
        EXEC_COUNT: MultiCountMetric(),
        EXEC_LATENCY: MultiMeanReducedMetric(),
        EXEC_TIME_NS: MultiCountMetric(),
        TUPLE_DESERIALIZATION_TIME_NS: MultiCountMetric()
    }

    inputs_init = [
        ACK_COUNT, ComponentMetrics.FAIL_COUNT, EXEC_COUNT, EXEC_TIME_NS
    ]
    outputs_init = [ComponentMetrics.EMIT_COUNT]

    def __init__(self, pplan_helper):
        super().__init__(self.bolt_metrics)
        self._init_multi_count_metrics(pplan_helper)

    def _init_multi_count_metrics(self, pplan_helper):
        """Initializes the default values for a necessary set of MultiCountMetrics"""
        # inputs
        to_in_init = [
            self.metrics[i] for i in self.inputs_init if i in self.metrics
            and isinstance(self.metrics[i], MultiCountMetric)
        ]
        for in_stream in pplan_helper.get_my_bolt().inputs:
            stream_id = in_stream.stream.id
            global_stream_id = in_stream.stream.component_name + "/" + stream_id
            for metric in to_in_init:
                metric.add_key(stream_id)
                metric.add_key(global_stream_id)
        # outputs
        to_out_init = [
            self.metrics[i] for i in self.outputs_init if i in self.metrics
            and isinstance(self.metrics[i], MultiCountMetric)
        ]
        for out_stream in pplan_helper.get_my_bolt().outputs:
            stream_id = out_stream.stream.id
            for metric in to_out_init:
                metric.add_key(stream_id)

    def execute_tuple(self, stream_id, source_component, latency_in_ns):
        """Apply updates to the execute metrics"""
        self.update_count(self.EXEC_COUNT, key=stream_id)
        self.update_reduced_metric(self.EXEC_LATENCY, latency_in_ns, stream_id)
        self.update_count(self.EXEC_TIME_NS,
                          incr_by=latency_in_ns,
                          key=stream_id)

        global_stream_id = source_component + "/" + stream_id
        self.update_count(self.EXEC_COUNT, key=global_stream_id)
        self.update_reduced_metric(self.EXEC_LATENCY, latency_in_ns,
                                   global_stream_id)
        self.update_count(self.EXEC_TIME_NS,
                          incr_by=latency_in_ns,
                          key=global_stream_id)

    def deserialize_data_tuple(self, stream_id, source_component,
                               latency_in_ns):
        """Apply updates to the deserialization metrics"""
        self.update_count(self.TUPLE_DESERIALIZATION_TIME_NS,
                          incr_by=latency_in_ns,
                          key=stream_id)
        global_stream_id = source_component + "/" + stream_id
        self.update_count(self.TUPLE_DESERIALIZATION_TIME_NS,
                          incr_by=latency_in_ns,
                          key=global_stream_id)

    def acked_tuple(self, stream_id, source_component, latency_in_ns):
        """Apply updates to the ack metrics"""
        self.update_count(self.ACK_COUNT, key=stream_id)
        self.update_reduced_metric(self.PROCESS_LATENCY, latency_in_ns,
                                   stream_id)
        global_stream_id = source_component + '/' + stream_id
        self.update_count(self.ACK_COUNT, key=global_stream_id)
        self.update_reduced_metric(self.PROCESS_LATENCY, latency_in_ns,
                                   global_stream_id)

    def failed_tuple(self, stream_id, source_component, latency_in_ns):
        """Apply updates to the fail metrics"""
        self.update_count(self.FAIL_COUNT, key=stream_id)
        self.update_reduced_metric(self.FAIL_LATENCY, latency_in_ns, stream_id)
        global_stream_id = source_component + '/' + stream_id
        self.update_count(self.FAIL_COUNT, key=global_stream_id)
        self.update_reduced_metric(self.FAIL_LATENCY, latency_in_ns,
                                   global_stream_id)