Esempio n. 1
0
    def setUp(self):
        super(StatsStoreTest, self).setUp()

        self.process_id = "some_pid"
        self.stats_store = aff4.FACTORY.Create(None,
                                               stats_store.StatsStore,
                                               mode="w",
                                               token=self.token)
        fake_stats_collector = default_stats_collector.DefaultStatsCollector([
            stats_utils.CreateCounterMetadata("counter"),
            stats_utils.CreateCounterMetadata("counter_with_fields",
                                              fields=[("source", str)]),
            stats_utils.CreateEventMetadata("events"),
            stats_utils.CreateEventMetadata("events_with_fields",
                                            fields=[("source", str)]),
            stats_utils.CreateGaugeMetadata("int_gauge", int),
            stats_utils.CreateGaugeMetadata("str_gauge", str),
            stats_utils.CreateGaugeMetadata("str_gauge_with_fields",
                                            str,
                                            fields=[("task", int)])
        ])
        fake_stats_context = stats_test_utils.FakeStatsContext(
            fake_stats_collector)
        fake_stats_context.start()
        self.addCleanup(fake_stats_context.stop)
Esempio n. 2
0
def _CreateFakeStatsCollector():
  """Returns a stats-collector for use by tests in this file."""
  return default_stats_collector.DefaultStatsCollector([
      stats_utils.CreateCounterMetadata("counter"),
      stats_utils.CreateCounterMetadata(
          "counter_with_fields", fields=[("source", str)]),
      stats_utils.CreateEventMetadata("events"),
      stats_utils.CreateEventMetadata(
          "events_with_fields", fields=[("source", str)]),
      stats_utils.CreateGaugeMetadata("int_gauge", int),
      stats_utils.CreateGaugeMetadata("str_gauge", str),
      stats_utils.CreateGaugeMetadata(
          "str_gauge_with_fields", str, fields=[("task", int)])
  ])
Esempio n. 3
0
  def testRaisesOnImproperFieldsUsage1(self):
    counter_name = "testRaisesOnImproperFieldsUsage1_counter"
    int_gauge_name = "testRaisesOnImproperFieldsUsage1_int_gauge"
    event_metric_name = "testRaisesOnImproperFieldsUsage1_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(counter_name),
        stats_utils.CreateGaugeMetadata(int_gauge_name, int),
        stats_utils.CreateEventMetadata(event_metric_name)
    ])

    # Check for counters
    with self.assertRaises(ValueError):
      collector.GetMetricValue(counter_name, fields=["a"])

    # Check for gauges
    with self.assertRaises(ValueError):
      collector.GetMetricValue(int_gauge_name, fields=["a"])

    # Check for event metrics
    self.assertRaises(
        ValueError,
        collector.GetMetricValue,
        event_metric_name,
        fields=["a", "b"])
Esempio n. 4
0
    def testMultipleFuncs(self):
        """Tests if multiple decorators produce aggregate stats."""
        counter_name = "testMultipleFuncs_counter"
        event_metric_name = "testMultipleFuncs_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateCounterMetadata(counter_name),
            stats_utils.CreateEventMetadata(event_metric_name, bins=[0, 1, 2])
        ])

        @stats_utils.Counted(counter_name)
        def Func1(n):
            self._Sleep(n)

        @stats_utils.Counted(counter_name)
        def Func2(n):
            self._Sleep(n)

        @stats_utils.Timed(event_metric_name)
        def Func3(n):
            self._Sleep(n)

        @stats_utils.Timed(event_metric_name)
        def Func4(n):
            self._Sleep(n)

        with FakeStatsContext(collector):
            Func1(0.1)
            Func2(0.1)
            self.assertEqual(collector.GetMetricValue(counter_name), 2)

            Func3(0.1)
            Func4(1.1)
            m = collector.GetMetricValue(event_metric_name)
            self.assertEqual(m.bins_heights, {-_INF: 0, 0: 1, 1: 1, 2: 0})
Esempio n. 5
0
    def testExceptionHandling(self):
        """Test decorators when exceptions are thrown."""
        counter_name = "testExceptionHandling_counter"
        event_metric_name = "testExceptionHandling_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateCounterMetadata(counter_name),
            stats_utils.CreateEventMetadata(event_metric_name,
                                            bins=[0, 0.1, 0.2])
        ])

        @stats_utils.Timed(event_metric_name)
        @stats_utils.Counted(counter_name)
        def RaiseFunc(n):
            self._Sleep(n)
            raise Exception()

        with FakeStatsContext(collector):
            self.assertRaises(Exception, RaiseFunc, 0.11)

        # Check if all vars get updated
        m = collector.GetMetricValue(event_metric_name)
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 0, 0.1: 1, 0.2: 0})

        self.assertEqual(collector.GetMetricValue(counter_name), 1)
Esempio n. 6
0
    def testGetAllMetricsMetadataWorksCorrectlyOnSimpleMetrics(self):
        counter_name = "testGAMM_SimpleMetrics_counter"
        int_gauge_name = "testGAMM_SimpleMetrics_int_gauge"
        event_metric_name = "testGAMM_SimpleMetrics_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateCounterMetadata(counter_name),
            stats_utils.CreateGaugeMetadata(int_gauge_name,
                                            int,
                                            fields=[("dimension", str)]),
            stats_utils.CreateEventMetadata(event_metric_name)
        ])

        metrics = collector.GetAllMetricsMetadata()
        self.assertEqual(metrics[counter_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.COUNTER)
        self.assertFalse(metrics[counter_name].fields_defs)

        self.assertEqual(metrics[int_gauge_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.GAUGE)
        self.assertEqual(metrics[int_gauge_name].fields_defs, [
            rdf_stats.MetricFieldDefinition(
                field_name="dimension",
                field_type=rdf_stats.MetricFieldDefinition.FieldType.STR)
        ])

        self.assertEqual(metrics[event_metric_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.EVENT)
        self.assertFalse(metrics[event_metric_name].fields_defs)
Esempio n. 7
0
    def testMaps(self):
        """Test binned timings."""
        event_metric_name = "testMaps_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateEventMetadata(event_metric_name,
                                            bins=[0, 0.1, 0.2])
        ])

        @stats_utils.Timed(event_metric_name)
        def TimedFunc(n):
            self._Sleep(n)

        with FakeStatsContext(collector):
            m = collector.GetMetricValue(event_metric_name)
            self.assertEqual(m.bins_heights, {-_INF: 0, 0: 0, 0.1: 0, 0.2: 0})

            for _ in range(3):
                TimedFunc(0.01)

            m = collector.GetMetricValue(event_metric_name)
            self.assertEqual(m.bins_heights, {-_INF: 0, 0: 3, 0.1: 0, 0.2: 0})

            TimedFunc(0.11)
            m = collector.GetMetricValue(event_metric_name)
            self.assertEqual(m.bins_heights, {-_INF: 0, 0: 3, 0.1: 1, 0.2: 0})
Esempio n. 8
0
  def testCombiningDecorators(self):
    """Test combining decorators."""
    counter_name = "testCombiningDecorators_counter"
    event_metric_name = "testCombiningDecorators_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(counter_name),
        stats_utils.CreateEventMetadata(
            event_metric_name, bins=[0.0, 0.1, 0.2])
    ])

    @stats_utils.Timed(event_metric_name)
    @stats_utils.Counted(counter_name)
    def OverdecoratedFunc(n):
      self._Sleep(n)

    with FakeStatsContext(collector):
      OverdecoratedFunc(0.02)

    # Check if all vars get updated
    m = collector.GetMetricValue(event_metric_name)
    self.assertEqual(m.bins_heights[0.0], 1)
    self.assertEqual(m.bins_heights[0.1], 0)
    self.assertEqual(m.bins_heights[0.2], 0)

    self.assertEqual(collector.GetMetricValue(counter_name), 1)
Esempio n. 9
0
    def testSimpleEventMetric(self):
        event_metric_name = "testSimpleEventMetric_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateEventMetadata(event_metric_name,
                                            bins=[0.0, 0.1, 0.2]),
        ])

        data = collector.GetMetricValue(event_metric_name)
        self.assertAlmostEqual(0, data.sum)
        self.assertEqual(0, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 0, 0.2: 0}, data.bins_heights)

        collector.RecordEvent(event_metric_name, 0.15)
        data = collector.GetMetricValue(event_metric_name)
        self.assertAlmostEqual(0.15, data.sum)
        self.assertEqual(1, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 1, 0.2: 0}, data.bins_heights)

        collector.RecordEvent(event_metric_name, 0.5)
        data = collector.GetMetricValue(event_metric_name)
        self.assertAlmostEqual(0.65, data.sum)
        self.assertEqual(2, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 1, 0.2: 1}, data.bins_heights)

        collector.RecordEvent(event_metric_name, -0.1)
        data = collector.GetMetricValue(event_metric_name)
        self.assertAlmostEqual(0.55, data.sum)
        self.assertEqual(3, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 1, 0.0: 0, 0.1: 1, 0.2: 1}, data.bins_heights)
Esempio n. 10
0
  def testRaisesOnImproperFieldsUsage2(self):
    counter_name = "testRaisesOnImproperFieldsUsage2_counter"
    int_gauge_name = "testRaisesOnImproperFieldsUsage2_int_gauge"
    event_metric_name = "testRaisesOnImproperFieldsUsage2_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(
            counter_name, fields=[("dimension", str)]),
        stats_utils.CreateGaugeMetadata(
            int_gauge_name, int, fields=[("dimension", str)]),
        stats_utils.CreateEventMetadata(
            event_metric_name, fields=[("dimension", str)])
    ])

    # Check for counters
    self.assertRaises(ValueError, collector.GetMetricValue, counter_name)
    self.assertRaises(
        ValueError, collector.GetMetricValue, counter_name, fields=["a", "b"])

    # Check for gauges
    self.assertRaises(ValueError, collector.GetMetricValue, int_gauge_name)
    self.assertRaises(
        ValueError, collector.GetMetricValue, int_gauge_name, fields=["a", "b"])

    # Check for event metrics
    self.assertRaises(ValueError, collector.GetMetricValue, event_metric_name)
    self.assertRaises(
        ValueError,
        collector.GetMetricValue,
        event_metric_name,
        fields=["a", "b"])
Esempio n. 11
0
  def testGetMetricFieldsWorksCorrectly(self):
    counter_name = "testGetMetricFieldsWorksCorrectly_counter"
    int_gauge_name = "testGetMetricFieldsWorksCorrectly_int_gauge"
    event_metric_name = "testGetMetricFieldsWorksCorrectly_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(
            counter_name, fields=[("dimension1", str), ("dimension2", str)]),
        stats_utils.CreateGaugeMetadata(
            int_gauge_name, int, fields=[("dimension", str)]),
        stats_utils.CreateEventMetadata(
            event_metric_name, fields=[("dimension", str)]),
    ])

    collector.IncrementCounter(counter_name, fields=["b", "b"])
    collector.IncrementCounter(counter_name, fields=["a", "c"])

    collector.SetGaugeValue(int_gauge_name, 20, fields=["a"])
    collector.SetGaugeValue(int_gauge_name, 30, fields=["b"])

    collector.RecordEvent(event_metric_name, 0.1, fields=["a"])
    collector.RecordEvent(event_metric_name, 0.1, fields=["b"])

    fields = sorted(collector.GetMetricFields(counter_name), key=lambda t: t[0])
    self.assertEqual([("a", "c"), ("b", "b")], fields)

    fields = sorted(
        collector.GetMetricFields(int_gauge_name), key=lambda t: t[0])
    self.assertEqual([("a",), ("b",)], fields)

    fields = sorted(
        collector.GetMetricFields(event_metric_name), key=lambda t: t[0])
    self.assertEqual([("a",), ("b",)], fields)
Esempio n. 12
0
  def testEventMetricWithFields(self):
    event_metric_name = "testEventMetricWithFields_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateEventMetadata(
            event_metric_name,
            bins=[0.0, 0.1, 0.2],
            fields=[("dimension", str)])
    ])

    data = collector.GetMetricValue(
        event_metric_name, fields=["dimension_value_1"])
    self.assertAlmostEqual(0, data.sum)
    self.assertEqual(0, data.count)
    self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
    self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 0, 0.2: 0}, data.bins_heights)

    collector.RecordEvent(event_metric_name, 0.15, fields=["dimension_value_1"])
    collector.RecordEvent(event_metric_name, 0.25, fields=["dimension_value_2"])

    data = collector.GetMetricValue(
        event_metric_name, fields=["dimension_value_1"])
    self.assertAlmostEqual(0.15, data.sum)
    self.assertEqual(1, data.count)
    self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
    self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 1, 0.2: 0}, data.bins_heights)

    data = collector.GetMetricValue(
        event_metric_name, fields=["dimension_value_2"])
    self.assertAlmostEqual(0.25, data.sum)
    self.assertEqual(1, data.count)
    self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
    self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 0, 0.2: 1}, data.bins_heights)
Esempio n. 13
0
  def Run(self):
    # We have to include all server metadata in the test context since server
    # code that uses the metrics runs within the context.
    non_test_metadata = list(
        itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
    test_metadata = non_test_metadata + [
        stats_utils.CreateCounterMetadata(
            _TEST_COUNTER, docstring="Sample counter metric."),
        stats_utils.CreateGaugeMetadata(
            _TEST_GAUGE_METRIC, str, docstring="Sample gauge metric."),
        stats_utils.CreateEventMetadata(
            _TEST_EVENT_METRIC, docstring="Sample event metric."),
    ]
    stats_collector = default_stats_collector.DefaultStatsCollector(
        test_metadata)
    with stats_test_utils.FakeStatsContext(stats_collector):
      with aff4.FACTORY.Create(
          None, aff4_stats_store.StatsStore, mode="w",
          token=self.token) as stats_store:
        stats_store.WriteStats(process_id="worker_1")

      # We use mixins to run the same tests against multiple APIs.
      # Result-filtering is only needed for HTTP API tests.
      if isinstance(self, api_regression_http.HttpApiRegressionTestMixinBase):
        api_post_process_fn = self._PostProcessApiResult
      else:
        api_post_process_fn = None

      self.Check(
          "ListStatsStoreMetricsMetadata",
          args=stats_plugin.ApiListStatsStoreMetricsMetadataArgs(
              component="WORKER"),
          api_post_process_fn=api_post_process_fn)
Esempio n. 14
0
def _CreateFakeStatsCollector():
  """Returns a stats-collector for use by tests in this file."""
  return prometheus_stats_collector.PrometheusStatsCollector([
      stats_utils.CreateCounterMetadata(_SINGLE_DIM_COUNTER),
      stats_utils.CreateCounterMetadata(
          _COUNTER_WITH_ONE_FIELD, fields=[("field1", str)]),
      stats_utils.CreateCounterMetadata(
          _COUNTER_WITH_TWO_FIELDS, fields=[("field1", str), ("field2", int)]),
      stats_utils.CreateEventMetadata(_EVENT_METRIC),
  ])
Esempio n. 15
0
  def testEventMetricGetsRendered(self):
    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        [stats_utils.CreateEventMetadata("api_method_latency")])
    with stats_test_utils.FakeStatsContext(stats_collector):
      stats_collector_instance.Get().RecordEvent("api_method_latency", 15)

      varz_json = json.loads(stats_server.BuildVarzJsonString())
      self.assertEqual(varz_json["api_method_latency"]["info"], {
          "metric_type": "EVENT",
          "value_type": "DISTRIBUTION"
      })
      self.assertCountEqual(
          iterkeys(varz_json["api_method_latency"]["value"]),
          ["sum", "bins_heights", "counter"])
Esempio n. 16
0
def GetMetadata():
    """Returns a list of MetricMetadata for GRR server components."""
    return [
        # GRR user-management metrics.
        stats_utils.CreateEventMetadata("acl_check_time",
                                        fields=[("check_type", str)]),
        stats_utils.CreateCounterMetadata("approval_searches",
                                          fields=[("reason_presence", str),
                                                  ("source", str)]),

        # Cronjob metrics.
        stats_utils.CreateCounterMetadata("cron_internal_error"),
        stats_utils.CreateCounterMetadata("cron_job_failure",
                                          fields=[("cron_job_id", str)]),
        stats_utils.CreateCounterMetadata("cron_job_timeout",
                                          fields=[("cron_job_id", str)]),
        stats_utils.CreateEventMetadata("cron_job_latency",
                                        fields=[("cron_job_id", str)]),

        # Access-control metrics.
        stats_utils.CreateCounterMetadata("grr_expired_tokens"),

        # Datastore metrics.
        stats_utils.CreateCounterMetadata("grr_commit_failure"),
        stats_utils.CreateCounterMetadata("datastore_retries"),
        stats_utils.CreateGaugeMetadata(
            "datastore_size",
            int,
            docstring="Size of data store in bytes",
            units="BYTES"),
        stats_utils.CreateCounterMetadata("grr_task_retransmission_count"),
        stats_utils.CreateCounterMetadata("grr_task_ttl_expired_count"),
        stats_utils.CreateEventMetadata(
            "db_request_latency",
            fields=[("call", str)],
            bins=[0.05 * 1.2**x for x in range(30)]),  # 50ms to ~10 secs
        stats_utils.CreateCounterMetadata("db_request_errors",
                                          fields=[("call", str),
                                                  ("type", str)]),
        stats_utils.CreateEventMetadata(
            "blob_store_poll_hit_latency",
            bins=[0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50]),
        stats_utils.CreateEventMetadata("blob_store_poll_hit_iteration",
                                        bins=[1, 2, 5, 10, 20, 50]),
        stats_utils.CreateEventMetadata(
            "dual_blob_store_write_latency",
            fields=[("backend", str), ("backend_class", str)],
            bins=[0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50]),
        stats_utils.CreateCounterMetadata("dual_blob_store_success_count",
                                          fields=[("backend", str),
                                                  ("backend_class", str)]),
        stats_utils.CreateCounterMetadata("dual_blob_store_error_count",
                                          fields=[("backend", str),
                                                  ("backend_class", str)]),
        stats_utils.CreateCounterMetadata("dual_blob_store_discard_count",
                                          fields=[("backend", str),
                                                  ("backend_class", str)]),

        # Threadpool metrics.
        stats_utils.CreateGaugeMetadata("threadpool_outstanding_tasks",
                                        int,
                                        fields=[("pool_name", str)]),
        stats_utils.CreateGaugeMetadata("threadpool_threads",
                                        int,
                                        fields=[("pool_name", str)]),
        stats_utils.CreateGaugeMetadata("threadpool_cpu_use",
                                        float,
                                        fields=[("pool_name", str)]),
        stats_utils.CreateCounterMetadata("threadpool_task_exceptions",
                                          fields=[("pool_name", str)]),
        stats_utils.CreateEventMetadata("threadpool_working_time",
                                        fields=[("pool_name", str)]),
        stats_utils.CreateEventMetadata("threadpool_queueing_time",
                                        fields=[("pool_name", str)]),

        # Worker and flow-related metrics.
        stats_utils.CreateCounterMetadata("grr_flows_stuck"),
        stats_utils.CreateCounterMetadata("worker_bad_flow_objects",
                                          fields=[("type", str)]),
        stats_utils.CreateCounterMetadata("worker_session_errors",
                                          fields=[("type", str)]),
        stats_utils.CreateCounterMetadata(
            "worker_flow_lock_error",
            docstring=
            "Worker lock failures. We expect these to be high when the "
            "systemis idle."),
        stats_utils.CreateEventMetadata("worker_flow_processing_time",
                                        fields=[("flow", str)]),
        stats_utils.CreateEventMetadata(
            "worker_time_to_retrieve_notifications"),
        stats_utils.CreateCounterMetadata("grr_flow_completed_count"),
        stats_utils.CreateCounterMetadata("grr_flow_errors"),
        stats_utils.CreateCounterMetadata("grr_flow_invalid_flow_count"),
        stats_utils.CreateCounterMetadata("grr_request_retransmission_count"),
        stats_utils.CreateCounterMetadata("grr_response_out_of_order"),
        stats_utils.CreateCounterMetadata("grr_unique_clients"),
        stats_utils.CreateCounterMetadata("grr_worker_states_run"),
        stats_utils.CreateCounterMetadata("grr_well_known_flow_requests"),
        stats_utils.CreateCounterMetadata("flow_starts",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("flow_errors",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("flow_completions",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("well_known_flow_requests",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("well_known_flow_errors",
                                          fields=[("flow", str)]),
        stats_utils.CreateEventMetadata("fleetspeak_last_ping_latency_millis"),

        # Hunt-related metrics.
        stats_utils.CreateCounterMetadata("hunt_output_plugin_verifications",
                                          fields=[("status", str)]),
        stats_utils.CreateCounterMetadata(
            "hunt_output_plugin_verification_errors"),
        stats_utils.CreateCounterMetadata("hunt_output_plugin_errors",
                                          fields=[("plugin", str)]),
        stats_utils.CreateCounterMetadata("hunt_results_ran_through_plugin",
                                          fields=[("plugin", str)]),
        stats_utils.CreateCounterMetadata("hunt_results_compacted"),
        stats_utils.CreateCounterMetadata(
            "hunt_results_compaction_locking_errors"),
        stats_utils.CreateCounterMetadata("hunt_results_added"),

        # GRR-API metrics.
        stats_utils.CreateEventMetadata("api_method_latency",
                                        fields=[("method_name", str),
                                                ("protocol", str),
                                                ("status", str)]),
        stats_utils.CreateEventMetadata("api_access_probe_latency",
                                        fields=[("method_name", str),
                                                ("protocol", str),
                                                ("status", str)]),

        # Client-related metrics.
        stats_utils.CreateCounterMetadata("grr_client_crashes"),
        stats_utils.CreateCounterMetadata("client_pings_by_label",
                                          fields=[("label", str)]),

        # Metrics specific to GRR frontends.
        stats_utils.CreateGaugeMetadata("frontend_active_count",
                                        int,
                                        fields=[("source", str)]),
        stats_utils.CreateGaugeMetadata("frontend_max_active_count", int),
        stats_utils.CreateCounterMetadata("frontend_http_requests",
                                          fields=[("action", str),
                                                  ("protocol", str)]),
        stats_utils.CreateCounterMetadata("frontend_in_bytes",
                                          fields=[("source", str)]),
        stats_utils.CreateCounterMetadata("frontend_out_bytes",
                                          fields=[("source", str)]),
        stats_utils.CreateCounterMetadata("frontend_request_count",
                                          fields=[("source", str)]),
        stats_utils.CreateCounterMetadata("frontend_inactive_request_count",
                                          fields=[("source", str)]),
        stats_utils.CreateEventMetadata("frontend_request_latency",
                                        fields=[("source", str)]),
        stats_utils.CreateEventMetadata("grr_frontendserver_handle_time"),
        stats_utils.CreateCounterMetadata("grr_frontendserver_handle_num"),
        stats_utils.CreateGaugeMetadata("grr_frontendserver_client_cache_size",
                                        int),
        stats_utils.CreateCounterMetadata("grr_messages_sent"),
        stats_utils.CreateCounterMetadata("grr_pub_key_cache",
                                          fields=[("type", str)]),
    ]
Esempio n. 17
0
  def Run(self):
    real_metric_metadata = list(
        itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
    test_metadata = real_metric_metadata + [
        stats_utils.CreateCounterMetadata(
            _TEST_COUNTER, docstring="Sample counter metric."),
        stats_utils.CreateGaugeMetadata(
            _TEST_GAUGE_METRIC, float, docstring="Sample gauge metric."),
        stats_utils.CreateEventMetadata(
            _TEST_EVENT_METRIC, docstring="Sample event metric."),
    ]
    stats_collector = default_stats_collector.DefaultStatsCollector(
        test_metadata)
    with stats_test_utils.FakeStatsContext(stats_collector):
      for i in range(10):
        with test_lib.FakeTime(42 + i * 60):
          stats_collector.IncrementCounter(_TEST_COUNTER)
          stats_collector.SetGaugeValue(_TEST_GAUGE_METRIC, i * 0.5)
          stats_collector.RecordEvent(_TEST_EVENT_METRIC, 0.42 + 0.5 * i)

          with aff4.FACTORY.Create(
              None, aff4_stats_store.StatsStore, mode="w",
              token=self.token) as stats_store:
            stats_store.WriteStats(process_id="worker_1")

      range_start = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42)
      range_end = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_COUNTER,
              start=range_start,
              end=range_end))
      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_COUNTER,
              start=range_start,
              end=range_end,
              rate="1m"))

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_GAUGE_METRIC,
              start=range_start,
              end=range_end))

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_EVENT_METRIC,
              start=range_start,
              end=range_end))
      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_EVENT_METRIC,
              start=range_start,
              end=range_end,
              distribution_handling_mode="DH_COUNT"))