コード例 #1
0
    def setUp(self):
        super(StatsStoreTest, self).setUp()

        self.process_id = "some_pid"
        self.stats_store = aff4.FACTORY.Create(None,
                                               stats_store.StatsStore,
                                               mode="w",
                                               token=self.token)
        fake_stats_collector = default_stats_collector.DefaultStatsCollector([
            stats_utils.CreateCounterMetadata("counter"),
            stats_utils.CreateCounterMetadata("counter_with_fields",
                                              fields=[("source", str)]),
            stats_utils.CreateEventMetadata("events"),
            stats_utils.CreateEventMetadata("events_with_fields",
                                            fields=[("source", str)]),
            stats_utils.CreateGaugeMetadata("int_gauge", int),
            stats_utils.CreateGaugeMetadata("str_gauge", str),
            stats_utils.CreateGaugeMetadata("str_gauge_with_fields",
                                            str,
                                            fields=[("task", int)])
        ])
        fake_stats_context = stats_test_utils.FakeStatsContext(
            fake_stats_collector)
        fake_stats_context.start()
        self.addCleanup(fake_stats_context.stop)
コード例 #2
0
def GetMetadata():
    """Returns a list of MetricMetadata for the client's metrics."""
    return [
        stats_utils.CreateCounterMetadata("grr_client_received_bytes"),
        stats_utils.CreateCounterMetadata("grr_client_sent_bytes"),
        stats_utils.CreateGaugeMetadata("grr_client_cpu_usage", str),
        stats_utils.CreateGaugeMetadata("grr_client_io_usage", str)
    ]
コード例 #3
0
def _CreateFakeStatsCollector():
  """Returns a stats-collector for use by tests in this file."""
  return prometheus_stats_collector.PrometheusStatsCollector([
      stats_utils.CreateCounterMetadata(_SINGLE_DIM_COUNTER),
      stats_utils.CreateCounterMetadata(
          _COUNTER_WITH_ONE_FIELD, fields=[("field1", str)]),
      stats_utils.CreateCounterMetadata(
          _COUNTER_WITH_TWO_FIELDS, fields=[("field1", str), ("field2", int)]),
      stats_utils.CreateEventMetadata(_EVENT_METRIC),
  ])
コード例 #4
0
def _CreateFakeStatsCollector():
  """Returns a stats-collector for use by tests in this file."""
  return default_stats_collector.DefaultStatsCollector([
      stats_utils.CreateCounterMetadata("counter"),
      stats_utils.CreateCounterMetadata(
          "counter_with_fields", fields=[("source", str)]),
      stats_utils.CreateEventMetadata("events"),
      stats_utils.CreateEventMetadata(
          "events_with_fields", fields=[("source", str)]),
      stats_utils.CreateGaugeMetadata("int_gauge", int),
      stats_utils.CreateGaugeMetadata("str_gauge", str),
      stats_utils.CreateGaugeMetadata(
          "str_gauge_with_fields", str, fields=[("task", int)])
  ])
コード例 #5
0
  def testRaisesOnImproperFieldsUsage2(self):
    counter_name = "testRaisesOnImproperFieldsUsage2_counter"
    int_gauge_name = "testRaisesOnImproperFieldsUsage2_int_gauge"
    event_metric_name = "testRaisesOnImproperFieldsUsage2_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(
            counter_name, fields=[("dimension", str)]),
        stats_utils.CreateGaugeMetadata(
            int_gauge_name, int, fields=[("dimension", str)]),
        stats_utils.CreateEventMetadata(
            event_metric_name, fields=[("dimension", str)])
    ])

    # Check for counters
    self.assertRaises(ValueError, collector.GetMetricValue, counter_name)
    self.assertRaises(
        ValueError, collector.GetMetricValue, counter_name, fields=["a", "b"])

    # Check for gauges
    self.assertRaises(ValueError, collector.GetMetricValue, int_gauge_name)
    self.assertRaises(
        ValueError, collector.GetMetricValue, int_gauge_name, fields=["a", "b"])

    # Check for event metrics
    self.assertRaises(ValueError, collector.GetMetricValue, event_metric_name)
    self.assertRaises(
        ValueError,
        collector.GetMetricValue,
        event_metric_name,
        fields=["a", "b"])
コード例 #6
0
ファイル: stats_regression_test.py プロジェクト: ehossam/grr
  def Run(self):
    # We have to include all server metadata in the test context since server
    # code that uses the metrics runs within the context.
    non_test_metadata = list(
        itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
    test_metadata = non_test_metadata + [
        stats_utils.CreateCounterMetadata(
            _TEST_COUNTER, docstring="Sample counter metric."),
        stats_utils.CreateGaugeMetadata(
            _TEST_GAUGE_METRIC, str, docstring="Sample gauge metric."),
        stats_utils.CreateEventMetadata(
            _TEST_EVENT_METRIC, docstring="Sample event metric."),
    ]
    stats_collector = default_stats_collector.DefaultStatsCollector(
        test_metadata)
    with stats_test_utils.FakeStatsContext(stats_collector):
      with aff4.FACTORY.Create(
          None, aff4_stats_store.StatsStore, mode="w",
          token=self.token) as stats_store:
        stats_store.WriteStats(process_id="worker_1")

      # We use mixins to run the same tests against multiple APIs.
      # Result-filtering is only needed for HTTP API tests.
      if isinstance(self, api_regression_http.HttpApiRegressionTestMixinBase):
        api_post_process_fn = self._PostProcessApiResult
      else:
        api_post_process_fn = None

      self.Check(
          "ListStatsStoreMetricsMetadata",
          args=stats_plugin.ApiListStatsStoreMetricsMetadataArgs(
              component="WORKER"),
          api_post_process_fn=api_post_process_fn)
コード例 #7
0
  def testGetMetricFieldsWorksCorrectly(self):
    counter_name = "testGetMetricFieldsWorksCorrectly_counter"
    int_gauge_name = "testGetMetricFieldsWorksCorrectly_int_gauge"
    event_metric_name = "testGetMetricFieldsWorksCorrectly_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(
            counter_name, fields=[("dimension1", str), ("dimension2", str)]),
        stats_utils.CreateGaugeMetadata(
            int_gauge_name, int, fields=[("dimension", str)]),
        stats_utils.CreateEventMetadata(
            event_metric_name, fields=[("dimension", str)]),
    ])

    collector.IncrementCounter(counter_name, fields=["b", "b"])
    collector.IncrementCounter(counter_name, fields=["a", "c"])

    collector.SetGaugeValue(int_gauge_name, 20, fields=["a"])
    collector.SetGaugeValue(int_gauge_name, 30, fields=["b"])

    collector.RecordEvent(event_metric_name, 0.1, fields=["a"])
    collector.RecordEvent(event_metric_name, 0.1, fields=["b"])

    fields = sorted(collector.GetMetricFields(counter_name), key=lambda t: t[0])
    self.assertEqual([("a", "c"), ("b", "b")], fields)

    fields = sorted(
        collector.GetMetricFields(int_gauge_name), key=lambda t: t[0])
    self.assertEqual([("a",), ("b",)], fields)

    fields = sorted(
        collector.GetMetricFields(event_metric_name), key=lambda t: t[0])
    self.assertEqual([("a",), ("b",)], fields)
コード例 #8
0
  def testCombiningDecorators(self):
    """Test combining decorators."""
    counter_name = "testCombiningDecorators_counter"
    event_metric_name = "testCombiningDecorators_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(counter_name),
        stats_utils.CreateEventMetadata(
            event_metric_name, bins=[0.0, 0.1, 0.2])
    ])

    @stats_utils.Timed(event_metric_name)
    @stats_utils.Counted(counter_name)
    def OverdecoratedFunc(n):
      self._Sleep(n)

    with FakeStatsContext(collector):
      OverdecoratedFunc(0.02)

    # Check if all vars get updated
    m = collector.GetMetricValue(event_metric_name)
    self.assertEqual(m.bins_heights[0.0], 1)
    self.assertEqual(m.bins_heights[0.1], 0)
    self.assertEqual(m.bins_heights[0.2], 0)

    self.assertEqual(collector.GetMetricValue(counter_name), 1)
コード例 #9
0
  def testCounterWithFields(self):
    counter_name = "testCounterWithFields_counter"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(
            counter_name, fields=[("dimension", str)])
    ])

    # Test that default values for any fields values are 0."
    self.assertEqual(0, collector.GetMetricValue(counter_name, fields=["a"]))
    self.assertEqual(0, collector.GetMetricValue(counter_name, fields=["b"]))

    for _ in builtins.range(5):
      collector.IncrementCounter(counter_name, fields=["dimension_value_1"])
    self.assertEqual(
        5, collector.GetMetricValue(counter_name, fields=["dimension_value_1"]))

    collector.IncrementCounter(counter_name, 2, fields=["dimension_value_1"])
    self.assertEqual(
        7, collector.GetMetricValue(counter_name, fields=["dimension_value_1"]))

    collector.IncrementCounter(counter_name, 2, fields=["dimension_value_2"])
    self.assertEqual(
        2, collector.GetMetricValue(counter_name, fields=["dimension_value_2"]))
    # Check that previously set values with other fields are not affected.
    self.assertEqual(
        7, collector.GetMetricValue(counter_name, fields=["dimension_value_1"]))
コード例 #10
0
    def testGetAllMetricsMetadataWorksCorrectlyOnSimpleMetrics(self):
        counter_name = "testGAMM_SimpleMetrics_counter"
        int_gauge_name = "testGAMM_SimpleMetrics_int_gauge"
        event_metric_name = "testGAMM_SimpleMetrics_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateCounterMetadata(counter_name),
            stats_utils.CreateGaugeMetadata(int_gauge_name,
                                            int,
                                            fields=[("dimension", str)]),
            stats_utils.CreateEventMetadata(event_metric_name)
        ])

        metrics = collector.GetAllMetricsMetadata()
        self.assertEqual(metrics[counter_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.COUNTER)
        self.assertFalse(metrics[counter_name].fields_defs)

        self.assertEqual(metrics[int_gauge_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.GAUGE)
        self.assertEqual(metrics[int_gauge_name].fields_defs, [
            rdf_stats.MetricFieldDefinition(
                field_name="dimension",
                field_type=rdf_stats.MetricFieldDefinition.FieldType.STR)
        ])

        self.assertEqual(metrics[event_metric_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.EVENT)
        self.assertFalse(metrics[event_metric_name].fields_defs)
コード例 #11
0
    def testExceptionHandling(self):
        """Test decorators when exceptions are thrown."""
        counter_name = "testExceptionHandling_counter"
        event_metric_name = "testExceptionHandling_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateCounterMetadata(counter_name),
            stats_utils.CreateEventMetadata(event_metric_name,
                                            bins=[0, 0.1, 0.2])
        ])

        @stats_utils.Timed(event_metric_name)
        @stats_utils.Counted(counter_name)
        def RaiseFunc(n):
            self._Sleep(n)
            raise Exception()

        with FakeStatsContext(collector):
            self.assertRaises(Exception, RaiseFunc, 0.11)

        # Check if all vars get updated
        m = collector.GetMetricValue(event_metric_name)
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 0, 0.1: 1, 0.2: 0})

        self.assertEqual(collector.GetMetricValue(counter_name), 1)
コード例 #12
0
    def testMultipleFuncs(self):
        """Tests if multiple decorators produce aggregate stats."""
        counter_name = "testMultipleFuncs_counter"
        event_metric_name = "testMultipleFuncs_event_metric"

        collector = self._CreateStatsCollector([
            stats_utils.CreateCounterMetadata(counter_name),
            stats_utils.CreateEventMetadata(event_metric_name, bins=[0, 1, 2])
        ])

        @stats_utils.Counted(counter_name)
        def Func1(n):
            self._Sleep(n)

        @stats_utils.Counted(counter_name)
        def Func2(n):
            self._Sleep(n)

        @stats_utils.Timed(event_metric_name)
        def Func3(n):
            self._Sleep(n)

        @stats_utils.Timed(event_metric_name)
        def Func4(n):
            self._Sleep(n)

        with FakeStatsContext(collector):
            Func1(0.1)
            Func2(0.1)
            self.assertEqual(collector.GetMetricValue(counter_name), 2)

            Func3(0.1)
            Func4(1.1)
            m = collector.GetMetricValue(event_metric_name)
            self.assertEqual(m.bins_heights, {-_INF: 0, 0: 1, 1: 1, 2: 0})
コード例 #13
0
  def testRaisesOnImproperFieldsUsage1(self):
    counter_name = "testRaisesOnImproperFieldsUsage1_counter"
    int_gauge_name = "testRaisesOnImproperFieldsUsage1_int_gauge"
    event_metric_name = "testRaisesOnImproperFieldsUsage1_event_metric"

    collector = self._CreateStatsCollector([
        stats_utils.CreateCounterMetadata(counter_name),
        stats_utils.CreateGaugeMetadata(int_gauge_name, int),
        stats_utils.CreateEventMetadata(event_metric_name)
    ])

    # Check for counters
    with self.assertRaises(ValueError):
      collector.GetMetricValue(counter_name, fields=["a"])

    # Check for gauges
    with self.assertRaises(ValueError):
      collector.GetMetricValue(int_gauge_name, fields=["a"])

    # Check for event metrics
    self.assertRaises(
        ValueError,
        collector.GetMetricValue,
        event_metric_name,
        fields=["a", "b"])
コード例 #14
0
ファイル: db_stats_test.py プロジェクト: thetraker/grr
 def _SetUpFakeStatsContext(self):
     """Registers stats metrics used by tests in this class."""
     # DB implementations might interact with real metrics (not defined in this
     # test), so we make sure that they get registered.
     real_metrics = list(
         stats_collector_instance.Get().GetAllMetricsMetadata().values())
     test_metrics = [
         stats_utils.CreateCounterMetadata(_SINGLE_DIM_COUNTER),
         stats_utils.CreateCounterMetadata(_MULTI_DIM_COUNTER,
                                           fields=[("str_field1", str),
                                                   ("str_field2", str)]),
     ]
     fake_stats_context = stats_test_utils.FakeStatsContext(
         default_stats_collector.DefaultStatsCollector(real_metrics +
                                                       test_metrics))
     fake_stats_context.start()
     self.addCleanup(fake_stats_context.stop)
コード例 #15
0
    def testDecrementingCounterRaises(self):
        counter_name = "testDecrementingCounterRaises_counter"

        collector = self._CreateStatsCollector(
            [stats_utils.CreateCounterMetadata(counter_name)])

        with self.assertRaises(ValueError):
            collector.IncrementCounter(counter_name, -1)
コード例 #16
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = default_stats_collector.DefaultStatsCollector([
         stats_utils.CreateCounterMetadata("fake_counter"),
     ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron = system.PurgeServerStatsCronJob(
                 rdf_cronjobs.CronJobRun(), rdf_cronjobs.CronJob())
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertIn("Deleted 2 stats entries.",
                           cron.run_state.log_message)
コード例 #17
0
def GetMetricMetadata():
    """Returns a list of MetricMetadata for communicator-related metrics."""
    return [
        stats_utils.CreateCounterMetadata("grr_client_unknown"),
        stats_utils.CreateCounterMetadata("grr_decoding_error"),
        stats_utils.CreateCounterMetadata("grr_decryption_error"),
        stats_utils.CreateCounterMetadata(
            "grr_legacy_client_decryption_error"),
        stats_utils.CreateCounterMetadata("grr_authenticated_messages"),
        stats_utils.CreateCounterMetadata("grr_unauthenticated_messages"),
        stats_utils.CreateCounterMetadata("grr_rsa_operations"),
        stats_utils.CreateCounterMetadata("grr_encrypted_cipher_cache",
                                          fields=[("type", str)]),
    ]
コード例 #18
0
    def testSimpleCounter(self):
        counter_name = "testSimpleCounter_counter"

        collector = self._CreateStatsCollector(
            [stats_utils.CreateCounterMetadata(counter_name)])

        self.assertEqual(0, collector.GetMetricValue(counter_name))

        for _ in range(5):
            collector.IncrementCounter(counter_name)
        self.assertEqual(5, collector.GetMetricValue(counter_name))

        collector.IncrementCounter(counter_name, 2)
        self.assertEqual(7, collector.GetMetricValue(counter_name))
コード例 #19
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = default_stats_collector.DefaultStatsCollector([
         stats_utils.CreateCounterMetadata("fake_counter"),
     ])
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     with test_lib.ConfigOverrider(config_overrides):
         with stats_test_utils.FakeStatsContext(fake_stats_collector):
             with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
                 stats_collector_instance.Get().IncrementCounter(
                     "fake_counter")
                 stats_store._WriteStats(process_id="fake_process_id")
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp1)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
             with test_lib.FakeTime(timestamp2):
                 stats_store._WriteStats(process_id="fake_process_id")
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp1), (1, timestamp2)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
             with test_lib.FakeTime(timestamp3):
                 system.PurgeServerStatsCronJob(
                     rdf_cronjobs.CronJobRun(),
                     rdf_cronjobs.CronJob()).Run()
                 # timestamp1 is older than 1h, so it should get deleted.
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp2)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
コード例 #20
0
    def testCountingDecorator(self):
        """Test _Function call counting."""
        counter_name = "testCountingDecorator_counter"

        collector = self._CreateStatsCollector(
            [stats_utils.CreateCounterMetadata(counter_name)])

        @stats_utils.Counted(counter_name)
        def CountedFunc():
            pass

        with FakeStatsContext(collector):
            for _ in range(10):
                CountedFunc()

        self.assertEqual(collector.GetMetricValue(counter_name), 10)
コード例 #21
0
ファイル: stats_test_utils.py プロジェクト: lordlee0702/grr
    def testErrorsCountingDecorator(self):
        counter_name = "testCountingDecorator_errors_counter"

        collector = self._CreateStatsCollector(
            [stats_utils.CreateCounterMetadata(counter_name)])

        @stats_utils.SuccessesCounted(counter_name)
        def CountedFunc(should_raise):
            if should_raise:
                raise RuntimeError("foo")

        with FakeStatsContext(collector):
            for i in range(10):
                if i % 2 == 0:
                    with self.assertRaises(RuntimeError):
                        CountedFunc(True)
                else:
                    CountedFunc(False)

        # Non-failing calls shouldn't increment the counter.
        self.assertEqual(collector.GetMetricValue(counter_name), 5)
コード例 #22
0
ファイル: stats_server_test.py プロジェクト: LubyRuffy/grr
    def testPrometheusIntegration(self):
        registry = prometheus_client.CollectorRegistry(auto_describe=True)

        metadatas = [stats_utils.CreateCounterMetadata("foobars")]
        collector = prometheus_stats_collector.PrometheusStatsCollector(
            metadatas, registry=registry)
        collector.IncrementCounter("foobars", 42)

        port = portpicker.pick_unused_port()

        with mock.patch.object(stats_server.StatsServerHandler, "registry",
                               registry):
            server = stats_server.StatsServer(port)
            server.Start()
            self.addCleanup(server.Stop)
            res = requests.get("http://localhost:{}/metrics".format(port))

        text_fd = io.StringIO(res.text)
        families = prometheus_parser.text_fd_to_metric_families(text_fd)
        families = {family.name: family for family in families}

        self.assertIn("foobars", families)
        self.assertEqual(families["foobars"].samples[0].value, 42)
コード例 #23
0
    def testPrometheusIntegration(self):
        registry = prometheus_client.CollectorRegistry(auto_describe=True)

        metadatas = [stats_utils.CreateCounterMetadata("foobars")]
        collector = prometheus_stats_collector.PrometheusStatsCollector(
            metadatas, registry=registry)
        collector.IncrementCounter("foobars", 42)

        handler = stats_server.StatsServerHandler(mock.MagicMock(),
                                                  mock.MagicMock(),
                                                  mock.MagicMock())
        handler.registry = registry
        handler.path = "/metrics"
        handler.headers = {}
        handler.wfile = io.BytesIO()

        handler.do_GET()
        handler.wfile.seek(0)

        families = prometheus_parser.text_fd_to_metric_families(handler.wfile)
        families = {family.name: family for family in families}

        self.assertIn("foobars", families)
        self.assertEqual(families["foobars"].samples[0].value, 42)
コード例 #24
0
ファイル: system_test.py プロジェクト: cdstelly/grr
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
         [
             stats_utils.CreateCounterMetadata("fake_counter"),
         ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     zero_duration = rdfvalue.Duration(0)
     # Backslash continuation is explicitly allowed by Google's style guide for
     # nested context manager expressions spanning 3 or more lines.
     # pylint: disable=g-backslash-continuation
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1), \
          mock.patch.object(system, "_stats_checkpoint_period", zero_duration):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron_name = compatibility.GetName(
                 system.PurgeServerStatsCronJob)
             cronjobs.ScheduleSystemCronJobs(names=[cron_name])
             job_data = data_store.REL_DB.ReadCronJobs([cron_name])[0]
             cron_run = rdf_cronjobs.CronJobRun(cron_job_id=cron_name)
             cron_run.GenerateRunId()
             cron_run.started_at = rdfvalue.RDFDatetime.Now()
             cron = system.PurgeServerStatsCronJob(cron_run, job_data)
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertEqual(
                 "Deleted 2 stats entries.\nDeleted 1 stats entries.",
                 cron.run_state.log_message)
コード例 #25
0
def GetMetadata():
    """Returns a list of MetricMetadata for GRR server components."""
    return [
        # GRR user-management metrics.
        stats_utils.CreateEventMetadata("acl_check_time",
                                        fields=[("check_type", str)]),
        stats_utils.CreateCounterMetadata("approval_searches",
                                          fields=[("reason_presence", str),
                                                  ("source", str)]),

        # Cronjob metrics.
        stats_utils.CreateCounterMetadata("cron_internal_error"),
        stats_utils.CreateCounterMetadata("cron_job_failure",
                                          fields=[("cron_job_id", str)]),
        stats_utils.CreateCounterMetadata("cron_job_timeout",
                                          fields=[("cron_job_id", str)]),
        stats_utils.CreateEventMetadata("cron_job_latency",
                                        fields=[("cron_job_id", str)]),

        # Access-control metrics.
        stats_utils.CreateCounterMetadata("grr_expired_tokens"),

        # Datastore metrics.
        stats_utils.CreateCounterMetadata("grr_commit_failure"),
        stats_utils.CreateCounterMetadata("datastore_retries"),
        stats_utils.CreateGaugeMetadata(
            "datastore_size",
            int,
            docstring="Size of data store in bytes",
            units="BYTES"),
        stats_utils.CreateCounterMetadata("grr_task_retransmission_count"),
        stats_utils.CreateCounterMetadata("grr_task_ttl_expired_count"),
        stats_utils.CreateEventMetadata(
            "db_request_latency",
            fields=[("call", str)],
            bins=[0.05 * 1.2**x for x in range(30)]),  # 50ms to ~10 secs
        stats_utils.CreateCounterMetadata("db_request_errors",
                                          fields=[("call", str),
                                                  ("type", str)]),
        stats_utils.CreateEventMetadata(
            "blob_store_poll_hit_latency",
            bins=[0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50]),
        stats_utils.CreateEventMetadata("blob_store_poll_hit_iteration",
                                        bins=[1, 2, 5, 10, 20, 50]),
        stats_utils.CreateEventMetadata(
            "dual_blob_store_write_latency",
            fields=[("backend", str), ("backend_class", str)],
            bins=[0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50]),
        stats_utils.CreateCounterMetadata("dual_blob_store_success_count",
                                          fields=[("backend", str),
                                                  ("backend_class", str)]),
        stats_utils.CreateCounterMetadata("dual_blob_store_error_count",
                                          fields=[("backend", str),
                                                  ("backend_class", str)]),
        stats_utils.CreateCounterMetadata("dual_blob_store_discard_count",
                                          fields=[("backend", str),
                                                  ("backend_class", str)]),

        # Threadpool metrics.
        stats_utils.CreateGaugeMetadata("threadpool_outstanding_tasks",
                                        int,
                                        fields=[("pool_name", str)]),
        stats_utils.CreateGaugeMetadata("threadpool_threads",
                                        int,
                                        fields=[("pool_name", str)]),
        stats_utils.CreateGaugeMetadata("threadpool_cpu_use",
                                        float,
                                        fields=[("pool_name", str)]),
        stats_utils.CreateCounterMetadata("threadpool_task_exceptions",
                                          fields=[("pool_name", str)]),
        stats_utils.CreateEventMetadata("threadpool_working_time",
                                        fields=[("pool_name", str)]),
        stats_utils.CreateEventMetadata("threadpool_queueing_time",
                                        fields=[("pool_name", str)]),

        # Worker and flow-related metrics.
        stats_utils.CreateCounterMetadata("grr_flows_stuck"),
        stats_utils.CreateCounterMetadata("worker_bad_flow_objects",
                                          fields=[("type", str)]),
        stats_utils.CreateCounterMetadata("worker_session_errors",
                                          fields=[("type", str)]),
        stats_utils.CreateCounterMetadata(
            "worker_flow_lock_error",
            docstring=
            "Worker lock failures. We expect these to be high when the "
            "systemis idle."),
        stats_utils.CreateEventMetadata("worker_flow_processing_time",
                                        fields=[("flow", str)]),
        stats_utils.CreateEventMetadata(
            "worker_time_to_retrieve_notifications"),
        stats_utils.CreateCounterMetadata("grr_flow_completed_count"),
        stats_utils.CreateCounterMetadata("grr_flow_errors"),
        stats_utils.CreateCounterMetadata("grr_flow_invalid_flow_count"),
        stats_utils.CreateCounterMetadata("grr_request_retransmission_count"),
        stats_utils.CreateCounterMetadata("grr_response_out_of_order"),
        stats_utils.CreateCounterMetadata("grr_unique_clients"),
        stats_utils.CreateCounterMetadata("grr_worker_states_run"),
        stats_utils.CreateCounterMetadata("grr_well_known_flow_requests"),
        stats_utils.CreateCounterMetadata("flow_starts",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("flow_errors",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("flow_completions",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("well_known_flow_requests",
                                          fields=[("flow", str)]),
        stats_utils.CreateCounterMetadata("well_known_flow_errors",
                                          fields=[("flow", str)]),
        stats_utils.CreateEventMetadata("fleetspeak_last_ping_latency_millis"),

        # Hunt-related metrics.
        stats_utils.CreateCounterMetadata("hunt_output_plugin_verifications",
                                          fields=[("status", str)]),
        stats_utils.CreateCounterMetadata(
            "hunt_output_plugin_verification_errors"),
        stats_utils.CreateCounterMetadata("hunt_output_plugin_errors",
                                          fields=[("plugin", str)]),
        stats_utils.CreateCounterMetadata("hunt_results_ran_through_plugin",
                                          fields=[("plugin", str)]),
        stats_utils.CreateCounterMetadata("hunt_results_compacted"),
        stats_utils.CreateCounterMetadata(
            "hunt_results_compaction_locking_errors"),
        stats_utils.CreateCounterMetadata("hunt_results_added"),

        # GRR-API metrics.
        stats_utils.CreateEventMetadata("api_method_latency",
                                        fields=[("method_name", str),
                                                ("protocol", str),
                                                ("status", str)]),
        stats_utils.CreateEventMetadata("api_access_probe_latency",
                                        fields=[("method_name", str),
                                                ("protocol", str),
                                                ("status", str)]),

        # Client-related metrics.
        stats_utils.CreateCounterMetadata("grr_client_crashes"),
        stats_utils.CreateCounterMetadata("client_pings_by_label",
                                          fields=[("label", str)]),

        # Metrics specific to GRR frontends.
        stats_utils.CreateGaugeMetadata("frontend_active_count",
                                        int,
                                        fields=[("source", str)]),
        stats_utils.CreateGaugeMetadata("frontend_max_active_count", int),
        stats_utils.CreateCounterMetadata("frontend_http_requests",
                                          fields=[("action", str),
                                                  ("protocol", str)]),
        stats_utils.CreateCounterMetadata("frontend_in_bytes",
                                          fields=[("source", str)]),
        stats_utils.CreateCounterMetadata("frontend_out_bytes",
                                          fields=[("source", str)]),
        stats_utils.CreateCounterMetadata("frontend_request_count",
                                          fields=[("source", str)]),
        stats_utils.CreateCounterMetadata("frontend_inactive_request_count",
                                          fields=[("source", str)]),
        stats_utils.CreateEventMetadata("frontend_request_latency",
                                        fields=[("source", str)]),
        stats_utils.CreateEventMetadata("grr_frontendserver_handle_time"),
        stats_utils.CreateCounterMetadata("grr_frontendserver_handle_num"),
        stats_utils.CreateGaugeMetadata("grr_frontendserver_client_cache_size",
                                        int),
        stats_utils.CreateCounterMetadata("grr_messages_sent"),
        stats_utils.CreateCounterMetadata("grr_pub_key_cache",
                                          fields=[("type", str)]),
    ]
コード例 #26
0
ファイル: stats_regression_test.py プロジェクト: ehossam/grr
  def Run(self):
    real_metric_metadata = list(
        itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
    test_metadata = real_metric_metadata + [
        stats_utils.CreateCounterMetadata(
            _TEST_COUNTER, docstring="Sample counter metric."),
        stats_utils.CreateGaugeMetadata(
            _TEST_GAUGE_METRIC, float, docstring="Sample gauge metric."),
        stats_utils.CreateEventMetadata(
            _TEST_EVENT_METRIC, docstring="Sample event metric."),
    ]
    stats_collector = default_stats_collector.DefaultStatsCollector(
        test_metadata)
    with stats_test_utils.FakeStatsContext(stats_collector):
      for i in range(10):
        with test_lib.FakeTime(42 + i * 60):
          stats_collector.IncrementCounter(_TEST_COUNTER)
          stats_collector.SetGaugeValue(_TEST_GAUGE_METRIC, i * 0.5)
          stats_collector.RecordEvent(_TEST_EVENT_METRIC, 0.42 + 0.5 * i)

          with aff4.FACTORY.Create(
              None, aff4_stats_store.StatsStore, mode="w",
              token=self.token) as stats_store:
            stats_store.WriteStats(process_id="worker_1")

      range_start = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42)
      range_end = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_COUNTER,
              start=range_start,
              end=range_end))
      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_COUNTER,
              start=range_start,
              end=range_end,
              rate="1m"))

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_GAUGE_METRIC,
              start=range_start,
              end=range_end))

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_EVENT_METRIC,
              start=range_start,
              end=range_end))
      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_EVENT_METRIC,
              start=range_start,
              end=range_end,
              distribution_handling_mode="DH_COUNT"))