Ejemplo n.º 1
0
  def Run(self):
    # We have to include all server metadata in the test context since server
    # code that uses the metrics runs within the context.
    non_test_metadata = list(
        itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
    test_metadata = non_test_metadata + [
        stats_utils.CreateCounterMetadata(
            _TEST_COUNTER, docstring="Sample counter metric."),
        stats_utils.CreateGaugeMetadata(
            _TEST_GAUGE_METRIC, str, docstring="Sample gauge metric."),
        stats_utils.CreateEventMetadata(
            _TEST_EVENT_METRIC, docstring="Sample event metric."),
    ]
    stats_collector = default_stats_collector.DefaultStatsCollector(
        test_metadata)
    with stats_test_utils.FakeStatsContext(stats_collector):
      with aff4.FACTORY.Create(
          None, aff4_stats_store.StatsStore, mode="w",
          token=self.token) as stats_store:
        stats_store.WriteStats(process_id="worker_1")

      # We use mixins to run the same tests against multiple APIs.
      # Result-filtering is only needed for HTTP API tests.
      if isinstance(self, api_regression_http.HttpApiRegressionTestMixinBase):
        api_post_process_fn = self._PostProcessApiResult
      else:
        api_post_process_fn = None

      self.Check(
          "ListStatsStoreMetricsMetadata",
          args=stats_plugin.ApiListStatsStoreMetricsMetadataArgs(
              component="WORKER"),
          api_post_process_fn=api_post_process_fn)
Ejemplo n.º 2
0
    def setUp(self):
        super(StatsStoreTest, self).setUp()

        self.process_id = "some_pid"
        self.stats_store = aff4.FACTORY.Create(None,
                                               stats_store.StatsStore,
                                               mode="w",
                                               token=self.token)
        fake_stats_collector = default_stats_collector.DefaultStatsCollector([
            stats_utils.CreateCounterMetadata("counter"),
            stats_utils.CreateCounterMetadata("counter_with_fields",
                                              fields=[("source", str)]),
            stats_utils.CreateEventMetadata("events"),
            stats_utils.CreateEventMetadata("events_with_fields",
                                            fields=[("source", str)]),
            stats_utils.CreateGaugeMetadata("int_gauge", int),
            stats_utils.CreateGaugeMetadata("str_gauge", str),
            stats_utils.CreateGaugeMetadata("str_gauge_with_fields",
                                            str,
                                            fields=[("task", int)])
        ])
        fake_stats_context = stats_test_utils.FakeStatsContext(
            fake_stats_collector)
        fake_stats_context.start()
        self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 3
0
 def _SetupFakeStatsContext(self):
   """Creates a stats context for running tests based on defined metrics."""
   metrics_metadata = list(
       itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
   fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
       metrics_metadata)
   fake_stats_context = stats_test_utils.FakeStatsContext(fake_stats_collector)
   fake_stats_context.start()
   self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 4
0
 def setUp(self):
   super(StatsStoreDataQueryTest, self).setUp()
   self.process_id = "some_pid"
   self.stats_store = aff4.FACTORY.Create(
       None, stats_store.StatsStore, mode="w", token=self.token)
   fake_stats_context = stats_test_utils.FakeStatsContext(
       _CreateFakeStatsCollector())
   fake_stats_context.start()
   self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 5
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = default_stats_collector.DefaultStatsCollector([
         stats_utils.CreateCounterMetadata("fake_counter"),
     ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron = system.PurgeServerStatsCronJob(
                 rdf_cronjobs.CronJobRun(), rdf_cronjobs.CronJob())
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertIn("Deleted 2 stats entries.",
                           cron.run_state.log_message)
Ejemplo n.º 6
0
  def setUp(self):
    super(StatsStoreTest, self).setUp()

    config_overrider = test_lib.ConfigOverrider({
        "Database.useForReads.stats": True,
    })
    config_overrider.Start()
    self.addCleanup(config_overrider.Stop)

    fake_stats_context = stats_test_utils.FakeStatsContext(
        _CreateFakeStatsCollector())
    fake_stats_context.start()
    self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 7
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = default_stats_collector.DefaultStatsCollector([
         stats_utils.CreateCounterMetadata("fake_counter"),
     ])
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     with test_lib.ConfigOverrider(config_overrides):
         with stats_test_utils.FakeStatsContext(fake_stats_collector):
             with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
                 stats_collector_instance.Get().IncrementCounter(
                     "fake_counter")
                 stats_store._WriteStats(process_id="fake_process_id")
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp1)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
             with test_lib.FakeTime(timestamp2):
                 stats_store._WriteStats(process_id="fake_process_id")
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp1), (1, timestamp2)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
             with test_lib.FakeTime(timestamp3):
                 system.PurgeServerStatsCronJob(
                     rdf_cronjobs.CronJobRun(),
                     rdf_cronjobs.CronJob()).Run()
                 # timestamp1 is older than 1h, so it should get deleted.
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp2)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
Ejemplo n.º 8
0
  def testEventMetricGetsRendered(self):
    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        [stats_utils.CreateEventMetadata("api_method_latency")])
    with stats_test_utils.FakeStatsContext(stats_collector):
      stats_collector_instance.Get().RecordEvent("api_method_latency", 15)

      varz_json = json.loads(stats_server.BuildVarzJsonString())
      self.assertEqual(varz_json["api_method_latency"]["info"], {
          "metric_type": "EVENT",
          "value_type": "DISTRIBUTION"
      })
      self.assertCountEqual(
          iterkeys(varz_json["api_method_latency"]["value"]),
          ["sum", "bins_heights", "counter"])
Ejemplo n.º 9
0
    def testMakeIncreasingHandlesValuesResets(self):
        # Write test data.
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

        stats_collector_instance.Get().IncrementCounter(_SINGLE_DIM_COUNTER)
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(30))

        stats_collector_instance.Get().IncrementCounter(_SINGLE_DIM_COUNTER)
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(60))

        # Simulate process restart by reseting the stats-collector.
        with stats_test_utils.FakeStatsContext(_CreateFakeStatsCollector()):
            self.stats_store.WriteStats(
                process_id="pid1",
                timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

            # We've reset the counter on 60th second, so we get following time series:
            # 1970-01-01 00:00:00    0
            # 1970-01-01 00:00:30    1
            # 1970-01-01 00:01:00    2
            # 1970-01-01 00:01:30    0
            stats_data = self.stats_store.ReadStats(process_id="pid1")
            query = stats_store.StatsStoreDataQuery(stats_data)

            ts = query.In(_SINGLE_DIM_COUNTER).TakeValue().ts

            self.assertAlmostEqual(ts.data[0][0], 0)
            self.assertAlmostEqual(ts.data[1][0], 1)
            self.assertAlmostEqual(ts.data[2][0], 2)
            self.assertAlmostEqual(ts.data[3][0], 0)

            # EnsureIsIncremental detects the reset and increments values that follow
            # the reset point:
            # 1970-01-01 00:00:00    0
            # 1970-01-01 00:00:30    1
            # 1970-01-01 00:01:00    2
            # 1970-01-01 00:01:30    2
            ts = query.MakeIncreasing().ts

            self.assertAlmostEqual(ts.data[0][0], 0)
            self.assertAlmostEqual(ts.data[1][0], 1)
            self.assertAlmostEqual(ts.data[2][0], 2)
            self.assertAlmostEqual(ts.data[3][0], 2)
Ejemplo n.º 10
0
 def _SetUpFakeStatsContext(self):
     """Registers stats metrics used by tests in this class."""
     # DB implementations might interact with real metrics (not defined in this
     # test), so we make sure that they get registered.
     real_metrics = list(
         stats_collector_instance.Get().GetAllMetricsMetadata().values())
     test_metrics = [
         stats_utils.CreateCounterMetadata(_SINGLE_DIM_COUNTER),
         stats_utils.CreateCounterMetadata(_MULTI_DIM_COUNTER,
                                           fields=[("str_field1", str),
                                                   ("str_field2", str)]),
     ]
     fake_stats_context = stats_test_utils.FakeStatsContext(
         default_stats_collector.DefaultStatsCollector(real_metrics +
                                                       test_metrics))
     fake_stats_context.start()
     self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 11
0
  def Run(self):
    real_metric_metadata = list(
        itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
    test_metadata = real_metric_metadata + [
        stats_utils.CreateCounterMetadata(
            _TEST_COUNTER, docstring="Sample counter metric."),
        stats_utils.CreateGaugeMetadata(
            _TEST_GAUGE_METRIC, float, docstring="Sample gauge metric."),
        stats_utils.CreateEventMetadata(
            _TEST_EVENT_METRIC, docstring="Sample event metric."),
    ]
    stats_collector = default_stats_collector.DefaultStatsCollector(
        test_metadata)
    with stats_test_utils.FakeStatsContext(stats_collector):
      for i in range(10):
        with test_lib.FakeTime(42 + i * 60):
          stats_collector.IncrementCounter(_TEST_COUNTER)
          stats_collector.SetGaugeValue(_TEST_GAUGE_METRIC, i * 0.5)
          stats_collector.RecordEvent(_TEST_EVENT_METRIC, 0.42 + 0.5 * i)

          with aff4.FACTORY.Create(
              None, aff4_stats_store.StatsStore, mode="w",
              token=self.token) as stats_store:
            stats_store.WriteStats(process_id="worker_1")

      range_start = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42)
      range_end = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_COUNTER,
              start=range_start,
              end=range_end))
      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_COUNTER,
              start=range_start,
              end=range_end,
              rate="1m"))

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_GAUGE_METRIC,
              start=range_start,
              end=range_end))

      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_EVENT_METRIC,
              start=range_start,
              end=range_end))
      self.Check(
          "GetStatsStoreMetric",
          args=stats_plugin.ApiGetStatsStoreMetricArgs(
              component="WORKER",
              metric_name=_TEST_EVENT_METRIC,
              start=range_start,
              end=range_end,
              distribution_handling_mode="DH_COUNT"))
Ejemplo n.º 12
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
         [
             stats_utils.CreateCounterMetadata("fake_counter"),
         ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     zero_duration = rdfvalue.Duration(0)
     # Backslash continuation is explicitly allowed by Google's style guide for
     # nested context manager expressions spanning 3 or more lines.
     # pylint: disable=g-backslash-continuation
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1), \
          mock.patch.object(system, "_stats_checkpoint_period", zero_duration):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron_name = compatibility.GetName(
                 system.PurgeServerStatsCronJob)
             cronjobs.ScheduleSystemCronJobs(names=[cron_name])
             job_data = data_store.REL_DB.ReadCronJobs([cron_name])[0]
             cron_run = rdf_cronjobs.CronJobRun(cron_job_id=cron_name)
             cron_run.GenerateRunId()
             cron_run.started_at = rdfvalue.RDFDatetime.Now()
             cron = system.PurgeServerStatsCronJob(cron_run, job_data)
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertEqual(
                 "Deleted 2 stats entries.\nDeleted 1 stats entries.",
                 cron.run_state.log_message)