Пример #1
0
 def testDeleteStatsFromLegacyDB(self):
   with test_lib.ConfigOverrider({"StatsStore.stats_ttl_hours": 1}):
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     with test_lib.FakeTime(timestamp1):
       stats_collector_instance.Get().IncrementCounter(_SINGLE_DIM_COUNTER)
       stats_store._WriteStats(process_id="fake_process_id")
       expected_results = {
           "fake_process_id": {
               _SINGLE_DIM_COUNTER: [(1, timestamp1)]
           }
       }
       self.assertDictEqual(
           stats_store.ReadStats("f", _SINGLE_DIM_COUNTER), expected_results)
     with test_lib.FakeTime(timestamp2):
       stats_store._WriteStats(process_id="fake_process_id")
       expected_results = {
           "fake_process_id": {
               _SINGLE_DIM_COUNTER: [(1, timestamp1), (1, timestamp2)]
           }
       }
       self.assertDictEqual(
           stats_store.ReadStats("f", _SINGLE_DIM_COUNTER), expected_results)
     with test_lib.FakeTime(timestamp3):
       stats_store._DeleteStatsFromLegacyDB("fake_process_id")
       # timestamp1 is older than 1h, so it should get deleted.
       expected_results = {
           "fake_process_id": {
               _SINGLE_DIM_COUNTER: [(1, timestamp2)]
           }
       }
       self.assertDictEqual(
           stats_store.ReadStats("f", _SINGLE_DIM_COUNTER), expected_results)
Пример #2
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = default_stats_collector.DefaultStatsCollector([
         stats_utils.CreateCounterMetadata("fake_counter"),
     ])
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     with test_lib.ConfigOverrider(config_overrides):
         with stats_test_utils.FakeStatsContext(fake_stats_collector):
             with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
                 stats_collector_instance.Get().IncrementCounter(
                     "fake_counter")
                 stats_store._WriteStats(process_id="fake_process_id")
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp1)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
             with test_lib.FakeTime(timestamp2):
                 stats_store._WriteStats(process_id="fake_process_id")
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp1), (1, timestamp2)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
             with test_lib.FakeTime(timestamp3):
                 system.PurgeServerStatsCronJob(
                     rdf_cronjobs.CronJobRun(),
                     rdf_cronjobs.CronJob()).Run()
                 # timestamp1 is older than 1h, so it should get deleted.
                 expected_results = {
                     "fake_process_id": {
                         "fake_counter": [(1, timestamp2)]
                     }
                 }
                 self.assertDictEqual(
                     stats_store.ReadStats("f", "fake_counter"),
                     expected_results)
Пример #3
0
    def testReadStats(self):
        with test_lib.FakeTime(rdfvalue.RDFDatetime(1000)):
            stats_collector_instance.Get().IncrementCounter(
                _SINGLE_DIM_COUNTER)
            stats_collector_instance.Get().IncrementCounter(
                _COUNTER_WITH_ONE_FIELD, fields=["fieldval1"])
            stats_collector_instance.Get().IncrementCounter(
                _COUNTER_WITH_TWO_FIELDS, fields=["fieldval2", 3])
            stats_store._WriteStats(process_id="fake_process_id")

        with test_lib.FakeTime(rdfvalue.RDFDatetime(2000)):
            stats_collector_instance.Get().IncrementCounter(
                _COUNTER_WITH_TWO_FIELDS, fields=["fieldval2", 3])
            stats_collector_instance.Get().IncrementCounter(
                _COUNTER_WITH_TWO_FIELDS, fields=["fieldval2", 4])
            stats_store._WriteStats(process_id="fake_process_id")

        expected_single_dim_results = {
            "fake_process_id": {
                _SINGLE_DIM_COUNTER: [(1, 1000), (1, 2000)]
            }
        }
        expected_multi_dim1_results = {
            "fake_process_id": {
                _COUNTER_WITH_ONE_FIELD: {
                    "fieldval1": [(1, 1000), (1, 2000)]
                }
            }
        }
        expected_multi_dim2_results = {
            "fake_process_id": {
                _COUNTER_WITH_TWO_FIELDS: {
                    "fieldval2": {
                        3: [(1, 1000), (2, 2000)],
                        4: [(1, 2000)]
                    }
                }
            }
        }

        self.assertDictEqual(stats_store.ReadStats("f", _SINGLE_DIM_COUNTER),
                             expected_single_dim_results)
        self.assertDictEqual(
            stats_store.ReadStats("fake", _COUNTER_WITH_ONE_FIELD),
            expected_multi_dim1_results)
        self.assertEqual(
            stats_store.ReadStats("fake_process_id", _COUNTER_WITH_TWO_FIELDS),
            expected_multi_dim2_results)
Пример #4
0
    def SetupSampleMetrics(token=None):
        now = rdfvalue.RDFDatetime.Now()
        handle_data = [(3, now - rdfvalue.Duration("50m")),
                       (0, now - rdfvalue.Duration("45m")),
                       (1, now - rdfvalue.Duration("40m")),
                       (0, now - rdfvalue.Duration("35m")),
                       (0, now - rdfvalue.Duration("30m")),
                       (1, now - rdfvalue.Duration("25m")),
                       (0, now - rdfvalue.Duration("20m")),
                       (0, now - rdfvalue.Duration("15m")),
                       (0, now - rdfvalue.Duration("10m")),
                       (5, now - rdfvalue.Duration("5m")),
                       (0, now)]  # pyformat: disable

        handle_data = [(value, timestamp.AsMicrosecondsSinceEpoch())
                       for value, timestamp in handle_data]
        for value, timestamp in handle_data:
            with test_lib.FakeTime(timestamp / 1e6):
                stats_collector_instance.Get().IncrementCounter(
                    "grr_frontendserver_handle_num", value)
                stats_store._WriteStats(process_id="frontend")
Пример #5
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = default_stats_collector.DefaultStatsCollector([
         stats_utils.CreateCounterMetadata("fake_counter"),
     ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron = system.PurgeServerStatsCronJob(
                 rdf_cronjobs.CronJobRun(), rdf_cronjobs.CronJob())
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertIn("Deleted 2 stats entries.",
                           cron.run_state.log_message)
Пример #6
0
    def Run(self):
        with test_lib.ConfigOverrider({"Database.useForReads.stats": True}):
            real_metric_metadata = list(
                itervalues(
                    stats_collector_instance.Get().GetAllMetricsMetadata()))
            test_metadata = real_metric_metadata + [
                stats_utils.CreateCounterMetadata(
                    _TEST_COUNTER, docstring="Sample counter metric."),
                stats_utils.CreateGaugeMetadata(
                    _TEST_GAUGE_METRIC,
                    float,
                    docstring="Sample gauge metric."),
                stats_utils.CreateEventMetadata(
                    _TEST_EVENT_METRIC, docstring="Sample event metric."),
            ]
            stats_collector = default_stats_collector.DefaultStatsCollector(
                test_metadata)
            with stats_test_utils.FakeStatsContext(stats_collector):
                for i in range(10):
                    with test_lib.FakeTime(42 + i * 60):
                        stats_collector.IncrementCounter(_TEST_COUNTER)
                        stats_collector.SetGaugeValue(_TEST_GAUGE_METRIC,
                                                      i * 0.5)
                        stats_collector.RecordEvent(_TEST_EVENT_METRIC,
                                                    0.42 + 0.5 * i)
                        stats_store._WriteStats(process_id="worker_1")

                range_start = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42)
                range_end = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)

                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_COUNTER,
                               start=range_start,
                               end=range_end))
                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_COUNTER,
                               start=range_start,
                               end=range_end,
                               rate="1m"))

                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_GAUGE_METRIC,
                               start=range_start,
                               end=range_end))

                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_EVENT_METRIC,
                               start=range_start,
                               end=range_end))
                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_EVENT_METRIC,
                               start=range_start,
                               end=range_end,
                               distribution_handling_mode="DH_COUNT"))
Пример #7
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
         [
             stats_utils.CreateCounterMetadata("fake_counter"),
         ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     zero_duration = rdfvalue.Duration(0)
     # Backslash continuation is explicitly allowed by Google's style guide for
     # nested context manager expressions spanning 3 or more lines.
     # pylint: disable=g-backslash-continuation
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1), \
          mock.patch.object(system, "_stats_checkpoint_period", zero_duration):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron_name = compatibility.GetName(
                 system.PurgeServerStatsCronJob)
             cronjobs.ScheduleSystemCronJobs(names=[cron_name])
             job_data = data_store.REL_DB.ReadCronJobs([cron_name])[0]
             cron_run = rdf_cronjobs.CronJobRun(cron_job_id=cron_name)
             cron_run.GenerateRunId()
             cron_run.started_at = rdfvalue.RDFDatetime.Now()
             cron = system.PurgeServerStatsCronJob(cron_run, job_data)
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertEqual(
                 "Deleted 2 stats entries.\nDeleted 1 stats entries.",
                 cron.run_state.log_message)