Ejemplo n.º 1
0
    def Run(self):
        # We have to include all server metadata in the test context since server
        # code that uses the metrics runs within the context.
        non_test_metadata = list(
            itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
        test_metadata = non_test_metadata + [
            stats_utils.CreateCounterMetadata(
                _TEST_COUNTER, docstring="Sample counter metric."),
            stats_utils.CreateGaugeMetadata(
                _TEST_GAUGE_METRIC, float, docstring="Sample gauge metric."),
            stats_utils.CreateEventMetadata(_TEST_EVENT_METRIC,
                                            docstring="Sample event metric."),
        ]
        stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
            test_metadata)
        with stats_test_utils.FakeStatsContext(stats_collector):
            # We use mixins to run the same tests against multiple APIs.
            # Result-filtering is only needed for HTTP API tests.
            if isinstance(self,
                          api_regression_http.HttpApiRegressionTestMixinBase):
                api_post_process_fn = self._PostProcessApiResult
            else:
                api_post_process_fn = None

            self.Check("ListStatsStoreMetricsMetadata",
                       args=stats_plugin.ApiListStatsStoreMetricsMetadataArgs(
                           component="WORKER"),
                       api_post_process_fn=api_post_process_fn)
Ejemplo n.º 2
0
def TestInit():
  """Only used in tests and will rerun all the hooks to create a clean state."""
  global INIT_RAN

  stats_collector = prometheus_stats_collector.PrometheusStatsCollector()
  stats_collector_instance.Set(stats_collector)

  # Tests use both the server template grr_server.yaml as a primary config file
  # (this file does not contain all required options, e.g. private keys), and
  # additional configuration in test_data/grr_test.yaml which contains typical
  # values for a complete installation.
  flags.FLAGS.config = package.ResourcePath("grr-response-core",
                                            "install_data/etc/grr-server.yaml")

  flags.FLAGS.secondary_configs.append(
      package.ResourcePath("grr-response-test",
                           "grr_response_test/test_data/grr_test.yaml"))

  # This config contains non-public settings that should be applied during
  # tests.
  extra_test_config = config.CONFIG["Test.additional_test_config"]
  if os.path.exists(extra_test_config):
    flags.FLAGS.secondary_configs.append(extra_test_config)

  # Prevent using the default writeback location since it may clash with local
  # setup.
  writeback_filepath = temp.TempFilePath(prefix="grr_writeback", suffix=".yaml")
  config.CONFIG.global_override["Config.writeback"] = writeback_filepath

  # Tests additionally add a test configuration file.
  config_lib.SetPlatformArchContext()
  config_lib.ParseConfigCommandLine()

  # We are running a test so let the config system know that.
  config.CONFIG.AddContext(contexts.TEST_CONTEXT,
                           "Context applied when we run tests.")

  if not INIT_RAN:
    server_logging.ServerLoggingStartupInit()
    server_logging.SetTestVerbosity()

  blob_store_test_lib.UseTestBlobStore()

  data_store.InitializeDataStore()

  artifact.LoadArtifactsOnce()
  checks.LoadChecksFromFilesystemOnce()
  client_approval_auth.InitializeClientApprovalAuthorizationManagerOnce()
  email_alerts.InitializeEmailAlerterOnce()
  http_api.InitializeHttpRequestHandlerOnce()
  ip_resolver.IPResolverInitOnce()
  stats_server.InitializeStatsServerOnce()
  webauth.InitializeWebAuthOnce()

  if not utils.TimeBasedCache.house_keeper_thread:
    utils.TimeBasedCache()
  utils.TimeBasedCache.house_keeper_thread.exit = True
  utils.TimeBasedCache.house_keeper_thread.join()

  INIT_RAN = True
Ejemplo n.º 3
0
def Init():
    """Run all required startup routines and initialization hooks."""
    # Set up a temporary syslog handler so we have somewhere to log problems
    # with ConfigInit() which needs to happen before we can start our create our
    # proper logging setup.
    syslog_logger = logging.getLogger("TempLogger")
    if os.path.exists("/dev/log"):
        handler = logging.handlers.SysLogHandler(address="/dev/log")
    else:
        handler = logging.handlers.SysLogHandler()
    syslog_logger.addHandler(handler)

    # The default behavior of server components is to raise errors when
    # encountering unknown config options.
    flags.FLAGS.disallow_missing_config_definitions = True

    try:
        config_lib.SetPlatformArchContext()
        config_lib.ParseConfigCommandLine(rename_invalid_writeback=False)
    except config_lib.Error:
        syslog_logger.exception("Died during config initialization")
        raise

    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        registry=prometheus_client.REGISTRY)
    stats_collector_instance.Set(stats_collector)

    server_logging.ServerLoggingStartupInit()

    bs_registry_init.RegisterBlobStores()
    all_decoders.Register()
    all_parsers.Register()
    ec_registry_init.RegisterExportConverters()
    gui_api_registry_init.RegisterApiCallRouters()

    data_store.InitializeDataStore()

    if contexts.ADMIN_UI_CONTEXT in config.CONFIG.context:
        api_auth_manager.InitializeApiAuthManager()

    artifact.LoadArtifactsOnce()  # Requires aff4.AFF4Init.
    checks.LoadChecksFromFilesystemOnce()
    client_approval_auth.InitializeClientApprovalAuthorizationManagerOnce()
    cronjobs.InitializeCronWorkerOnce()
    email_alerts.InitializeEmailAlerterOnce()
    http_api.InitializeHttpRequestHandlerOnce()
    ip_resolver.IPResolverInitOnce()
    stats_server.InitializeStatsServerOnce()
    webauth.InitializeWebAuthOnce()

    # Exempt config updater from this check because it is the one responsible for
    # setting the variable.
    if not config.CONFIG.ContextApplied("ConfigUpdater Context"):
        if not config.CONFIG.Get("Server.initialized"):
            raise RuntimeError(
                "Config not initialized, run \"grr_config_updater"
                " initialize\". If the server is already configured,"
                " add \"Server.initialized: True\" to your config.")
Ejemplo n.º 4
0
 def _SetupFakeStatsContext(self):
   """Creates a stats context for running tests based on defined metrics."""
   # Reset stats_collector_instance to None, then reinitialize it.
   patcher = mock.patch.object(stats_collector_instance, "_stats_singleton",
                               None)
   patcher.start()
   self.addCleanup(patcher.stop)
   stats_collector_instance.Set(
       prometheus_stats_collector.PrometheusStatsCollector())
Ejemplo n.º 5
0
 def _SetupFakeStatsContext(self):
   """Creates a stats context for running tests based on defined metrics."""
   metrics_metadata = list(
       itervalues(stats_collector_instance.Get().GetAllMetricsMetadata()))
   fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
       metrics_metadata)
   fake_stats_context = stats_test_utils.FakeStatsContext(fake_stats_collector)
   fake_stats_context.start()
   self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 6
0
def _CreateFakeStatsCollector():
  """Returns a stats-collector for use by tests in this file."""
  return prometheus_stats_collector.PrometheusStatsCollector([
      stats_utils.CreateCounterMetadata(_SINGLE_DIM_COUNTER),
      stats_utils.CreateCounterMetadata(
          _COUNTER_WITH_ONE_FIELD, fields=[("field1", str)]),
      stats_utils.CreateCounterMetadata(
          _COUNTER_WITH_TWO_FIELDS, fields=[("field1", str), ("field2", int)]),
      stats_utils.CreateEventMetadata(_EVENT_METRIC),
  ])
Ejemplo n.º 7
0
  def testEventMetricGetsRendered(self):
    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        [stats_utils.CreateEventMetadata("api_method_latency")])
    with stats_test_utils.FakeStatsContext(stats_collector):
      stats_collector_instance.Get().RecordEvent("api_method_latency", 15)

      varz_json = json.loads(stats_server.BuildVarzJsonString())
      self.assertEqual(varz_json["api_method_latency"]["info"], {
          "metric_type": "EVENT",
          "value_type": "DISTRIBUTION"
      })
      self.assertCountEqual(
          iterkeys(varz_json["api_method_latency"]["value"]),
          ["sum", "bins_heights", "counter"])
Ejemplo n.º 8
0
def Init():
    """Run all required startup routines and initialization hooks."""
    global INIT_RAN
    if INIT_RAN:
        return

    # Set up a temporary syslog handler so we have somewhere to log problems
    # with ConfigInit() which needs to happen before we can start our create our
    # proper logging setup.
    syslog_logger = logging.getLogger("TempLogger")
    if os.path.exists("/dev/log"):
        handler = logging.handlers.SysLogHandler(address="/dev/log")
    else:
        handler = logging.handlers.SysLogHandler()
    syslog_logger.addHandler(handler)

    try:
        config_lib.SetPlatformArchContext()
        config_lib.ParseConfigCommandLine()
    except config_lib.Error:
        syslog_logger.exception("Died during config initialization")
        raise

    metric_metadata = server_metrics.GetMetadata()
    metric_metadata.extend(communicator.GetMetricMetadata())

    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        metric_metadata, registry=prometheus_client.REGISTRY)
    stats_collector_instance.Set(stats_collector)

    server_logging.ServerLoggingStartupInit()

    bs_registry_init.RegisterBlobStores()
    all_decoders.Register()
    all_parsers.Register()
    registry.Init()

    # Exempt config updater from this check because it is the one responsible for
    # setting the variable.
    if not config.CONFIG.ContextApplied("ConfigUpdater Context"):
        if not config.CONFIG.Get("Server.initialized"):
            raise RuntimeError(
                "Config not initialized, run \"grr_config_updater"
                " initialize\". If the server is already configured,"
                " add \"Server.initialized: True\" to your config.")

    INIT_RAN = True
Ejemplo n.º 9
0
 def _SetUpFakeStatsContext(self):
   """Registers stats metrics used by tests in this class."""
   # DB implementations might interact with real metrics (not defined in this
   # test), so we make sure that they get registered.
   real_metrics = list(
       stats_collector_instance.Get().GetAllMetricsMetadata().values())
   test_metrics = [
       stats_utils.CreateCounterMetadata(_SINGLE_DIM_COUNTER),
       stats_utils.CreateCounterMetadata(
           _MULTI_DIM_COUNTER,
           fields=[("str_field1", str), ("str_field2", str)]),
   ]
   fake_stats_context = stats_test_utils.FakeStatsContext(
       prometheus_stats_collector.PrometheusStatsCollector(real_metrics +
                                                           test_metrics))
   fake_stats_context.start()
   self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 10
0
  def setUp(self):
    super(StatsStoreTest, self).setUp()

    self.process_id = "some_pid"
    self.stats_store = aff4.FACTORY.Create(
        None, stats_store.StatsStore, mode="w", token=self.token)
    fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector([
        stats_utils.CreateCounterMetadata("counter"),
        stats_utils.CreateCounterMetadata(
            "counter_with_fields", fields=[("source", str)]),
        stats_utils.CreateEventMetadata("events"),
        stats_utils.CreateEventMetadata(
            "events_with_fields", fields=[("source", str)]),
        stats_utils.CreateGaugeMetadata("int_gauge", int),
    ])
    fake_stats_context = stats_test_utils.FakeStatsContext(fake_stats_collector)
    fake_stats_context.start()
    self.addCleanup(fake_stats_context.stop)
Ejemplo n.º 11
0
    def testPrometheusIntegration(self):
        registry = prometheus_client.CollectorRegistry(auto_describe=True)

        metadatas = [stats_utils.CreateCounterMetadata("foobars")]
        collector = prometheus_stats_collector.PrometheusStatsCollector(
            metadatas, registry=registry)
        collector.IncrementCounter("foobars", 42)

        port = portpicker.pick_unused_port()

        with mock.patch.object(stats_server.StatsServerHandler, "registry",
                               registry):
            server = stats_server.StatsServer(port)
            server.Start()
            self.addCleanup(server.Stop)
            res = requests.get("http://localhost:{}/metrics".format(port))

        text_fd = io.StringIO(res.text)
        families = prometheus_parser.text_fd_to_metric_families(text_fd)
        families = {family.name: family for family in families}

        self.assertIn("foobars", families)
        self.assertEqual(families["foobars"].samples[0].value, 42)
Ejemplo n.º 12
0
    def testPrometheusIntegration(self):
        registry = prometheus_client.CollectorRegistry(auto_describe=True)

        metadatas = [stats_utils.CreateCounterMetadata("foobars")]
        collector = prometheus_stats_collector.PrometheusStatsCollector(
            metadatas, registry=registry)
        collector.IncrementCounter("foobars", 42)

        handler = stats_server.StatsServerHandler(mock.MagicMock(),
                                                  mock.MagicMock(),
                                                  mock.MagicMock())
        handler.registry = registry
        handler.path = "/metrics"
        handler.headers = {}
        handler.wfile = io.BytesIO()

        handler.do_GET()
        handler.wfile.seek(0)

        families = prometheus_parser.text_fd_to_metric_families(handler.wfile)
        families = {family.name: family for family in families}

        self.assertIn("foobars", families)
        self.assertEqual(families["foobars"].samples[0].value, 42)
Ejemplo n.º 13
0
 def MakeCollector(metadatas):
   return prometheus_stats_collector.PrometheusStatsCollector(
       metadatas, registry)
Ejemplo n.º 14
0
 def _CreateStatsCollector(self):
   return prometheus_stats_collector.PrometheusStatsCollector()
Ejemplo n.º 15
0
def Init():
    """Run all required startup routines and initialization hooks."""
    # Set up a temporary syslog handler so we have somewhere to log problems
    # with ConfigInit() which needs to happen before we can start our create our
    # proper logging setup.
    syslog_logger = logging.getLogger("TempLogger")
    if os.path.exists("/dev/log"):
        handler = logging.handlers.SysLogHandler(address="/dev/log")
    else:
        handler = logging.handlers.SysLogHandler()
    syslog_logger.addHandler(handler)

    try:
        config_lib.SetPlatformArchContext()
        config_lib.ParseConfigCommandLine()
    except config_lib.Error:
        syslog_logger.exception("Died during config initialization")
        raise

    metric_metadata = server_metrics.GetMetadata()
    metric_metadata.extend(communicator.GetMetricMetadata())

    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        metric_metadata, registry=prometheus_client.REGISTRY)
    stats_collector_instance.Set(stats_collector)

    server_logging.ServerLoggingStartupInit()

    bs_registry_init.RegisterBlobStores()
    all_decoders.Register()
    all_parsers.Register()

    data_store.InitializeDataStore()

    if data_store.AFF4Enabled():
        aff4.AFF4Init()  # Requires data_store.InitializeDataStore.
        aff4_grr.GRRAFF4Init()  # Requires aff4.AFF4Init.
        filestore.FileStoreInit()  # Requires aff4_grr.GRRAFF4Init.
        results.ResultQueueInit()  # Requires aff4.AFF4Init.
        sequential_collection.StartUpdaterOnce()

    if contexts.ADMIN_UI_CONTEXT in config.CONFIG.context:
        api_auth_manager.InitializeApiAuthManager()

    artifact.LoadArtifactsOnce()  # Requires aff4.AFF4Init.
    checks.LoadChecksFromFilesystemOnce()
    client_approval_auth.InitializeClientApprovalAuthorizationManagerOnce()
    cronjobs.InitializeCronWorkerOnce()  # Requires aff4.AFF4Init.
    email_alerts.InitializeEmailAlerterOnce()
    http_api.InitializeHttpRequestHandlerOnce()
    ip_resolver.IPResolverInitOnce()
    stats_server.InitializeStatsServerOnce()
    webauth.InitializeWebAuthOnce()

    # Exempt config updater from this check because it is the one responsible for
    # setting the variable.
    if not config.CONFIG.ContextApplied("ConfigUpdater Context"):
        if not config.CONFIG.Get("Server.initialized"):
            raise RuntimeError(
                "Config not initialized, run \"grr_config_updater"
                " initialize\". If the server is already configured,"
                " add \"Server.initialized: True\" to your config.")
Ejemplo n.º 16
0
 def _CreateStatsCollector(self, metadata_list):
     return prometheus_stats_collector.PrometheusStatsCollector(
         metadata_list)
Ejemplo n.º 17
0
 def testPurgeServerStats(self):
     if not data_store.RelationalDBReadEnabled():
         self.skipTest("Test is only for the relational DB. Skipping...")
     fake_stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
         [
             stats_utils.CreateCounterMetadata("fake_counter"),
         ])
     timestamp0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1)
     timestamp1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)
     timestamp2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)
     timestamp3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(4800)
     config_overrides = {
         "Database.useForReads.stats": True,
         "StatsStore.stats_ttl_hours": 1
     }
     zero_duration = rdfvalue.Duration(0)
     # Backslash continuation is explicitly allowed by Google's style guide for
     # nested context manager expressions spanning 3 or more lines.
     # pylint: disable=g-backslash-continuation
     with test_lib.ConfigOverrider(config_overrides), \
          stats_test_utils.FakeStatsContext(fake_stats_collector), \
          mock.patch.object(system, "_STATS_DELETION_BATCH_SIZE", 1), \
          mock.patch.object(system, "_stats_checkpoint_period", zero_duration):
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp0)):
             stats_store._WriteStats(process_id="fake_process_id")
         with test_lib.FakeTime(rdfvalue.RDFDatetime(timestamp1)):
             stats_collector_instance.Get().IncrementCounter("fake_counter")
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp2):
             stats_store._WriteStats(process_id="fake_process_id")
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(0, timestamp0), (1, timestamp1),
                                      (1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
         with test_lib.FakeTime(timestamp3):
             cron_name = compatibility.GetName(
                 system.PurgeServerStatsCronJob)
             cronjobs.ScheduleSystemCronJobs(names=[cron_name])
             job_data = data_store.REL_DB.ReadCronJobs([cron_name])[0]
             cron_run = rdf_cronjobs.CronJobRun(cron_job_id=cron_name)
             cron_run.GenerateRunId()
             cron_run.started_at = rdfvalue.RDFDatetime.Now()
             cron = system.PurgeServerStatsCronJob(cron_run, job_data)
             cron.Run()
             # timestamp0 and timestamp1 are older than 1h, so they should get
             # deleted.
             expected_results = {
                 "fake_process_id": {
                     "fake_counter": [(1, timestamp2)]
                 }
             }
             self.assertDictEqual(
                 stats_store.ReadStats("f", "fake_counter"),
                 expected_results)
             self.assertEqual(
                 "Deleted 2 stats entries.\nDeleted 1 stats entries.",
                 cron.run_state.log_message)
Ejemplo n.º 18
0
def TestInit():
    """Only used in tests and will rerun all the hooks to create a clean state."""
    global INIT_RAN

    metric_metadata = server_metrics.GetMetadata()
    metric_metadata.extend(client_metrics.GetMetadata())
    metric_metadata.extend(communicator.GetMetricMetadata())
    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        metric_metadata)
    stats_collector_instance.Set(stats_collector)

    # Tests use both the server template grr_server.yaml as a primary config file
    # (this file does not contain all required options, e.g. private keys), and
    # additional configuration in test_data/grr_test.yaml which contains typical
    # values for a complete installation.
    flags.FLAGS.config = package.ResourcePath(
        "grr-response-core", "install_data/etc/grr-server.yaml")

    flags.FLAGS.secondary_configs.append(
        package.ResourcePath("grr-response-test",
                             "grr_response_test/test_data/grr_test.yaml"))

    # This config contains non-public settings that should be applied during
    # tests.
    extra_test_config = config.CONFIG["Test.additional_test_config"]
    if os.path.exists(extra_test_config):
        flags.FLAGS.secondary_configs.append(extra_test_config)

    # Tests additionally add a test configuration file.
    config_lib.SetPlatformArchContext()
    config_lib.ParseConfigCommandLine()

    # We are running a test so let the config system know that.
    config.CONFIG.AddContext(contexts.TEST_CONTEXT,
                             "Context applied when we run tests.")

    test_ds = flags.FLAGS.test_data_store
    if test_ds is None:
        test_ds = compatibility.GetName(fake_data_store.FakeDataStore)

    config.CONFIG.Set("Datastore.implementation", test_ds)

    if not INIT_RAN:
        server_logging.ServerLoggingStartupInit()
        server_logging.SetTestVerbosity()

    blob_store_test_lib.UseTestBlobStore()

    data_store.InitializeDataStore()

    if data_store.AFF4Enabled():
        aff4.AFF4Init()  # Requires data_store.InitializeDataStore.
        aff4_grr.GRRAFF4Init()  # Requires aff4.AFF4Init.
        filestore.FileStoreInit()  # Requires aff4_grr.GRRAFF4Init.
        results.ResultQueueInit()  # Requires aff4.AFF4Init.
        sequential_collection.StartUpdaterOnce()

    artifact.LoadArtifactsOnce()  # Requires aff4.AFF4Init.
    checks.LoadChecksFromFilesystemOnce()
    client_approval_auth.InitializeClientApprovalAuthorizationManagerOnce()
    cronjobs.InitializeCronWorkerOnce()  # Requires aff4.AFF4Init.
    email_alerts.InitializeEmailAlerterOnce()
    http_api.InitializeHttpRequestHandlerOnce()
    ip_resolver.IPResolverInitOnce()
    stats_server.InitializeStatsServerOnce()
    webauth.InitializeWebAuthOnce()

    db = data_store.DB.SetupTestDB()
    if db:
        data_store.DB = db
    data_store.DB.Initialize()

    if not utils.TimeBasedCache.house_keeper_thread:
        utils.TimeBasedCache()
    utils.TimeBasedCache.house_keeper_thread.exit = True
    utils.TimeBasedCache.house_keeper_thread.join()

    INIT_RAN = True
Ejemplo n.º 19
0
def TestInit():
    """Only used in tests and will rerun all the hooks to create a clean state."""
    global INIT_RAN

    metric_metadata = server_metrics.GetMetadata()
    metric_metadata.extend(client_metrics.GetMetadata())
    metric_metadata.extend(communicator.GetMetricMetadata())
    stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
        metric_metadata)
    stats_collector_instance.Set(stats_collector)

    # Tests use both the server template grr_server.yaml as a primary config file
    # (this file does not contain all required options, e.g. private keys), and
    # additional configuration in test_data/grr_test.yaml which contains typical
    # values for a complete installation.
    flags.FLAGS.config = package.ResourcePath(
        "grr-response-core", "install_data/etc/grr-server.yaml")

    flags.FLAGS.secondary_configs.append(
        package.ResourcePath("grr-response-test",
                             "grr_response_test/test_data/grr_test.yaml"))

    # This config contains non-public settings that should be applied during
    # tests.
    extra_test_config = config.CONFIG["Test.additional_test_config"]
    if os.path.exists(extra_test_config):
        flags.FLAGS.secondary_configs.append(extra_test_config)

    # Tests additionally add a test configuration file.
    config_lib.SetPlatformArchContext()
    config_lib.ParseConfigCommandLine()

    # We are running a test so let the config system know that.
    config.CONFIG.AddContext(contexts.TEST_CONTEXT,
                             "Context applied when we run tests.")

    test_ds = flags.FLAGS.test_data_store
    if test_ds is None:
        test_ds = compatibility.GetName(fake_data_store.FakeDataStore)

    config.CONFIG.Set("Datastore.implementation", test_ds)

    if not INIT_RAN:
        server_logging.ServerLoggingStartupInit()
        server_logging.SetTestVerbosity()

    blob_store_test_lib.UseTestBlobStore()
    registry.TestInit()

    db = data_store.DB.SetupTestDB()
    if db:
        data_store.DB = db
    data_store.DB.Initialize()
    aff4.AFF4InitHook().Run()

    if not utils.TimeBasedCache.house_keeper_thread:
        utils.TimeBasedCache()
    utils.TimeBasedCache.house_keeper_thread.exit = True
    utils.TimeBasedCache.house_keeper_thread.join()

    INIT_RAN = True
Ejemplo n.º 20
0
    def Run(self):
        with test_lib.ConfigOverrider({"Database.useForReads.stats": True}):
            real_metric_metadata = list(
                itervalues(
                    stats_collector_instance.Get().GetAllMetricsMetadata()))
            test_metadata = real_metric_metadata + [
                stats_utils.CreateCounterMetadata(
                    _TEST_COUNTER, docstring="Sample counter metric."),
                stats_utils.CreateGaugeMetadata(
                    _TEST_GAUGE_METRIC,
                    float,
                    docstring="Sample gauge metric."),
                stats_utils.CreateEventMetadata(
                    _TEST_EVENT_METRIC, docstring="Sample event metric."),
            ]
            stats_collector = prometheus_stats_collector.PrometheusStatsCollector(
                test_metadata)
            with stats_test_utils.FakeStatsContext(stats_collector):
                for i in range(10):
                    with test_lib.FakeTime(42 + i * 60):
                        stats_collector.IncrementCounter(_TEST_COUNTER)
                        stats_collector.SetGaugeValue(_TEST_GAUGE_METRIC,
                                                      i * 0.5)
                        stats_collector.RecordEvent(_TEST_EVENT_METRIC,
                                                    0.42 + 0.5 * i)
                        stats_store._WriteStats(process_id="worker_1")

                range_start = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42)
                range_end = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3600)

                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_COUNTER,
                               start=range_start,
                               end=range_end))
                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_COUNTER,
                               start=range_start,
                               end=range_end,
                               rate="1m"))

                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_GAUGE_METRIC,
                               start=range_start,
                               end=range_end))

                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_EVENT_METRIC,
                               start=range_start,
                               end=range_end))
                self.Check("GetStatsStoreMetric",
                           args=stats_plugin.ApiGetStatsStoreMetricArgs(
                               component="WORKER",
                               metric_name=_TEST_EVENT_METRIC,
                               start=range_start,
                               end=range_end,
                               distribution_handling_mode="DH_COUNT"))