Example #1
0
    def testInCallAcceptsRegularExpressions(self):
        # Initialize and write test data.
        stats.STATS.RegisterCounterMetric("counter")

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))
        self.stats_store.WriteStats(
            process_id="pid2",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

        stats_data = self.stats_store.MultiReadStats(
            process_ids=["pid1", "pid2"])

        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(query.In("pid1").In("counter").SeriesCount(), 1)

        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(query.In("pid2").In("counter").SeriesCount(), 1)

        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(query.In("pid.*").In("counter").SeriesCount(), 2)
Example #2
0
    def testUsingInCallNarrowsQuerySpace(self):
        # Create sample data.
        stats.STATS.RegisterCounterMetric("counter")
        stats.STATS.RegisterCounterMetric("counter_with_fields",
                                          fields=[("source", str)])

        stats.STATS.IncrementCounter("counter")
        stats.STATS.IncrementCounter("counter_with_fields", fields=["http"])
        stats.STATS.IncrementCounter("counter_with_fields", fields=["rpc"])

        # Write to data store.
        self.stats_store.WriteStats(process_id=self.process_id, timestamp=42)

        # Read them back and apply queries with In() and InAll() calls.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)

        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(query.In("counter").SeriesCount(), 1)

        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(
            query.In("counter_with_fields").InAll().SeriesCount(), 2)

        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(
            query.In("counter_with_fields").In("http").SeriesCount(), 1)
Example #3
0
    def testMeanReturnsZeroIfQueryHasNoTimeSeries(self):
        # Read data back.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)

        # Get time series generated with TakeValue().
        query = stats_store.StatsStoreDataQuery(stats_data)
        self.assertEqual(query.In("counter").TakeValue().Mean(), 0)
Example #4
0
    def testInTimeRangeLimitsQueriesByTime(self):
        # Initialize and write test data.
        stats.STATS.RegisterCounterMetric("counter")

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(140))

        # Read data back.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)

        # Check that InTimeRange works as expected.
        query = stats_store.StatsStoreDataQuery(stats_data)
        ts = query.In("counter").TakeValue().InTimeRange(
            rdfvalue.RDFDatetime.FromSecondsSinceEpoch(80),
            rdfvalue.RDFDatetime.FromSecondsSinceEpoch(120)).ts

        self.assertListEqual(ts.data, [[2, 100 * 1e6]])
Example #5
0
 def testInTimeRangeRaisesIfAppliedBeforeTakeMethod(self):
     stats_data = self.stats_store.ReadStats(process_id=self.process_id)
     query = stats_store.StatsStoreDataQuery(stats_data)
     with self.assertRaises(RuntimeError):
         query.In("counter").InTimeRange(
             rdfvalue.RDFDatetime.FromSecondsSinceEpoch(80),
             rdfvalue.RDFDatetime.FromSecondsSinceEpoch(120))
Example #6
0
    def testNormalize(self):
        # Initialize and write test data.
        stats.STATS.RegisterCounterMetric("counter")

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(15))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(45))

        # Read data back.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)
        query = stats_store.StatsStoreDataQuery(stats_data)

        ts = query.In("counter").TakeValue().Normalize(
            rdfvalue.Duration("30s"), 0, rdfvalue.Duration("1m")).ts
        self.assertListEqual(ts.data, [[1.5, 0 * 1e6], [3.0, 30 * 1e6]])
Example #7
0
  def testRate(self):
    # Write test data.
    for i in range(5):
      for _ in range(i):
        stats_collector_instance.Get().IncrementCounter("counter")

      self.stats_store.WriteStats(
          process_id=self.process_id,
          timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(10 * i))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)

    # Get time series generated with TakeValue().
    query = stats_store.StatsStoreDataQuery(stats_data)
    ts = query.In("counter").TakeValue().Normalize(
        rdfvalue.Duration("10s"), 0, rdfvalue.Duration("50s")).Rate().ts

    # We expect following time serie:
    # 1970-01-01 00:00:00    0
    # 1970-01-01 00:00:10    1
    # 1970-01-01 00:00:20    3
    # 1970-01-01 00:00:30    6
    # 1970-01-01 00:00:40    10
    #
    # Therefore we expect the following after applying Rate():
    # 1970-01-01 00:00:00    0
    # 1970-01-01 00:00:10    0.1
    # 1970-01-01 00:00:20    0.2
    # 1970-01-01 00:00:30    0.3
    # 1970-01-01 00:00:40    0.4
    self.assertListEqual(ts.data,
                         [[0.1, 0], [0.2, 10 * 1e6],
                          [0.30000000000000004, 20 * 1e6], [0.4, 30 * 1e6]])
Example #8
0
    def testAggregateViaSumAggregatesMultipleTimeSeriesIntoOne(self):
        # Initialize and write test data.
        stats.STATS.RegisterCounterMetric("counter")

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid2",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))
        self.stats_store.WriteStats(
            process_id="pid2",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

        stats_data = self.stats_store.MultiReadStats(
            process_ids=["pid1", "pid2"])
        query = stats_store.StatsStoreDataQuery(stats_data)

        ts = query.In("pid.*").In("counter").TakeValue().Normalize(
            rdfvalue.Duration("30s"),
            0,
            rdfvalue.Duration("2m"),
            mode=timeseries.NORMALIZE_MODE_COUNTER).AggregateViaSum().ts

        # We expect 2 time series in the query:
        # 1970-01-01 00:00:00    1
        # 1970-01-01 00:00:30    1
        # 1970-01-01 00:01:00    1
        # 1970-01-01 00:01:30    3
        #
        # and:
        # 1970-01-01 00:00:00    2
        # 1970-01-01 00:00:30    2
        # 1970-01-01 00:01:00    2
        # 1970-01-01 00:01:30    3
        #
        # Therefore we expect the sum to look like:
        # 1970-01-01 00:00:00    3
        # 1970-01-01 00:00:30    3
        # 1970-01-01 00:01:00    3
        # 1970-01-01 00:01:30    6
        self.assertAlmostEqual(ts.data[0][0], 3)
        self.assertAlmostEqual(ts.data[1][0], 3)
        self.assertAlmostEqual(ts.data[2][0], 3)
        self.assertAlmostEqual(ts.data[3][0], 6)
        self.assertListEqual([t for _, t in ts.data],
                             [0.0 * 1e6, 30.0 * 1e6, 60.0 * 1e6, 90.0 * 1e6])
Example #9
0
  def testSeriesCountReturnsNumberOfDataSeriesInCurrentQuery(self):
    # Write test data.
    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id="pid1",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))
    self.stats_store.WriteStats(
        process_id="pid2",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

    stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])

    query = stats_store.StatsStoreDataQuery(stats_data)
    self.assertEqual(query.In("pid.*").SeriesCount(), 2)

    query = stats_store.StatsStoreDataQuery(stats_data)
    self.assertEqual(query.In("pid1").In("counter").SeriesCount(), 1)

    query = stats_store.StatsStoreDataQuery(stats_data)
    self.assertEqual(query.In("pid.*").In("counter").SeriesCount(), 2)
Example #10
0
  def testTakeDistributionSumRaisesIfPlainValueIsEncountered(self):
    # Write test data.
    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)
    query = stats_store.StatsStoreDataQuery(stats_data)
    with self.assertRaises(ValueError):
      query.In("counter").TakeDistributionSum()
Example #11
0
  def testTakeValueRaisesIfDistributionIsEncountered(self):
    # Write test data.
    stats_collector_instance.Get().RecordEvent("events", 42)
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)
    query = stats_store.StatsStoreDataQuery(stats_data)
    with self.assertRaises(ValueError):
      query.In("events").TakeValue()
Example #12
0
    def testTakeValueRaisesIfDistributionIsEncountered(self):
        # Initialize and write test data.
        stats.STATS.RegisterEventMetric("events")

        stats.STATS.RecordEvent("events", 42)
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

        # Read data back.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)
        query = stats_store.StatsStoreDataQuery(stats_data)
        with self.assertRaises(ValueError):
            query.In("events").TakeValue()
Example #13
0
  def testMeanReducesTimeSerieToSingleNumber(self):
    # Write test data.
    for i in range(5):
      stats_collector_instance.Get().IncrementCounter("counter")
      self.stats_store.WriteStats(
          process_id=self.process_id,
          timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(10 * i))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)

    # Get time series generated with TakeValue().
    query = stats_store.StatsStoreDataQuery(stats_data)
    self.assertAlmostEqual(query.In("counter").TakeValue().Mean(), 3)
Example #14
0
  def testMeanRaisesIfCalledOnMultipleTimeSeries(self):
    # Write test data.
    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id="pid1",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))
    self.stats_store.WriteStats(
        process_id="pid2",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

    stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
    query = stats_store.StatsStoreDataQuery(stats_data)
    with self.assertRaises(RuntimeError):
      query.In("pid.*").In("counter").TakeValue().Mean()
Example #15
0
    def testMakeIncreasingHandlesValuesResets(self):
        # Initialize and write test data.
        stats.STATS.RegisterCounterMetric("counter")

        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(30))

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(60))

        stats.STATS.RegisterCounterMetric("counter")
        self.stats_store.WriteStats(
            process_id="pid1",
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

        # We've reset the counter on 60th second, so we get following time series:
        # 1970-01-01 00:00:00    0
        # 1970-01-01 00:00:30    1
        # 1970-01-01 00:01:00    2
        # 1970-01-01 00:01:30    0
        stats_data = self.stats_store.ReadStats(process_id="pid1")
        query = stats_store.StatsStoreDataQuery(stats_data)

        ts = query.In("counter").TakeValue().ts

        self.assertAlmostEqual(ts.data[0][0], 0)
        self.assertAlmostEqual(ts.data[1][0], 1)
        self.assertAlmostEqual(ts.data[2][0], 2)
        self.assertAlmostEqual(ts.data[3][0], 0)

        # EnsureIsIncremental detects the reset and increments values that follow
        # the reset point:
        # 1970-01-01 00:00:00    0
        # 1970-01-01 00:00:30    1
        # 1970-01-01 00:01:00    2
        # 1970-01-01 00:01:30    2
        ts = query.MakeIncreasing().ts

        self.assertAlmostEqual(ts.data[0][0], 0)
        self.assertAlmostEqual(ts.data[1][0], 1)
        self.assertAlmostEqual(ts.data[2][0], 2)
        self.assertAlmostEqual(ts.data[3][0], 2)
Example #16
0
    def testTakeDistributionCountRaisesIfPlainValueIsEncountered(self):
        # Initialize and write test data.
        stats.STATS.RegisterCounterMetric("counter")

        stats.STATS.IncrementCounter("counter")
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

        # Read data back.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)
        query = stats_store.StatsStoreDataQuery(stats_data)
        with self.assertRaises(ValueError):
            query.In("counter").TakeDistributionCount()
Example #17
0
  def testMakeIncreasingHandlesValuesResets(self):
    # Write test data.
    self.stats_store.WriteStats(
        process_id="pid1",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id="pid1",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(30))

    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id="pid1",
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(60))

    # Simulate process restart by reseting the stats-collector.
    with stats_test_utils.FakeStatsContext(_CreateFakeStatsCollector()):
      self.stats_store.WriteStats(
          process_id="pid1",
          timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(90))

      # We've reset the counter on 60th second, so we get following time series:
      # 1970-01-01 00:00:00    0
      # 1970-01-01 00:00:30    1
      # 1970-01-01 00:01:00    2
      # 1970-01-01 00:01:30    0
      stats_data = self.stats_store.ReadStats(process_id="pid1")
      query = stats_store.StatsStoreDataQuery(stats_data)

      ts = query.In("counter").TakeValue().ts

      self.assertAlmostEqual(ts.data[0][0], 0)
      self.assertAlmostEqual(ts.data[1][0], 1)
      self.assertAlmostEqual(ts.data[2][0], 2)
      self.assertAlmostEqual(ts.data[3][0], 0)

      # EnsureIsIncremental detects the reset and increments values that follow
      # the reset point:
      # 1970-01-01 00:00:00    0
      # 1970-01-01 00:00:30    1
      # 1970-01-01 00:01:00    2
      # 1970-01-01 00:01:30    2
      ts = query.MakeIncreasing().ts

      self.assertAlmostEqual(ts.data[0][0], 0)
      self.assertAlmostEqual(ts.data[1][0], 1)
      self.assertAlmostEqual(ts.data[2][0], 2)
      self.assertAlmostEqual(ts.data[3][0], 2)
Example #18
0
  def testTakeDistributionSumUsesDistributionSumsToBuildTimeSeries(self):
    # Write test data.
    stats_collector_instance.Get().RecordEvent("events", 42)
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

    stats_collector_instance.Get().RecordEvent("events", 43)
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)
    query = stats_store.StatsStoreDataQuery(stats_data)

    ts = query.In("events").TakeDistributionSum().ts
    self.assertListEqual(ts.data, [[42, 42 * 1e6], [85, 100 * 1e6]])
Example #19
0
  def testTakeValueUsesPlainValuesToBuildTimeSeries(self):
    # Write test data.
    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)

    # Get time series generated with TakeValue().
    query = stats_store.StatsStoreDataQuery(stats_data)
    ts = query.In("counter").TakeValue().ts
    self.assertListEqual(ts.data, [[1, 42 * 1e6], [2, 100 * 1e6]])
Example #20
0
  def testScaleAppliesScaleFunctionToSingleTimeSerie(self):
    # Write test data.
    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)

    # Get time series generated with TakeValue().
    query = stats_store.StatsStoreDataQuery(stats_data)
    ts = query.In("counter").TakeValue().Scale(3).ts

    self.assertListEqual(ts.data, [[3, 42 * 1e6], [6, 100 * 1e6]])
Example #21
0
    def testTakeDistributionCountUsesDistributionCountsToBuildTimeSeries(self):
        # Initialize and write test data.
        stats.STATS.RegisterEventMetric("events")

        stats.STATS.RecordEvent("events", 42)
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))

        stats.STATS.RecordEvent("events", 43)
        self.stats_store.WriteStats(
            process_id=self.process_id,
            timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100))

        # Read data back.
        stats_data = self.stats_store.ReadStats(process_id=self.process_id)
        query = stats_store.StatsStoreDataQuery(stats_data)

        ts = query.In("events").TakeDistributionCount().ts
        self.assertListEqual(ts.data, [[1, 42 * 1e6], [2, 100 * 1e6]])
Example #22
0
  def testNormalizeFillsGapsInTimeSeries(self):
    # Write test data.
    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(0))

    stats_collector_instance.Get().IncrementCounter("counter")
    self.stats_store.WriteStats(
        process_id=self.process_id,
        timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(120))

    # Read data back.
    stats_data = self.stats_store.ReadStats(process_id=self.process_id)
    query = stats_store.StatsStoreDataQuery(stats_data)

    ts = query.In("counter").TakeValue().Normalize(
        rdfvalue.Duration("30s"), 0, rdfvalue.Duration("130s")).ts

    self.assertListEqual(ts.data, [[1.0, 0], [None, 30 * 1e6], [None, 60 * 1e6],
                                   [None, 90 * 1e6], [2.0, 120 * 1e6]])
Example #23
0
    def Handle(self, args, token):
        stats_store = aff4.FACTORY.Create(
            stats_store_lib.StatsStore.DATA_STORE_ROOT,
            aff4_type=stats_store_lib.StatsStore,
            mode="rw",
            token=token)

        process_ids = stats_store.ListUsedProcessIds()
        filtered_ids = [
            pid for pid in process_ids
            if pid.startswith(args.component.name.lower())
        ]

        start_time = args.start
        end_time = args.end

        if not end_time:
            end_time = rdfvalue.RDFDatetime.Now()

        if not start_time:
            start_time = end_time - rdfvalue.Duration("1h")

        # Run for a little extra time at the start. This improves the quality of the
        # first data points of counter metrics which don't appear in every interval.
        base_start_time = start_time
        # pylint: disable=g-no-augmented-assignment
        start_time = start_time - rdfvalue.Duration("10m")
        # pylint: enable=g-no-augmented-assignment

        if end_time <= start_time:
            raise ValueError("End time can't be less than start time.")

        result = ApiStatsStoreMetric(start=base_start_time,
                                     end=end_time,
                                     metric_name=args.metric_name)

        data = stats_store.MultiReadStats(process_ids=filtered_ids,
                                          metric_name=utils.SmartStr(
                                              args.metric_name),
                                          timestamp=(start_time, end_time))

        if not data:
            return result

        pid = next(iterkeys(data))
        metadata = stats_store.ReadMetadata(process_id=pid)
        metric_metadata = metadata.AsDict()[args.metric_name]

        query = stats_store_lib.StatsStoreDataQuery(data)
        query.In(args.component.name.lower() + ".*").In(args.metric_name)
        if metric_metadata.fields_defs:
            query.InAll()

        requested_duration = end_time - start_time
        if requested_duration >= rdfvalue.Duration("1d"):
            sampling_duration = rdfvalue.Duration("5m")
        elif requested_duration >= rdfvalue.Duration("6h"):
            sampling_duration = rdfvalue.Duration("1m")
        else:
            sampling_duration = rdfvalue.Duration("30s")

        if metric_metadata.metric_type == metric_metadata.MetricType.COUNTER:
            query.TakeValue().MakeIncreasing().Normalize(
                sampling_duration,
                start_time,
                end_time,
                mode=timeseries.NORMALIZE_MODE_COUNTER)
        elif metric_metadata.metric_type == metric_metadata.MetricType.EVENT:
            if args.distribution_handling_mode == "DH_SUM":
                query.TakeDistributionSum()
            elif args.distribution_handling_mode == "DH_COUNT":
                query.TakeDistributionCount()
            else:
                raise ValueError(
                    "Unexpected request.distribution_handling_mode "
                    "value: %s." % args.distribution_handling_mode)
            query.MakeIncreasing()
            query.Normalize(sampling_duration,
                            start_time,
                            end_time,
                            mode=timeseries.NORMALIZE_MODE_COUNTER)

        elif metric_metadata.metric_type == metric_metadata.MetricType.GAUGE:
            query.TakeValue().Normalize(sampling_duration, start_time,
                                        end_time)
        else:
            raise RuntimeError("Unsupported metric type.")

        if args.aggregation_mode == "AGG_SUM":
            query.AggregateViaSum()
        elif args.aggregation_mode == "AGG_MEAN":
            query.AggregateViaMean()
        elif args.aggregation_mode == "AGG_NONE":
            pass
        else:
            raise ValueError("Unexpected request.aggregation value: %s." %
                             args.aggregation)

        if (args.rate and metric_metadata.metric_type !=
                metric_metadata.MetricType.GAUGE):
            query.Rate()

        query.InTimeRange(base_start_time, end_time)

        for value, timestamp in query.ts.data:
            if value is not None:
                result.data_points.append(
                    ApiStatsStoreMetricDataPoint(timestamp=timestamp,
                                                 value=value))

        return result
Example #24
0
 def testNormalizeRaisesIfAppliedBeforeTakeMethod(self):
     stats_data = self.stats_store.ReadStats(process_id=self.process_id)
     query = stats_store.StatsStoreDataQuery(stats_data)
     with self.assertRaises(RuntimeError):
         query.In("counter").Normalize(15, 0, 60)