コード例 #1
0
ファイル: test_storage.py プロジェクト: shushen/gnocchi
    def test_delete_old_measures(self):
        self.storage.add_measures(self.metric, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing(self.storage, self.index)

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 23.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric))

        # One year later…
        self.storage.add_measures(self.metric, [
            storage.Measure(datetime.datetime(2015, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing(self.storage, self.index)

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2015, 1, 1), 86400.0, 69),
            (utils.datetime_utc(2015, 1, 1, 12), 3600.0, 69),
            (utils.datetime_utc(2015, 1, 1, 12), 300.0, 69),
        ], self.storage.get_measures(self.metric))
コード例 #2
0
ファイル: statsd.py プロジェクト: amar266/gnocchi
 def treat_metric(self, metric_name, metric_type, value, sampling):
     metric_name += "|" + metric_type
     if metric_type == "ms":
         if sampling is not None:
             raise ValueError(
                 "Invalid sampling for ms: `%d`, should be none"
                 % sampling)
         self.times[metric_name] = storage.Measure(
             utils.dt_in_unix_ns(utils.utcnow()), value)
     elif metric_type == "g":
         if sampling is not None:
             raise ValueError(
                 "Invalid sampling for g: `%d`, should be none"
                 % sampling)
         self.gauges[metric_name] = storage.Measure(
             utils.dt_in_unix_ns(utils.utcnow()), value)
     elif metric_type == "c":
         sampling = 1 if sampling is None else sampling
         if metric_name in self.counters:
             current_value = self.counters[metric_name].value
         else:
             current_value = 0
         self.counters[metric_name] = storage.Measure(
             utils.dt_in_unix_ns(utils.utcnow()),
             current_value + (value * (1 / sampling)))
     # TODO(jd) Support "set" type
     # elif metric_type == "s":
     #     pass
     else:
         raise ValueError("Unknown metric type `%s'" % metric_type)
コード例 #3
0
ファイル: test_storage.py プロジェクト: apolloliu/gnocchi-1
 def test_get_measure_unknown_aggregation(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.assertRaises(storage.AggregationDoesNotExist,
                       self.storage.get_measures,
                       self.metric, aggregation='last')
コード例 #4
0
ファイル: test_cross_metric.py プロジェクト: amar266/gnocchi
    def test_add_and_get_cross_metric_measures_with_holes(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime64(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime64(2014, 1, 1, 12, 12, 45), 42),
        ])
        self.incoming.add_measures(metric2, [
            storage.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(datetime64(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(datetime64(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(datetime64(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        values = cross_metric.get_cross_metric_measures(
            self.storage, [self.metric, metric2])
        self.assertEqual([(datetime64(2014, 1, 1, 0, 0,
                                      0), numpy.timedelta64(1, 'D'), 18.875),
                          (datetime64(2014, 1, 1, 12, 0,
                                      0), numpy.timedelta64(1, 'h'), 18.875),
                          (datetime64(2014, 1, 1, 12, 0,
                                      0), numpy.timedelta64(5, 'm'), 39.0),
                          (datetime64(2014, 1, 1, 12, 5,
                                      0), numpy.timedelta64(5, 'm'), 11.0),
                          (datetime64(2014, 1, 1, 12, 10,
                                      0), numpy.timedelta64(5, 'm'), 22.0)],
                         values)
コード例 #5
0
ファイル: test_storage.py プロジェクト: apolloliu/gnocchi-1
    def test_add_and_get_cross_metric_measures_with_holes(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 42),
        ])
        self.incoming.add_measures(metric2, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        values = self.storage.get_cross_metric_measures([self.metric, metric2])
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 18.875),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 18.875),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5, 0), 300.0, 11.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 22.0)
        ], values)
コード例 #6
0
ファイル: test_storage.py プロジェクト: shushen/gnocchi
 def test_get_measure_unknown_aggregation(self):
     self.storage.add_measures(self.metric, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.assertRaises(storage.AggregationDoesNotExist,
                       self.storage.get_measures,
                       self.metric,
                       aggregation='last')
コード例 #7
0
ファイル: test_storage.py プロジェクト: kgiusti/gnocchi
    def test_delete_old_measures(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing()

        self.assertEqual([
            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 23.0),
            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
        ], self.storage.get_measures(self.metric))

        # One year later…
        self.incoming.add_measures(self.metric, [
            storage.Measure(datetime64(2015, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing()

        self.assertEqual([
            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
            (datetime64(2015, 1, 1), numpy.timedelta64(1, 'D'), 69),
            (datetime64(2015, 1, 1, 12), numpy.timedelta64(1, 'h'), 69),
            (datetime64(2015, 1, 1, 12), numpy.timedelta64(5, 'm'), 69),
        ], self.storage.get_measures(self.metric))

        self.assertEqual(
            {
                carbonara.SplitKey(numpy.datetime64(1244160000, 's'),
                                   numpy.timedelta64(1, 'D')),
            },
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     numpy.timedelta64(1,
                                                                       'D')))
        self.assertEqual(
            {
                carbonara.SplitKey(numpy.datetime64(1412640000, 's'),
                                   numpy.timedelta64(1, 'h')),
            },
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     numpy.timedelta64(1,
                                                                       'h')))
        self.assertEqual(
            {
                carbonara.SplitKey(numpy.datetime64(1419120000, 's'),
                                   numpy.timedelta64(5, 'm')),
            },
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     numpy.timedelta64(5,
                                                                       'm')))
コード例 #8
0
 def test_resize_policy(self):
     name = str(uuid.uuid4())
     ap = archive_policy.ArchivePolicy(name, 0, [(3, 5)])
     self.index.create_archive_policy(ap)
     m = storage.Metric(uuid.uuid4(), ap)
     self.index.create_metric(m.id, str(uuid.uuid4()), str(uuid.uuid4()),
                              name)
     self.storage.add_measures(m, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 0), 1),
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 5), 1),
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 10), 1),
     ])
     self.storage.process_background_tasks(self.index, sync=True)
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 5), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 10), 5.0, 1.0),
     ], self.storage.get_measures(m))
     # expand to more points
     self.index.update_archive_policy(
         name, [archive_policy.ArchivePolicyItem(granularity=5, points=6)])
     self.storage.add_measures(m, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 15), 1),
     ])
     self.storage.process_background_tasks(self.index, sync=True)
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 5), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 10), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 15), 5.0, 1.0),
     ], self.storage.get_measures(m))
     # shrink timespan
     self.index.update_archive_policy(
         name, [archive_policy.ArchivePolicyItem(granularity=5, points=2)])
     # unchanged after update if no samples
     self.storage.process_background_tasks(self.index, sync=True)
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 5), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 10), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 15), 5.0, 1.0),
     ], self.storage.get_measures(m))
     # drop points
     self.storage.add_measures(m, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 20), 1),
     ])
     self.storage.process_background_tasks(self.index, sync=True)
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 15), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 20), 5.0, 1.0),
     ], self.storage.get_measures(m))
コード例 #9
0
ファイル: test_incoming.py プロジェクト: amar266/gnocchi
    def test_iter_on_sacks_to_process(self):
        if (self.incoming.iter_on_sacks_to_process ==
                incoming.IncomingDriver.iter_on_sacks_to_process):
            self.skipTest("Incoming driver does not implement "
                          "iter_on_sacks_to_process")

        found = threading.Event()

        sack_to_find = self.incoming.sack_for_metric(self.metric.id)

        def _iter_on_sacks_to_process():
            for sack in self.incoming.iter_on_sacks_to_process():
                self.assertIsInstance(sack, int)
                if sack == sack_to_find:
                    found.set()
                    break

        finder = threading.Thread(target=_iter_on_sacks_to_process)
        finder.daemon = True
        finder.start()

        # Try for 30s to get a notification about this sack
        for _ in range(30):
            if found.wait(timeout=1):
                break
            # NOTE(jd) Retry to send measures. It cannot be done only once as
            # there might be a race condition between the threads
            self.incoming.add_measures(self.metric, [
                storage.Measure(numpy.datetime64("2014-01-01 12:00:01"), 69),
            ])
        else:
            self.fail("Notification for metric not received")
コード例 #10
0
ファイル: test_storage.py プロジェクト: shushen/gnocchi
 def test_delete_nonempty_metric(self):
     self.storage.add_measures(self.metric, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.trigger_processing(self.storage, self.index)
     self.storage.delete_metric(self.metric)
     self.trigger_processing(self.storage, self.index)
コード例 #11
0
 def test_delete_nonempty_metric(self):
     self.storage.add_measures(self.metric, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.storage.process_background_tasks(self.index, sync=True)
     self.storage.delete_metric(self.metric)
     self.storage.process_background_tasks(self.index, sync=True)
コード例 #12
0
ファイル: test_storage.py プロジェクト: apolloliu/gnocchi-1
    def test_add_measures_big(self):
        m, __ = self._create_metric('high')
        self.incoming.add_measures(m, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, i, j), 100)
            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
        self.trigger_processing([str(m.id)])

        self.assertEqual(3661, len(self.storage.get_measures(m)))
コード例 #13
0
 def todo(metric):
     for _ in six.moves.range(conf.batch_of_measures):
         measures = [
             storage.Measure(utils.to_timestamp(datetime.datetime.now()),
                             random.random())
             for __ in six.moves.range(conf.measures_per_batch)
         ]
         s.add_measures(metric, measures)
コード例 #14
0
 def test_delete_expunge_metric(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.trigger_processing()
     self.index.delete_metric(self.metric.id)
     self.storage.expunge_metrics(self.incoming, self.index, sync=True)
     self.assertRaises(indexer.NoSuchMetric, self.index.delete_metric,
                       self.metric.id)
コード例 #15
0
    def test_add_measures_big(self):
        m, __ = self._create_metric('high')
        self.storage.add_measures(m, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, i, j), 100)
            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)
        ])
        self.storage.process_background_tasks(self.index, sync=True)

        self.assertEqual(3661, len(self.storage.get_measures(m)))
コード例 #16
0
ファイル: test_storage.py プロジェクト: shushen/gnocchi
    def test_add_measures_big(self):
        m, __ = self._create_metric('high')
        self.storage.add_measures(m, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, i, j), 100)
            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)
        ])
        self.trigger_processing(self.storage, self.index)

        self.assertEqual(3661, len(self.storage.get_measures(m)))
コード例 #17
0
ファイル: test_storage.py プロジェクト: shushen/gnocchi
 def test_delete_expunge_metric(self):
     self.storage.add_measures(self.metric, [
         storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.trigger_processing(self.storage, self.index)
     self.index.delete_metric(self.metric.id)
     self.storage.expunge_metrics(self.index, sync=True)
     self.assertRaises(indexer.NoSuchMetric, self.index.delete_metric,
                       self.metric.id)
コード例 #18
0
ファイル: test_storage.py プロジェクト: kgiusti/gnocchi
    def test_updated_measures(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
        ])
        self.trigger_processing()

        self.assertEqual([
            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 55.5),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 55.5),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69),
            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 42.0),
        ], self.storage.get_measures(self.metric))

        self.incoming.add_measures(self.metric, [
            storage.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing()

        self.assertEqual([
            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 23.0),
            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
        ], self.storage.get_measures(self.metric))

        self.assertEqual([
            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 69),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 69.0),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 42.0),
            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
        ], self.storage.get_measures(self.metric, aggregation='max'))

        self.assertEqual([
            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 4),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 4),
            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 4.0),
            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
        ], self.storage.get_measures(self.metric, aggregation='min'))
コード例 #19
0
ファイル: test_storage.py プロジェクト: apolloliu/gnocchi-1
    def test_add_measures_update_subset(self):
        m, m_sql = self._create_metric('medium')
        measures = [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 6, i, j, 0), 100)
            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)]
        self.incoming.add_measures(m, measures)
        self.trigger_processing([str(m.id)])

        # add measure to end, in same aggregate time as last point.
        new_point = utils.dt_to_unix_ns(2014, 1, 6, 1, 58, 1)
        self.incoming.add_measures(
            m, [storage.Measure(new_point, 100)])

        with mock.patch.object(self.incoming, 'add_measures') as c:
            self.trigger_processing([str(m.id)])
        for __, args, __ in c.mock_calls:
            self.assertEqual(
                list(args[3])[0][0], carbonara.round_timestamp(
                    new_point, args[1].granularity * 10e8))
コード例 #20
0
 def test_measures_reporting(self):
     m2, __ = self._create_metric('medium')
     for i in six.moves.range(60):
         self.incoming.add_measures(self.metric, [
             storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, i), 69),
         ])
         self.incoming.add_measures(m2, [
             storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, i), 69),
         ])
     report = self.incoming.measures_report(True)
     self.assertIsInstance(report, dict)
     self.assertEqual(2, report['summary']['metrics'])
     self.assertEqual(120, report['summary']['measures'])
     self.assertIn('details', report)
     self.assertIsInstance(report['details'], dict)
     report = self.incoming.measures_report(False)
     self.assertIsInstance(report, dict)
     self.assertEqual(2, report['summary']['metrics'])
     self.assertEqual(120, report['summary']['measures'])
コード例 #21
0
    def test_updated_measures(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 55.5),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 55.5),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 42.0),
        ], self.storage.get_measures(self.metric))

        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 23.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 69),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 42.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric, aggregation='max'))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 4),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 4),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 4.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric, aggregation='min'))
コード例 #22
0
ファイル: test_storage.py プロジェクト: kgiusti/gnocchi
    def test_search_value(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(datetime64(
                2014,
                1,
                1,
                12,
                0,
                1,
            ), 69),
            storage.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime64(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime64(2014, 1, 1, 12, 12, 45), 42),
        ])

        self.incoming.add_measures(metric2, [
            storage.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(datetime64(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(datetime64(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(datetime64(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        self.assertEqual(
            {
                metric2: [],
                self.metric:
                [(datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 33),
                 (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 33),
                 (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69),
                 (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5,
                                                                    'm'), 42)]
            }, self.storage.search_value([metric2, self.metric], {u"≥": 30}))

        self.assertEqual(
            {
                metric2: [],
                self.metric: []
            },
            self.storage.search_value([metric2, self.metric],
                                      {u"∧": [{
                                          u"eq": 100
                                      }, {
                                          u"≠": 50
                                      }]}))
コード例 #23
0
 def test_delete_nonempty_metric_unprocessed(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.index.delete_metric(self.metric.id)
     self.trigger_processing()
     __, __, details = self.incoming._build_report(True)
     self.assertIn(str(self.metric.id), details)
     self.storage.expunge_metrics(self.incoming, self.index, sync=True)
     __, __, details = self.incoming._build_report(True)
     self.assertNotIn(str(self.metric.id), details)
コード例 #24
0
 def test_delete_nonempty_metric(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.trigger_processing()
     self.storage.delete_metric(self.incoming, self.metric, sync=True)
     self.trigger_processing()
     self.assertEqual([], self.storage.get_measures(self.metric))
     self.assertRaises(storage.MetricDoesNotExist,
                       self.storage._get_unaggregated_timeserie,
                       self.metric)
コード例 #25
0
 def test_list_metric_with_measures_to_process(self):
     metrics = tests_utils.list_all_incoming_metrics(self.incoming)
     self.assertEqual(set(), metrics)
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     metrics = tests_utils.list_all_incoming_metrics(self.incoming)
     self.assertEqual(set([str(self.metric.id)]), metrics)
     self.trigger_processing()
     metrics = tests_utils.list_all_incoming_metrics(self.incoming)
     self.assertEqual(set([]), metrics)
コード例 #26
0
ファイル: test_storage.py プロジェクト: shushen/gnocchi
    def test_add_measures_update_subset(self):
        m, m_sql = self._create_metric('medium')
        measures = [
            storage.Measure(datetime.datetime(2014, 1, 6, i, j, 0), 100)
            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)
        ]
        self.storage.add_measures(m, measures)
        self.trigger_processing(self.storage, self.index)

        # add measure to end, in same aggregate time as last point.
        new_point = datetime.datetime(2014, 1, 6, 1, 58, 1)
        self.storage.add_measures(m, [storage.Measure(new_point, 100)])

        with mock.patch.object(self.storage, '_add_measures') as c:
            self.trigger_processing(self.storage, self.index)
        for __, args, __ in c.mock_calls:
            self.assertEqual(
                args[3].first,
                carbonara.TimeSerie.round_timestamp(
                    new_point, args[1].granularity * 10e8))
コード例 #27
0
    def test_search_value(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(
                2014,
                1,
                1,
                12,
                0,
                1,
            ), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 42),
        ])

        self.incoming.add_measures(metric2, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        self.assertEqual(
            {
                metric2: [],
                self.metric: [(utils.datetime_utc(2014, 1, 1), 86400, 33),
                              (utils.datetime_utc(2014, 1, 1, 12), 3600, 33),
                              (utils.datetime_utc(2014, 1, 1, 12), 300, 69),
                              (utils.datetime_utc(2014, 1, 1, 12, 10), 300, 42)
                              ]
            }, self.storage.search_value([metric2, self.metric], {u"≥": 30}))

        self.assertEqual(
            {
                metric2: [],
                self.metric: []
            },
            self.storage.search_value([metric2, self.metric],
                                      {u"∧": [{
                                          u"eq": 100
                                      }, {
                                          u"≠": 50
                                      }]}))
コード例 #28
0
    def test_search_value(self):
        metric2, __ = self._create_metric()
        self.storage.add_measures(self.metric, [
            storage.Measure(datetime.datetime(
                2014,
                1,
                1,
                12,
                0,
                1,
            ), 69),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 12, 45), 42),
        ])

        self.storage.add_measures(metric2, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.storage.process_background_tasks(self.index, sync=True)

        self.assertEqual(
            {
                metric2: [],
                self.metric: [(utils.datetime_utc(2014, 1, 1), 86400, 33),
                              (utils.datetime_utc(2014, 1, 1, 12), 3600, 33),
                              (utils.datetime_utc(2014, 1, 1, 12), 300, 69),
                              (utils.datetime_utc(2014, 1, 1, 12, 10), 300, 42)
                              ]
            }, self.storage.search_value([metric2, self.metric], {u"≥": 30}))

        self.assertEqual(
            {
                metric2: [],
                self.metric: []
            },
            self.storage.search_value([metric2, self.metric],
                                      {u"∧": [{
                                          u"eq": 100
                                      }, {
                                          u"≠": 50
                                      }]}))
コード例 #29
0
    def test_add_measures_big_update_subset(self):
        m, m_sql = self._create_metric('medium')
        measures = [
            storage.Measure(datetime.datetime(2014, 1, i, j, 0, 0), 100)
            for i in six.moves.range(1, 6) for j in six.moves.range(0, 24)
        ]
        measures.append(
            storage.Measure(datetime.datetime(2014, 1, 6, 0, 0, 0), 100))
        self.storage.add_measures(m, measures)
        self.storage.process_background_tasks(self.index, sync=True)

        self.storage.add_measures(
            m, [storage.Measure(datetime.datetime(2014, 1, 6, 1, 0, 0), 100)])

        with mock.patch.object(self.storage, '_store_metric_measures') as c:
            self.storage.process_background_tasks(self.index, sync=True)
        count = 0
        for call in c.mock_calls:
            if mock.call(m_sql, mock.ANY, 'mean', 3600.0, mock.ANY) == call:
                count += 1
        self.assertEqual(1, count)
コード例 #30
0
    def test_rewrite_measures_corruption_bad_data(self):
        # Create an archive policy that spans on several splits. Each split
        # being 3600 points, let's go for 36k points so we have 10 splits.
        apname = str(uuid.uuid4())
        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
        self.index.create_archive_policy(ap)
        self.metric = storage.Metric(uuid.uuid4(), ap)
        self.index.create_metric(self.metric.id, str(uuid.uuid4()), apname)

        # First store some points scattered across different splits
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 2, 13, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 4, 14, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 6, 15, 12, 45), 44),
        ])
        self.trigger_processing()

        splits = {'1451520000.0', '1451736000.0', '1451952000.0'}
        self.assertEqual(
            splits,
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     60.0))

        if self.storage.WRITE_FULL:
            assertCompressedIfWriteFull = self.assertTrue
        else:
            assertCompressedIfWriteFull = self.assertFalse

        data = self.storage._get_measures(self.metric, '1451520000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451736000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451952000.0', "mean",
                                          60.0)
        assertCompressedIfWriteFull(
            carbonara.AggregatedTimeSerie.is_compressed(data))

        self.assertEqual([
            (utils.datetime_utc(2016, 1, 1, 12), 60.0, 69),
            (utils.datetime_utc(2016, 1, 2, 13, 7), 60.0, 42),
            (utils.datetime_utc(2016, 1, 4, 14, 9), 60.0, 4),
            (utils.datetime_utc(2016, 1, 6, 15, 12), 60.0, 44),
        ], self.storage.get_measures(self.metric, granularity=60.0))

        # Test what happens if we write garbage
        self.storage._store_metric_measures(self.metric, '1451952000.0',
                                            "mean", 60.0, b"oh really?")

        # Now store brand new points that should force a rewrite of one of the
        # split (keep in mind the back window size in one hour here). We move
        # the BoundTimeSerie processing timeserie far away from its current
        # range.
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 10, 16, 18, 45), 45),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 10, 17, 12, 45), 46),
        ])
        self.trigger_processing()