Esempio n. 1
0
 def test_get_measure_unknown_aggregation(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.assertRaises(storage.AggregationDoesNotExist,
                       self.storage.get_measures,
                       self.metric, aggregation='last')
Esempio n. 2
0
    def test_add_and_get_cross_metric_measures_with_holes(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 42),
        ])
        self.incoming.add_measures(metric2, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        values = self.storage.get_cross_metric_measures([self.metric, metric2])
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 18.875),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 18.875),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5, 0), 300.0, 11.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 22.0)
        ], values)
Esempio n. 3
0
    def test_add_measures_big(self):
        m, __ = self._create_metric('high')
        self.incoming.add_measures(m, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, i, j), 100)
            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
        self.trigger_processing([str(m.id)])

        self.assertEqual(3661, len(self.storage.get_measures(m)))
Esempio n. 4
0
 def test_delete_expunge_metric(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.trigger_processing()
     self.index.delete_metric(self.metric.id)
     self.storage.expunge_metrics(self.incoming, self.index, sync=True)
     self.assertRaises(indexer.NoSuchMetric, self.index.delete_metric,
                       self.metric.id)
Esempio n. 5
0
    def test_updated_measures(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 55.5),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 55.5),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 42.0),
        ], self.storage.get_measures(self.metric))

        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 23.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 69),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 42.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric, aggregation='max'))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 4),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 4),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 4.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric, aggregation='min'))
Esempio n. 6
0
 def test_measures_reporting(self):
     m2, __ = self._create_metric('medium')
     for i in six.moves.range(60):
         self.incoming.add_measures(self.metric, [
             storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, i), 69),
         ])
         self.incoming.add_measures(m2, [
             storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, i), 69),
         ])
     report = self.incoming.measures_report(True)
     self.assertIsInstance(report, dict)
     self.assertEqual(2, report['summary']['metrics'])
     self.assertEqual(120, report['summary']['measures'])
     self.assertIn('details', report)
     self.assertIsInstance(report['details'], dict)
     report = self.incoming.measures_report(False)
     self.assertIsInstance(report, dict)
     self.assertEqual(2, report['summary']['metrics'])
     self.assertEqual(120, report['summary']['measures'])
Esempio n. 7
0
    def test_add_measures_update_subset(self):
        m, m_sql = self._create_metric('medium')
        measures = [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 6, i, j, 0), 100)
            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)]
        self.incoming.add_measures(m, measures)
        self.trigger_processing([str(m.id)])

        # add measure to end, in same aggregate time as last point.
        new_point = utils.dt_to_unix_ns(2014, 1, 6, 1, 58, 1)
        self.incoming.add_measures(
            m, [storage.Measure(new_point, 100)])

        with mock.patch.object(self.incoming, 'add_measures') as c:
            self.trigger_processing([str(m.id)])
        for __, args, __ in c.mock_calls:
            self.assertEqual(
                list(args[3])[0][0], carbonara.round_timestamp(
                    new_point, args[1].granularity * 10e8))
Esempio n. 8
0
 def test_delete_nonempty_metric_unprocessed(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.index.delete_metric(self.metric.id)
     self.trigger_processing()
     __, __, details = self.incoming._build_report(True)
     self.assertIn(str(self.metric.id), details)
     self.storage.expunge_metrics(self.incoming, self.index, sync=True)
     __, __, details = self.incoming._build_report(True)
     self.assertNotIn(str(self.metric.id), details)
Esempio n. 9
0
 def test_delete_nonempty_metric(self):
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     self.trigger_processing()
     self.storage.delete_metric(self.incoming, self.metric, sync=True)
     self.trigger_processing()
     self.assertEqual([], self.storage.get_measures(self.metric))
     self.assertRaises(storage.MetricDoesNotExist,
                       self.storage._get_unaggregated_timeserie,
                       self.metric)
Esempio n. 10
0
 def test_list_metric_with_measures_to_process(self):
     metrics = tests_utils.list_all_incoming_metrics(self.incoming)
     self.assertEqual(set(), metrics)
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
     ])
     metrics = tests_utils.list_all_incoming_metrics(self.incoming)
     self.assertEqual(set([str(self.metric.id)]), metrics)
     self.trigger_processing()
     metrics = tests_utils.list_all_incoming_metrics(self.incoming)
     self.assertEqual(set([]), metrics)
Esempio n. 11
0
    def test_search_value(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(
                2014,
                1,
                1,
                12,
                0,
                1,
            ), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 5, 31), 8),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 42),
        ])

        self.incoming.add_measures(metric2, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 2),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 6),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 13, 10), 2),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        self.assertEqual(
            {
                metric2: [],
                self.metric: [(utils.datetime_utc(2014, 1, 1), 86400, 33),
                              (utils.datetime_utc(2014, 1, 1, 12), 3600, 33),
                              (utils.datetime_utc(2014, 1, 1, 12), 300, 69),
                              (utils.datetime_utc(2014, 1, 1, 12, 10), 300, 42)
                              ]
            }, self.storage.search_value([metric2, self.metric], {u"≥": 30}))

        self.assertEqual(
            {
                metric2: [],
                self.metric: []
            },
            self.storage.search_value([metric2, self.metric],
                                      {u"∧": [{
                                          u"eq": 100
                                      }, {
                                          u"≠": 50
                                      }]}))
Esempio n. 12
0
    def test_rewrite_measures_corruption_bad_data(self):
        # Create an archive policy that spans on several splits. Each split
        # being 3600 points, let's go for 36k points so we have 10 splits.
        apname = str(uuid.uuid4())
        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
        self.index.create_archive_policy(ap)
        self.metric = storage.Metric(uuid.uuid4(), ap)
        self.index.create_metric(self.metric.id, str(uuid.uuid4()), apname)

        # First store some points scattered across different splits
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 2, 13, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 4, 14, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 6, 15, 12, 45), 44),
        ])
        self.trigger_processing()

        splits = {'1451520000.0', '1451736000.0', '1451952000.0'}
        self.assertEqual(
            splits,
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     60.0))

        if self.storage.WRITE_FULL:
            assertCompressedIfWriteFull = self.assertTrue
        else:
            assertCompressedIfWriteFull = self.assertFalse

        data = self.storage._get_measures(self.metric, '1451520000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451736000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451952000.0', "mean",
                                          60.0)
        assertCompressedIfWriteFull(
            carbonara.AggregatedTimeSerie.is_compressed(data))

        self.assertEqual([
            (utils.datetime_utc(2016, 1, 1, 12), 60.0, 69),
            (utils.datetime_utc(2016, 1, 2, 13, 7), 60.0, 42),
            (utils.datetime_utc(2016, 1, 4, 14, 9), 60.0, 4),
            (utils.datetime_utc(2016, 1, 6, 15, 12), 60.0, 44),
        ], self.storage.get_measures(self.metric, granularity=60.0))

        # Test what happens if we write garbage
        self.storage._store_metric_measures(self.metric, '1451952000.0',
                                            "mean", 60.0, b"oh really?")

        # Now store brand new points that should force a rewrite of one of the
        # split (keep in mind the back window size in one hour here). We move
        # the BoundTimeSerie processing timeserie far away from its current
        # range.
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 10, 16, 18, 45), 45),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 10, 17, 12, 45), 46),
        ])
        self.trigger_processing()
Esempio n. 13
0
    def test_delete_old_measures(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 23.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric))

        # One year later…
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2015, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2015, 1, 1), 86400.0, 69),
            (utils.datetime_utc(2015, 1, 1, 12), 3600.0, 69),
            (utils.datetime_utc(2015, 1, 1, 12), 300.0, 69),
        ], self.storage.get_measures(self.metric))

        self.assertEqual({"1244160000.0"},
                         self.storage._list_split_keys_for_metric(
                             self.metric, "mean", 86400.0))
        self.assertEqual({"1412640000.0"},
                         self.storage._list_split_keys_for_metric(
                             self.metric, "mean", 3600.0))
        self.assertEqual({"1419120000.0"},
                         self.storage._list_split_keys_for_metric(
                             self.metric, "mean", 300.0))
Esempio n. 14
0
 def test_resize_policy(self):
     name = str(uuid.uuid4())
     ap = archive_policy.ArchivePolicy(name, 0, [(3, 5)])
     self.index.create_archive_policy(ap)
     m = self.index.create_metric(uuid.uuid4(), str(uuid.uuid4()), name)
     m = self.index.list_metrics(ids=[m.id])[0]
     self.incoming.add_measures(m, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 0), 1),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 5), 1),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 10), 1),
     ])
     self.trigger_processing([str(m.id)])
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 5), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 10), 5.0, 1.0),
     ], self.storage.get_measures(m))
     # expand to more points
     self.index.update_archive_policy(
         name, [archive_policy.ArchivePolicyItem(granularity=5, points=6)])
     m = self.index.list_metrics(ids=[m.id])[0]
     self.incoming.add_measures(m, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 15), 1),
     ])
     self.trigger_processing([str(m.id)])
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 5), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 10), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 15), 5.0, 1.0),
     ], self.storage.get_measures(m))
     # shrink timespan
     self.index.update_archive_policy(
         name, [archive_policy.ArchivePolicyItem(granularity=5, points=2)])
     m = self.index.list_metrics(ids=[m.id])[0]
     self.assertEqual([
         (utils.datetime_utc(2014, 1, 1, 12, 0, 10), 5.0, 1.0),
         (utils.datetime_utc(2014, 1, 1, 12, 0, 15), 5.0, 1.0),
     ], self.storage.get_measures(m))
Esempio n. 15
0
    def test_corrupted_data(self):
        if not isinstance(self.storage, _carbonara.CarbonaraBasedStorage):
            self.skipTest("This driver is not based on Carbonara")

        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing()

        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 13, 0, 1), 1),
        ])

        with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.unserialize',
                        side_effect=carbonara.InvalidData()):
            with mock.patch('gnocchi.carbonara.BoundTimeSerie.unserialize',
                            side_effect=carbonara.InvalidData()):
                self.trigger_processing()

        m = self.storage.get_measures(self.metric)
        self.assertIn((utils.datetime_utc(2014, 1, 1), 86400.0, 1), m)
        self.assertIn((utils.datetime_utc(2014, 1, 1, 13), 3600.0, 1), m)
        self.assertIn((utils.datetime_utc(2014, 1, 1, 13), 300.0, 1), m)
Esempio n. 16
0
    def test_add_measures_update_subset_split(self):
        m, m_sql = self._create_metric('medium')
        measures = [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 6, i, j, 0), 100)
            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)
        ]
        self.incoming.add_measures(m, measures)
        self.trigger_processing([str(m.id)])

        # add measure to end, in same aggregate time as last point.
        self.incoming.add_measures(
            m,
            [storage.Measure(utils.dt_to_unix_ns(2014, 1, 6, 1, 58, 1), 100)])

        with mock.patch.object(self.storage, '_store_metric_measures') as c:
            # should only resample last aggregate
            self.trigger_processing([str(m.id)])
        count = 0
        for call in c.mock_calls:
            # policy is 60 points and split is 48. should only update 2nd half
            args = call[1]
            if args[0] == m_sql and args[2] == 'mean' and args[3] == 60.0:
                count += 1
        self.assertEqual(1, count)
Esempio n. 17
0
 def test_get_cross_metric_measures_unknown_granularity(self):
     metric2 = storage.Metric(uuid.uuid4(), self.archive_policies['low'])
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.incoming.add_measures(metric2, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.assertRaises(storage.GranularityDoesNotExist,
                       self.storage.get_cross_metric_measures,
                       [self.metric, metric2],
                       granularity=12345.456)
Esempio n. 18
0
 def test_get_cross_metric_measures_unknown_aggregation(self):
     metric2 = storage.Metric(uuid.uuid4(), self.archive_policies['low'])
     self.incoming.add_measures(self.metric, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.incoming.add_measures(metric2, [
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
         storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
     ])
     self.assertRaises(storage.AggregationDoesNotExist,
                       self.storage.get_cross_metric_measures,
                       [self.metric, metric2],
                       aggregation='last')
Esempio n. 19
0
    def test_add_and_get_cross_metric_measures_different_archives(self):
        metric2 = storage.Metric(uuid.uuid4(),
                                 self.archive_policies['no_granularity_match'])
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.incoming.add_measures(metric2, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])

        self.assertRaises(storage.MetricUnaggregatable,
                          self.storage.get_cross_metric_measures,
                          [self.metric, metric2])
Esempio n. 20
0
    def test_aborted_initial_processing(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 5),
        ])
        with mock.patch.object(self.storage, '_store_unaggregated_timeserie',
                               side_effect=Exception):
            try:
                self.trigger_processing()
            except Exception:
                pass

        with mock.patch('gnocchi.storage._carbonara.LOG') as LOG:
            self.trigger_processing()
            self.assertFalse(LOG.error.called)

        m = self.storage.get_measures(self.metric)
        self.assertIn((utils.datetime_utc(2014, 1, 1), 86400.0, 5.0), m)
        self.assertIn((utils.datetime_utc(2014, 1, 1, 12), 3600.0, 5.0), m)
        self.assertIn((utils.datetime_utc(2014, 1, 1, 12), 300.0, 5.0), m)
Esempio n. 21
0
    def test_add_and_get_cross_metric_measures(self):
        metric2, __ = self._create_metric()
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.incoming.add_measures(metric2, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 41), 2),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 10, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 13, 10), 4),
        ])
        self.trigger_processing([str(self.metric.id), str(metric2.id)])

        values = self.storage.get_cross_metric_measures([self.metric, metric2])
        self.assertEqual(
            [(utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 22.25),
             (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
             (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
             (utils.datetime_utc(2014, 1, 1, 12, 5, 0), 300.0, 12.5),
             (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 24.0)], values)

        values = self.storage.get_cross_metric_measures([self.metric, metric2],
                                                        reaggregation='max')
        self.assertEqual(
            [(utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 39.75),
             (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 39.75),
             (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 69),
             (utils.datetime_utc(2014, 1, 1, 12, 5, 0), 300.0, 23),
             (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 44)], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=datetime.datetime(2014, 1, 1, 12, 10, 0))
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 24.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            to_timestamp=datetime.datetime(2014, 1, 1, 12, 5, 0))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=datetime.datetime(2014, 1, 1, 12, 10, 10),
            to_timestamp=datetime.datetime(2014, 1, 1, 12, 10, 10))
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 24.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 0),
            to_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 1))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 0),
            to_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 1),
            granularity=300.0)

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)
Esempio n. 22
0
    def test_add_and_get_measures(self):
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.trigger_processing()

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 23.0),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ], self.storage.get_measures(self.metric))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
        ],
                         self.storage.get_measures(
                             self.metric,
                             from_timestamp=datetime.datetime(
                                 2014, 1, 1, 12, 10, 0)))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5), 300.0, 23.0),
        ],
                         self.storage.get_measures(
                             self.metric,
                             to_timestamp=datetime.datetime(
                                 2014, 1, 1, 12, 6, 0)))

        self.assertEqual(
            [
                (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
                (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
                (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 44.0),
            ],
            self.storage.get_measures(
                self.metric,
                to_timestamp=datetime.datetime(2014, 1, 1, 12, 10, 10),
                from_timestamp=datetime.datetime(2014, 1, 1, 12, 10, 10)))

        self.assertEqual(
            [
                (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
                (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
                (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            ],
            self.storage.get_measures(
                self.metric,
                from_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 0),
                to_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 2)))

        self.assertEqual(
            [
                (utils.datetime_utc(2014, 1, 1), 86400.0, 39.75),
                (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
                (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            ],
            self.storage.get_measures(
                self.metric,
                from_timestamp=iso8601.parse_date("2014-1-1 13:00:00+01:00"),
                to_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 2)))

        self.assertEqual(
            [
                (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 39.75),
            ],
            self.storage.get_measures(
                self.metric,
                from_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 0),
                to_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 2),
                granularity=3600.0))

        self.assertEqual(
            [
                (utils.datetime_utc(2014, 1, 1, 12), 300.0, 69.0),
            ],
            self.storage.get_measures(
                self.metric,
                from_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 0),
                to_timestamp=datetime.datetime(2014, 1, 1, 12, 0, 2),
                granularity=300.0))

        self.assertRaises(storage.GranularityDoesNotExist,
                          self.storage.get_measures,
                          self.metric,
                          granularity=42)
Esempio n. 23
0
    def test_rewrite_measures_oldest_mutable_timestamp_eq_next_key(self):
        """See LP#1655422"""
        # Create an archive policy that spans on several splits. Each split
        # being 3600 points, let's go for 36k points so we have 10 splits.
        apname = str(uuid.uuid4())
        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
        self.index.create_archive_policy(ap)
        self.metric = storage.Metric(uuid.uuid4(), ap)
        self.index.create_metric(self.metric.id, str(uuid.uuid4()), apname)

        # First store some points scattered across different splits
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 1, 12, 0, 1), 69),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 2, 13, 7, 31), 42),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 4, 14, 9, 31), 4),
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 6, 15, 12, 45), 44),
        ])
        self.trigger_processing()

        splits = {'1451520000.0', '1451736000.0', '1451952000.0'}
        self.assertEqual(
            splits,
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     60.0))

        if self.storage.WRITE_FULL:
            assertCompressedIfWriteFull = self.assertTrue
        else:
            assertCompressedIfWriteFull = self.assertFalse

        data = self.storage._get_measures(self.metric, '1451520000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451736000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451952000.0', "mean",
                                          60.0)
        assertCompressedIfWriteFull(
            carbonara.AggregatedTimeSerie.is_compressed(data))

        self.assertEqual([
            (utils.datetime_utc(2016, 1, 1, 12), 60.0, 69),
            (utils.datetime_utc(2016, 1, 2, 13, 7), 60.0, 42),
            (utils.datetime_utc(2016, 1, 4, 14, 9), 60.0, 4),
            (utils.datetime_utc(2016, 1, 6, 15, 12), 60.0, 44),
        ], self.storage.get_measures(self.metric, granularity=60.0))

        # Now store brand new points that should force a rewrite of one of the
        # split (keep in mind the back window size in one hour here). We move
        # the BoundTimeSerie processing timeserie far away from its current
        # range.

        # Here we test a special case where the oldest_mutable_timestamp will
        # be 2016-01-10TOO:OO:OO = 1452384000.0, our new split key.
        self.incoming.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 1, 10, 0, 12), 45),
        ])
        self.trigger_processing()

        self.assertEqual(
            {'1452384000.0', '1451736000.0', '1451520000.0', '1451952000.0'},
            self.storage._list_split_keys_for_metric(self.metric, "mean",
                                                     60.0))
        data = self.storage._get_measures(self.metric, '1451520000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451736000.0', "mean",
                                          60.0)
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1451952000.0', "mean",
                                          60.0)
        # Now this one is compressed because it has been rewritten!
        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
        data = self.storage._get_measures(self.metric, '1452384000.0', "mean",
                                          60.0)
        assertCompressedIfWriteFull(
            carbonara.AggregatedTimeSerie.is_compressed(data))

        self.assertEqual([
            (utils.datetime_utc(2016, 1, 1, 12), 60.0, 69),
            (utils.datetime_utc(2016, 1, 2, 13, 7), 60.0, 42),
            (utils.datetime_utc(2016, 1, 4, 14, 9), 60.0, 4),
            (utils.datetime_utc(2016, 1, 6, 15, 12), 60.0, 44),
            (utils.datetime_utc(2016, 1, 10, 0, 12), 60.0, 45),
        ], self.storage.get_measures(self.metric, granularity=60.0))