Ejemplo n.º 1
0
 def test_to_timestamps_epoch(self):
     self.assertEqual(
         utils.to_datetime("1425652440"),
         datetime.datetime(2015, 3, 6, 14, 34, tzinfo=iso8601.iso8601.UTC))
     self.assertEqual(
         utils.to_datetime("1425652440.4"),
         datetime.datetime(2015,
                           3,
                           6,
                           14,
                           34,
                           0,
                           400000,
                           tzinfo=iso8601.iso8601.UTC))
     self.assertEqual(
         utils.to_datetime(1425652440),
         datetime.datetime(2015, 3, 6, 14, 34, tzinfo=iso8601.iso8601.UTC))
     self.assertEqual(
         utils.to_datetime(utils.to_timestamp(1425652440.4)),
         datetime.datetime(2015,
                           3,
                           6,
                           14,
                           34,
                           0,
                           400000,
                           tzinfo=iso8601.iso8601.UTC))
Ejemplo n.º 2
0
    def _handle_binary_op(engine, table, op, nodes):
        try:
            field_name, value = list(nodes.items())[0]
        except Exception:
            raise indexer.QueryError()

        if field_name == "lifespan":
            attr = getattr(table, "ended_at") - getattr(table, "started_at")
            value = utils.to_timespan(value)
            if engine == "mysql":
                # NOTE(jd) So subtracting 2 timestamps in MySQL result in some
                # weird results based on string comparison. It's useless and it
                # does not work at all with seconds or anything. Just skip it.
                raise exceptions.NotImplementedError
        else:
            try:
                attr = getattr(table, field_name)
            except AttributeError:
                raise indexer.QueryAttributeError(table, field_name)

            if not hasattr(attr, "type"):
                # This is not a column
                raise indexer.QueryAttributeError(table, field_name)

            # Convert value to the right type
            if value is not None and isinstance(attr.type,
                                                base.PreciseTimestamp):
                value = utils.to_timestamp(value)

        return op(attr, value)
Ejemplo n.º 3
0
 def todo(metric):
     for _ in six.moves.range(conf.batch_of_measures):
         measures = [
             storage.Measure(utils.to_timestamp(datetime.datetime.now()),
                             random.random())
             for __ in six.moves.range(conf.measures_per_batch)]
         s.add_measures(metric, measures)
Ejemplo n.º 4
0
 def todo(metric):
     for _ in six.moves.range(conf.batch_of_measures):
         measures = [
             storage.Measure(utils.to_timestamp(datetime.datetime.now()),
                             random.random())
             for __ in six.moves.range(conf.measures_per_batch)
         ]
         s.add_measures(metric, measures)
Ejemplo n.º 5
0
def MeasureSchema(m):
    # NOTE(sileht): don't use voluptuous for performance reasons
    try:
        value = float(m['value'])
    except Exception:
        abort(400, "Invalid input for a value")

    try:
        timestamp = utils.to_timestamp(m['timestamp'])
    except Exception:
        abort(400, "Invalid input for a timestamp")

    return storage.Measure(timestamp, value)
Ejemplo n.º 6
0
    def to_measure(m):
        # NOTE(sileht): we do the input validation
        # during the iteration for not loop just for this
        # and don't use voluptuous for performance reason
        try:
            value = float(m['value'])
        except Exception:
            abort(400, "Invalid input for a value")

        try:
            timestamp = utils.to_timestamp(m['timestamp'])
        except Exception:
            abort(400, "Invalid input for a timestamp")

        return storage.Measure(timestamp, value)
Ejemplo n.º 7
0
Archivo: s3.py Proyecto: luo-zn/gnocchi
 def _list_split_keys_unbatched(self, metric, aggregations, version=3):
     bucket = self._bucket_name
     keys = {}
     for aggregation in aggregations:
         keys[aggregation] = set()
         response = {}
         while response.get('IsTruncated', True):
             if 'NextContinuationToken' in response:
                 kwargs = {
                     'ContinuationToken': response['NextContinuationToken']
                 }
             else:
                 kwargs = {}
             response = self.s3.list_objects_v2(
                 Bucket=bucket,
                 Prefix=self._prefix(metric) + '%s_%s' % (
                     aggregation.method,
                     utils.timespan_total_seconds(
                         aggregation.granularity),
                 ),
                 **kwargs)
             # If response is empty then check that the metric exists
             contents = response.get('Contents', ())
             if not contents and not self._metric_exists_p(metric, version):
                 raise storage.MetricDoesNotExist(metric)
             for f in contents:
                 try:
                     if (self._version_check(f['Key'], version)):
                         meta = f['Key'].split('_')
                         keys[aggregation].add(carbonara.SplitKey(
                             utils.to_timestamp(meta[2]),
                             sampling=aggregation.granularity))
                 except (ValueError, IndexError):
                     # Might be "none", or any other file. Be resilient.
                     continue
     return keys
Ejemplo n.º 8
0
 def _list_split_keys_unbatched(self, metric, aggregations, version=3):
     bucket = self._bucket_name
     keys = {}
     for aggregation in aggregations:
         keys[aggregation] = set()
         response = {}
         while response.get('IsTruncated', True):
             if 'NextContinuationToken' in response:
                 kwargs = {
                     'ContinuationToken': response['NextContinuationToken']
                 }
             else:
                 kwargs = {}
             response = self.s3.list_objects_v2(
                 Bucket=bucket,
                 Prefix=self._prefix(metric) + '%s_%s' % (
                     aggregation.method,
                     utils.timespan_total_seconds(aggregation.granularity),
                 ),
                 **kwargs)
             # If response is empty then check that the metric exists
             contents = response.get('Contents', ())
             if not contents and not self._metric_exists_p(metric, version):
                 raise storage.MetricDoesNotExist(metric)
             for f in contents:
                 try:
                     if (self._version_check(f['Key'], version)):
                         meta = f['Key'].split('_')
                         keys[aggregation].add(
                             carbonara.SplitKey(
                                 utils.to_timestamp(meta[2]),
                                 sampling=aggregation.granularity))
                 except (ValueError, IndexError):
                     # Might be "none", or any other file. Be resilient.
                     continue
     return keys
Ejemplo n.º 9
0
def Timestamp(v):
    if v is None:
        return v
    return utils.to_timestamp(v)
Ejemplo n.º 10
0
    def test_add_and_get_cross_metric_measures(self):
        metric2, __ = self._create_metric()
        self.storage.add_measures(self.metric, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.storage.add_measures(metric2, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 41), 2),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 10, 31), 4),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 13, 10), 4),
        ])
        self.storage.process_background_tasks(self.index, sync=True)

        values = self.storage.get_cross_metric_measures([self.metric, metric2])
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
            (utils.datetime_utc(2014, 1, 1, 12, 5, 0), 300.0, 12.5),
            (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 24.0)
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=utils.to_timestamp('2014-01-01 12:10:00'))
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 24.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            to_timestamp=utils.to_timestamp('2014-01-01 12:05:00'))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            to_timestamp=utils.to_timestamp('2014-01-01 12:10:10'),
            from_timestamp=utils.to_timestamp('2014-01-01 12:10:10'))
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 24.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=utils.to_timestamp('2014-01-01 12:00:00'),
            to_timestamp=utils.to_timestamp('2014-01-01 12:00:01'))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=utils.to_timestamp('2014-01-01 12:00:00'),
            to_timestamp=utils.to_timestamp('2014-01-01 12:00:01'),
            granularity=300.0)

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)
Ejemplo n.º 11
0
    def test_add_and_get_cross_metric_measures(self):
        metric2, __ = self._create_metric()
        self.storage.add_measures(self.metric, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 1), 69),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 31), 42),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 9, 31), 4),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.storage.add_measures(metric2, [
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 0, 5), 9),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 7, 41), 2),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 10, 31), 4),
            storage.Measure(datetime.datetime(2014, 1, 1, 12, 13, 10), 4),
        ])
        self.trigger_processing(self.storage, self.index)

        values = self.storage.get_cross_metric_measures([self.metric, metric2])
        self.assertEqual(
            [(utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 22.25),
             (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
             (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
             (utils.datetime_utc(2014, 1, 1, 12, 5, 0), 300.0, 12.5),
             (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 24.0)], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=utils.to_timestamp('2014-01-01 12:10:00'))
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 10, 0), 300.0, 24.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            to_timestamp=utils.to_timestamp('2014-01-01 12:05:00'))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 0, 0, 0), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            to_timestamp=utils.to_timestamp('2014-01-01 12:10:10'),
            from_timestamp=utils.to_timestamp('2014-01-01 12:10:10'))
        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 10), 300.0, 24.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=utils.to_timestamp('2014-01-01 12:00:00'),
            to_timestamp=utils.to_timestamp('2014-01-01 12:00:01'))

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1), 86400.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 3600.0, 22.25),
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)

        values = self.storage.get_cross_metric_measures(
            [self.metric, metric2],
            from_timestamp=utils.to_timestamp('2014-01-01 12:00:00'),
            to_timestamp=utils.to_timestamp('2014-01-01 12:00:01'),
            granularity=300.0)

        self.assertEqual([
            (utils.datetime_utc(2014, 1, 1, 12, 0, 0), 300.0, 39.0),
        ], values)