Beispiel #1
0
    def _get_measures_timeserie(self,
                                metric,
                                aggregation,
                                granularity,
                                from_timestamp=None,
                                to_timestamp=None):

        # Find the number of point
        for d in metric.archive_policy.definition:
            if d.granularity == granularity:
                points = d.points
                break
        else:
            raise storage.GranularityDoesNotExist(metric, granularity)

        all_keys = None
        try:
            all_keys = self._list_split_keys_for_metric(
                metric, aggregation, granularity)
        except storage.MetricDoesNotExist:
            for d in metric.archive_policy.definition:
                if d.granularity == granularity:
                    return carbonara.AggregatedTimeSerie(
                        sampling=granularity,
                        aggregation_method=aggregation,
                        max_size=d.points)
            raise storage.GranularityDoesNotExist(metric, granularity)

        if from_timestamp:
            from_timestamp = str(
                carbonara.SplitKey.from_timestamp_and_sampling(
                    from_timestamp, granularity))

        if to_timestamp:
            to_timestamp = str(
                carbonara.SplitKey.from_timestamp_and_sampling(
                    to_timestamp, granularity))

        timeseries = list(
            filter(
                lambda x: x is not None,
                self._map_in_thread(
                    self._get_measures_and_unserialize,
                    ((metric, key, aggregation, granularity)
                     for key in sorted(all_keys)
                     if ((not from_timestamp or key >= from_timestamp) and (
                         not to_timestamp or key <= to_timestamp))))))

        return carbonara.AggregatedTimeSerie.from_timeseries(
            sampling=granularity,
            aggregation_method=aggregation,
            timeseries=timeseries,
            max_size=points)
    def _get_measures_timeserie(self,
                                metric,
                                aggregation,
                                granularity,
                                from_timestamp=None,
                                to_timestamp=None):

        # Find the number of point
        for d in metric.archive_policy.definition:
            if d.granularity == granularity:
                points = d.points
                break
        else:
            raise storage.GranularityDoesNotExist(metric, granularity)

        all_keys = None
        try:
            all_keys = self._list_split_keys_for_metric(
                metric, aggregation, granularity)
        except storage.MetricDoesNotExist:
            # This can happen if it's an old metric with a TimeSerieArchive
            all_keys = None

        if not all_keys:
            # It does not mean we have no data: it can be an old metric with a
            # TimeSerieArchive.
            try:
                data = self._get_metric_archive(metric, aggregation)
            except (storage.MetricDoesNotExist,
                    storage.AggregationDoesNotExist):
                # It really does not exist
                for d in metric.archive_policy.definition:
                    if d.granularity == granularity:
                        return carbonara.AggregatedTimeSerie(
                            sampling=granularity,
                            aggregation_method=aggregation,
                            max_size=d.points)
                raise storage.GranularityDoesNotExist(metric, granularity)
            else:
                archive = carbonara.TimeSerieArchive.unserialize(data)
                # It's an old metric with an TimeSerieArchive!
                for ts in archive.agg_timeseries:
                    if ts.sampling == granularity:
                        return ts
                raise storage.GranularityDoesNotExist(metric, granularity)

        if from_timestamp:
            from_timestamp = carbonara.AggregatedTimeSerie.get_split_key(
                from_timestamp, granularity)

        if to_timestamp:
            to_timestamp = carbonara.AggregatedTimeSerie.get_split_key(
                to_timestamp, granularity)

        timeseries = filter(
            lambda x: x is not None,
            self._map_in_thread(
                self._get_measures_and_unserialize,
                ((metric, key, aggregation, granularity) for key in all_keys
                 if ((not from_timestamp or key >= from_timestamp) and (
                     not to_timestamp or key <= to_timestamp)))))

        return carbonara.AggregatedTimeSerie.from_timeseries(
            sampling=granularity,
            aggregation_method=aggregation,
            timeseries=timeseries,
            max_size=points)
Beispiel #3
0
def get_cross_metric_measures(storage,
                              metrics,
                              from_timestamp=None,
                              to_timestamp=None,
                              aggregation='mean',
                              reaggregation=None,
                              granularity=None,
                              needed_overlap=100.0,
                              fill=None,
                              transform=None):
    """Get aggregated measures of multiple entities.

    :param storage: The storage driver.
    :param metrics: The metrics measured to aggregate.
    :param from timestamp: The timestamp to get the measure from.
    :param to timestamp: The timestamp to get the measure to.
    :param granularity: The granularity to retrieve.
    :param aggregation: The type of aggregation to retrieve.
    :param reaggregation: The type of aggregation to compute
                          on the retrieved measures.
    :param fill: The value to use to fill in missing data in series.
    :param transform: List of transformation to apply to the series
    """
    for metric in metrics:
        if aggregation not in metric.archive_policy.aggregation_methods:
            raise gnocchi_storage.AggregationDoesNotExist(metric, aggregation)
        if granularity is not None:
            for d in metric.archive_policy.definition:
                if d.granularity == granularity:
                    break
            else:
                raise gnocchi_storage.GranularityDoesNotExist(
                    metric, granularity)

    if reaggregation is None:
        reaggregation = aggregation

    if granularity is None:
        granularities = (definition.granularity for metric in metrics
                         for definition in metric.archive_policy.definition)
        granularities_in_common = [
            g for g, occurrence in six.iteritems(
                collections.Counter(granularities))
            if occurrence == len(metrics)
        ]

        if not granularities_in_common:
            raise MetricUnaggregatable(metrics, 'No granularity match')
    else:
        granularities_in_common = [granularity]

    tss = storage._map_in_thread(
        storage._get_measures_timeserie,
        [(metric, aggregation, g, from_timestamp, to_timestamp)
         for metric in metrics for g in granularities_in_common])

    if transform is not None:
        tss = list(map(lambda ts: ts.transform(transform), tss))

    try:
        return [(timestamp.replace(tzinfo=iso8601.iso8601.UTC), r, v)
                for timestamp, r, v in
                aggregated(tss, reaggregation, from_timestamp, to_timestamp,
                           needed_overlap, fill)]
    except UnAggregableTimeseries as e:
        raise MetricUnaggregatable(metrics, e.reason)