Esempio n. 1
0
    def get_metric_list(self, request, metrics, begin, end, job, max_points,
                        num_points, **kwargs):
        errors = {}
        reduce_fn, group_by = map(request.GET.get, ('reduce_fn', 'group_by'))
        if not reduce_fn and group_by:
            errors[
                'reduce_fn'] = "This field is mandatory if 'group_by' is specified"
        if errors:
            return self.create_response(request,
                                        errors,
                                        response_class=HttpBadRequest)

        try:
            base_bundle = self.build_bundle(request=request)
            objs = self.obj_get_list(bundle=base_bundle,
                                     **self.remove_api_resource_names(kwargs))
        except Http404 as exc:
            raise custom_response(self, request, http.HttpNotFound,
                                  {'metrics': exc})
        metrics = metrics or set(
            itertools.chain.from_iterable(
                MetricStore(obj).names for obj in objs))

        result = dict((obj.id,
                       self._fetch(MetricStore(obj), metrics, begin, end, job,
                                   max_points, num_points)) for obj in objs)
        if not reduce_fn:
            for obj_id, stats in result.items():
                result[obj_id] = self._format(stats)
            return self.create_response(request, result)
        if not group_by:
            stats = self._reduce(metrics, result, reduce_fn)
            return self.create_response(request, self._format(stats))
        # Want to reduce into groups, one series per group
        groups = defaultdict(dict)
        for obj in objs:
            if hasattr(obj, 'content_type'):
                obj = obj.downcast()
            if hasattr(obj, group_by):
                group_val = getattr(obj, group_by)
                groups[getattr(group_val, 'id',
                               group_val)][obj.id] = result[obj.id]
        for key in groups:
            stats = self._reduce(metrics, groups[key], reduce_fn)
            groups[key] = self._format(stats)
        return self.create_response(request, groups)
Esempio n. 2
0
 def get_metric_detail(self, request, metrics, begin, end, job, max_points,
                       num_points, **kwargs):
     obj = self.cached_obj_get(request=request,
                               **self.remove_api_resource_names(kwargs))
     metrics = metrics or MetricStore(obj).names
     if isinstance(obj, StorageResourceRecord):
         # FIXME: there is a level of indirection here to go from a StorageResourceRecord to individual time series.
         # Although no longer necessary, time series are still stored in separate resources.
         stats = defaultdict(dict)
         for stat in StorageResourceStatistic.objects.filter(
                 storage_resource=obj, name__in=metrics):
             for dt, data in self._fetch(stat.metrics, metrics, begin, end,
                                         job, max_points,
                                         num_points).items():
                 stats[dt].update(data)
     else:
         stats = self._fetch(MetricStore(obj), metrics, begin, end, job,
                             max_points, num_points)
     if not job:
         for data in stats.values():
             data.update(dict.fromkeys(set(metrics).difference(data), 0.0))
     return self.create_response(request, self._format(stats))
Esempio n. 3
0
 def metrics(self):
     from chroma_core.lib.metrics import MetricStore
     if not hasattr(self, '_metrics'):
         self._metrics = MetricStore.new(self)
     return self._metrics