Ejemplo n.º 1
0
    def find_nodes(self, query):
        """Find nodes matching a query."""
        # TODO: we should probably consider query.startTime and query.endTime
        #  to filter out metrics that had no points in this interval.

        cache_key = "find:%s" % (hashing.compactHash(query.pattern))
        results = self.django_cache().get(cache_key)
        if results:
            cache_hit = True
        else:
            find_start = time.time()
            results = glob_utils.graphite_glob(self.accessor(), query.pattern)
            log.rendering(
                'find(%s) - %f secs' % (query.pattern, time.time() - find_start))
            cache_hit = False

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(
                self.accessor(), self.cache(), self.carbonlink(), metric_name)
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)

        if not cache_hit:
            self.django_cache().set(cache_key, results, self._cache_timeout)
Ejemplo n.º 2
0
    def find_nodes(self, query):
        """Find nodes matching a query."""
        leaves_only = hasattr(query, "leaves_only") and query.leaves_only
        cache_key = "find_nodes:%s" % self._hash(query)
        cached = self.django_cache().get(cache_key)
        if cached is not None:
            cache_hit = True
            success, results = cached
        else:
            find_start = time.time()
            try:
                if query.startTime is None:
                    start_time = None
                else:
                    start_time = datetime.fromtimestamp(query.startTime)

                if query.endTime is None:
                    end_time = None
                else:
                    end_time = datetime.fromtimestamp(query.endTime)

                results = glob_utils.graphite_glob(
                    self.accessor(),
                    query.pattern,
                    metrics=True,
                    directories=not leaves_only,
                    start_time=start_time,
                    end_time=end_time,
                )
                success = True
            except bg_accessor.Error as e:
                success = False
                results = e

            log.rendering(
                "find(%s) - %f secs" % (query.pattern, time.time() - find_start)
            )
            cache_hit = False

        if not cache_hit:
            self.django_cache().set(cache_key, (success, results), self._cache_timeout)

        if not success:
            raise results

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(
                self.accessor(), self.cache(), self.carbonlink(), metric_name
            )
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)
Ejemplo n.º 3
0
    def metrics_to_nodes(self, metrics, pattern):
        """Filter metrics and convert to nodes."""

        # From a metric list, infer directories.
        # TODO: This is super super inefficient!
        directories = set()
        for metric in metrics:
            components = metric.split(".")
            for i in range(1, len(components)):
                directories.add('.'.join(components[:i]))

        # Then filter everything accoring to the pattern.
        for directory in glob_utils.glob(directories, pattern):
            yield node.BranchNode(directory)

        for metric in glob_utils.glob(metrics, pattern):
            yield node.LeafNode(metric, None)
Ejemplo n.º 4
0
    def find_nodes(self, query):
        """Find nodes matching a query."""
        # TODO: we should probably consider query.startTime and query.endTime
        #  to filter out metrics that had no points in this interval.

        leaves_only = hasattr(query, 'leaves_only') and query.leaves_only
        cache_key = "find_nodes:%s" % (hashing.compactHash(query.pattern))
        cached = self.django_cache().get(cache_key)
        if cached:
            cache_hit = True
            success, results = cached
        else:
            find_start = time.time()
            try:
                results = glob_utils.graphite_glob(self.accessor(),
                                                   query.pattern,
                                                   metrics=True,
                                                   directories=not leaves_only)
                success = True
            except bg_accessor.Error as e:
                success = False
                results = e

            log.rendering('find(%s) - %f secs' %
                          (query.pattern, time.time() - find_start))
            cache_hit = False

        if not cache_hit:
            self.django_cache().set(cache_key, (success, results),
                                    self._cache_timeout)

        if not success:
            raise results

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(self.accessor(), self.cache(), self.carbonlink(),
                            metric_name)
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)