Ejemplo n.º 1
0
def list_metrics(accessor, pattern, graphite=True):
    """Return the list of metrics corresponding to pattern.

    Exit with error message if None.

    Args:
        accessor: Accessor, a connected accessor
        pattern: string, e.g. my.metric.a or my.metric.**.a

    Optional Args:
        graphite: bool, use graphite globbing if True.

    Returns:
        iterable(Metric)
    """
    if not graphite:
        metrics_names = accessor.glob_metric_names(pattern)
    else:
        metrics_names, _ = graphite_glob(accessor,
                                         pattern,
                                         metrics=True,
                                         directories=False)

    for metric in metrics_names:
        if metric is None:
            continue
        yield accessor.get_metric(metric)
Ejemplo n.º 2
0
    def test_graphite_glob(self):
        for name in self._metric_names:
            metric = bg_test_utils.make_metric_with_defaults(name)
            self.accessor.create_metric(metric)

        scenarii = [
            # Single character wildcard
            ("a?", ["aa"], []),
            # Component wildcard
            ("*", self._metrics_by_length[1], ["a", "x"]),
            ("*.*.c", ["a.b.c", "x.y.c"], []),
            ("a.*.*", ["a.a.a", "a.b.c", "a.b.d"], []),
            ("*.*.*", self._metrics_by_length[3], []),
            # Multi-character wildcard
            ("a*", ["a", "aa", "aaa"], ["a"]),
            # Choices
            ("*.{b,c,d,5}.?", ["a.b.c", "a.b.d"], []),
            # Globstar wildcard
            ("a.**", ["a.a.a", "a.b.c", "a.b.d"], ["a.a", "a.b"]),
        ]
        for (glob, metrics, directories) in scenarii:
            (found_metrics,
             found_directories) = bg_glob.graphite_glob(self.accessor, glob)
            found_metric_names = [
                found_metric.name for found_metric in found_metrics
            ]
            found_directory_names = [
                found_directory.name for found_directory in found_directories
            ]
            self.assertEqual((metrics, directories),
                             (found_metric_names, found_directory_names))
Ejemplo n.º 3
0
    def find_nodes(self, query):
        """Find nodes matching a query."""
        # TODO: we should probably consider query.startTime and query.endTime
        #  to filter out metrics that had no points in this interval.

        cache_key = "find:%s" % (hashing.compactHash(query.pattern))
        results = self.django_cache().get(cache_key)
        if results:
            cache_hit = True
        else:
            find_start = time.time()
            results = glob_utils.graphite_glob(self.accessor(), query.pattern)
            log.rendering(
                'find(%s) - %f secs' % (query.pattern, time.time() - find_start))
            cache_hit = False

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(
                self.accessor(), self.cache(), self.carbonlink(), metric_name)
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)

        if not cache_hit:
            self.django_cache().set(cache_key, results, self._cache_timeout)
Ejemplo n.º 4
0
    def test_graphite_glob(self):
        for name in self._metric_names:
            metric = bg_test_utils.make_metric(name)
            self.accessor.create_metric(metric)

        scenarii = [
            # Single character wildcard
            ("a?", ["aa"], []),
            # Component wildcard
            ("*", self._metrics_by_length[1], ["a", "x"]),
            ("*.*.c", ["a.b.c", "x.y.c"], []),
            ("a.*.*", ["a.a.a", "a.b.c", "a.b.d"], []),
            ("*.*.*", self._metrics_by_length[3], []),
            # Multi-character wildcard
            ("a*", ["a", "aa", "aaa"], ["a"]),
            # Choices
            ("*.{b,c,d,5}.?", ["a.b.c", "a.b.d"], []),
            # Globstar wildcard
            ("a.**", ["a.a.a", "a.b.c", "a.b.d"], ["a.a", "a.b"]),
        ]
        for (glob, metrics, directories) in scenarii:
            (found_metrics, found_directories) = bg_glob.graphite_glob(self.accessor, glob)
            found_metric_names = [found_metric.name for found_metric in found_metrics]
            found_directory_names = [found_directory.name for found_directory in found_directories]
            self.assertEqual((metrics, directories), (found_metric_names, found_directory_names))
Ejemplo n.º 5
0
def list_metrics(accessor, pattern, graphite=True):
    """Return the list of metrics corresponding to pattern.

    Exit with error message if None.

    Args:
        accessor: Accessor, a connected accessor
        pattern: string, e.g. my.metric.a or my.metric.**.a

    Optional Args:
        graphite: bool, use graphite globbing if True.

    Returns:
        iterable(Metric)
    """
    if not graphite:
        metrics_names = accessor.glob_metric_names(pattern)
    else:
        metrics, _ = graphite_glob(
            accessor, pattern, metrics=True, directories=False
        )
        metrics_names = [metric.name for metric in metrics]

    for metric in metrics_names:
        if metric is None:
            continue
        yield accessor.get_metric(metric)
Ejemplo n.º 6
0
    def find_nodes(self, query):
        """Find nodes matching a query."""
        leaves_only = hasattr(query, "leaves_only") and query.leaves_only
        cache_key = "find_nodes:%s" % self._hash(query)
        cached = self.django_cache().get(cache_key)
        if cached is not None:
            cache_hit = True
            success, results = cached
        else:
            find_start = time.time()
            try:
                if query.startTime is None:
                    start_time = None
                else:
                    start_time = datetime.fromtimestamp(query.startTime)

                if query.endTime is None:
                    end_time = None
                else:
                    end_time = datetime.fromtimestamp(query.endTime)

                results = glob_utils.graphite_glob(
                    self.accessor(),
                    query.pattern,
                    metrics=True,
                    directories=not leaves_only,
                    start_time=start_time,
                    end_time=end_time,
                )
                success = True
            except bg_accessor.Error as e:
                success = False
                results = e

            log.rendering(
                "find(%s) - %f secs" % (query.pattern, time.time() - find_start)
            )
            cache_hit = False

        if not cache_hit:
            self.django_cache().set(cache_key, (success, results), self._cache_timeout)

        if not success:
            raise results

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(
                self.accessor(), self.cache(), self.carbonlink(), metric_name
            )
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)
Ejemplo n.º 7
0
 def _find_glob(self, query, leaves_only, start_time, end_time):
     if leaves_only:
         return glob_utils.graphite_glob_leaves(
             self.accessor(),
             query.pattern,
             start_time=start_time,
             end_time=end_time,
         )
     else:
         return glob_utils.graphite_glob(
             self.accessor(),
             query.pattern,
             metrics=True,
             directories=True,
             start_time=start_time,
             end_time=end_time,
         )
Ejemplo n.º 8
0
 def _find_glob(self, query, leaves_only, start_time, end_time):
     if leaves_only:
         return glob_utils.graphite_glob_leaves(
             self.accessor(),
             query.pattern,
             start_time=start_time,
             end_time=end_time,
         )
     else:
         return glob_utils.graphite_glob(
             self.accessor(),
             query.pattern,
             metrics=True,
             directories=True,
             start_time=start_time,
             end_time=end_time,
         )
Ejemplo n.º 9
0
    def run(self, accessor, opts):
        """List metrics and directories.

        See command.CommandBase.
        """
        accessor.connect()

        if not opts.graphite:
            directories_names = accessor.glob_directory_names(opts.glob)
        else:
            _, directories_names = graphite_glob(
                accessor, opts.glob, metrics=False, directories=True
            )
        for directory in directories_names:
            print("d %s" % directory)
        for metric in list_metrics(accessor, opts.glob, opts.graphite):
            if metric:
                print("m %s %s" % (metric.name, metric.metadata.as_string_dict()))
Ejemplo n.º 10
0
    def find_nodes(self, query):
        """Find nodes matching a query."""
        # TODO: we should probably consider query.startTime and query.endTime
        #  to filter out metrics that had no points in this interval.

        leaves_only = hasattr(query, 'leaves_only') and query.leaves_only
        cache_key = "find_nodes:%s" % (hashing.compactHash(query.pattern))
        cached = self.django_cache().get(cache_key)
        if cached:
            cache_hit = True
            success, results = cached
        else:
            find_start = time.time()
            try:
                results = glob_utils.graphite_glob(self.accessor(),
                                                   query.pattern,
                                                   metrics=True,
                                                   directories=not leaves_only)
                success = True
            except bg_accessor.Error as e:
                success = False
                results = e

            log.rendering('find(%s) - %f secs' %
                          (query.pattern, time.time() - find_start))
            cache_hit = False

        if not cache_hit:
            self.django_cache().set(cache_key, (success, results),
                                    self._cache_timeout)

        if not success:
            raise results

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(self.accessor(), self.cache(), self.carbonlink(),
                            metric_name)
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)
Ejemplo n.º 11
0
    def run(self, accessor, opts):
        """List metrics and directories.

        See command.CommandBase.
        """
        accessor.connect()

        if not opts.graphite:
            directories_names = accessor.glob_directory_names(opts.glob)
        else:
            _, directories_names = graphite_glob(accessor,
                                                 opts.glob,
                                                 metrics=False,
                                                 directories=True)
        for directory in directories_names:
            print("d %s" % directory)
        for metric in list_metrics(accessor, opts.glob, opts.graphite):
            if metric:
                print("m %s %s" %
                      (metric.name, metric.metadata.as_string_dict()))