コード例 #1
0
ファイル: graphite.py プロジェクト: nunb/biggraphite
    def find_nodes(self, query):
        """Find nodes matching a query."""
        # TODO: we should probably consider query.startTime and query.endTime
        #  to filter out metrics that had no points in this interval.

        cache_key = "find:%s" % (hashing.compactHash(query.pattern))
        results = self.django_cache().get(cache_key)
        if results:
            cache_hit = True
        else:
            find_start = time.time()
            results = glob_utils.graphite_glob(self.accessor(), query.pattern)
            log.rendering(
                'find(%s) - %f secs' % (query.pattern, time.time() - find_start))
            cache_hit = False

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(
                self.accessor(), self.cache(), self.carbonlink(), metric_name)
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)

        if not cache_hit:
            self.django_cache().set(cache_key, results, self._cache_timeout)
コード例 #2
0
ファイル: rcache.py プロジェクト: wayslog/graphite-web
def hashData(targets, start, end, prefix=False):
    target_str = ",".join(sorted(targets))
    start_str = epoch_time_str(start)
    end_str = epoch_time_str(end)
    hash_key = target_str + "@" + start_str + ":" + end_str
    prefix = REDIS_LOCAL_PREFIX if prefix else ""
    return prefix + compactHash(hash_key)
コード例 #3
0
  def __init__(self, store, query):
    self.store = store
    self.query = query

    if query.startTime:
      start = query.startTime - (query.startTime % settings.FIND_CACHE_DURATION)
    else:
      start = ""

    if query.endTime:
      end = query.endTime - (query.endTime % settings.FIND_CACHE_DURATION)
    else:
      end = ""

    self.cacheKey = "find:%s:%s:%s:%s" % (store.host, compactHash(query.pattern), start, end)
コード例 #4
0
    def __init__(self, store, query):
        self.store = store
        self.query = query

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        self.cacheKey = "find:%s:%s:%s:%s" % (
            store.host, compactHash(query.pattern), start, end)
コード例 #5
0
  def __init__(self, store, query):
    self.store = store
    self.query = query
    self.connection = None
    self.failed = False

    if query.startTime:
      start = query.startTime - (query.startTime % settings.FIND_CACHE_DURATION)
    else:
      start = ""

    if query.endTime:
      end = query.endTime - (query.endTime % settings.FIND_CACHE_DURATION)
    else:
      end = ""

    self.cacheKey = "find:%s:%s:%s:%s" % (store.host, compactHash(query.pattern), start, end)
    self.cachedResult = None
コード例 #6
0
ファイル: remote.py プロジェクト: wushenwu/graphite-web
    def __init__(self, store, query):
        self.store = store
        self.query = query

        if query.startTime:
            start = query.startTime - \
                (query.startTime % settings.FIND_CACHE_DURATION)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime %
                                   settings.FIND_CACHE_DURATION)
        else:
            end = ""

        self.cacheKey = "find:%s:%s:%s:%s" % (
            store.host, compactHash(query.pattern), start, end)
コード例 #7
0
  def __init__(self, store, query):
    self.store = store
    self.query = query
    self.connection = None
    self.failed = False

    if query.startTime:
      start = query.startTime - (query.startTime % settings.FIND_CACHE_DURATION)
    else:
      start = ""

    if query.endTime:
      end = query.endTime - (query.endTime % settings.FIND_CACHE_DURATION)
    else:
      end = ""

    self.cacheKey = "find:%s:%s:%s:%s" % (store.host, compactHash(query.pattern), start, end)
    self.cachedResult = None
コード例 #8
0
ファイル: graphite.py プロジェクト: Willymontaz/biggraphite
    def find_nodes(self, query):
        """Find nodes matching a query."""
        # TODO: we should probably consider query.startTime and query.endTime
        #  to filter out metrics that had no points in this interval.

        leaves_only = hasattr(query, 'leaves_only') and query.leaves_only
        cache_key = "find_nodes:%s" % (hashing.compactHash(query.pattern))
        cached = self.django_cache().get(cache_key)
        if cached:
            cache_hit = True
            success, results = cached
        else:
            find_start = time.time()
            try:
                results = glob_utils.graphite_glob(self.accessor(),
                                                   query.pattern,
                                                   metrics=True,
                                                   directories=not leaves_only)
                success = True
            except bg_accessor.Error as e:
                success = False
                results = e

            log.rendering('find(%s) - %f secs' %
                          (query.pattern, time.time() - find_start))
            cache_hit = False

        if not cache_hit:
            self.django_cache().set(cache_key, (success, results),
                                    self._cache_timeout)

        if not success:
            raise results

        metric_names, directories = results

        for metric_name in metric_names:
            reader = Reader(self.accessor(), self.cache(), self.carbonlink(),
                            metric_name)
            yield node.LeafNode(metric_name, reader)

        for directory in directories:
            yield node.BranchNode(directory)
コード例 #9
0
ファイル: graphite.py プロジェクト: nsyntych/biggraphite
 def _hash(self, obj):
     # Make sure we use all the member of the objects to
     # build a unique key.
     return hashing.compactHash(str(sorted(vars(obj).items())))
コード例 #10
0
    def find_nodes(self, query, timer=None):
        timer.set_msg('host: {host}, query: {query}'.format(host=self.host,
                                                            query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" %
                  (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(
            query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result"
                % (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(url,
                                  fields=query_params,
                                  headers=query.headers,
                                  timeout=settings.REMOTE_FIND_TIMEOUT)

            try:
                if result.getheader('content-type') == 'application/x-msgpack':
                    results = msgpack.load(BufferedHTTPReader(
                        result, buffer_size=settings.REMOTE_BUFFER_SIZE),
                                           encoding='utf-8')
                else:
                    results = unpickle.load(
                        BufferedHTTPReader(
                            result, buffer_size=settings.REMOTE_BUFFER_SIZE))
            except Exception as err:
                self.fail()
                log.exception(
                    "RemoteFinder[%s] Error decoding find response from %s: %s"
                    % (self.host, result.url_full, err))
                raise Exception("Error decoding find response from %s: %s" %
                                (result.url_full, err))
            finally:
                result.release_conn()

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet([
                    Interval(interval[0], interval[1])
                    for interval in intervals
                ])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            yield node
コード例 #11
0
ファイル: graphite.py プロジェクト: iksaif/biggraphite
 def _hash(self, obj):
     # Make sure we use all the member of the objects to
     # build a unique key.
     return hashing.compactHash(str(sorted(vars(obj).items())))
コード例 #12
0
 def __init__(self, store, query):
     self.store = store
     self.query = query
     self.connection = None
     self.cacheKey = compactHash('find:%s:%s' % (self.store.host, query))
     self.cachedResults = None
コード例 #13
0
ファイル: remote_storage.py プロジェクト: Cue/graphite
 def __init__(self, store, query):
   self.store = store
   self.query = query
   self.connection = None
   self.cacheKey = compactHash('find:%s:%s' % (self.store.host, query))
   self.cachedResults = None
コード例 #14
0
ファイル: remote.py プロジェクト: zshaikh76/graphite-web
    def find_nodes(self, query, timer=None):
        timer.set_msg('host: {host}, query: {query}'.format(host=self.host,
                                                            query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" %
                  (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(
            query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result"
                % (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(url,
                                  fields=query_params,
                                  headers=query.headers,
                                  timeout=settings.FIND_TIMEOUT)

            try:
                if result.getheader('content-type') == 'application/x-msgpack':
                    results = msgpack.load(BufferedHTTPReader(
                        result, buffer_size=settings.REMOTE_BUFFER_SIZE),
                                           encoding='utf-8')
                else:
                    results = unpickle.load(
                        BufferedHTTPReader(
                            result, buffer_size=settings.REMOTE_BUFFER_SIZE))
            except Exception as err:
                self.fail()
                log.exception(
                    "RemoteFinder[%s] Error decoding find response from %s: %s"
                    % (self.host, result.url_full, err))
                raise Exception("Error decoding find response from %s: %s" %
                                (result.url_full, err))
            finally:
                result.release_conn()

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        # We don't use generator here, this function may be run as a job in a thread pool, using a generator has the following risks:
        # 1. Generators are lazy, if we don't iterator the returned generator in the job, the real execution(network operations,
        #    time-consuming) are very likely be triggered in the calling thread, losing the effect of thread pool;
        # 2. As function execution is delayed, the job manager can not catch job runtime exception as expected/designed;
        nodes = []
        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet([
                    Interval(interval[0], interval[1])
                    for interval in intervals
                ])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            nodes.append(node)

        return nodes
コード例 #15
0
ファイル: remote.py プロジェクト: cbowman0/graphite-web
    def find_nodes(self, query, timer=None):
        timer.set_msg(
            'host: {host}, query: {query}'.format(
                host=self.host,
                query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" % (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result" %
                (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(
                url,
                fields=query_params,
                headers=query.headers,
                timeout=settings.FIND_TIMEOUT)

            results = self.deserialize(result)

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        # We don't use generator here, this function may be run as a job in a thread pool, using a generator has the following risks:
        # 1. Generators are lazy, if we don't iterator the returned generator in the job, the real execution(network operations,
        #    time-consuming) are very likely be triggered in the calling thread, losing the effect of thread pool;
        # 2. As function execution is delayed, the job manager can not catch job runtime exception as expected/designed;
        nodes = []
        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet(
                    [Interval(interval[0], interval[1]) for interval in intervals])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            nodes.append(node)

        return nodes
コード例 #16
0
ファイル: remote.py プロジェクト: iksaif/graphite-web
    def find_nodes(self, query, timer=None):
        timer.set_msg(
            'host: {host}, query: {query}'.format(
                host=self.host,
                query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" % (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result" %
                (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(
                url,
                fields=query_params,
                headers=query.headers,
                timeout=settings.FIND_TIMEOUT)

            try:
                if result.getheader('content-type') == 'application/x-msgpack':
                  results = msgpack.load(BufferedHTTPReader(
                    result, buffer_size=settings.REMOTE_BUFFER_SIZE), encoding='utf-8')
                else:
                  results = unpickle.load(BufferedHTTPReader(
                    result, buffer_size=settings.REMOTE_BUFFER_SIZE))
            except Exception as err:
                self.fail()
                log.exception(
                    "RemoteFinder[%s] Error decoding find response from %s: %s" %
                    (self.host, result.url_full, err))
                raise Exception("Error decoding find response from %s: %s" % (result.url_full, err))
            finally:
                result.release_conn()

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet(
                    [Interval(interval[0], interval[1]) for interval in intervals])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            yield node