示例#1
0
    def _deserialize_stream(stream, content_type):
        if content_type == 'application/x-msgpack':
            data = msgpack.load(stream, encoding='utf-8')
        else:
            data = unpickle.load(stream)

        return data
示例#2
0
    def _deserialize_stream(stream, content_type):
        if content_type == 'application/x-msgpack':
            data = msgpack.load(stream, encoding='utf-8')
        else:
            data = unpickle.load(stream)

        return data
示例#3
0
    def context(self):
        if self.cached_context_data is not None:
            return self.cached_context_data

        context_path = self.fs_path[:-len(self.extension)] + '.context.pickle'

        if exists(context_path):
            fh = open(context_path, 'rb')
            context_data = unpickle.load(fh)
            fh.close()
        else:
            context_data = {}

        self.cached_context_data = context_data
        return context_data
    def context(self):
        if self.cached_context_data is not None:
            return self.cached_context_data

        context_path = self.fs_path[: -len(self.extension)] + ".context.pickle"

        if exists(context_path):
            fh = open(context_path, "rb")
            context_data = unpickle.load(fh)
            fh.close()
        else:
            context_data = {}

        self.cached_context_data = context_data
        return context_data
示例#5
0
def load_whitelist():
  fh = open(settings.WHITELIST_FILE, 'rb')
  whitelist = unpickle.load(fh)
  fh.close()
  return whitelist
示例#6
0
    def find_nodes(self, query, timer=None):
        timer.set_msg('host: {host}, query: {query}'.format(host=self.host,
                                                            query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" %
                  (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(
            query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result"
                % (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(url,
                                  fields=query_params,
                                  headers=query.headers,
                                  timeout=settings.REMOTE_FIND_TIMEOUT)

            try:
                if result.getheader('content-type') == 'application/x-msgpack':
                    results = msgpack.load(BufferedHTTPReader(
                        result, buffer_size=settings.REMOTE_BUFFER_SIZE),
                                           encoding='utf-8')
                else:
                    results = unpickle.load(
                        BufferedHTTPReader(
                            result, buffer_size=settings.REMOTE_BUFFER_SIZE))
            except Exception as err:
                self.fail()
                log.exception(
                    "RemoteFinder[%s] Error decoding find response from %s: %s"
                    % (self.host, result.url_full, err))
                raise Exception("Error decoding find response from %s: %s" %
                                (result.url_full, err))
            finally:
                result.release_conn()

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet([
                    Interval(interval[0], interval[1])
                    for interval in intervals
                ])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            yield node
示例#7
0
    def fetch_multi(self, startTime, endTime, now=None, requestContext=None):
        if not self.bulk_query:
            return []

        query_params = [
            ('format', self.finder.params.get('format', 'pickle')),
            ('local', self.finder.params.get('local', '1')),
            ('noCache', '1'),
            ('from', int(startTime)),
            ('until', int(endTime))
        ]

        for target in self.bulk_query:
            query_params.append(('target', target))

        if now is not None:
            query_params.append(('now', int(now)))

        headers = requestContext.get('forwardHeaders') if requestContext else None

        retries = 1 # start counting at one to make log output and settings more readable
        while True:
          try:
            result = self.finder.request(
                '/render/',
                fields=query_params,
                headers=headers,
                timeout=settings.FETCH_TIMEOUT,
            )
            break
          except Exception:
            if retries >= settings.MAX_FETCH_RETRIES:
              log.exception("Failed after %s attempts! Root cause:\n%s" %
                  (settings.MAX_FETCH_RETRIES, format_exc()))
              raise
            else:
              log.exception("Got an exception when fetching data! Try: %i of %i. Root cause:\n%s" %
                           (retries, settings.MAX_FETCH_RETRIES, format_exc()))
              retries += 1

        try:
            if result.getheader('content-type') == 'application/x-msgpack':
              data = msgpack.load(BufferedHTTPReader(
                result, buffer_size=settings.REMOTE_BUFFER_SIZE), encoding='utf-8')
            else:
              data = unpickle.load(BufferedHTTPReader(
                result, buffer_size=settings.REMOTE_BUFFER_SIZE))
        except Exception as err:
            self.finder.fail()
            log.exception(
                "RemoteReader[%s] Error decoding render response from %s: %s" %
                (self.finder.host, result.url_full, err))
            raise Exception("Error decoding render response from %s: %s" % (result.url_full, err))
        finally:
            result.release_conn()

        try:
            return [
                {
                    'pathExpression': series.get('pathExpression', series['name']),
                    'name': series['name'],
                    'time_info': (series['start'], series['end'], series['step']),
                    'values': series['values'],
                }
                for series in data
            ]
        except Exception as err:
            self.finder.fail()
            log.exception(
                "RemoteReader[%s] Invalid render response from %s: %s" %
                (self.finder.host, result.url_full, repr(err)))
            raise Exception("Invalid render response from %s: %s" % (result.url_full, repr(err)))
示例#8
0
    def find_nodes(self, query, timer=None):
        timer.set_msg('host: {host}, query: {query}'.format(host=self.host,
                                                            query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" %
                  (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(
            query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result"
                % (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(url,
                                  fields=query_params,
                                  headers=query.headers,
                                  timeout=settings.FIND_TIMEOUT)

            try:
                if result.getheader('content-type') == 'application/x-msgpack':
                    results = msgpack.load(BufferedHTTPReader(
                        result, buffer_size=settings.REMOTE_BUFFER_SIZE),
                                           encoding='utf-8')
                else:
                    results = unpickle.load(
                        BufferedHTTPReader(
                            result, buffer_size=settings.REMOTE_BUFFER_SIZE))
            except Exception as err:
                self.fail()
                log.exception(
                    "RemoteFinder[%s] Error decoding find response from %s: %s"
                    % (self.host, result.url_full, err))
                raise Exception("Error decoding find response from %s: %s" %
                                (result.url_full, err))
            finally:
                result.release_conn()

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        # We don't use generator here, this function may be run as a job in a thread pool, using a generator has the following risks:
        # 1. Generators are lazy, if we don't iterator the returned generator in the job, the real execution(network operations,
        #    time-consuming) are very likely be triggered in the calling thread, losing the effect of thread pool;
        # 2. As function execution is delayed, the job manager can not catch job runtime exception as expected/designed;
        nodes = []
        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet([
                    Interval(interval[0], interval[1])
                    for interval in intervals
                ])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            nodes.append(node)

        return nodes
示例#9
0
    def find_nodes(self, query, timer=None):
        timer.set_msg(
            'host: {host}, query: {query}'.format(
                host=self.host,
                query=query))

        log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" % (self.host, query))

        # prevent divide by 0
        cacheTTL = settings.FIND_CACHE_DURATION or 1
        if query.startTime:
            start = query.startTime - (query.startTime % cacheTTL)
        else:
            start = ""

        if query.endTime:
            end = query.endTime - (query.endTime % cacheTTL)
        else:
            end = ""

        cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash(query.pattern), start, end)

        results = cache.get(cacheKey)
        if results is not None:
            log.debug(
                "RemoteFinder.find_nodes(host=%s, query=%s) using cached result" %
                (self.host, query))
        else:
            url = '/metrics/find/'

            query_params = [
                ('local', self.params.get('local', '1')),
                ('format', self.params.get('format', 'pickle')),
                ('query', query.pattern),
            ]
            if query.startTime:
                query_params.append(('from', int(query.startTime)))

            if query.endTime:
                query_params.append(('until', int(query.endTime)))

            result = self.request(
                url,
                fields=query_params,
                headers=query.headers,
                timeout=settings.FIND_TIMEOUT)

            try:
                if result.getheader('content-type') == 'application/x-msgpack':
                  results = msgpack.load(BufferedHTTPReader(
                    result, buffer_size=settings.REMOTE_BUFFER_SIZE), encoding='utf-8')
                else:
                  results = unpickle.load(BufferedHTTPReader(
                    result, buffer_size=settings.REMOTE_BUFFER_SIZE))
            except Exception as err:
                self.fail()
                log.exception(
                    "RemoteFinder[%s] Error decoding find response from %s: %s" %
                    (self.host, result.url_full, err))
                raise Exception("Error decoding find response from %s: %s" % (result.url_full, err))
            finally:
                result.release_conn()

            cache.set(cacheKey, results, settings.FIND_CACHE_DURATION)

        for node_info in results:
            # handle both 1.x and 0.9.x output
            path = node_info.get('path') or node_info.get('metric_path')
            is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf')
            intervals = node_info.get('intervals') or []
            if not isinstance(intervals, IntervalSet):
                intervals = IntervalSet(
                    [Interval(interval[0], interval[1]) for interval in intervals])

            node_info = {
                'is_leaf': is_leaf,
                'path': path,
                'intervals': intervals,
            }

            if is_leaf:
                reader = RemoteReader(self, node_info)
                node = LeafNode(path, reader)
            else:
                node = BranchNode(path)

            node.local = False
            yield node
示例#10
0
    def fetch_multi(self, startTime, endTime, now=None, requestContext=None):
        if not self.bulk_query:
            return []

        query_params = [('format', self.finder.params.get('format', 'pickle')),
                        ('local', self.finder.params.get('local', '1')),
                        ('noCache', '1'), ('from', int(startTime)),
                        ('until', int(endTime))]

        for target in self.bulk_query:
            query_params.append(('target', target))

        if now is not None:
            query_params.append(('now', int(now)))

        headers = requestContext.get(
            'forwardHeaders') if requestContext else None

        retries = 1  # start counting at one to make log output and settings more readable
        while True:
            try:
                result = self.finder.request(
                    '/render/',
                    fields=query_params,
                    headers=headers,
                    timeout=settings.REMOTE_FETCH_TIMEOUT,
                )
                break
            except Exception:
                if retries >= settings.MAX_FETCH_RETRIES:
                    log.exception("Failed after %s attempts! Root cause:\n%s" %
                                  (settings.MAX_FETCH_RETRIES, format_exc()))
                    raise
                else:
                    log.exception(
                        "Got an exception when fetching data! Try: %i of %i. Root cause:\n%s"
                        % (retries, settings.MAX_FETCH_RETRIES, format_exc()))
                    retries += 1

        try:
            if result.getheader('content-type') == 'application/x-msgpack':
                data = msgpack.load(BufferedHTTPReader(
                    result, buffer_size=settings.REMOTE_BUFFER_SIZE),
                                    encoding='utf-8')
            else:
                data = unpickle.load(
                    BufferedHTTPReader(
                        result, buffer_size=settings.REMOTE_BUFFER_SIZE))
        except Exception as err:
            self.finder.fail()
            log.exception(
                "RemoteReader[%s] Error decoding render response from %s: %s" %
                (self.finder.host, result.url_full, err))
            raise Exception("Error decoding render response from %s: %s" %
                            (result.url_full, err))
        finally:
            result.release_conn()

        try:
            return [{
                'pathExpression':
                series.get('pathExpression', series['name']),
                'name':
                series['name'],
                'time_info': (series['start'], series['end'], series['step']),
                'values':
                series['values'],
            } for series in data]
        except Exception as err:
            self.finder.fail()
            log.exception(
                "RemoteReader[%s] Invalid render response from %s: %s" %
                (self.finder.host, result.url_full, repr(err)))
            raise Exception("Invalid render response from %s: %s" %
                            (result.url_full, repr(err)))