def _deserialize_buffer(byte_buffer, content_type): if content_type == 'application/x-msgpack': data = msgpack.unpackb(byte_buffer, encoding='utf-8') else: data = unpickle.loads(byte_buffer) return data
def get_results(self): if self.failed: return if self.cachedResult is not None: results = self.cachedResult else: if self.connection is None: self.send() try: response = self.connection.getresponse() assert response.status == 200, "received error response %s - %s" % (response.status, response.reason) result_data = response.read() results = unpickle.loads(result_data) except: log.exception("FindRequest.get_results(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) for node_info in results: if node_info.get('is_leaf'): reader = RemoteReader(self.store, node_info, bulk_query=self.query.pattern) node = LeafNode(node_info['path'], reader) else: node = BranchNode(node_info['path']) node.local = False yield node
def wait_for_results(): connection_event.wait(1) connection = self.request_connections.get(url) if not connection: log.exception("RemoteReader.wait_for_results :: no connection found") if wait_lock.acquire(False): # the FetchInProgress that gets waited on waits for the actual completion try: response = connection.getresponse() if response.status != 200: raise Exception("Error response %d %s from %s" % (response.status, response.reason, url)) pickled_response = response.read() results = unpickle.loads(pickled_response) self.cache_lock.acquire() self.request_cache[url] = results self.cache_lock.release() completion_event.set() return results except: completion_event.set() self.store.fail() log.exception("Error requesting %s" % url) raise else: # otherwise we just wait on the completion_event completion_event.wait(settings.REMOTE_FETCH_TIMEOUT) cached_results = self.request_cache.get(url) if cached_results is None: raise Exception("Passive remote fetch failed to find cached results") else: return cached_results
def read_response(self): # called under self.lock try: self.has_done_response_read = True # safe if self.connection.timeout works as advertised try: # Python 2.7+, use buffering of HTTP responses response = self.connection.getresponse(buffering=True) except TypeError: # Python 2.6 and older response = self.connection.getresponse() if response.status != 200: raise Exception("Error response %d %s from http://%s%s" % (response.status, response.reason, self.store.host, self.urlpath)) pickled_response = response.read() self.result = { series['name']: series for series in unpickle.loads(pickled_response) } return self.result except: self.store.fail() log.exception("Error requesting http://%s%s" % (self.store.host, self.urlpath)) raise finally: self.done_cb()
def get_results(self): if self.cachedResults: return self.cachedResults if not self.connection: self.send() try: response = self.connection.getresponse() assert response.status == 200, "received error response %s - %s" % ( response.status, response.reason) result_data = response.read() results = unpickle.loads(result_data) except: self.store.fail() if not self.suppressErrors: raise else: results = [] resultNodes = [ RemoteNode(self.store, node['metric_path'], node['isLeaf']) for node in results ] cache.set(self.cacheKey, resultNodes, settings.REMOTE_FIND_CACHE_DURATION) self.cachedResults = resultNodes return resultNodes
def get_results(self): if self.cachedResults: return self.cachedResults if not self.connection: self.send() try: # Python 2.7+, use buffering of HTTP responses response = self.connection.getresponse(buffering=True) except TypeError: # Python 2.6 and older response = self.connection.getresponse() try: assert response.status == 200, "received error response %s - %s" % (response.status, response.reason) result_data = response.read() results = unpickle.loads(result_data) except: self.store.fail() if not self.suppressErrors: raise else: results = [] resultNodes = [ RemoteNode(self.store, node['metric_path'], node['isLeaf']) for node in results ] cache.set(self.cacheKey, resultNodes, settings.REMOTE_FIND_CACHE_DURATION) self.cachedResults = resultNodes return resultNodes
def _fetch(self, url, query_string, query_params, headers): self.log_debug("RemoteReader:: Starting to execute _fetch %s?%s" % (url, query_string)) try: self.log_debug("ReadResult:: Requesting %s?%s" % (url, query_string)) result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FETCH_TIMEOUT, ) if result.status != 200: self.store.fail() self.log_error("ReadResult:: Error response %d from %s?%s" % (result.status, url, query_string)) data = [] else: data = unpickle.loads(result.data) except Exception as err: self.store.fail() self.log_error("ReadResult:: Error requesting %s?%s: %s" % (url, query_string, err)) data = [] self.log_debug("RemoteReader:: Completed _fetch %s?%s" % (url, query_string)) return data
def _fetch(self, url, query_string, query_params, headers): url_full = "%s?%s" % (url, query_string) log.debug( "RemoteReader:: Starting to execute _fetch %s" % url_full) try: log.debug("ReadResult:: Requesting %s" % url_full) result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FETCH_TIMEOUT, ) if result.status != 200: self.store.fail() self.log_error("ReadResult:: Error response %d from %s" % url_full) data = [] else: data = unpickle.loads(result.data) except Exception as err: self.store.fail() self.log_error("ReadResult:: Error requesting %s: %s" % (url_full, err)) data = [] log.debug("RemoteReader:: Completed _fetch %s" % url_full) return data
def request_series(): if request_lock.acquire(False): # the FetchInProgress that gets waited on waits for the actual completion try: log.info("RemoteReader.request_data :: requesting %s" % url) connection = HTTPConnectionWithTimeout(self.store.host) connection.timeout = settings.REMOTE_FETCH_TIMEOUT connection.request('GET', urlpath) response = connection.getresponse() if response.status != 200: raise Exception("Error response %d %s from %s" % (response.status, response.reason, url)) pickled_response = response.read() results = unpickle.loads(pickled_response) self.cache_lock.acquire() self.request_cache[url] = results self.cache_lock.release() completion_event.set() return results except: completion_event.set() self.store.fail() log.exception("Error requesting %s" % url) raise else: # otherwise we just wait on the completion_event completion_event.wait(settings.REMOTE_FETCH_TIMEOUT) cached_results = self.request_cache.get(url) if cached_results is None: raise Exception("Passive remote fetch failed to find cached results") else: return cached_results
def fetch(self, startTime, endTime): if not self.__isLeaf: return [] query_params = [ ('target', self.metric_path), ('format', 'pickle'), ('from', str( int(startTime) )), ('until', str( int(endTime) )) ] query_string = urlencode(query_params) connection = HTTPConnectionWithTimeout(self.store.host) connection.timeout = settings.REMOTE_STORE_FETCH_TIMEOUT connection.request('GET', '/render/?' + query_string) response = connection.getresponse() assert response.status == 200, "Failed to retrieve remote data: %d %s" % (response.status, response.reason) rawData = response.read() seriesList = unpickle.loads(rawData) assert len(seriesList) == 1, "Invalid result: seriesList=%s" % str(seriesList) series = seriesList[0] timeInfo = (series['start'], series['end'], series['step']) return (timeInfo, series['values'])
def get_results(self): if self.failed: return if self.cachedResult is not None: results = self.cachedResult else: if self.connection is None: self.send() try: try: # Python 2.7+, use buffering of HTTP responses response = self.connection.getresponse(buffering=True) except TypeError: # Python 2.6 and older response = self.connection.getresponse() assert response.status == 200, "received error response %s - %s" % ( response.status, response.reason) result_data = response.read() results = unpickle.loads(result_data) except: log.exception( "FindRequest.get_results(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self.store, node_info, bulk_query=self.query.pattern) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def fetch_multi(self, startTime, endTime, now=None, requestContext=None): if not self.bulk_query: return [] query_params = [('format', 'pickle'), ('local', '1'), ('noCache', '1'), ('from', int(startTime)), ('until', int(endTime))] for target in self.bulk_query: query_params.append(('target', target)) if now is not None: query_params.append(('now', int(now))) headers = requestContext.get( 'forwardHeaders') if requestContext else None retries = 1 # start counting at one to make log output and settings more readable while True: try: result = self.finder.request( '/render/', fields=query_params, headers=headers, timeout=settings.REMOTE_FETCH_TIMEOUT, ) break except Exception: if retries >= settings.MAX_FETCH_RETRIES: log.exception("Failed after %s attempts! Root cause:\n%s" % (settings.MAX_FETCH_RETRIES, format_exc())) raise else: log.exception( "Got an exception when fetching data! Try: %i of %i. Root cause:\n%s" % (retries, settings.MAX_FETCH_RETRIES, format_exc())) retries += 1 try: data = unpickle.loads(result.data) except Exception as err: self.finder.fail() log.exception( "RemoteReader[%s] Error decoding render response from %s: %s" % (self.finder.host, result.url_full, err)) raise Exception("Error decoding render response from %s: %s" % (result.url_full, err)) return [{ 'pathExpression': series.get('pathExpression', series['name']), 'name': series['name'], 'time_info': (series['start'], series['end'], series['step']), 'values': series['values'], } for series in data]
def renderLocalView(request): try: start = time() reqParams = StringIO(request.body) graphType = reqParams.readline().strip() optionsPickle = reqParams.read() reqParams.close() graphClass = GraphTypes[graphType] options = unpickle.loads(optionsPickle) image = doImageRender(graphClass, options) log.rendering("Delegated rendering request took %.6f seconds" % (time() - start)) return buildResponse(image) except: log.exception("Exception in graphite.render.views.rawrender") return HttpResponseServerError()
def renderLocalView(request): try: start = time() reqParams = StringIO(request.raw_post_data) graphType = reqParams.readline().strip() optionsPickle = reqParams.read() reqParams.close() graphClass = GraphTypes[graphType] options = unpickle.loads(optionsPickle) image = doImageRender(graphClass, options) log.rendering("Delegated rendering request took %.6f seconds" % (time() - start)) return buildResponse(image) except: log.exception("Exception in graphite.render.views.rawrender") return HttpResponseServerError()
def read_response(self): # called under self.lock try: self.has_done_response_read = True response = self.connection.getresponse() # safe if self.connection.timeout works as advertised if response.status != 200: raise Exception("Error response %d %s from http://%s%s" % (response.status, response.reason, self.store.host, self.urlpath)) pickled_response = response.read() self.result = unpickle.loads(pickled_response) return self.result except: self.store.fail() log.exception("Error requesting http://%s%s" % (self.store.host, self.urlpath)) raise finally: self.done_cb()
def renderLocalView(request): try: start = time() reqParams = BytesIO(request.body) graphType = reqParams.readline().strip() optionsPickle = reqParams.read() reqParams.close() graphClass = GraphTypes[graphType] options = unpickle.loads(optionsPickle) image = doImageRender(graphClass, options) log.rendering("Delegated rendering request took %.6f seconds" % (time() - start)) response = buildResponse(image) add_never_cache_headers(response) return response except Exception: log.exception("Exception in graphite.render.views.rawrender") return HttpResponseServerError()
def read_response(self): # called under self.lock try: self.has_done_response_read = True response = self.connection.getresponse( ) # safe if self.connection.timeout works as advertised if response.status != 200: raise Exception("Error response %d %s from http://%s%s" % (response.status, response.reason, self.store.host, self.urlpath)) pickled_response = response.read() self.result = unpickle.loads(pickled_response) return self.result except: self.store.fail() log.exception("Error requesting http://%s%s" % (self.store.host, self.urlpath)) raise finally: self.done_cb()
def wait_for_results(): if request_lock.acquire(False): # we only send the request the first time we're called try: log.info("RemoteReader.request_data :: requesting %s" % url) self.connection = HTTPConnectionWithTimeout(self.store.host) self.connection.timeout = settings.REMOTE_FETCH_TIMEOUT self.connection.request('GET', urlpath) except: log.exception("Error requesting %s" % url) wait_lock.acquire(False) completion_event.set() self.store.fail() raise if wait_lock.acquire(False): # the FetchInProgress that gets waited on waits for the actual completion try: response = self.connection.getresponse() if response.status != 200: raise Exception("Error response %d %s from %s" % (response.status, response.reason, url)) pickled_response = response.read() results = unpickle.loads(pickled_response) self.cache_lock.acquire() self.request_cache[url] = results self.cache_lock.release() completion_event.set() return results except: completion_event.set() self.store.fail() log.exception("Error requesting %s" % url) raise else: # otherwise we just wait on the completion_event completion_event.wait(settings.REMOTE_FETCH_TIMEOUT) cached_results = self.request_cache.get(url) if cached_results is None: raise Exception("Passive remote fetch failed to find cached results") else: return cached_results
def fetch(self, startTime, endTime, now=None, result_queue=None): if not self.__isLeaf: return [] if self.__isBulk: targets = [ ('target', v) for v in self.metric_path ] else: targets = [ ('target', self.metric_path) ] query_params = [ ('local', '1'), ('format', 'pickle'), ('from', str( int(startTime) )), ('until', str( int(endTime) )) ] query_params.extend(targets) if now is not None: query_params.append(('now', str( int(now) ))) query_string = urlencode(query_params) connection = HTTPConnectionWithTimeout(self.store.host) connection.timeout = settings.REMOTE_STORE_FETCH_TIMEOUT if settings.REMOTE_STORE_USE_POST: connection.request('POST', '/render/', query_string) else: connection.request('GET', '/render/?' + query_string) try: # Python 2.7+, use buffering of HTTP responses response = connection.getresponse(buffering=True) except TypeError: # Python 2.6 and older response = connection.getresponse() assert response.status == 200, "Failed to retrieve remote data: %d %s" % (response.status, response.reason) rawData = response.read() seriesList = unpickle.loads(rawData) if result_queue: result_queue.put( (self.store.host, seriesList) ) else: return seriesList
def fetch(self, startTime, endTime, now=None, result_queue=None): if not self.__isLeaf: return [] if self.__isBulk: targets = [('target', v) for v in self.metric_path] else: targets = [('target', self.metric_path)] query_params = [('local', '1'), ('format', 'pickle'), ('from', str(int(startTime))), ('until', str(int(endTime)))] query_params.extend(targets) if now is not None: query_params.append(('now', str(int(now)))) query_string = urlencode(query_params) connection = HTTPConnectionWithTimeout(self.store.host) connection.timeout = settings.REMOTE_STORE_FETCH_TIMEOUT if settings.REMOTE_STORE_USE_POST: connection.request('POST', '/render/', query_string) else: connection.request('GET', '/render/?' + query_string) try: # Python 2.7+, use buffering of HTTP responses response = connection.getresponse(buffering=True) except TypeError: # Python 2.6 and older response = connection.getresponse() assert response.status == 200, "Failed to retrieve remote data: %d %s" % ( response.status, response.reason) rawData = response.read() seriesList = unpickle.loads(rawData) if result_queue: result_queue.put((self.store.host, seriesList)) else: return seriesList
def fetch(self, startTime, endTime): if not self.__isLeaf: return [] query_params = [('target', self.metric_path), ('format', 'pickle'), ('from', str(int(startTime))), ('until', str(int(endTime)))] query_string = urlencode(query_params) connection = HTTPConnectionWithTimeout(self.store.host) connection.timeout = settings.REMOTE_STORE_FETCH_TIMEOUT connection.request('GET', '/render/?' + query_string) response = connection.getresponse() assert response.status == 200, "Failed to retrieve remote data: %d %s" % ( response.status, response.reason) rawData = response.read() seriesList = unpickle.loads(rawData) assert len( seriesList) == 1, "Invalid result: seriesList=%s" % str(seriesList) series = seriesList[0] timeInfo = (series['start'], series['end'], series['step']) return (timeInfo, series['values'])
def load_whitelist(): buffer = open(settings.WHITELIST_FILE, "rb").read() whitelist = unpickle.loads(buffer) return whitelist
def find_nodes(self, query, timer=None): timer.set_msg('host: {host}, query: {query}'.format(host=self.host, query=query)) log.debug("RemoteFinder.find_nodes(host=%s, query=%s) called" % (self.host, query)) # prevent divide by 0 cacheTTL = settings.FIND_CACHE_DURATION or 1 if query.startTime: start = query.startTime - (query.startTime % cacheTTL) else: start = "" if query.endTime: end = query.endTime - (query.endTime % cacheTTL) else: end = "" cacheKey = "find:%s:%s:%s:%s" % (self.host, compactHash( query.pattern), start, end) results = cache.get(cacheKey) if results is not None: log.debug( "RemoteFinder.find_nodes(host=%s, query=%s) using cached result" % (self.host, query)) else: url = '/metrics/find/' query_params = [ ('local', '1'), ('format', 'pickle'), ('query', query.pattern), ] if query.startTime: query_params.append(('from', int(query.startTime))) if query.endTime: query_params.append(('until', int(query.endTime))) result = self.request(url, fields=query_params, headers=query.headers, timeout=settings.REMOTE_FIND_TIMEOUT) try: results = unpickle.loads(result.data) except Exception as err: self.fail() log.exception( "RemoteFinder[%s] Error decoding find response from %s: %s" % (self.host, result.url_full, err)) raise Exception("Error decoding find response from %s: %s" % (result.url_full, err)) cache.set(cacheKey, results, settings.FIND_CACHE_DURATION) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self, node_info) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def load_whitelist(): buffer = open(settings.WHITELIST_FILE, 'rb').read() whitelist = unpickle.loads(buffer) return whitelist
def test_find_view(self): ts = int(time.time()) #create a minus 60 variable to test with, otherwise the build could fail the longer the test runs ts_minus_sixty_seconds = ts - 60 self.create_whisper_hosts(ts) self.addCleanup(self.wipe_whisper_hosts) url = reverse('graphite.metrics.views.find_view') # # Missing query param # response = self.client.post(url, {}) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Missing required parameter 'query'") # # format=invalid_format # response = self.client.post(url, { 'format': 'invalid_format', 'query': '*' }) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Invalid value for 'format' parameter") def test_find_view_basics(data): response = self.client.post(url, data) self.assertEqual(response.status_code, 200) self.assertTrue(response.has_header('Pragma')) self.assertTrue(response.has_header('Cache-Control')) return response.content # # Default values # request_default = { 'query': '', 'local': 0, 'wildcards': 0, 'from': -1, 'until': -1, 'jsonp': '', 'automatic_variants': 0 } # # format=treejson # request = copy.deepcopy(request_default) request['format'] = 'treejson' request['query'] = '*' content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # No match request = copy.deepcopy(request_default) request['format'] = 'treejson' request['query'] = 'other' content = test_find_view_basics(request) self.assertEqual(content, '[]') request['query'] = '*' request['wildcards'] = 1 content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # Other formats than treejson shouldn't require DB calls with self.assertNumQueries(0): # # format=pickle # request = copy.deepcopy(request_default) request['format'] = 'pickle' request['query'] = '*' content = test_find_view_basics(request) data = unpickle.loads(content) self.assertEqual(len(data), 1) self.assertEqual(data[0]['path'], 'hosts') self.assertEqual(data[0]['is_leaf'], False) request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = unpickle.loads(content) self.assertEqual(len(data), 2) data = sorted(data, key=lambda item: item['path']) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0].start), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0].end), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0].start), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0].end), ts) # # format=completer # request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) request['query'] = 'hosts' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) request['query'] = 'hosts.*.*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'path': u'hosts.worker1.cpu', u'is_leaf': u'1', u'name': u'cpu' }, { u'path': u'hosts.worker2.cpu', u'is_leaf': u'1', u'name': u'cpu' }] }) request['query'] = 'hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.' }, { u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.' }] }) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # Test wildcards param request['wildcards'] = 1 request['query'] = 'hosts.*.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'*' }, { u'is_leaf': u'1', u'path': u'hosts.worker1.cpu', u'name': u'cpu' }, { u'is_leaf': u'1', u'path': u'hosts.worker2.cpu', u'name': u'cpu' }] }) # Test from/until params request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = 'hosts' request['from'] = int(time.time()) - 60 request['until'] = int(time.time()) content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) # automatic_variants request = copy.deepcopy(request_default) request['format'] = 'completer' request['automatic_variants'] = 1 request['query'] = 'hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.' }, { u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.' }] }) request['automatic_variants'] = 1 request['query'] = '{hosts,blah}.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'path': u'hosts.worker1.', u'is_leaf': u'0', u'name': u'worker1' }, { u'path': u'hosts.worker2.', u'is_leaf': u'0', u'name': u'worker2' }] }) request['automatic_variants'] = 1 request['query'] = 'hosts,blah.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'worker1', u'path': u'hosts.worker1.', u'is_leaf': u'0' }, { u'name': u'worker2', u'path': u'hosts.worker2.', u'is_leaf': u'0' }] }) # format=completer+jsonp request = copy.deepcopy(request_default) request['format'] = 'completer' request['jsonp'] = 'asdf' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'path': u'hosts.', u'is_leaf': u'0' }] }) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data['metrics'], []) # # format=nodelist # request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'hosts']}) request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*.*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'worker1', u'worker2']}) request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*.*.*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'cpu']}) # override node position request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*.*.*' request['position'] = '0' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'hosts']}) # format=json request = copy.deepcopy(request_default) request['format'] = 'json' # branch request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, [{u'path': u'hosts', u'is_leaf': False}]) # leaf request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(len(data), 2) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0]['end']), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0]['end']), ts) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, []) # format=json+jsonp request = copy.deepcopy(request_default) request['format'] = 'json' request['jsonp'] = 'asdf' # branch request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data, [{u'path': u'hosts', u'is_leaf': False}]) # leaf request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(len(data), 2) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0]['end']), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0]['end']), ts) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data, [])
def test_find_view(self): ts = int(time.time()) # ts_minus_sixty_seconds = ts - 60 # usage always commented-out below? self.create_whisper_hosts(ts) self.addCleanup(self.wipe_whisper_hosts) url = reverse('metrics_find') # # Missing query param # response = self.client.post(url, {}) self.assertEqual(response.status_code, 400) self.assertEqual( response.content, b"Invalid parameters (Missing required parameter 'query')") # # invalid from/until params # response = self.client.post(url, { 'query': '*', 'from': 'now-1h', 'until': 'now-2h', }) self.assertEqual(response.status_code, 400) # response contains timestamps such as: # Invalid parameters (Failed to instantiate find query: # Invalid interval start=1582801589 end=1582797989) self.assertRegex( response.content, b"^Invalid parameters \\(Failed to instantiate find query: Invalid interval start=[0-9]+ end=[0-9]+\\)$" ) # # Wrong type for param 'wildcards' # response = self.client.post(url, { 'query': '*', 'wildcards': '123a', }) self.assertEqual(response.status_code, 400) # the output in Python 2/3 slightly varies because repr() shows unicode strings differently, that's why the "u?" self.assertRegex( response.content, b"^Invalid parameters \\(Invalid int value u?'123a' for param wildcards: invalid literal for int\\(\\) with base 10: u?'123a'\\)$" ) # # Invalid 'from' timestamp # response = self.client.post( url, { 'query': '*', 'from': 'now-1mmminute', # "mmminute" is misspelled }) self.assertEqual(response.status_code, 400) # the output in Python 2/3 slightly varies because repr() shows unicode strings differently, that's why the "u?" self.assertRegex( response.content, b"^Invalid parameters \\(Invalid value u?'now-1mmminute' for param from: Invalid offset unit u?'mmminute'\\)$" ) # # format=invalid_format # response = self.client.post(url, { 'format': 'invalid_format', 'query': '*' }) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, b"Invalid value for 'format' parameter") def test_find_view_basics(data): response = self.client.post(url, data) self.assertEqual(response.status_code, 200) self.assertTrue(response.has_header('Pragma')) self.assertTrue(response.has_header('Cache-Control')) return response.content # # Default values # request_default = { 'query': '', 'local': 0, 'wildcards': 0, 'from': -1, 'until': -1, 'jsonp': '', 'automatic_variants': 0 } # # format=treejson # request = copy.deepcopy(request_default) request['format'] = 'treejson' request['query'] = '*' content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # No match request = copy.deepcopy(request_default) request['format'] = 'treejson' request['query'] = 'other' content = test_find_view_basics(request) self.assertEqual(content, b'[]') request['query'] = '*' request['wildcards'] = 1 content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # Other formats than treejson shouldn't require DB calls with self.assertNumQueries(0): # # format=pickle # request = copy.deepcopy(request_default) request['format'] = 'pickle' request['query'] = '*' content = test_find_view_basics(request) data = unpickle.loads(content) self.assertEqual(len(data), 1) self.assertEqual(data[0]['path'], 'hosts') self.assertEqual(data[0]['is_leaf'], False) request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = unpickle.loads(content) self.assertEqual(len(data), 2) def _path_key(metrics_dict): return metrics_dict.get('path', '') data = sorted(data, key=_path_key) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0].start), ts_minus_sixty_seconds) self.assertIn(int(data[0]['intervals'][0].end), [ts, ts - 1]) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0].start), ts_minus_sixty_seconds) self.assertIn(int(data[1]['intervals'][0].end), [ts, ts - 1]) # # format=msgpack # request = copy.deepcopy(request_default) request['format'] = 'msgpack' request['query'] = '*' content = test_find_view_basics(request) data = msgpack.loads(content, encoding='utf-8') self.assertEqual(len(data), 1) self.assertEqual(data[0]['path'], 'hosts') self.assertEqual(data[0]['is_leaf'], False) request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = msgpack.loads(content, encoding='utf-8') self.assertEqual(len(data), 2) data = sorted(data, key=_path_key) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0].start), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0][1]), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0].start), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0][1]), ts) # # format=completer # request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) request['query'] = 'hosts' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) request['query'] = 'hosts.*.*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'path': u'hosts.worker1.cpu', u'is_leaf': u'1', u'name': u'cpu' }, { u'path': u'hosts.worker2.cpu', u'is_leaf': u'1', u'name': u'cpu' }] }) request['query'] = 'hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.' }, { u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.' }] }) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # Test wildcards param request['wildcards'] = 1 request['query'] = 'hosts.*.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'name': u'*' }, { u'is_leaf': u'1', u'path': u'hosts.worker1.cpu', u'name': u'cpu' }, { u'is_leaf': u'1', u'path': u'hosts.worker2.cpu', u'name': u'cpu' }] }) # Test from/until params request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = 'hosts' request['from'] = int(time.time()) - 60 request['until'] = int(time.time()) content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) # Test from/until params request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = 'hosts' request['from'] = 'now-1min' request['until'] = 'now' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) # automatic_variants request = copy.deepcopy(request_default) request['format'] = 'completer' request['automatic_variants'] = 1 request['query'] = 'hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.' }, { u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.' }] }) request['automatic_variants'] = 1 request['query'] = '{hosts,blah}.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'path': u'hosts.worker1.', u'is_leaf': u'0', u'name': u'worker1' }, { u'path': u'hosts.worker2.', u'is_leaf': u'0', u'name': u'worker2' }] }) request['automatic_variants'] = 1 request['query'] = 'hosts,blah.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics'], key=_path_key) self.assertEqual( data, { u'metrics': [{ u'name': u'worker1', u'path': u'hosts.worker1.', u'is_leaf': u'0' }, { u'name': u'worker2', u'path': u'hosts.worker2.', u'is_leaf': u'0' }] }) # format=completer+jsonp request = copy.deepcopy(request_default) request['format'] = 'completer' request['jsonp'] = 'asdf' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content.split(b"(")[1].strip(b")")) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'path': u'hosts.', u'is_leaf': u'0' }] }) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content.split(b"(")[1].strip(b")")) self.assertEqual(data['metrics'], []) # # format=nodelist # request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'hosts']}) request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*.*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'worker1', u'worker2']}) request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*.*.*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'cpu']}) # override node position request = copy.deepcopy(request_default) request['format'] = 'nodelist' request['query'] = '*.*.*' request['position'] = '0' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'hosts']}) # format=json request = copy.deepcopy(request_default) request['format'] = 'json' # branch request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, [{u'path': u'hosts', u'is_leaf': False}]) # leaf request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(len(data), 2) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertIn(int(data[0]['intervals'][0]['end']), [ts, ts - 1]) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertIn(int(data[1]['intervals'][0]['end']), [ts, ts - 1]) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, []) # format=json+jsonp request = copy.deepcopy(request_default) request['format'] = 'json' request['jsonp'] = 'asdf' # branch request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content.split(b"(")[1].strip(b")")) self.assertEqual(data, [{u'path': u'hosts', u'is_leaf': False}]) # leaf request['query'] = 'hosts.*.cpu' content = test_find_view_basics(request) data = json.loads(content.split(b"(")[1].strip(b")")) self.assertEqual(len(data), 2) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertIn(int(data[0]['intervals'][0]['end']), [ts, ts - 1]) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertIn(int(data[1]['intervals'][0]['end']), [ts, ts - 1]) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content.split(b"(")[1].strip(b")")) self.assertEqual(data, [])
def send(self, headers=None, msg_setter=None): log.debug("FindRequest.send(host=%s, query=%s) called" % (self.store.host, self.query)) if headers is None: headers = {} results = cache.get(self.cacheKey) if results is not None: log.debug( "FindRequest.send(host=%s, query=%s) using cached result" % (self.store.host, self.query)) else: url = "%s://%s/metrics/find/" % ( 'https' if settings.INTRACLUSTER_HTTPS else 'http', self.store.host) query_params = [ ('local', '1'), ('format', 'pickle'), ('query', self.query.pattern), ] if self.query.startTime: query_params.append(('from', self.query.startTime)) if self.query.endTime: query_params.append(('until', self.query.endTime)) try: result = http.request( 'POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FIND_TIMEOUT) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception during request" % (self.store.host, self.query)) self.store.fail() return if result.status != 200: log.exception( "FindRequest.send(host=%s, query=%s) error response %d from %s?%s" % (self.store.host, self.query, result.status, url, urlencode(query_params))) self.store.fail() return try: results = unpickle.loads(result.data) except BaseException: log.exception( "FindRequest.send(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) msg_setter('host: {host}, query: {query}'.format(host=self.store.host, query=self.query)) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([ Interval(interval[0], interval[1]) for interval in intervals ]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self.store, node_info, bulk_query=[self.query.pattern]) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def test_find_view(self): ts = int(time.time()) #create a minus 60 variable to test with, otherwise the build could fail the longer the test runs ts_minus_sixty_seconds = ts - 60 self.create_whisper_hosts(ts) self.addCleanup(self.wipe_whisper_hosts) url = reverse('graphite.metrics.views.find_view') # # Missing query param # response = self.client.post(url, {}) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Missing required parameter 'query'") # # format=invalid_format # response = self.client.post(url, {'format': 'invalid_format', 'query': '*'}) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Invalid value for 'format' parameter") def test_find_view_basics(data): response = self.client.post(url, data) self.assertEqual(response.status_code, 200) self.assertTrue(response.has_header('Pragma')) self.assertTrue(response.has_header('Cache-Control')) return response.content # # Default values # request_default = {'query': '', 'local': 0, 'wildcards': 0, 'from': -1, 'until': -1, 'jsonp': '', 'automatic_variants': 0} # # format=treejson # request=copy.deepcopy(request_default) request['format']='treejson' request['query']='*' content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # No match request=copy.deepcopy(request_default) request['format']='treejson' request['query']='other' content = test_find_view_basics(request) self.assertEqual(content, '[]') request['query']='*' request['wildcards']=1 content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # Other formats than treejson shouldn't require DB calls with self.assertNumQueries(0): # # format=pickle # request=copy.deepcopy(request_default) request['format']='pickle' request['query']='*' content = test_find_view_basics(request) data = unpickle.loads(content) self.assertEqual(len(data), 1) self.assertEqual(data[0]['path'], 'hosts') self.assertEqual(data[0]['is_leaf'], False) request['query']='hosts.*.cpu' content = test_find_view_basics(request) data = unpickle.loads(content) self.assertEqual(len(data), 2) data = sorted(data, key=lambda item: item['path']) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0].start), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0].end), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0].start), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0].end), ts) # # format=completer # request=copy.deepcopy(request_default) request['format']='completer' request['query']='*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.'}]}) request['query']='hosts' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.'}]}) request['query']='hosts.*.*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'path': u'hosts.worker1.cpu', u'is_leaf': u'1', u'name': u'cpu'}, {u'path': u'hosts.worker2.cpu', u'is_leaf': u'1', u'name': u'cpu'}]}) request['query']='hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.'}, {u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.'}]}) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # Test wildcards param request['wildcards']=1 request['query']='hosts.*.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'*'}, {u'is_leaf': u'1', u'path': u'hosts.worker1.cpu', u'name': u'cpu'}, {u'is_leaf': u'1', u'path': u'hosts.worker2.cpu', u'name': u'cpu'}]}) # Test from/until params request=copy.deepcopy(request_default) request['format']='completer' request['query']='hosts' request['from']=int(time.time())-60 request['until']=int(time.time()) content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.'}]}) # automatic_variants request=copy.deepcopy(request_default) request['format']='completer' request['automatic_variants']=1 request['query']='hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.'}, {u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.'}]}) request['automatic_variants']=1 request['query']='{hosts,blah}.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'path': u'hosts.worker1.', u'is_leaf': u'0', u'name': u'worker1'}, {u'path': u'hosts.worker2.', u'is_leaf': u'0', u'name': u'worker2'}]}) request['automatic_variants']=1 request['query']='hosts,blah.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'worker1', u'path': u'hosts.worker1.', u'is_leaf': u'0'}, {u'name': u'worker2', u'path': u'hosts.worker2.', u'is_leaf': u'0'}]}) # format=completer+jsonp request=copy.deepcopy(request_default) request['format']='completer' request['jsonp']='asdf' request['query']='*' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'path': u'hosts.', u'is_leaf': u'0'}]}) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data['metrics'], []) # # format=nodelist # request=copy.deepcopy(request_default) request['format']='nodelist' request['query']='*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'hosts']}) request=copy.deepcopy(request_default) request['format']='nodelist' request['query']='*.*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'worker1', u'worker2']}) request=copy.deepcopy(request_default) request['format']='nodelist' request['query']='*.*.*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'cpu']}) # override node position request=copy.deepcopy(request_default) request['format']='nodelist' request['query']='*.*.*' request['position']='0' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, {u'nodes': [u'hosts']}) # format=json request=copy.deepcopy(request_default) request['format']='json' # branch request['query']='*' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, [{u'path': u'hosts', u'is_leaf': False}]) # leaf request['query']='hosts.*.cpu' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(len(data), 2) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0]['end']), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0]['end']), ts) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data, []) # format=json+jsonp request=copy.deepcopy(request_default) request['format']='json' request['jsonp']='asdf' # branch request['query']='*' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data, [{u'path': u'hosts', u'is_leaf': False}]) # leaf request['query']='hosts.*.cpu' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(len(data), 2) self.assertEqual(data[0]['path'], 'hosts.worker1.cpu') self.assertEqual(data[0]['is_leaf'], True) self.assertEqual(len(data[0]['intervals']), 1) #self.assertEqual(int(data[0]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[0]['intervals'][0]['end']), ts) self.assertEqual(data[1]['path'], 'hosts.worker2.cpu') self.assertEqual(data[1]['is_leaf'], True) self.assertEqual(len(data[1]['intervals']), 1) #self.assertEqual(int(data[1]['intervals'][0]['start']), ts_minus_sixty_seconds) self.assertEqual(int(data[1]['intervals'][0]['end']), ts) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data, [])
def send(self, headers=None, msg_setter=None): log.info("FindRequest.send(host=%s, query=%s) called" % (self.store.host, self.query)) if headers is None: headers = {} results = cache.get(self.cacheKey) if results is not None: log.info("FindRequest.send(host=%s, query=%s) using cached result" % (self.store.host, self.query)) else: url = "%s://%s/metrics/find/" % ('https' if settings.INTRACLUSTER_HTTPS else 'http', self.store.host) query_params = [ ('local', '1'), ('format', 'pickle'), ('query', self.query.pattern), ] if self.query.startTime: query_params.append( ('from', self.query.startTime) ) if self.query.endTime: query_params.append( ('until', self.query.endTime) ) try: result = http.request('POST' if settings.REMOTE_STORE_USE_POST else 'GET', url, fields=query_params, headers=headers, timeout=settings.REMOTE_FIND_TIMEOUT) except: log.exception("FindRequest.send(host=%s, query=%s) exception during request" % (self.store.host, self.query)) self.store.fail() return if result.status != 200: log.exception("FindRequest.send(host=%s, query=%s) error response %d from %s?%s" % (self.store.host, self.query, result.status, url, urlencode(query_params))) self.store.fail() return try: results = unpickle.loads(result.data) except: log.exception("FindRequest.send(host=%s, query=%s) exception processing response" % (self.store.host, self.query)) self.store.fail() return cache.set(self.cacheKey, results, settings.FIND_CACHE_DURATION) msg_setter('host: {host}, query: {query}'.format(host=self.store.host, query=self.query)) for node_info in results: # handle both 1.x and 0.9.x output path = node_info.get('path') or node_info.get('metric_path') is_leaf = node_info.get('is_leaf') or node_info.get('isLeaf') intervals = node_info.get('intervals') or [] if not isinstance(intervals, IntervalSet): intervals = IntervalSet([Interval(interval[0], interval[1]) for interval in intervals]) node_info = { 'is_leaf': is_leaf, 'path': path, 'intervals': intervals, } if is_leaf: reader = RemoteReader(self.store, node_info, bulk_query=[self.query.pattern]) node = LeafNode(path, reader) else: node = BranchNode(path) node.local = False yield node
def test_find_view(self): self.create_whisper_hosts() self.addCleanup(self.wipe_whisper_hosts) url = reverse('graphite.metrics.views.find_view') # # Missing query param # response = self.client.post(url, {}) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Missing required parameter 'query'") # # format=invalid_format # response = self.client.post(url, {'format': 'invalid_format', 'query': '*'}) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Invalid value for 'format' parameter") def test_find_view_basics(data): response = self.client.post(url, data) self.assertEqual(response.status_code, 200) self.assertTrue(response.has_header('Pragma')) self.assertTrue(response.has_header('Cache-Control')) return response.content # # Default values # request_default = {'query': '', 'local': 0, 'wildcards': 0, 'from': -1, 'until': -1, 'jsonp': '', 'automatic_variants': 0} # # format=treejson # request=copy.deepcopy(request_default) request['format']='treejson' request['query']='*' content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # No match request=copy.deepcopy(request_default) request['format']='treejson' request['query']='other' content = test_find_view_basics(request) self.assertEqual(content, '[]') request['query']='*' request['wildcards']=1 content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # # format=pickle # request=copy.deepcopy(request_default) request['format']='pickle' request['query']='*' content = test_find_view_basics(request) [data] = unpickle.loads(content) self.assertEqual(data['path'], 'hosts') # # format=completer # request=copy.deepcopy(request_default) request['format']='completer' request['query']='*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.'}]}) request['query']='hosts' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.'}]}) request['query']='hosts.*.*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'path': u'hosts.worker1.cpu', u'is_leaf': u'1', u'name': u'cpu'}, {u'path': u'hosts.worker2.cpu', u'is_leaf': u'1', u'name': u'cpu'}]}) request['query']='hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.'}, {u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.'}]}) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # Test wildcards param request['wildcards']=1 request['query']='hosts.*.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'*'}, {u'is_leaf': u'1', u'path': u'hosts.worker1.cpu', u'name': u'cpu'}, {u'is_leaf': u'1', u'path': u'hosts.worker2.cpu', u'name': u'cpu'}]}) # Test from/until params request=copy.deepcopy(request_default) request['format']='completer' request['query']='hosts' request['from']=int(time.time())-60 request['until']=int(time.time()) content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.'}]}) # automatic_variants request=copy.deepcopy(request_default) request['format']='completer' request['automatic_variants']=1 request['query']='hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.'}, {u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.'}]}) request['automatic_variants']=1 request['query']='{hosts,blah}.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'path': u'hosts.worker1.', u'is_leaf': u'0', u'name': u'worker1'}, {u'path': u'hosts.worker2.', u'is_leaf': u'0', u'name': u'worker2'}]}) request['automatic_variants']=1 request['query']='hosts,blah.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual(data, {u'metrics': [{u'name': u'worker1', u'path': u'hosts.worker1.', u'is_leaf': u'0'}, {u'name': u'worker2', u'path': u'hosts.worker2.', u'is_leaf': u'0'}]}) # format=completer+jsonp request=copy.deepcopy(request_default) request['format']='completer' request['jsonp']='asdf' request['query']='*' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data, {u'metrics': [{u'name': u'hosts', u'path': u'hosts.', u'is_leaf': u'0'}]}) # No match request['query']='other' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data['metrics'], [])
def recv_response(self, conn): len_prefix = recv_exactly(conn, 4) body_size = struct.unpack("!L", len_prefix)[0] body = recv_exactly(conn, body_size) return unpickle.loads(body)
def test_find_view(self): self.create_whisper_hosts() self.addCleanup(self.wipe_whisper_hosts) url = reverse('graphite.metrics.views.find_view') # # Missing query param # response = self.client.post(url, {}) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Missing required parameter 'query'") # # format=invalid_format # response = self.client.post(url, { 'format': 'invalid_format', 'query': '*' }) self.assertEqual(response.status_code, 400) self.assertEqual(response.content, "Invalid value for 'format' parameter") def test_find_view_basics(data): response = self.client.post(url, data) self.assertEqual(response.status_code, 200) self.assertTrue(response.has_header('Pragma')) self.assertTrue(response.has_header('Cache-Control')) return response.content # # Default values # request_default = { 'query': '', 'local': 0, 'wildcards': 0, 'from': -1, 'until': -1, 'jsonp': '', 'automatic_variants': 0 } # # format=treejson # request = copy.deepcopy(request_default) request['format'] = 'treejson' request['query'] = '*' content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # No match request = copy.deepcopy(request_default) request['format'] = 'treejson' request['query'] = 'other' content = test_find_view_basics(request) self.assertEqual(content, '[]') request['query'] = '*' request['wildcards'] = 1 content = test_find_view_basics(request) [data] = json.loads(content) self.assertEqual(data['text'], 'hosts') # # format=pickle # request = copy.deepcopy(request_default) request['format'] = 'pickle' request['query'] = '*' content = test_find_view_basics(request) [data] = unpickle.loads(content) self.assertEqual(data['path'], 'hosts') # # format=completer # request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) request['query'] = 'hosts' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) request['query'] = 'hosts.*.*' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'path': u'hosts.worker1.cpu', u'is_leaf': u'1', u'name': u'cpu' }, { u'path': u'hosts.worker2.cpu', u'is_leaf': u'1', u'name': u'cpu' }] }) request['query'] = 'hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.' }, { u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.' }] }) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content) self.assertEqual(data['metrics'], []) # Test wildcards param request['wildcards'] = 1 request['query'] = 'hosts.*.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'*' }, { u'is_leaf': u'1', u'path': u'hosts.worker1.cpu', u'name': u'cpu' }, { u'is_leaf': u'1', u'path': u'hosts.worker2.cpu', u'name': u'cpu' }] }) # Test from/until params request = copy.deepcopy(request_default) request['format'] = 'completer' request['query'] = 'hosts' request['from'] = int(time.time()) - 60 request['until'] = int(time.time()) content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'is_leaf': u'0', u'path': u'hosts.' }] }) # automatic_variants request = copy.deepcopy(request_default) request['format'] = 'completer' request['automatic_variants'] = 1 request['query'] = 'hosts.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'is_leaf': u'0', u'name': u'worker1', u'path': u'hosts.worker1.' }, { u'is_leaf': u'0', u'name': u'worker2', u'path': u'hosts.worker2.' }] }) request['automatic_variants'] = 1 request['query'] = '{hosts,blah}.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'path': u'hosts.worker1.', u'is_leaf': u'0', u'name': u'worker1' }, { u'path': u'hosts.worker2.', u'is_leaf': u'0', u'name': u'worker2' }] }) request['automatic_variants'] = 1 request['query'] = 'hosts,blah.' content = test_find_view_basics(request) data = json.loads(content) data['metrics'] = sorted(data['metrics']) self.assertEqual( data, { u'metrics': [{ u'name': u'worker1', u'path': u'hosts.worker1.', u'is_leaf': u'0' }, { u'name': u'worker2', u'path': u'hosts.worker2.', u'is_leaf': u'0' }] }) # format=completer+jsonp request = copy.deepcopy(request_default) request['format'] = 'completer' request['jsonp'] = 'asdf' request['query'] = '*' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual( data, { u'metrics': [{ u'name': u'hosts', u'path': u'hosts.', u'is_leaf': u'0' }] }) # No match request['query'] = 'other' content = test_find_view_basics(request) data = json.loads(content.split("(")[1].strip(")")) self.assertEqual(data['metrics'], [])