def find(self, pattern, startTime=None, endTime=None, local=False, headers=None, leaves_only=False): query = FindQuery(pattern, startTime, endTime, local=local, headers=headers, leaves_only=leaves_only) warn_threshold = settings.METRICS_FIND_WARNING_THRESHOLD fail_threshold = settings.METRICS_FIND_FAILURE_THRESHOLD matched_leafs = 0 for match in self._find(query): if isinstance(match, LeafNode): matched_leafs += 1 elif leaves_only: continue if matched_leafs > fail_threshold: raise Exception( ("Query %s yields too many results and failed " "(failure threshold is %d)") % (pattern, fail_threshold)) yield match if matched_leafs > warn_threshold: log.warning(("Query %s yields large number of results up to %d " "(warning threshold is %d)") % (pattern, matched_leafs, warn_threshold))
def test_find_nodes_cached(self, http_request, cache_get): finder = RemoteFinder('127.0.0.1') startTime = 1496262000 endTime = 1496262060 data = [ { 'path': 'a.b.c', 'is_leaf': False, }, { 'path': 'a.b.c.d', 'is_leaf': True, }, ] cache_get.return_value = data query = FindQuery('a.b.c', startTime, endTime) nodes = finder.find_nodes(query) self.assertEqual(http_request.call_count, 0) self.assertEqual(cache_get.call_count, 1) self.assertEqual(cache_get.call_args[0], ( 'find:127.0.0.1:553f764f7b436175c0387e22b4a19213:1496262000:1496262000', )) self.assertEqual(len(nodes), 2) self.assertIsInstance(nodes[0], BranchNode) self.assertEqual(nodes[0].path, 'a.b.c') self.assertIsInstance(nodes[1], LeafNode) self.assertEqual(nodes[1].path, 'a.b.c.d')
def test_find_nodes(self, http_request): finder = RemoteFinder('127.0.0.1') startTime = 1496262000 endTime = 1496262060 data = [ { 'path': 'a.b.c', 'is_leaf': False, }, { 'path': 'a.b.c.d', 'is_leaf': True, }, ] responseObject = HTTPResponse(body=StringIO(pickle.dumps(data)), status=200) http_request.return_value = responseObject query = FindQuery('a.b.c', startTime, endTime) result = finder.find_nodes(query) self.assertIsInstance(result, types.GeneratorType) nodes = list(result) self.assertEqual(http_request.call_args[0], ( 'POST', 'http://127.0.0.1/metrics/find/', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('local', '1'), ('format', 'pickle'), ('query', 'a.b.c'), ('from', startTime), ('until', endTime), ], 'headers': None, 'timeout': 10, }) self.assertEqual(len(nodes), 2) self.assertIsInstance(nodes[0], BranchNode) self.assertEqual(nodes[0].path, 'a.b.c') self.assertIsInstance(nodes[1], LeafNode) self.assertEqual(nodes[1].path, 'a.b.c.d') query = FindQuery('a.b.c', None, None) result = finder.find_nodes(query) self.assertIsInstance(result, types.GeneratorType) nodes = list(result) self.assertEqual(http_request.call_args[0], ( 'POST', 'http://127.0.0.1/metrics/find/', )) self.assertEqual( http_request.call_args[1], { 'fields': [ ('local', '1'), ('format', 'pickle'), ('query', 'a.b.c'), ], 'headers': None, 'timeout': 10, }) self.assertEqual(len(nodes), 2) self.assertIsInstance(nodes[0], BranchNode) self.assertEqual(nodes[0].path, 'a.b.c') self.assertIsInstance(nodes[1], LeafNode) self.assertEqual(nodes[1].path, 'a.b.c.d') # non-pickle response responseObject = HTTPResponse(body='error', status=200) http_request.return_value = responseObject result = finder.find_nodes(query) with self.assertRaisesRegexp( Exception, 'Error decoding find response from http://[^ ]+: .+'): list(result)
def find_nodes(self, query): log.debug("IRONdbFinder.find_nodes, query: %s, max_retries: %d" % (query.pattern, self.max_retries)) metrics_expand = False if query.pattern.endswith('.**'): query.pattern = query.pattern[:-1] metrics_expand = True names = {} name_headers = copy.deepcopy(self.headers) name_headers['Accept'] = 'application/x-flatbuffer-metric-find-result-list' for i in range(0, self.max_retries): try: if self.zipkin_enabled == True: traceheader = binascii.hexlify(os.urandom(8)) name_headers['X-B3-TraceId'] = traceheader name_headers['X-B3-SpanId'] = traceheader if self.zipkin_event_trace_level == 1: name_headers['X-Mtev-Trace-Event'] = '1' elif self.zipkin_event_trace_level == 2: name_headers['X-Mtev-Trace-Event'] = '2' r = requests.get(urls.names, params={'query': query.pattern}, headers=name_headers, timeout=((self.connection_timeout / 1000.0), (self.timeout / 1000.0))) r.raise_for_status() if r.headers['content-type'] == 'application/json': names = r.json() elif r.headers['content-type'] == 'application/x-flatbuffer-metric-find-result-list': names = irondb_flatbuf.metric_find_results(r.content) else: pass break except (socket.gaierror, requests.exceptions.ConnectionError) as ex: # on down nodes, try again on another node until "tries" log.exception("IRONdbFinder.find_nodes ConnectionError %s" % ex) except requests.exceptions.ConnectTimeout as ex: # on down nodes, try again on another node until "tries" log.exception("IRONdbFinder.find_nodes ConnectTimeout %s" % ex) except irondb_flatbuf.FlatBufferError as ex: # flatbuffer error, try again log.exception("IRONdbFinder.find_nodes FlatBufferError %s" % ex) except JSONDecodeError as ex: # json error, try again log.exception("IRONdbFinder.find_nodes JSONDecodeError %s" % ex) except requests.exceptions.ReadTimeout as ex: # on down nodes, try again on another node until "tries" log.exception("IRONdbFinder.find_nodes ReadTimeout %s" % ex) except requests.exceptions.HTTPError as ex: # http status code errors are failures, stop immediately log.exception("IRONdbFinder.find_nodes HTTPError %s %s" % (ex, r.content)) break if settings.DEBUG: log.debug("IRONdbFinder.find_nodes, result: %s" % json.dumps(names)) # for each set of self.batch_size leafnodes, execute an IRONdbMeasurementFetcher # so we can do these in batches. measurement_headers = copy.deepcopy(self.headers) measurement_headers['Accept'] = 'application/x-flatbuffer-metric-get-result-list' fetcher = IRONdbMeasurementFetcher(measurement_headers, self.timeout, self.connection_timeout, self.database_rollups, self.rollup_window, self.max_retries, self.zipkin_enabled, self.zipkin_event_trace_level) for name in names: if 'leaf' in name and 'leaf_data' in name: fetcher.add_leaf(name['name'], name['leaf_data']) reader = IRONdbReader(name['name'], fetcher) yield LeafNode(name['name'], reader) else: yield BranchNode(name['name']) if metrics_expand: query = FindQuery(name['name'] + '.**', None, None) for node in self.find_nodes(query): yield node
def test_find_nodes(self, http_request): finder = RemoteFinder('127.0.0.1') startTime = 1496262000 endTime = 1496262060 data = [ { 'path': 'a.b.c', 'is_leaf': False, }, { 'path': 'a.b.c.d', 'is_leaf': True, }, ] responseObject = HTTPResponse(body=BytesIO(pickle.dumps(data)), status=200, preload_content=False) http_request.return_value = responseObject query = FindQuery('a.b.c', startTime, endTime) nodes = finder.find_nodes(query) self.assertEqual(http_request.call_args[0], ( 'POST', 'http://127.0.0.1/metrics/find/', )) self.assertEqual(http_request.call_args[1], { 'fields': [ ('local', '1'), ('format', 'pickle'), ('query', 'a.b.c'), ('from', startTime), ('until', endTime), ], 'headers': None, 'preload_content': False, 'timeout': 10, }) self.assertEqual(len(nodes), 2) self.assertIsInstance(nodes[0], BranchNode) self.assertEqual(nodes[0].path, 'a.b.c') self.assertIsInstance(nodes[1], LeafNode) self.assertEqual(nodes[1].path, 'a.b.c.d') finder = RemoteFinder('https://127.0.0.1?format=msgpack') data = [ { 'path': 'a.b.c', 'is_leaf': False, }, { 'path': 'a.b.c.d', 'is_leaf': True, }, ] responseObject = HTTPResponse( body=BytesIO(msgpack.dumps(data, use_bin_type=True)), status=200, preload_content=False, headers={'Content-Type': 'application/x-msgpack'} ) http_request.return_value = responseObject query = FindQuery('a.b.c', None, None) nodes = finder.find_nodes(query) self.assertEqual(http_request.call_args[0], ( 'POST', 'https://127.0.0.1/metrics/find/', )) self.assertEqual(http_request.call_args[1], { 'fields': [ ('local', '1'), ('format', 'msgpack'), ('query', 'a.b.c'), ], 'headers': None, 'preload_content': False, 'timeout': 10, }) self.assertEqual(len(nodes), 2) self.assertIsInstance(nodes[0], BranchNode) self.assertEqual(nodes[0].path, 'a.b.c') self.assertIsInstance(nodes[1], LeafNode) self.assertEqual(nodes[1].path, 'a.b.c.d') # non-pickle response responseObject = HTTPResponse(body=BytesIO(b'error'), status=200, preload_content=False) http_request.return_value = responseObject with self.assertRaisesRegexp(Exception, 'Error decoding find response from https://[^ ]+: .+'): finder.find_nodes(query)