def make_non_enum_node(self, metric, enums, complete_len): if not self.complete(metric, complete_len): metric_parts = metric.split('.') yield BranchNode('.'.join(metric_parts[:complete_len])) else: if enums: yield BranchNode(metric) else: yield TenantBluefloodLeafNode( metric, TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases, None))
def find_nodes(self, query): logger.debug("find", query=query) pthhave = {} for h in urls.hosts: try: pths = msgpack.unpackb(requests.get( urls.paths_for_host(h), params={'query': query.pattern, 'format': 'msgpack'} ).content, encoding="utf8") except Exception as excp: logger.error("error in find {}".format(excp)) continue for path in pths: # skip any dupes if path['path'] in pthhave: continue pthhave[path['path']] = 1 if path.get('leaf', False): yield CadentLeafNode( path['path'], CadentReader(path['path'], h) ) else: yield BranchNode(path['path']) raise StopIteration
def find_nodes(self, query): seen_branches = set() leaf_regex = self.compile_regex(query, False) #query Elasticsearch for paths matches = self.search_series(leaf_regex, query) leafs = {} branches = {} for metric in matches: if metric.is_leaf(): if metric.name in leafs: leafs[metric.name].append(metric) else: leafs[metric.name] = [metric] else: if metric.name in branches: branches[metric.name].append(metric) else: branches[metric.name] = [metric] for name, metrics in leafs.iteritems(): yield KairosdbLeafNode(name, KairosdbReader(self.config, metrics)) for branchName, metrics in branches.iteritems(): name = branchName while '.' in name: name = name.rsplit('.', 1)[0] if name not in seen_branches: seen_branches.add(name) if leaf_regex.match(name) is not None: yield BranchNode(name)
def find_nodes(self, query): # TODO: once we can query influx better for retention periods, honor the start/end time in the FindQuery object with statsd.timer('service=graphite-api.action=yield_nodes.target_type=gauge.unit=ms.what=query_duration'): for (name, res) in self.get_leaves(query): yield InfluxLeafNode(name, InfluxdbReader(self.client, name, res, self.cache, self.public_org)) for name in self.get_branches(query): yield BranchNode(name)
def find_nodes(self, query): """Find nodes for 'foo.*.{a,b}.latency' query expressions :type query: graphite_api.storage.FindQuery """ metrics, metrics_set = self._get_metrics_list() # Shortcut if there is no wildcard if not '{' in query.pattern and not '*' in query.pattern: path = query.pattern if path in metrics_set: yield MetronomeLeafNode(path, MetronomeReader(path, self)) return log.info("find_nodes: %s", query.pattern) matcher = Matcher(query.pattern) seen = set() for candidate in metrics: path, is_leaf_node = matcher.match(candidate) if not path: continue if path in seen: continue seen.add(path) #log.debug('match: %s %s', path, is_leaf_node) if is_leaf_node: yield MetronomeLeafNode(path, MetronomeReader(path, self)) else: yield BranchNode(path)
def find_nodes(self, query): if query.pattern == 'foo': yield BranchNode('foo') elif query.pattern == 'bar.*': for i in range(10): path = 'bar.{0}'.format(i) yield LeafNode(path, DummyReader(path))
def find_nodes(self, query): logger.debug("find_nodes() query %s", query.pattern) # TODO: once we can query influx better for retention periods, honor the start/end time in the FindQuery object with statsd.timer('service_is_graphite-api.action_is_yield_nodes.target_type_is_gauge.unit_is_ms.what_is_query_duration'): for (name, res) in self.get_leaves(query): yield InfluxLeafNode(name, InfluxdbReader( self.client, name, res, self.cache)) for name in self.get_branches(query): yield BranchNode(name)
def find_nodes(self, query): seen_branches = set() #query Elasticsearch for paths matches = self.search_series(query) for name, metrics in matches['leafs'].iteritems(): yield RaintankLeafNode(name, RaintankReader(self.config, metrics)) for branchName in matches['branches']: yield BranchNode(branchName)
def find_nodes(self, query): paths = requests.get(urls.paths, params={ 'query': query.pattern }).json() for path in paths: if path['leaf']: yield CyaniteLeafNode(path['path'], CyaniteReader(path['path'])) else: yield BranchNode(path['path'])
def find_nodes(self, query): """Query format: <prefix>.<tag_type> or <prefix>.<tag_type>.<tag> """ query = query.pattern.split('.') if len(query) < 2 or query[0] != self.prefix: return [] try: if len(query) == 2: return [BranchNode(tag) for tag in self.tags[query[1]]] if len(query) == 3: return [ BranchNode(item) for item in self.tags[query[1]][query[2]] ] except KeyError: return []
def find_nodes(self, query): request = requests.post('%s/search' % graphouse_url, data={'query': query.pattern}) request.raise_for_status() result = request.text.split('\n') for metric in result: if not metric: continue if metric.endswith('.'): yield BranchNode(metric[:-1]) else: yield LeafNode(metric, GraphouseReader(metric, graphouse_url=self.graphouse_url))
def find_nodes(self, query): request = requests.get('%s/search?%s' % (self.graphouse_url, parse.urlencode({'query': query.pattern}))) request.raise_for_status() result = request.text.split('\n') for metric in result: if not metric: continue if metric.endswith('.'): yield BranchNode(metric[:-1]) else: yield GraphouseLeafNode(metric, GraphouseReader(metric, self.graphouse_url))
def _nodes_from_multi_ds_rrd(self, full_path, patterns, metric, dss): pattern = patterns[0] patterns = patterns[1:] for ds in dss: if not self._match(ds, pattern): continue new_metric = "%s.%s" % (metric, ds) if not patterns: yield BranchNode(new_metric) else: it = self._nodes_from_rra(full_path, patterns, new_metric, ds) for node in it: yield node
def find_nodes_without_submetrics(self, query): query_parts = query.pattern.split('.') query_depth = len(query_parts) for metric in self.find_metrics(query.pattern): metric_parts = metric.split('.') if not self.complete(metric, query_depth): yield BranchNode('.'.join(metric_parts[:query_depth])) else: yield TenantBluefloodLeafNode( metric, TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases))
def find_nodes(self, query): logger.debug("find_nodes", finder="newts", start=query.startTime, end=query.endTime, pattern=query.pattern) for resource, metric, is_leaf in self._search_nodes(query.pattern): # XXX ambigous, : is valid in graphite name dot_path = resource.replace(':', '.') if not is_leaf: yield BranchNode(dot_path) else: reader = NewtsReader(self.client, resource, metric, self.config['fetch.maxpoints']) yield LeafNode('{}.{}'.format(dot_path, metric), reader)
def find_nodes(self, query): """Find and return nodes matching query :param query: Query to search for :type query: :mod:`influxgraph.utils.Query` """ node_paths = self.index.query(query.pattern) for path, node in node_paths: if node.is_leaf(): # Set path on existing reader to avoid having to create # new objects for each path which is expensive # Reader is not used for queries when multi fetch is enabled # regardless self.reader.path = path yield InfluxDBLeafNode(path, self.reader) else: yield BranchNode(path)
def find_nodes(self, query): clean_pattern = query.pattern.replace('\\', '') pattern_parts = clean_pattern.split('.') for root_dir in self.dirs: for abs_path in self._find_paths(root_dir, pattern_parts): relative_path = abs_path[len(root_dir):].lstrip(os.sep) metric_path = fs_to_metric(relative_path) # Now we construct and yield an appropriate Node object if isdir(abs_path): yield BranchNode(metric_path) elif isfile(abs_path): if abs_path.endswith(KENSHIN_EXT): reader = KenshinReader(abs_path, metric_path, self.carbonlink) yield LeafNode(metric_path, reader)
def _nodes_from_rrd(self, full_path, patterns, metric): if not patterns: yield BranchNode(metric) else: dss = self._get_dss(full_path) if len(dss) == 1: it = self._nodes_from_single_ds_rrd( full_path, patterns, metric, dss) else: it = self._nodes_from_multi_ds_rrd( full_path, patterns, metric, dss) for node in it: yield node
def find_nodes(self, query): res = self.es.search(index=self.es_index, doc_type='_doc', body={ 'query': { 'regexp': { 'name': query.pattern.replace( '.', '\\.').replace( '*', '[-a-z0-9_;:]*') } } }, _source=['name', 'leaf']) for hit in res.get('hits', {}).get('hits'): metric = hit.get('_source') if metric['leaf']: yield LeafNode(metric['name'], Reader(metric['name'])) else: yield BranchNode(metric['name'])
def _find_nodes(self, parent_dir, patterns, parent_metric): pattern = patterns[0] patterns = patterns[1:] for mtype, full_path, metric in self._yield_metrics(parent_dir): if not self._match(metric, pattern): continue if parent_metric: metric = "%s.%s" % (parent_metric, metric) if mtype == 'directory': if not patterns: yield BranchNode(metric) else: for node in self._find_nodes(full_path, patterns, metric): yield node elif mtype == 'rrd_file': for node in self._nodes_from_rrd(full_path, patterns, metric): yield node
def find_nodes(self, query, reqkey): metricsearch = getattr(settings, 'METRICSEARCH', '127.0.0.1') queries = self.expand_braces(query.pattern) result = [] for query in queries: request = requests.get( 'http://%s:7000/search?%s' % (metricsearch, urllib.urlencode({'query': query}))) request.raise_for_status() result += request.text.split('\n') for metric in result: if not metric: continue if metric.endswith('.'): yield BranchNode(metric[:-1]) else: yield LeafNode(metric, ClickHouseReader(metric, reqkey))
def find_nodes(self, query): try: query_depth = len(query.pattern.split('.')) #print 'DAS QUERY ' + str(query_depth) + ' ' + query.pattern client = Client(self.bf_query_endpoint, self.tenant) values = client.find_metrics(query.pattern) for obj in values: metric = obj['metric'] parts = metric.split('.') metric_depth = len(parts) if metric_depth > query_depth: yield BranchNode('.'.join(parts[:query_depth])) else: yield LeafNode( metric, TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint)) except Exception as e: print "Exception in Blueflood find_nodes: " print e raise e
def find_nodes_without_submetrics(self, query): """ This method is a generator which yields branch/leaf nodes that are available for a given query. """ nodes_dict = self.find_nodes_from_bf(query) for node in nodes_dict: for metric_name, is_leaf in node.items(): logger.debug( "Metric name(could be partial): [%s] is_leaf: [%s]", metric_name, is_leaf) if is_leaf: yield TenantBluefloodLeafNode( metric_name, TenantBluefloodReader(metric_name, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases, None)) else: yield BranchNode(metric_name)
def find_nodes(self, query): # query.pattern is basically regex, though * should become [^\.]+ and . \. # but list series doesn't support pattern matching/regex yet regex = query.pattern.replace('.', '\.').replace('*', '[^\.]+') self.logger.info("find_nodes query: %s -> %s" % (query.pattern, regex)) regex = re.compile(regex) series = self.client.query("list series") for s in series: self.logger.info("matching %s" % s['name']) series = [ s['name'] for s in series if regex.match(s['name']) is not None ] seen_branches = set() # for leaf "a.b.c" we should yield branches "a" and "a.b" for s in series: self.logger.info("leaf %s" % s) yield LeafNode(s, InfluxdbReader(self.client, s, self.logger)) branch = s.rpartition('.')[0] while branch != '' and branch not in seen_branches: self.logger.info("branch %s" % branch) yield BranchNode(branch) seen_branches.add(branch) branch = branch.rpartition('.')[0]
def find_nodes_with_submetrics(self, query): # By definition, when using submetrics, the names of all Leafnodes # must end in a submetric alias BF doesn't know about the submetric # aliases and so the aliases, (or globs corresponding to them,) # must be massaged before querying BF. # There are two cases above: # 1. When you want a list of valid submetric aliases, i.e. a complete # metric name followed by ".*" # 2. When you have a query contains a valid submetric alias, i.e a # complete metric name followed # by a valid submetric alias like "._avg" # Every submetric leaf node is covered by one of the above two cases. # Everything else is a branch node. def submetric_is_enum_value(submetric_alias): return self.submetric_aliases[submetric_alias] == 'enum' query_parts = query.pattern.split('.') query_depth = len(query_parts) submetric_alias = query_parts[-1] complete_len = query_depth - 1 # The pattern which all complete metrics will match complete_pattern = '.'.join(query_parts[:-1]) # handle enums # enums are required to have an "enum" submetric alias so # this is the only read required, (no background read.) if (query_depth > 2) and (submetric_alias in self.submetric_aliases): if submetric_is_enum_value(submetric_alias): enum_name = '.'.join(query_parts[:-2]) for metric, enums in \ self.find_metrics_with_enum_values(enum_name).items(): for n in self.make_enum_nodes(metric, enums, complete_pattern): yield n return if (query_depth > 1) and (submetric_alias == '*'): # In this if clause we are searching for complete metric names # followed by a ".*". If so, we are requesting a list of # submetric names, so return leaf nodes for each submetric # alias that is enabled # First modify the pattern to get a superset that includes # already complete submetrics new_pattern = complete_pattern + '*' for (metric, enums) in self.find_metrics(new_pattern).items(): metric_parts = metric.split('.') if self.complete(metric, complete_len) and \ fnmatch.fnmatchcase(metric, complete_pattern): for alias, _ in self.submetric_aliases.items(): yield TenantBluefloodLeafNode( '.'.join(metric_parts + [alias]), TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases, None)) else: # Make sure the branch nodes match the original pattern if fnmatch.fnmatchcase(metric, query.pattern): yield BranchNode('.'.join(metric_parts[:query_depth])) # if searching for a particular submetric alias, create a # leaf node for it elif (query_depth > 1) and (submetric_alias in self.submetric_aliases): for (metric, enums) in self.find_metrics(complete_pattern).items(): if self.complete(metric, complete_len): yield TenantBluefloodLeafNode( '.'.join([metric, submetric_alias]), TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases, None)) # everything else is a branch node else: for (metric, enums) in self.find_metrics(query.pattern).items(): metric_parts = metric.split('.') if not self.complete(metric, complete_len): yield BranchNode('.'.join(metric_parts[:query_depth]))
def find_nodes_with_submetrics(self, query): # By definition, when using submetrics, the names of all Leafnodes must end in a submetric alias # BF doesn't know about the submetric aliases and so the aliases, (or globs corresponding to them,) # must be massaged before querying BF. # There are two cases above: # 1. When you want a list of valid submetric aliases, i.e. a complete metric name followed by ".*" # 2. When you have a query contains a valid submetric alias, i.e a complete metric name followed # by a valid submetric alias like "._avg" # Every submetric leaf node is covered by one of the above two cases. Everything else is a # branch node. query_parts = query.pattern.split('.') query_depth = len(query_parts) # The pattern which all complete metrics will match complete_pattern = '.'.join(query_parts[0:-1]) if (query_depth > 1) and (query_parts[-1] == '*'): # In this if clause we are searching for complete metric names # followed by a ".*". If so, we are requesting a list of submetric # names, so return leaf nodes for each submetric alias that is # enabled # First modify the pattern to get a superset that includes already complete # submetrics new_pattern = complete_pattern + '*' for metric in self.find_metrics(new_pattern): metric_parts = metric.split('.') if self.complete(metric, query_depth) and fnmatch.fnmatchcase( metric, complete_pattern): for alias, _ in self.submetric_aliases.items(): yield TenantBluefloodLeafNode( '.'.join(metric_parts + [alias]), TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases)) else: # Make sure the branch nodes match the original pattern if fnmatch.fnmatchcase(metric, query.pattern): yield BranchNode('.'.join(metric_parts[:query_depth])) #if searching for a particular submetric alias, create a leaf node for it elif (query_depth > 1) and (query_parts[-1] in self.submetric_aliases): for metric in self.find_metrics(complete_pattern): if self.complete(metric, query_depth): yield TenantBluefloodLeafNode( '.'.join([metric, query_parts[-1]]), TenantBluefloodReader(metric, self.tenant, self.bf_query_endpoint, self.enable_submetrics, self.submetric_aliases)) #everything else is a branch node else: for metric in self.find_metrics(query.pattern): metric_parts = metric.split('.') if not self.complete(metric, query_depth): yield BranchNode('.'.join(metric_parts[:query_depth]))
def find_nodes(self, query): pattern = query.pattern if query.pattern.startswith("litmus"): pattern = query.pattern.replace("litmus", "worldping", 1) params = { "query": pattern, "from": query.startTime, "until": query.endTime, "format": "completer", } headers = { 'X-Org-Id': "%d" % g.org, } url = "%smetrics/find" % self.config['tank']['url'] with statsd.timer("graphite-api.%s.find.query_duration" % hostname): try: resp = self.http_session.get(url, params=params, headers=headers) except Exception as e: logger.error("find_nodes", url=url, error=e.message) raise MetrictankException( code=503, description="metric-tank service unavailable") logger.debug('find_nodes', url=url, status_code=resp.status_code, body=resp.text) if resp.status_code >= 500: logger.error("find_nodes", url=url, status_code=resp.status_code, body=resp.text) if resp.status_code == 500: raise MetrictankException(500, "metric-tank internal server error") elif resp.status_code == 502: raise MetrictankException(502, "metric-tank bad gateway") elif resp.status_code == 503: raise MetrictankException(503, "metric-tank service unavailable") elif resp.status_code == 504: raise MetrictankException(504, "metric-tank gateway timeout") else: raise MetrictankException(resp.status_code, resp.text) if resp.status_code >= 400: raise MetrictankException(resp.status_code, resp.text) data = resp.json() if "metrics" not in data: raise Exception("invalid response from metrictank.") for metric in data["metrics"]: if metric["is_leaf"] == "1": path = metric["path"] if query.pattern != pattern: path = metric["path"].replace("worldping", "litmus", 1) yield RaintankLeafNode(path, RaintankReader(metric["path"])) else: path = metric["path"] if query.pattern != pattern: path = metric["path"].replace("worldping", "litmus", 1) yield BranchNode(path)
def find_nodes(self, query): with statsd.timer('service=graphite-api.action=yield_nodes.target_type=gauge.unit=ms.what=query_duration'): for (name, res) in self.get_leaves(query): yield InfluxLeafNode(name, InfluxdbReader(self.client, name, res, self.cache, self.cheat_times)) for name in self.get_branches(query): yield BranchNode(name)