def _get_details_from_resource(self, resource): resource.refresh() stats = resource.entries.copy() tree = Tree(stats) details = list(tree.execute('$..*["details"]["description"]')) result = details[::-1] return result
def _get_details_from_resource(self): resource = self.read_current_from_device() stats = resource['entries'].copy() tree = Tree(stats) details = list(tree.execute('$..*["details"]["description"]')) result = details[::-1] return result
def collect(self): result = requests.get(self._config['json_data_url'], timeout=10).json() result_tree = Tree(result) global_labels = self._config.get('global_labels', {}) global_label_keys, global_label_values = self.parser_label( global_labels) for metric_config in self._config['metrics']: metric_name = "{}_{}".format(self._config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] lables = metric_config.get('labels', {}) label_keys, label_values = self.parser_label(lables) label_keys.extend(global_label_keys) label_values.extend(global_label_values) logging.debug("label_keys: {}, label_values: {}".format( label_keys, label_values)) value = result_tree.execute(metric_path) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=label_keys) metric.add_metric(tuple(str(v) for v in label_values), value) yield metric
def collect(self): config = self._config endpoints = config['json_data_urls'] for endpoint in endpoints: base_url = endpoint['url'] for metric_config in config['metrics']: if endpoint['kind'] == metric_config['kind']: for mymetric in metric_config['metric']: result = json.loads( urllib2.urlopen('{}{}'.format( base_url, metric_config['jsonpath']), timeout=10).read()) result_tree = Tree(result) metric_name = "{}_{}".format(metric_config['prefix'], mymetric['name']) metric_description = mymetric.get('description', '') metric_path = mymetric['path'] value = result_tree.execute(metric_path) logging.debug( "metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily( metric_name, metric_description, labels=["environment", "app", "kind"]) metric.add_metric([ endpoint['env'], endpoint['app'], endpoint['kind'] ], value=str(value)) yield metric
def collect(self): config = self._config endpoints = config['json_data_urls'] for endpoint in endpoints: try: result = json.loads( urllib2.urlopen(endpoint['url'], timeout=10).read()) result_tree = Tree(result) labels = ['instance_id'] values = [endpoint['label']] for tag in config['tags']: tag_name = tag['name'] tag_path = tag['path'] value = result_tree.execute(tag_path) logging.info("tag_name: {}, value for '{}' : {}".format( tag_name, tag_path, value)) labels.append(tag_name) values.append(value) metric = GaugeMetricFamily(config['metric_name'], "spring boot info", labels=labels) metric.add_metric(values, 1) yield metric except: logging.info("Could not fetch details for %s", endpoint['label'])
def collect(self): config = self._config global request global cookie_jar if self._initHttpClient: logging.info("Init http client") cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener( urllib2.HTTPCookieProcessor(cookie_jar)) urllib2.install_opener(opener) request = urllib2.Request(config['json_data_url']) base64string = base64.b64encode( '%s:%s' % (config['username'], config['password'])) request.add_header("Authorization", "Basic %s" % base64string) self._initHttpClient = False try: response = urllib2.urlopen(request) result = json.loads(response.read()) result_tree = Tree(result) # Fetching all the clusters hostname_array = list(result_tree.execute('$..hostName')) # for hostname in hostname_array: for metric_config in config['metrics']: # metric_name = '%s_%s{hostname="%s"}' % (config['metric_name_prefix'], metric_config['name'], hostname) metric_name = '%s_%s' % (config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = list(result_tree.execute(metric_path)) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=['hostname']) for ind in range(len(value)): metric.add_metric([hostname_array[ind]], value[ind]) yield metric except urllib2.URLError as e: logging.error(e.reason) self._initHttpClient = True except urllib2.HTTPError as e: logging.error(e.code) self._initHttpClient = True
def json_xpath(param, json_path): if isinstance(json_path, str): json_path = [json_path] results = [] tree = Tree(param) for path in json_path: jsonpath_expr = tree.execute(path) if jsonpath_expr: results.extend(jsonpath_expr) return results
def smartbox_setup(request, smartbox_ip): optionsforchrome = Options() optionsforchrome.add_argument('--no-sandbox') optionsforchrome.add_argument('--start-maximized') optionsforchrome.add_argument('--disable-extensions') optionsforchrome.add_argument('--disable-dev-shm-usage') optionsforchrome.add_argument('--ignore-certificate-errors') service = Service(ChromeDriverManager().install()) driver = webdriver.Chrome(service=service, options=optionsforchrome) SMARTBOX = smartbox_ip driver.get(SMARTBOX) driver.maximize_window() driver.find_element(By.NAME, "username").click() driver.find_element(By.NAME, "username").send_keys("username") driver.find_element(By.NAME, "password").click() driver.find_element(By.NAME, "password").send_keys("password") driver.find_element(By.XPATH, "//input[@value=\'Login\']").click() time.sleep(5) fieldnames = ['Time'] driver.get(smartbox_ip + "/getanalytics.php") result = json.loads(driver.find_element(By.TAG_NAME, 'body').text) result_tree = Tree(result) service_name_path = "$.webFullStreamInfo.service.serviceName" service_name = result_tree.execute(service_name_path) service_identifier_path = "$.webFullStreamInfo.service.serviceIdentifier" service_identifier = result_tree.execute(service_identifier_path) service_state_path = "$.webFullStreamInfo.service.state" service_state = result_tree.execute(service_state_path) service_name_list = list(service_name) service_identifier_list = list(service_identifier) service_state_list = list(service_state) d = { z[0]: list(z[1:]) for z in zip(service_name_list, service_identifier_list, service_state_list) } for key, value in d.items(): if value[1] == 'Service is active': kv = key, value active_service.append(kv) driver.quit() return active_service
def my_response2(arg1, arg2): print(arg1, arg2) response = { "statusCode": 200, "body": json.dumps({"message": "bye world"}), } responseTree = Tree( response ) # Using a third party lib (objectpath) inside my layer functions print(responseTree.execute('$.statusCode')) return response
def _get_details_from_resource(self): resource = self.read_current_from_device() stats = resource['entries'].copy() if HAS_OBJPATH: tree = Tree(stats) else: raise F5ModuleError( "objectpath module required, install objectpath module to continue. " ) details = list(tree.execute('$..*["details"]["description"]')) result = details[::-1] return result
def arrayOfBookmarks(data): jsonnn_tree = Tree(data) bookmarks = list(jsonnn_tree.execute('$..*')) # flat the tree filteredBookmarks = [] keysToMatch = ("name", "url", "date_added", "id", "meta_info") for bookmark in bookmarks: # search it obj = {} if all(key in bookmark for key in keysToMatch): # filter keys for k in keysToMatch: obj[k] = bookmark[k] filteredBookmarks.append(obj) # add the object with keys return filteredBookmarks
def H_objectpath(ctx): infile, outfile = get_src_dest_defaults(ctx) data = load_data( ctx.opts.flags.input_format, infile, ctx ) assert data q = Tree(data) assert q.data o = q.execute( ctx.opts.args.expr ) if isinstance(o, types.GeneratorType): for s in o: v = stdout_data( s, ctx, outf=outfile ) if v: return v else: return stdout_data( o, ctx, outf=outfile )
def assess_dockstore_obj(dockstore_obj): ''' Given a dockstore object from the API, assess it for its fairness ''' root = Tree(dockstore_obj) print('Performing assessment...') answers = {} for metric in metrics: if metric.get('query') is None: answers[metric['desc']] = { 'metric': metric.get('metric', ''), 'answer': metric.get('answer', 1.0), 'comment': metric['desc'], } else: matches = root.execute(metric['query']) results = [] ratio = None if matches != None: matches = list(itertools.chain(matches)) results = '; '.join([str(e).strip() for e in matches]).strip() try: ratio = get_ratio( root, metric['ratio'][0], metric['ratio'][1], metric['ratio'][2], ) except: pass if ratio == None: answers[metric['desc']] = { 'metric': metric.get('metric', ''), 'answer': 1.0 if len(results) > 0 and metric['pattern'].match(results) else 0.0, 'comment': str(results), } else: answers[metric['desc']] = { 'metric': metric.get('metric', ''), 'comment': metric['desc'], } answers[metric['desc']]['answer'] = ratio return answers
def call_api(self, keyword): api = tweepy.API(self.auth) s = api.search(q=keyword) feature_value_list = [] feature_data = [] for tweet in s: tree = Tree(tweet._json) feature_list = FeatureList('twitter') for feature in feature_list: f_value = tree.execute(feature) if feature == '$.source': f_value = TextUtil.text_href(f_value) feature_value_list.append(f_value) feature_data.append(feature_value_list) return feature_data
def children(self): tree = Tree(loads(str(self))) type_field = NodeBaseClass.serialize('type') object_field = NodeBaseClass.serialize('object') id_field = NodeBaseClass.serialize('id') return [ type(self).from_str( dumps(d[object_field]), node_type=d[type_field] if type_field in d.keys() else None) for d in list( tree.execute('$..*[@.' + type_field + ' and @.' + object_field + ']')) if d[object_field][id_field] != self.id ]
def collect(self): config = self._config result = json.loads( urllib.request.urlopen(config['json_data_url'], timeout=10).read()) result_tree = Tree(result) for metric_config in config['metrics']: metric_name = "{}_{}".format(config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, value=value) yield metric
def collect(self): config = self._config if 'endpoints' in config: endpoint_list = config['endpoints'] else: endpoint_list = [config] for endpoint_config in endpoint_list: if 'json_data_url' in endpoint_config: single_data = {"url": endpoint_config['json_data_url']} data_url_list = [single_data] else: data_url_list = endpoint_config['json_data_urls'] for data_url in data_url_list: result = json.loads( urlopen(data_url['url'], timeout=10).read()) result_tree = Tree(result) for metric_config in endpoint_config['metrics']: metric_name = "{}_{}".format( endpoint_config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) if "label" in data_url: logging.debug( "metric_name: {}, tag: {}, value for '{}' : {}". format(metric_name, data_url["label"], metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=['tag']) metric.add_metric(labels=[data_url["label"]], value=value) else: logging.debug( "metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, value=value) yield metric
def collect(self): config = self._config endpoints = config['json_data_urls'] for endpoint in endpoints: result = json.loads( urllib2.urlopen(endpoint['url'], timeout=10).read()) result_tree = Tree(result) for metric_config in config['metrics']: metric_name = "{}_{}".format(config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=["tag"]) metric.add_metric([endpoint['label']], value) yield metric
def _flatten(self): def to_id(d): if issubclass(type(d), CoreNodeClass): return { "__type": type(d).__name__, "__id": d.id } return d def swap_with_id(d): if issubclass(type(d), dict) or type(d) == dict: for idx, val in d.items(): d[idx] = to_id(val) swap_with_id(val) elif issubclass(type(d), list) or type(d) == list: for idx, val in enumerate(d): d[idx] = to_id(val) swap_with_id(val) elif issubclass(type(d), CoreNodeClass): self._nodes[d.id] = d d_data = d.get_data() if type(d_data) == tuple: # some ugliness to bypass issues with the immutable nature of tuples d.set_data(tuple(to_id(list(d_data)))) else: d.set_data(to_id(d_data)) swap_with_id(d_data) swap_with_id([self.root_node] + self.root_node.children()) for k, v in self._nodes.items(): data = v.get_data() tree = Tree(ujson.loads(ujson.dumps(data))) if hasattr(tree, "data"): for ref in list( tree.execute('$..*[@.__type and @.__id]')): rel_type = EntityClassGenerator(RelationshipBaseClass, VersionAwareEntity).create(self._rel_type) self._relationships.append(rel_type(node_1=self._nodes[v.id], node_2=self._nodes[ref["__id"]], rel_type=self._rel_type, protection=Protection.PRESERVE))
def is_s3_access_key_set(ambari_url, user, password, s3_access_key_set): cluster_name = get_cluster_name(ambari_url, user, password) url = ambari_url + '/api/v1/clusters/{0}/configurations/service_config_versions'.format( cluster_name) auth = (user, password) headers = {"X-Requested-By": "ambari"} response = requests.get(url, auth=auth, headers=headers) j = response.json() from objectpath import Tree tree = Tree(j) items = tree.execute("$..configurations.*[@.type is 'core-site']") for i in items: if 'fs.s3a.access.key' not in i['properties']: return False if s3_access_key_set != i['properties']['fs.s3a.access.key']: return False return True
def H_update_at(ctx): """Update object at path, using data read from srcfile(s)""" if not ctx.opts.args.srcfiles: return updatefile = get_dest(ctx, 'r') data = o = load_data( ctx.opts.flags.output_format, updatefile, ctx ) #if ctx.opts.args.pathexpr: #o = data_at_path(ctx, None, data) if ctx.opts.args.expr: q = Tree(data) assert q.data o = q.execute( ctx.opts.args.expr ) if isinstance(o, types.GeneratorType): r = list(o) assert len(r) == 1, r o = r[0] #r = [ stdout_data( s, ctx, outf=sys.stdout) for s in o ] #print(r) for src in ctx.opts.args.srcfiles: fmt = get_format_for_fileext(src) or ctx.opts.flags.input_format mdata = load_data( fmt, open_file( src, 'in', ctx=ctx ), ctx ) deep_update([o, mdata], ctx) updatefile = get_dest(ctx, 'w+') return stdout_data( data, ctx, outf=updatefile )
def collect(self): config = self._config metrics = self._metrics for db_config in config['db_resources']: kwargs = { 'logGroupName': 'RDSOSMetrics', 'limit': 1, 'logStreamName': db_config } response = self.client.get_log_events(**kwargs) message = json.loads(response['events'][0]['message']) result_tree = Tree(message) instance_id = message['instanceID'] engine = message['engine'] # Parse uptime to a number and produce a metric logging.info(message['uptime']) uptime = self.uptime_to_num(message['uptime']) c = CounterMetricFamily('rds_enhanced_uptime', 'RDS uptime in seconds', labels=['db', 'engine']) c.add_metric([instance_id, engine], uptime) yield c logging.info(instance_id) for metric_config in metrics['metrics'][engine]: metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) logging.info("metric_name: {}, value for '{}' : {}".format( metric_config['name'], metric_path, value)) c = CounterMetricFamily(metric_config['name'], metric_description, labels=['db', 'engine']) c.add_metric([instance_id, engine], value) yield c
("bright", "dark gray", "light gray", ("bold", "standout")), ("buttn", "black", "dark cyan"), ("buttnf", "white", "dark blue", "bold"), ] def show_key_or_exit(key): if key in ("f8"): raise urwid.ExitMainLoop() frame.footer = urwid.AttrWrap(urwid.Text([u"Pressed: ", key.__repr__()]), "header") text_header = u"Ƹ̵̡❀ Termblr ❀̵̨̄Ʒ\n(up and down arrows to scroll, F8 to quit)" # [urwid.Text(post.format(**i), align='center') for i in tree.execute("$.posts")] listbox_content = [[urwid.Text(post.format(**i), align="center"), urwid.Divider()] for i in tree.execute("$.posts")] listbox_content = [i for s in listbox_content for i in s] header = urwid.AttrWrap(urwid.Text(text_header, align="center"), "header") listbox = urwid.ListBox(urwid.SimpleListWalker(listbox_content)) frame = urwid.Frame(urwid.AttrWrap(listbox, "body"), header=header) loop = urwid.MainLoop( widget=frame, palette=palette, screen=urwid.raw_display.Screen(), unhandled_input=show_key_or_exit, handle_mouse=False, ) loop.run()
def main(): parser = argparse.ArgumentParser(description='Command line options') parser.add_argument( '-u', '--url', dest='URL', help='URL containing JSON document.' ) # parser.add_argument('-xml', dest='xml', help='[EXPERIMENTAL] Expect XML input.',action='store_true') parser.add_argument( '-d', '--debug', dest='debug', help='Debbuging on/off.', action='store_true' ) parser.add_argument( '-p', '--profile', dest='profile', help='Profiling on/off.', action='store_true' ) parser.add_argument( '-e', '--expr', dest='expr', help='Expression/query to execute on file, print on stdout and exit.' ) parser.add_argument('file', metavar='FILE', nargs="?", help='File name') args = parser.parse_args() a = {} expr = args.expr if not expr: print( bold("ObjectPath interactive shell") + "\n" + bold("ctrl+c") + " to exit, documentation at " + const("http://adriank.github.io/ObjectPath") + ".\n" ) if args.debug: a["debug"] = True if args.profile: try: from guppy import hpy except: pass File = args.file # if args.xml: # from utils.xmlextras import xml2tree src = False if args.URL: if sys.version_info[0] >= 3: from urllib.request import Request, build_opener # pylint: disable=E0611 else: from urllib2 import Request, build_opener request = Request(args.URL) opener = build_opener() request.add_header('User-Agent', 'ObjectPath/1.0 +http://objectpath.org/') src = opener.open(request) elif File: src = open(File, "r") if not src: if not expr: print( "JSON document source not specified. Working with an empty object {}." ) tree = Tree({}, a) else: if not expr: sys.stdout.write( "Loading JSON document from " + str(args.URL or File) + "..." ) sys.stdout.flush() # if args.xml: # tree=Tree(json.loads(json.dumps(xml2tree(src))),a) # else: tree = Tree(json.load(src), a) if not expr: print(" " + bold("done") + ".") if expr: if args.profile: import cProfile, pstats, StringIO pr = cProfile.Profile() pr.enable() try: ret = tree.execute(expr) except Exception as e: print(e.__class__.__name__ + ": " + str(e)) exit(1) if type(ret) in ITER_TYPES: ret = list(ret) print(json.dumps(ret)) if args.profile: pr.disable() s = StringIO.StringIO() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() print(s.getvalue()) return try: while True: limitResult = 5 try: if sys.version >= '3': r = input(">>> ") else: r = raw_input(">>> ") if r.startswith("all"): limitResult = -1 r = tree.execute(r[3:].strip()) else: r = tree.execute(r) # python 3 raises error here - unicode is not a proper type there try: if type(r) is unicode: r = r.encode("utf8") except NameError: pass print(printJSON(r, length=limitResult)) if args.profile: h = hpy() print(h.heap()) except Exception as e: print(e) except KeyboardInterrupt: pass # new line at the end forces command prompt to apear at left print(bold("\nbye!"))
rest_host = str(args.app_host) rest_port = int(args.app_port) rest_user = str(args.user) rest_user_password = str(args.password) url = "http://" + str(rest_host) + ":" + str(rest_port) + str(endpoint) # params = {} try: rest_result, time_delta = get_url(url, rest_user, rest_user_password) except Exception as err: sys.exit('-6') for key in values_name: tree = Tree(rest_result) path = str(values_name[key]) path = path.format(param1) value = tree.execute(path) # pprint(rest_result) if value is None: sys.exit('-3') if isinstance(value, (unicode, int, float)): value = str(value) if not isinstance(value, str): value = list(value) if len(value) > 1: sys.exit('-2') elif len(value) < 1: print key sys.exit('-4')
def main(): with open("files/GioMovies.json", "r") as file: data = json.load(file) giomovies = Tree(data) # Quais são os tipos de gênero de filmes, sem repetição? clock = time.time() q1 = giomovies.execute('$..topic[@.instanceOf.topicRef.href is "#Genero"].id') print("1: Gêneros: %s" % list(q1)) print("em ", time.time() - clock, " segundos\n") # Quais são os títulos dos filmes que foram produzidos em 2000, ordenados alfabeticamente? clock = time.time() q2 = giomovies.execute("sort($..topic[@.id in array($..association['#id_2000' in @.member..href]" ".member[@[0]]..href[replace(@, '#', '')])]..baseNameString)") print("2: Filmes 2000: %s" % list(q2)) print("em ", time.time() - clock, " segundos\n") # Quais são os títulos em inglês dos filmes que tem a palavra “especial” na sinopse? clock = time.time() q3 = giomovies.execute("$..topic[count(@..occurrence['#sinopse' in @..href].resourceData['especial' in @]) > " "0]..occurrence['#ingles' in @..href].resourceData") print("3: Filmes 'Especial': %s" % list(q3)) print("em ", time.time() - clock, " segundos\n") # Quais são os sites dos filmes que são do tipo “thriller”? clock = time.time() q4 = giomovies.execute("$..topic[@.id in array($..association['#thriller' in @.member..href]" ".member[@[0]]..href[replace(@, '#', '')])]" "..occurrence['#site' in @..href]..resourceRef.href") print("4: Filmes Thriller: %s" % list(q4)) print("em ", time.time() - clock, " segundos\n") # Quantos filmes contém mais de 3 atores como elenco de apoio? clock = time.time() q5 = giomovies.execute("$..topic[count(@..occurrence['#elencoApoio' in @..href])>3]..baseNameString") print("5: 3+ Elenco Apoio: %s" % len(list(q5))) print("em ", time.time() - clock, " segundos\n") # Quais são os ID dos filmes que tem o nome de algum membro do elenco citado na sinopse # Pega a Sinopse sino = lambda ls: \ list(map(lambda x: x['resourceData'], list(filter(lambda oc: ('scope' in oc and 'topicRef' in oc['scope'] and oc['scope']['topicRef']['href'] == '#sinopse'), ls))))[0] bsino = lambda ls: type(ls) == list and len(list(filter(lambda oc: 'scope' in oc and 'topicRef' in oc['scope'] and oc['scope']['topicRef']['href'] == '#sinopse', ls))) > 0 # Pega Elenco Apoio apoio = lambda ls: list(map(lambda x: x['resourceData'], list( filter(lambda oc: ('scope' in oc and 'topicRef' in oc['scope'] and oc['scope']['topicRef']['href'] == '#elencoApoio'), ls)))) # Pega elenco _elenco = lambda x, id: next(iter(list(map(lambda el: el['topicRef']['href'], list( filter(lambda el: el['topicRef']['href'].replace('#', '') != id, x))))), '') _name = lambda x: list(filter(lambda top: top['id'] == x, data['topicMap']['topic']))[0]['baseName'][ 'baseNameString'] elenco = lambda id: list(map(lambda el: _name(_elenco(el['member'], id).replace('#', '')), list(filter(lambda mb: mb['instanceOf']['topicRef']['href'] == '#filme-elenco' and any(memb['topicRef']['href'].replace('#', '') == id for memb in mb['member']), data['topicMap']['association'])))) q6 = [] clock = time.time() for x in list(filter(lambda y: 'occurrence' in y and bsino(y['occurrence']), data['topicMap']['topic'])): if any(ator in sino(x['occurrence']) for ator in apoio(x['occurrence'])) or any( ator in sino(x['occurrence']) for ator in elenco(x['id'])): q6.append(x['id']) print("6: Sinopse com ator: %s" % q6) print("em ", time.time() - clock, " segundos\n") q3 = [] benglish = lambda ls: type(ls) == list and len(list(filter(lambda oc: 'scope' in oc and 'topicRef' in oc['scope'] and oc['scope']['topicRef']['href'] == '#ingles', ls))) > 0 english = lambda ls: \ list(map(lambda x: x['resourceData'], list(filter(lambda oc: ('scope' in oc and 'topicRef' in oc['scope'] and oc['scope']['topicRef']['href'] == '#ingles'), ls))))[0] clock = time.time() for x in list(filter(lambda y: 'occurrence' in y and bsino(y['occurrence']), data['topicMap']['topic'])): if re.search(r'\b(especial)\b', sino(x['occurrence'])) and benglish(x['occurrence']): q3.append(english(x['occurrence'])) print("3: Sinopse com 'especial': %s" % q3) print("em ", time.time() - clock, " segundos\n")
def collect(self): config = self._config info_request = urllib2.Request(config['json_data_url']) if config['basic_auth_user'] != "": info_request.add_header( "Authorization", "Basic %s" % base64.standard_b64encode('%s:%s' % (config['basic_auth_user'], config['basic_auth_password']))) if config['host'] != "": info_request.add_header("Host", config['host']) result = json.loads(urllib2.urlopen(info_request).read()) result_tree = Tree(result) for metric_config in config['metrics']: metric_path = metric_config['path'] metric_type = metric_config['type'] values = result_tree.execute(metric_path) if metric_type == "gauge": for current_gauge_metric_attribute in tuple(values): current_gauge_metric_attribute_value = result_tree.execute( metric_path + ".*['" + current_gauge_metric_attribute + "'].value") if isinstance(current_gauge_metric_attribute_value, float): metric_name = "{}_{}".format( config['metric_name_prefix'], current_gauge_metric_attribute.replace( '.', '_').replace('-', '_').lower()) metric = GaugeMetricFamily( metric_name, '', value=current_gauge_metric_attribute_value) yield metric if metric_type == "meters": exception_count_map = {} for current_meters_metric_attribute in tuple(values): count_value = result_tree.execute( metric_path + ".*['" + current_meters_metric_attribute + "'].count") exception_count_map[ current_meters_metric_attribute] = count_value for key in exception_count_map.keys(): suffix = key.replace('.', '_').replace('-', '_').lower() label = "type" formatted_key = key if ".exception" in key: suffix = "exceptions" label = "exceptionType" formatted_key = key.replace(".exception", "") elif "metrics." in key: suffix = "log" label = "level" formatted_key = key.replace("metrics.", "") elif "-responses" in key: suffix = "response" label = "status" formatted_key = key.replace("-responses", "").replace( "org.eclipse.jetty.webapp.WebAppContext.", "") metric_name = "{}_{}".format(config['metric_name_prefix'], suffix) metric = SummaryMetricFamily(metric_name, '', labels=[label]) metric.add_metric([formatted_key], count_value=exception_count_map.get(key), sum_value=exception_count_map.get(key)) yield metric
break else: resp = SmartAPI.actions.query_get.call(**kwargs) res = {} for api in itertools.chain.from_iterable( [get_all(q='openapi:3'), get_all(q='swagger:2')]): # Get it data = read_spec(api) # Parse it root = Tree(data) # Look for metrics answers = {} for metric in metrics: matches = root.execute(metric['query']) results = [] ratio = None if matches != None: matches = list(itertools.chain(matches)) results = '; '.join([e.strip() for e in matches]).strip() try: ratio = get_Ratio( root, metric['ratio'][0], metric['ratio'][1], metric['ratio'][2], ) except: pass