def tips_generator(user_data, tips=None): """ Generate tips. """ if tips is None: tips = tips_pool # add source tips source_tips = get_tips_from_user_data(user_data) if source_tips: tips = tips + source_tips if user_data['optin']: user_data_prepared = Tree(user_data['data']) else: user_data_prepared = Tree({}) tips = [tip for tip in tips if tip_filter(tip, user_data_prepared)] tips = [clean_tip(tip) for tip in tips] for tip in tips: enrich_tip(tip) tips.sort(key=lambda t: t['priority'], reverse=True) # if optin is on, only show personalised tips if user_data['optin']: tips = [t for t in tips if t['isPersonalized']] return { "items": tips, "total": len(tips), }
def collect(self): config = self._config endpoints = config['json_data_urls'] for endpoint in endpoints: try: result = json.loads( urllib2.urlopen(endpoint['url'], timeout=10).read()) result_tree = Tree(result) labels = ['instance_id'] values = [endpoint['label']] for tag in config['tags']: tag_name = tag['name'] tag_path = tag['path'] value = result_tree.execute(tag_path) logging.info("tag_name: {}, value for '{}' : {}".format( tag_name, tag_path, value)) labels.append(tag_name) values.append(value) metric = GaugeMetricFamily(config['metric_name'], "spring boot info", labels=labels) metric.add_metric(values, 1) yield metric except: logging.info("Could not fetch details for %s", endpoint['label'])
def _get_details_from_resource(self, resource): resource.refresh() stats = resource.entries.copy() tree = Tree(stats) details = list(tree.execute('$..*["details"]["description"]')) result = details[::-1] return result
def collect(self): config = self._config endpoints = config['json_data_urls'] for endpoint in endpoints: base_url = endpoint['url'] for metric_config in config['metrics']: if endpoint['kind'] == metric_config['kind']: for mymetric in metric_config['metric']: result = json.loads( urllib2.urlopen('{}{}'.format( base_url, metric_config['jsonpath']), timeout=10).read()) result_tree = Tree(result) metric_name = "{}_{}".format(metric_config['prefix'], mymetric['name']) metric_description = mymetric.get('description', '') metric_path = mymetric['path'] value = result_tree.execute(metric_path) logging.debug( "metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily( metric_name, metric_description, labels=["environment", "app", "kind"]) metric.add_metric([ endpoint['env'], endpoint['app'], endpoint['kind'] ], value=str(value)) yield metric
def collect(self): result = requests.get(self._config['json_data_url'], timeout=10).json() result_tree = Tree(result) global_labels = self._config.get('global_labels', {}) global_label_keys, global_label_values = self.parser_label( global_labels) for metric_config in self._config['metrics']: metric_name = "{}_{}".format(self._config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] lables = metric_config.get('labels', {}) label_keys, label_values = self.parser_label(lables) label_keys.extend(global_label_keys) label_values.extend(global_label_values) logging.debug("label_keys: {}, label_values: {}".format( label_keys, label_values)) value = result_tree.execute(metric_path) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=label_keys) metric.add_metric(tuple(str(v) for v in label_values), value) yield metric
def _get_details_from_resource(self): resource = self.read_current_from_device() stats = resource['entries'].copy() tree = Tree(stats) details = list(tree.execute('$..*["details"]["description"]')) result = details[::-1] return result
def json_xpath(param, json_path): if isinstance(json_path, str): json_path = [json_path] results = [] tree = Tree(param) for path in json_path: jsonpath_expr = tree.execute(path) if jsonpath_expr: results.extend(jsonpath_expr) return results
def my_response2(arg1, arg2): print(arg1, arg2) response = { "statusCode": 200, "body": json.dumps({"message": "bye world"}), } responseTree = Tree( response ) # Using a third party lib (objectpath) inside my layer functions print(responseTree.execute('$.statusCode')) return response
def _get_details_from_resource(self): resource = self.read_current_from_device() stats = resource['entries'].copy() if HAS_OBJPATH: tree = Tree(stats) else: raise F5ModuleError( "objectpath module required, install objectpath module to continue. " ) details = list(tree.execute('$..*["details"]["description"]')) result = details[::-1] return result
def arrayOfBookmarks(data): jsonnn_tree = Tree(data) bookmarks = list(jsonnn_tree.execute('$..*')) # flat the tree filteredBookmarks = [] keysToMatch = ("name", "url", "date_added", "id", "meta_info") for bookmark in bookmarks: # search it obj = {} if all(key in bookmark for key in keysToMatch): # filter keys for k in keysToMatch: obj[k] = bookmark[k] filteredBookmarks.append(obj) # add the object with keys return filteredBookmarks
def H_objectpath(ctx): infile, outfile = get_src_dest_defaults(ctx) data = load_data( ctx.opts.flags.input_format, infile, ctx ) assert data q = Tree(data) assert q.data o = q.execute( ctx.opts.args.expr ) if isinstance(o, types.GeneratorType): for s in o: v = stdout_data( s, ctx, outf=outfile ) if v: return v else: return stdout_data( o, ctx, outf=outfile )
def __init__(self, request: Request): """ Initialize attribute provider :param request: authorization request object """ self._subject_tree = Tree(request._subject) self._resource_tree = Tree(request._resource) self._action_tree = Tree(request._action) self._context_tree = Tree(request._context) # Cache of attribute location and value pairs per access element used for quick attribute value retrieval self._attribute_values_cache = {"subject": {}, "resource": {}, "action": {}, "context": {}}
def assess_dockstore_obj(dockstore_obj): ''' Given a dockstore object from the API, assess it for its fairness ''' root = Tree(dockstore_obj) print('Performing assessment...') answers = {} for metric in metrics: if metric.get('query') is None: answers[metric['desc']] = { 'metric': metric.get('metric', ''), 'answer': metric.get('answer', 1.0), 'comment': metric['desc'], } else: matches = root.execute(metric['query']) results = [] ratio = None if matches != None: matches = list(itertools.chain(matches)) results = '; '.join([str(e).strip() for e in matches]).strip() try: ratio = get_ratio( root, metric['ratio'][0], metric['ratio'][1], metric['ratio'][2], ) except: pass if ratio == None: answers[metric['desc']] = { 'metric': metric.get('metric', ''), 'answer': 1.0 if len(results) > 0 and metric['pattern'].match(results) else 0.0, 'comment': str(results), } else: answers[metric['desc']] = { 'metric': metric.get('metric', ''), 'comment': metric['desc'], } answers[metric['desc']]['answer'] = ratio return answers
def children(self): tree = Tree(loads(str(self))) type_field = NodeBaseClass.serialize('type') object_field = NodeBaseClass.serialize('object') id_field = NodeBaseClass.serialize('id') return [ type(self).from_str( dumps(d[object_field]), node_type=d[type_field] if type_field in d.keys() else None) for d in list( tree.execute('$..*[@.' + type_field + ' and @.' + object_field + ']')) if d[object_field][id_field] != self.id ]
def call_api(self, keyword): api = tweepy.API(self.auth) s = api.search(q=keyword) feature_value_list = [] feature_data = [] for tweet in s: tree = Tree(tweet._json) feature_list = FeatureList('twitter') for feature in feature_list: f_value = tree.execute(feature) if feature == '$.source': f_value = TextUtil.text_href(f_value) feature_value_list.append(f_value) feature_data.append(feature_value_list) return feature_data
def perform(kls, inputs): urls = inputs['target:url'] for url in urls: try: r = requests.get(url) base_url = get_base_url(r.text, r.url) data = extruct.extract(r.text, base_url=base_url, syntaxes=['json-ld'])['json-ld'] tree = Tree(data) break except: data = None return dict( **{ 'metric:30': { 'answer': 1.0 if data else 0.0, 'comment': 'jsonld was found and properly parsed' if data else 'jsonld could not be parsed', }, }, **{ key: { 'answer': 1.0 if attr else 0.0, 'comment': attr if attr else 'json-ld %s not found' % (' '.join(to_schema[key])), } if key.startswith('metric:') else attr for key, attr in zip( to_schema.keys(), map( bind(get_json_ld_attr, tree), to_schema.values() ) ) } if data else {key: {} for key in to_schema.keys()} )
def get_value_bydictpath(source_dict, path_in_dict): """ 根据格式获取dict的value,只返回第一个匹配的数据 :param source_dict: :param path_in_dict: :return: """ if source_dict is None: raise Exception("source is None Type") if not isinstance(source_dict, dict): return source_dict try: if path_in_dict.startswith('$'): result = Tree(source_dict).execute(path_in_dict) return_result = [] for item in result: return_result.append(item) return return_result if len( return_result) > 1 else return_result[0] else: exp = parse(path_in_dict) matched_items = exp.find(source_dict) return matched_items[0].value except Exception as e: print("path is " + path_in_dict + " it is not correct!", e) # raise Exception("path is " + path_in_dict + " it is not correct!", e) return None
def collect(self): config = self._config result = json.loads( urllib.request.urlopen(config['json_data_url'], timeout=10).read()) result_tree = Tree(result) for metric_config in config['metrics']: metric_name = "{}_{}".format(config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, value=value) yield metric
def execute(self, expr): result = Tree(self.nodes).execute(expr) if isinstance(result, (generator, chain, map)): process = list(result) return (Parser(None, process[0]) if len(process) == 1 else Parser( None, process)) else: return Parser(None, result)
def getTree(): """ Retorna a Tree(objectpath) do arquivo GioMovies Utilizada para todas as queries/pesquisas """ with open('../files/GioMovies.json', 'r') as file: data = json.load(file) file.close() return Tree(data)
def collect(self): config = self._config global request global cookie_jar if self._initHttpClient: logging.info("Init http client") cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener( urllib2.HTTPCookieProcessor(cookie_jar)) urllib2.install_opener(opener) request = urllib2.Request(config['json_data_url']) base64string = base64.b64encode( '%s:%s' % (config['username'], config['password'])) request.add_header("Authorization", "Basic %s" % base64string) self._initHttpClient = False try: response = urllib2.urlopen(request) result = json.loads(response.read()) result_tree = Tree(result) # Fetching all the clusters hostname_array = list(result_tree.execute('$..hostName')) # for hostname in hostname_array: for metric_config in config['metrics']: # metric_name = '%s_%s{hostname="%s"}' % (config['metric_name_prefix'], metric_config['name'], hostname) metric_name = '%s_%s' % (config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = list(result_tree.execute(metric_path)) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=['hostname']) for ind in range(len(value)): metric.add_metric([hostname_array[ind]], value[ind]) yield metric except urllib2.URLError as e: logging.error(e.reason) self._initHttpClient = True except urllib2.HTTPError as e: logging.error(e.code) self._initHttpClient = True
def validate_path(path): """ Validate given attribute path satisfies ObjectPath notation. Throws ValidationError for invalid path. """ try: Tree({}).execute(path) except Exception as err: raise ValidationError(*err.args)
def collect(self): config = self._config if 'endpoints' in config: endpoint_list = config['endpoints'] else: endpoint_list = [config] for endpoint_config in endpoint_list: if 'json_data_url' in endpoint_config: single_data = {"url": endpoint_config['json_data_url']} data_url_list = [single_data] else: data_url_list = endpoint_config['json_data_urls'] for data_url in data_url_list: result = json.loads( urlopen(data_url['url'], timeout=10).read()) result_tree = Tree(result) for metric_config in endpoint_config['metrics']: metric_name = "{}_{}".format( endpoint_config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) if "label" in data_url: logging.debug( "metric_name: {}, tag: {}, value for '{}' : {}". format(metric_name, data_url["label"], metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=['tag']) metric.add_metric(labels=[data_url["label"]], value=value) else: logging.debug( "metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, value=value) yield metric
def smartbox_setup(request, smartbox_ip): optionsforchrome = Options() optionsforchrome.add_argument('--no-sandbox') optionsforchrome.add_argument('--start-maximized') optionsforchrome.add_argument('--disable-extensions') optionsforchrome.add_argument('--disable-dev-shm-usage') optionsforchrome.add_argument('--ignore-certificate-errors') service = Service(ChromeDriverManager().install()) driver = webdriver.Chrome(service=service, options=optionsforchrome) SMARTBOX = smartbox_ip driver.get(SMARTBOX) driver.maximize_window() driver.find_element(By.NAME, "username").click() driver.find_element(By.NAME, "username").send_keys("username") driver.find_element(By.NAME, "password").click() driver.find_element(By.NAME, "password").send_keys("password") driver.find_element(By.XPATH, "//input[@value=\'Login\']").click() time.sleep(5) fieldnames = ['Time'] driver.get(smartbox_ip + "/getanalytics.php") result = json.loads(driver.find_element(By.TAG_NAME, 'body').text) result_tree = Tree(result) service_name_path = "$.webFullStreamInfo.service.serviceName" service_name = result_tree.execute(service_name_path) service_identifier_path = "$.webFullStreamInfo.service.serviceIdentifier" service_identifier = result_tree.execute(service_identifier_path) service_state_path = "$.webFullStreamInfo.service.state" service_state = result_tree.execute(service_state_path) service_name_list = list(service_name) service_identifier_list = list(service_identifier) service_state_list = list(service_state) d = { z[0]: list(z[1:]) for z in zip(service_name_list, service_identifier_list, service_state_list) } for key, value in d.items(): if value[1] == 'Service is active': kv = key, value active_service.append(kv) driver.quit() return active_service
def collect(self): config = self._config endpoints = config['json_data_urls'] for endpoint in endpoints: result = json.loads( urllib2.urlopen(endpoint['url'], timeout=10).read()) result_tree = Tree(result) for metric_config in config['metrics']: metric_name = "{}_{}".format(config['metric_name_prefix'], metric_config['name']) metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) logging.debug("metric_name: {}, value for '{}' : {}".format( metric_name, metric_path, value)) metric = GaugeMetricFamily(metric_name, metric_description, labels=["tag"]) metric.add_metric([endpoint['label']], value) yield metric
def _flatten(self): def to_id(d): if issubclass(type(d), CoreNodeClass): return { "__type": type(d).__name__, "__id": d.id } return d def swap_with_id(d): if issubclass(type(d), dict) or type(d) == dict: for idx, val in d.items(): d[idx] = to_id(val) swap_with_id(val) elif issubclass(type(d), list) or type(d) == list: for idx, val in enumerate(d): d[idx] = to_id(val) swap_with_id(val) elif issubclass(type(d), CoreNodeClass): self._nodes[d.id] = d d_data = d.get_data() if type(d_data) == tuple: # some ugliness to bypass issues with the immutable nature of tuples d.set_data(tuple(to_id(list(d_data)))) else: d.set_data(to_id(d_data)) swap_with_id(d_data) swap_with_id([self.root_node] + self.root_node.children()) for k, v in self._nodes.items(): data = v.get_data() tree = Tree(ujson.loads(ujson.dumps(data))) if hasattr(tree, "data"): for ref in list( tree.execute('$..*[@.__type and @.__id]')): rel_type = EntityClassGenerator(RelationshipBaseClass, VersionAwareEntity).create(self._rel_type) self._relationships.append(rel_type(node_1=self._nodes[v.id], node_2=self._nodes[ref["__id"]], rel_type=self._rel_type, protection=Protection.PRESERVE))
def H_update_at(ctx): """Update object at path, using data read from srcfile(s)""" if not ctx.opts.args.srcfiles: return updatefile = get_dest(ctx, 'r') data = o = load_data( ctx.opts.flags.output_format, updatefile, ctx ) #if ctx.opts.args.pathexpr: #o = data_at_path(ctx, None, data) if ctx.opts.args.expr: q = Tree(data) assert q.data o = q.execute( ctx.opts.args.expr ) if isinstance(o, types.GeneratorType): r = list(o) assert len(r) == 1, r o = r[0] #r = [ stdout_data( s, ctx, outf=sys.stdout) for s in o ] #print(r) for src in ctx.opts.args.srcfiles: fmt = get_format_for_fileext(src) or ctx.opts.flags.input_format mdata = load_data( fmt, open_file( src, 'in', ctx=ctx ), ctx ) deep_update([o, mdata], ctx) updatefile = get_dest(ctx, 'w+') return stdout_data( data, ctx, outf=updatefile )
def collect(self): config = self._config metrics = self._metrics for db_config in config['db_resources']: kwargs = { 'logGroupName': 'RDSOSMetrics', 'limit': 1, 'logStreamName': db_config } response = self.client.get_log_events(**kwargs) message = json.loads(response['events'][0]['message']) result_tree = Tree(message) instance_id = message['instanceID'] engine = message['engine'] # Parse uptime to a number and produce a metric logging.info(message['uptime']) uptime = self.uptime_to_num(message['uptime']) c = CounterMetricFamily('rds_enhanced_uptime', 'RDS uptime in seconds', labels=['db', 'engine']) c.add_metric([instance_id, engine], uptime) yield c logging.info(instance_id) for metric_config in metrics['metrics'][engine]: metric_description = metric_config.get('description', '') metric_path = metric_config['path'] value = result_tree.execute(metric_path) logging.info("metric_name: {}, value for '{}' : {}".format( metric_config['name'], metric_path, value)) c = CounterMetricFamily(metric_config['name'], metric_description, labels=['db', 'engine']) c.add_metric([instance_id, engine], value) yield c
def is_s3_access_key_set(ambari_url, user, password, s3_access_key_set): cluster_name = get_cluster_name(ambari_url, user, password) url = ambari_url + '/api/v1/clusters/{0}/configurations/service_config_versions'.format( cluster_name) auth = (user, password) headers = {"X-Requested-By": "ambari"} response = requests.get(url, auth=auth, headers=headers) j = response.json() from objectpath import Tree tree = Tree(j) items = tree.execute("$..configurations.*[@.type is 'core-site']") for i in items: if 'fs.s3a.access.key' not in i['properties']: return False if s3_access_key_set != i['properties']['fs.s3a.access.key']: return False return True
def command(): if len(sys.argv) != 4: usage(1) fin_name = sys.argv[1] tmpl_name = sys.argv[2] out_name = sys.argv[3] fin = open(fin_name, "r") fin_json = json.load(fin) fin_objpath = Tree(fin_json) tmpl = open(tmpl_name, "r") tmpl_json = json.load(tmpl) out = open(out_name + '.orig', "w") json.dump(tmpl_json, out, sort_keys=True, indent=4) walk(tmpl_json, fin_objpath) out = open(out_name, "w") json.dump(tmpl_json, out, sort_keys=True, indent=4)
def main(): parser = argparse.ArgumentParser(description='Command line options') parser.add_argument( '-u', '--url', dest='URL', help='URL containing JSON document.' ) # parser.add_argument('-xml', dest='xml', help='[EXPERIMENTAL] Expect XML input.',action='store_true') parser.add_argument( '-d', '--debug', dest='debug', help='Debbuging on/off.', action='store_true' ) parser.add_argument( '-p', '--profile', dest='profile', help='Profiling on/off.', action='store_true' ) parser.add_argument( '-e', '--expr', dest='expr', help='Expression/query to execute on file, print on stdout and exit.' ) parser.add_argument('file', metavar='FILE', nargs="?", help='File name') args = parser.parse_args() a = {} expr = args.expr if not expr: print( bold("ObjectPath interactive shell") + "\n" + bold("ctrl+c") + " to exit, documentation at " + const("http://adriank.github.io/ObjectPath") + ".\n" ) if args.debug: a["debug"] = True if args.profile: try: from guppy import hpy except: pass File = args.file # if args.xml: # from utils.xmlextras import xml2tree src = False if args.URL: if sys.version_info[0] >= 3: from urllib.request import Request, build_opener # pylint: disable=E0611 else: from urllib2 import Request, build_opener request = Request(args.URL) opener = build_opener() request.add_header('User-Agent', 'ObjectPath/1.0 +http://objectpath.org/') src = opener.open(request) elif File: src = open(File, "r") if not src: if not expr: print( "JSON document source not specified. Working with an empty object {}." ) tree = Tree({}, a) else: if not expr: sys.stdout.write( "Loading JSON document from " + str(args.URL or File) + "..." ) sys.stdout.flush() # if args.xml: # tree=Tree(json.loads(json.dumps(xml2tree(src))),a) # else: tree = Tree(json.load(src), a) if not expr: print(" " + bold("done") + ".") if expr: if args.profile: import cProfile, pstats, StringIO pr = cProfile.Profile() pr.enable() try: ret = tree.execute(expr) except Exception as e: print(e.__class__.__name__ + ": " + str(e)) exit(1) if type(ret) in ITER_TYPES: ret = list(ret) print(json.dumps(ret)) if args.profile: pr.disable() s = StringIO.StringIO() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() print(s.getvalue()) return try: while True: limitResult = 5 try: if sys.version >= '3': r = input(">>> ") else: r = raw_input(">>> ") if r.startswith("all"): limitResult = -1 r = tree.execute(r[3:].strip()) else: r = tree.execute(r) # python 3 raises error here - unicode is not a proper type there try: if type(r) is unicode: r = r.encode("utf8") except NameError: pass print(printJSON(r, length=limitResult)) if args.profile: h = hpy() print(h.heap()) except Exception as e: print(e) except KeyboardInterrupt: pass # new line at the end forces command prompt to apear at left print(bold("\nbye!"))
param1 = args.param1 endpoint = endpoints[conf]['path'] values_name = endpoints[conf]['items'] rest_host = str(args.app_host) rest_port = int(args.app_port) rest_user = str(args.user) rest_user_password = str(args.password) url = "http://" + str(rest_host) + ":" + str(rest_port) + str(endpoint) # params = {} try: rest_result, time_delta = get_url(url, rest_user, rest_user_password) except Exception as err: sys.exit('-6') for key in values_name: tree = Tree(rest_result) path = str(values_name[key]) path = path.format(param1) value = tree.execute(path) # pprint(rest_result) if value is None: sys.exit('-3') if isinstance(value, (unicode, int, float)): value = str(value) if not isinstance(value, str): value = list(value) if len(value) > 1: sys.exit('-2') elif len(value) < 1:
#!/usr/bin/env python # -*- coding: utf-8 -*- from pytumblr import TumblrRestClient from objectpath import Tree import urwid client = TumblrRestClient( consumer_key="iWpkjxQeBaFBpIvHTB0RbP7G5ozicNZ5FQtiMkkoKFkiJ4Cfjb", consumer_secret="JfmKt6EJWOPdNs155kBrVC7AC8YO3V9RVJt73PcuU85E7buGsq", oauth_token="ZoegVG4B9pTAFm3lI86bSueW8KVlr0PKOkxNsh4CqmwIMnLRcE", oauth_secret="AkuR2qM8QNvEm62dQ8JbldxYicQJMvpxdOhfBQPALgsBWLrc7G", ) tree = Tree(client.dashboard(type="text")) post = u"""\ -------- Username: {blog_name} Date: {date} Post type: {type} :: {title} :: {body} Tags: {tags} Notes: {note_count} URL: {post_url} """