def load_social_columns_info(discussion, locale=None): info = discussion.preferences['extra_csv_data'] assert isinstance(info, dict) columns = [] locale = locale or discussion.main_locale try: for path, names in info.items(): path = parse(path) name = names.get(locale, None) or next(names.itervalues()) columns.append((name, path)) columns.sort() except Exception: return [] return columns
def search(path: str, data: typing.Union[list, dict], default=_UNSET, smart_unique: bool=True) -> typing.Union[int, float, bool, str, list, dict, None]: """ when not found: if raise_not_found is true, raise NotFoundError, else return default value. """ expr = parse(path) resp = expr.find(data) if not resp: if default is _UNSET: raise NotFoundError("Can't find by path: {}".format(path)) else: return default if len(resp) == 1 and smart_unique: return resp[0].value else: return [match.value for match in resp]
def _read_metric_json(fd, json_path): parser = parse(json_path) return [x.value for x in parser.find(json.load(fd))]
def get_cores(self) -> int: parser = parse( "resources[?type=openstack_compute_flavor_v2].instances[*].attributes.vcpus" ) return sum([cores.value for cores in parser.find(self.__tf_state)])
def __get_guest_passwd(self): parser = parse( "resources[?name=hieradata].instances[0].attributes.vars.guest_passwd" ) return parser.find(self.__tf_state)[0].value
def get_instance_count(instance_category): parser = parse( f'resources[?type="openstack_compute_instance_v2" & name="{instance_category}"].instances[*]' ) return len(parser.find(self.__tf_state))
def __get_nb_users(self): parser = parse( "resources[?name=hieradata].instances[0].attributes.vars.nb_users") return int(parser.find(self.__tf_state)[0].value)
def __get_domain(self): parser = parse( "resources[?name=hieradata].instances[0].attributes.vars.domain_name" ) full_domain_name = parser.find(self.__tf_state)[0].value return full_domain_name[len(self.__get_cluster_name()) + 1:]
def fetch_info(self): m = self._env.deployment_by_name(self._name) manifest = yaml.load(m.manifest) r = dict([(key, [i.value for i in parse(path).find(manifest)]) for key, path in self._info_fetcher.items()]) return r
def scrobble(): network = {} for stream in streams: network[stream] = pylast.LastFMNetwork( api_key=LASTFM_API_KEY, api_secret=LASTFM_API_SECRET, username=LASTFM_USERNAME[stream], password_hash=pylast.md5(LASTFM_PASSWORD[stream])) logging.debug(network[stream].get_authenticated_user()) with requests.Session() as s: # Haal de stream op uit de status pagina r = s.get(status) # Controleer of je een geldige response kreeg if r.status_code == 200: json_data = r.json() # Loop over de verschillende streams for stream in streams: jsonpath_expr = parse( f'$.icestats.source[?server_name =~ "{stream.capitalize()}"].title' ) matches = jsonpath_expr.find(json_data) # Definieer artiest en nummer artiest, nummer = None, None # Haal de artiest en het nummer op for match in matches: m = re.search(r'(.*?) - (.*)', match.value) if m: artiest = m.group(1).lower() nummer = m.group(2).lower() logging.debug( f'StuBru {stream.capitalize()} - Artiest: {artiest} - Nummer: {nummer}' ) break # Controleer of een songtitel gevonden is if artiest is None and nummer is None: logging.debug( f'StuBru {stream.capitalize()} - Geen songtitel gevonden' ) continue # Scrobble de nummer naar last.fm user = network[stream].get_authenticated_user() current = network[stream].get_track(artiest, nummer) try: previous = user.get_recent_tracks(1, cacheable=False)[0].track except IndexError: network[stream].scrobble(artiest, nummer, timestamp=time.time()) logging.info( f'Nummer gescrobbled naar StuBru-{stream.capitalize()}: {artiest.capitalize()} - {nummer.capitalize()}' ) else: # Scrobble het nummer als het niet het laatste nummer is if current.get_correction() != previous.title: network[stream].scrobble(artiest, nummer, timestamp=time.time()) logging.info( f'Nummer gescrobbled naar StuBru-{stream.capitalize()}: {artiest.capitalize()} - {nummer.capitalize()}' ) else: logging.debug(f'Geen nieuw nummer...') # Update now-playing network[stream].update_now_playing(artiest, nummer)
def repr_json(self, json2): out = {} try: out['name'] = parse("$.name").find(json2)[0].value except: pass try: out['globalId'] = parse("$.globalId").find(json2)[0].value except: pass try: out['contained_in_structure'] = parse( "$.containedInStructure").find(json2)[0].value except: pass try: out['unconnected_height'] = parse("$.'Unconnected Height'").find( json2)[0].value except: pass try: out['load_bearing'] = parse("$.LoadBearing").find(json2)[0].value except: pass try: out['area'] = parse("$.Area").find(json2)[0].value except: pass try: out['volume'] = parse("$.Volume").find(json2)[0].value except: pass try: out['base_level_constraint'] = parse("$.'Base Constraint'").find( json2)[0].value except: pass try: out['top_level_constraint'] = parse("$.'Top Constraint'").find( json2)[0].value except: pass # # openings try: a_ope = parse("$..hasOpenings.ref").find(json2) openings_arr = [] for i in a_ope: openings_arr.append(i.value) out['openings'] = openings_arr except: pass # # materials try: mat = parse("$..materialLayers").find(json2) matr = [] for i in mat: for j in i.value: # print(j['material']['name'], j['layerThickness']) matr.append({j['material']['name']: j['layerThickness']}) out['material'] = matr except: pass # # obj data try: if self.get_obj: x = parse("$..representations").find(json2) out['shape_representation_ref_obj'] = x[0].value[0]['ref'] s = "$..data[?(@.type=='shapeRepresentation' " + \ "& @.globalId == '"+str(x[0].value[0]['ref'])+"')]" a_obj = parse(s).find(self.data)[0].value["items"][0] # out['OBJ'] = a_obj else: out['OBJ'] = "suppressed" except: pass return out
def delete(self, json_path: str, key_or_index: Union[str, int]) -> dict: """ Delete value from JSON. Parameters: `json_path : str` specifies property path, while `key_or_index : Union[str, int]` specifies property name in JSON object or item index in JSON array. For example, if you need to delete value with key `delete_key` located under `field1.field2.field3.delete_key` then parameter `key_or_index` will be equal to 'delete_key' and `json_path` parameter will be equal to `field1.field2.field3`. To delete an item in the array, simply pass this items' index (integer) as `key_or_index` parameter. Note: if you use an array index while `json_path` parameter is pointing to a JSON object or if you use a property name while `json_path` is pointing to a JSON array, this function will raise an exception. This function returns a Python dictionary with updated content. This function raises an `IncorrectFunctionParameterTypeError` exception if one or more of its parameters have incorrect types. This function raises a `JSONPathError` if JSON path is not valid. This function raises any additional exceptions if occurred. Examples: Deleteing a key:value pair from root of simple object: >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('del_pair.json') # Object from `del_pair.json` >> { "application_name": "PetHome", "version": "1.0", "display": "standalone" } >>> op.delete('$', 'display') >>> op.active_json # Output: { "application_name": "PetHome", "version": "1.0" } Deleting item an from array >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('array.json') # Object from `array.json` >> { "colors": [ "red", "magenta", "green" ] } >>> op.delete('colors', 2) >>> op.active_json # Output: { "colors": [ "red", "magenta" ] } Note: if you don't know the item index, you can use `get_item_index` function from `robust_json.ext` package to get it. See the code below: >>> from robust_json.file import JSonFileParser >>> import robust_json.ext as ext >>> op = JsonFileParser('array.json') # Object from `array.json` >> { "colors": [ "red", "magenta", "green" ] } >>> array = op.get_key_value('colors') # Note: please refer to this function's docs if you have # any questions # array = [ "red", "magenta", "green" ] >>> index = ext.get_item_index('red', array, False) # Note: please refer to this function's docs if you have # any questions # index = 0 >>> op.delete('colors', index) >>> op.active_json # Output: { "colors": [ "magenta", "green" ] } For more information about this method, please visit: https://github.com/NickolaiBeloguzov/robust-json/blob/master/README.md#file-module-methods-and-properties """ # TODO Add link to an appropriate README section from GitHub if type(json_path) != str: raise IncorrectFunctionParameterTypeError("json_path", "str", type(json_path).__name__) if type(key_or_index) not in [str, int]: raise IncorrectFunctionParameterTypeError( "key_or_index", "str or int", type(key_or_index).__name__) json_content = self.active_json if not self.__service.check_json_path(json_path, json_content): raise JSONPathError(f"Path `{json_path}` is not valid.") js_expr = jsonpath.parse(json_path) for item in js_expr.find(json_content): temp = item.value if type(temp) == list: if type(key_or_index) != int: raise TypeError( f"Path `{json_path}` is pointing to a JSON array, therefore `key_or_index` parameter must have an `int` type; got `{type(key_or_index).__name__}` instead." ) del temp[key_or_index] self.active_json = json_content return json_content else: if type(key_or_index) != str: raise TypeError( f"Path `{json_path}` is pointing to a JSON object, therefore `key_or_index` parameter must have a `str` type; got `{type(key_or_index).__name__}` instead." ) del temp[key_or_index] self.active_json = json_content if self.__is_autosaving: if "autosave_path" in self.__kwargs: if type(self.__kwargs["autosave_path"]) != str: raise IncorrectFunctionParameterTypeError( "autosave_path", "str", type(self.__kwargs["autosave_path"]).__name__, ) path = self.__kwargs["autosave_path"] if not os.path.exists(path): create_file = True else: create_file = False else: path = self.__path create_file = False self.save_to_file(path=path, create_file=create_file) return json_content
def update_value( self, json_path: str, key_or_index: Union[str, int], new_value: Any, strict_mode: bool = False, ) -> dict: """ Update value in JSON. Parameters: `json_path : str` specifies property path, while `key_or_index : Union[str, int]` specifies key in JSON object/item index in JSON array. For example, if you need to update value with key `update_key` located under `field1.field2.field3.update_key` then parameter `key_or_index` will be equal to 'update_key' and `json_path` parameter will be equal to `field1.field2.field3`. If you want to update value in the root of the object then `json_path` parameter needs to be equal to `$`. To update item in an array, simply pass this items' index (integer) as `key_or_index` parameter. Note: if you use an array index while `json_path` parameter is pointing to the JSON object or if you use a key name while `json_path` is pointing to the JSON array, this function will raise an exception. `new_value : Any` specifies value that will overwrite the old one. `strict_mode : bool` parameter enables Strict Mode. If set to `True`, this function will compare the types of previous value and the new one. If they are not identical, this function will raise an exception. This function returns a Python dictionary with updated content. This function raises an `IncorrectFunctionParameterTypeError` exception if one or more of its' parameters have incorrect types. Tis function raises a `JSONPathError` if JSON path is not valid. This function raises a `JSONStrictModeError` if types of old and new values are not the same while Strict Mode is enabled. This function raises any additional exceptions if occurred. Examples: Updating key:value pair in a root of the object: >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('simple.json') # Object from `simple.json` >> { "app_name": "Test App", "version": "1.0.5" } >>> op.update('$', 'version', '1.1.0') >>> op.active_json # Output: { "app_name": "Test App", "version": "1.1.0" } Updating item in an array: >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('array.json') # Object from `array.json` >> { "colors": [ "red", "yellow", "green", "purple" ] } >>> op.update_value('colors', 3, "magenta") >>> op.active_json # Output: { "colors": [ "red", "yellow", "green", "magenta" ] } Note: if you don't know an item's index, you can use `get_item_index` function from `robust_json.ext` package to get it. >>> from robust_json.file import JSonFileParser >>> import robust_json.ext as ext >>> op = JsonFileParser('array.json') # Object from `array.json` >> { "colors": [ "red", "yellow", "green", "purple" ] } >>> colors_array = op.get_key_value('colors') # Note: please refer to this function's docs if you have # any questions # colors_array = [ "red", "yellow", "green", "purple" ] >>> index = ext.get_item_index('green', colors_array, False) # Note: please refer to this function's docs if you have # any questions # index = 2 >>> op.update_value('colors', index, 'cyan') >>> op.active_json # Output: { "colors": [ "red", "yellow", "cyan", "purple" ] } Updating value with Strict Mode enabled: >>> from robust_json.file import JSonFileParser >>> op = JsonFileParser('file.json') # Object from `file.json` >> { "id": 1046, "name": "Jamie Kellen" } >>> op.update_value('$', 'id', 'string', True) # JSONStrictModeError exception is raised. # When Strict Mode is enabled, new value must be the same # type as the previous one (in this case: int) >>> op.update_value('$', 'id', 1087, True) >>> op.active_json # Output: { "id": 1087, "name": "Jamie Kellen" } For more information about this method, please visit: https://github.com/NickolaiBeloguzov/robust-json/blob/master/README.md#file-module-methods-and-properties """ # TODO Add link to an appropriate README section from GitHub if type(json_path) != str: raise IncorrectFunctionParameterTypeError("json_path", "str", type(json_path).__name__) if type(strict_mode) != bool: raise IncorrectFunctionParameterTypeError( "strict_mode", "bool", type(strict_mode).__name__) if type(key_or_index) not in [str, int]: raise IncorrectFunctionParameterTypeError( "key_or_index", "str or int", type(key_or_index).__name__) json_content = self.active_json if not self.__service.check_json_path(json_path, json_content): raise JSONPathError(f"Path `{json_path}` is not valid.") js_expr = jsonpath.parse(json_path) for item in js_expr.find(json_content): temp = item.value if type(temp) == list: if type(key_or_index) != int: raise TypeError( f"Path `{json_path}` is pointing to a JSON array, therefore `key_or_index` parameter must have an `int` type; got `{type(key_or_index).__name__}` instead." ) if strict_mode == True: if type(temp[key_or_index]) != type(new_value): raise JSONStrictModeError( f"If strict mode is enabled, the type of the new value must be identical to the type of the old one ({type(temp[key_or_index]).__name__}); got `{type(new_value).__name__}` instead." ) temp[key_or_index] = new_value self.active_json = json_content return json_content else: if type(key_or_index) != str: raise TypeError( f"Path `{json_path}` is pointing to a JSON object, therefore `key_or_index` parameter must have a `str` type; got `{type(key_or_index).__name__}` instead." ) if strict_mode == True: if type(temp[key_or_index]) != type(new_value): raise JSONStrictModeError( f"If strict mode is enabled, the type of the new value must be identical to the type of the old one ({type(temp[key_or_index]).__name__}); got `{type(new_value).__name__}` instead." ) temp.update({key_or_index: new_value}) self.active_json = json_content if self.__is_autosaving: if "autosave_path" in self.__kwargs: if type(self.__kwargs["autosave_path"]) != str: raise IncorrectFunctionParameterTypeError( "autosave_path", "str", type(self.__kwargs["autosave_path"]).__name__, ) path = self.__kwargs["autosave_path"] if not os.path.exists(path): create_file = True else: create_file = False else: path = self.__path create_file = False self.save_to_file(path=path, create_file=create_file) return json_content
def append(self, json_path: str, append_value: Any, append_at_end: bool = False) -> dict: """ Append new value to an existing JSON object. This function takes value and adds it to the JSON object. Parameters: `json_path : str` specifies JSON property path where the given value needs to be added. If there is a need to append a value to the root of the object, this parameter needs to be equal to `$`. `append_value : Any` specifies the value that will be appended to the object. `append_at_end : bool` controls the behaviour of this function regarding JSON arrays of objects (structures like this: [{}, {}, {}, ...]) and general arrays (structures like this: [a, b, c, ...]). It has no influence on other structures. If set to False, function will try to add value to each object of an array. If set to True, function will try to append value at the end of an array. (see examples below). This function returns a Python dictionary with updated content. This function raises a `FunctionParameterTypeError` exception if one or more of its parameters have an incorrect type. This function raises a `ValueError` exception if `append_value` parameter is empty (i.e is equal to an empty string, an empty array or empty dictionary). This function raises a `JSONPathError` exception if JSON path is not valid. This function raises any additional exceptions if occurred. Examples: Adding a simple key:value pair to the root object: >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('test.json') # object from `test.json` >> { "key": "value" } >>> op.append('$', { 'test': 'test' }) >>> op.active_json # Output: { "key": "value", "test": "test" } Adding a new JSON object to an array of objects: >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('users.json') # object from `users.json` >> { "users": [ {"id": 1, "name": "Ken"}, { "id": 2, "name": "Liza" } ] } >>> op.append('users', { 'id': 3, 'name': 'Nick' }, True) >>> op.active_json # Output: { "users": [ {"id": 1, "name": "Ken"}, { "id": 2, "name": "Liza" }, { "id": 3, "name": "Nick" } ] } Adding a key:value pair to each object of array >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('users.json') # object from `users.json` >> { "users": [ {"id": 1, "name": "Ken"}, { "id": 2, "name": "Liza" } ] } >>> op.append('users', { 'role': 'guest' }) >>> op.active_json # Output: { "users": [ { "id": 1, "name": "Ken", "role": "guest" }, { "id": 2, "name": "Liza", "role": "guest" } ] } Adding a new element to an array of strings: >>> from robust_json.file import JsonFileParser >>> op = JsonFileParser('array.json') # object from `array.json` >> { "colors": [ "red", "blue" ] } >>> op.append('colors', 'green') >>> op.active_json # Output: { "colors": [ "red", "blue" ] } # Nothing has appended. It's because this function tried to append given # value to each string in array and failed # To fix this, we need to set `append_at_end` parameter to `True` >>> op.append('colors', 'green', True) >>> op.active_json # Output: { "colors": [ "red", "blue", "green" ] } For more information about this method please visit: https://github.com/NickolaiBeloguzov/robust-json/blob/master/README.md#file-module-methods-and-properties """ if type(json_path) != str: raise IncorrectFunctionParameterTypeError("json_path", "str", type(json_path).__name__) if type(append_at_end) != bool: raise IncorrectFunctionParameterTypeError( "append_at_end", "bool", type(append_at_end).__name__) empty_obj = [[], {}, ""] if append_value in empty_obj: raise ValueError(f"Parameter `append_value` is empty.") json_content = self.active_json if not self.__service.check_json_path(json_path, json_content): raise JSONPathError(f"Path `{json_path}` is not valid.") js_expr = jsonpath.parse(json_path) for item in js_expr.find(json_content): temp = item.value if type(temp) == list: if append_at_end == True: temp.append(append_value) self.active_json = json_content return json_content else: for i in iter(temp): if type(i) == dict: if type(append_value) == dict: i.update(append_value) else: raise TypeError( f"To append to a JSON object, parameter `append_value` must be a dictionary; got `{type(append_value).__name__}` instead." ) self.active_json = json_content return json_content temp.update(append_value) self.active_json = json_content if self.__is_autosaving: if "autosave_path" in self.__kwargs: if type(self.__kwargs["autosave_path"]) != str: raise IncorrectFunctionParameterTypeError( "autosave_path", "str", type(self.__kwargs["autosave_path"]).__name__) path = self.__kwargs["autosave_path"] if not os.path.exists(path): create_file = True else: create_file = False else: path = self.__path create_file = False self.save_to_file(path=path, create_file=create_file) return json_content
def get_os_floating_ips(self): parser = parse( "resources[?type=openstack_compute_floatingip_associate_v2].instances[*].attributes.floating_ip" ) return [match.value for match in parser.find(self.__tf_state)]
def __get_cluster_name(self): parser = parse( "resources[?name=hieradata].instances[0].attributes.vars.cluster_name" ) return parser.find(self.__tf_state)[0].value
def _render_manifest(self): init = self._manifest for p, v in self._render_rules: init = parse(p).update(init, v) return json.dumps(init)
def __get_image(self): parser = parse("resources[?name=image].instances[0].attributes.name") return parser.find(self.__tf_state)[0].value
def set_jsonpath(obj: JSONTYPES, path: str, value: Any) -> None: """Sets the value in each matching jsonpath key.""" expression = parse(path) expression.update(obj, value)
def get_instance_type(instance_category): parser = parse( f'resources[?type="openstack_compute_instance_v2" & name="{instance_category}"].instances[0].attributes.flavor_name' ) return parser.find(self.__tf_state)[0].value
def get_jsonpath(obj: JSONTYPES, path: str) -> List[JSONTYPES]: """Return json values matching jsonpaths.""" return [match.value for match in parse(path).find(obj)]
def get_external_storage_size(space_name): parser = parse( f'resources[?type="openstack_blockstorage_volume_v2" & name="{space_name}"].instances[0].attributes.size' ) return int(parser.find(self.__tf_state)[0].value)
def json_filter(self): """Data filter for output.""" if self._json_filter: return parse(self._json_filter) return False
def get_instance_count(self) -> int: parser = parse( "resources[?type=openstack_compute_flavor_v2].instances[*].attributes.id" ) return len(parser.find(self.__tf_state))
def dfs_question(parent_id: str, question: DatumInContext, graph_markers: Dict, parent_is_question: bool = False) -> Dict: # What kind of question am I? marking = is_marking(question) qtypes = [ "checkbox", "checkbox_flag", "daterange", "email", "file_upload", "integer", "mailing_address", "money", "objectives", "percentage", "phone_number", "radio", "ranges", "text", "text_medium", "text_multiline", "text_small" ] if question.value["type"] in ("objective", "objectives", "repeatable", "repeatables"): sending_parent_is_question = False elif question.value["type"] == "fieldset": if is_marking(question): sending_parent_is_question = True else: sending_parent_is_question = parent_is_question elif question.value["type"] in qtypes: sending_parent_is_question = True else: sending_parent_is_question = parent_is_question if marking: # We know we're marking and we know who the parent is. So if the # parent_id has no keys, we're the first. if not graph_markers.get(parent_id): print("making first descendant") if check_for_unmarked(question): this_marker = make_first_descendant(parent_id, False, True) else: this_marker = make_first_descendant(parent_id, parent_is_question) print("received descendant", this_marker) graph_markers[parent_id][this_marker] = {} else: print("making sibling") this_marker = make_next_sibling(parent_id, graph_markers[parent_id]) print("received sibling", this_marker) graph_markers[parent_id][this_marker] = {} if question.value.get("id"): if question.value.get("id") != this_marker: print(question.value["id"], this_marker) raise Exception subqs_expr = parse("questions[*]") subqs = subqs_expr.find(question.value) if not subqs: print("no subquestions, returning", graph_markers.keys()) return graph_markers[parent_id] graph_markers[parent_id][this_marker] = dfs_questions( this_marker, subqs, {}, parent_is_question=sending_parent_is_question) return graph_markers[parent_id] else: this_marker = parent_id subqs_expr = parse("questions[*]") subqs = subqs_expr.find(question.value) if not subqs: return graph_markers[parent_id] return dfs_questions( this_marker, subqs, graph_markers, parent_is_question=sending_parent_is_question)[parent_id]
async def get_membership_items(secrets, client): membership_item_names = {} locations = set() result = client.catalog.search_catalog_objects( body={ "include_related_objects": True, "object_types": ["ITEM"], "query": { "prefix_query": { "attribute_name": "name", "attribute_prefix": "f" } }, "limit": 100 }) if result.is_success(): json_result = result.body dats = parse("objects[*]").find(json_result) for dat in dats: item_id = [f.value for f in Fields('id').find(dat.value)][0] item_name = [ f.value for f in parse('item_data.name').find(dat.value) ][0] item_loc = [ f.value for f in Fields('present_at_location_ids').find(dat.value) ] if item_loc: item_loc = item_loc[0] membership_item_names[item_id] = item_name locations.update(item_loc) vdats = parse('item_data.variations[*]').find(dat.value) for vdat in vdats: item_id = [f.value for f in Fields('id').find(vdat.value)][0] var_item_name = [ f.value for f in parse('item_variation_data.name').find(vdat.value) ][0] item_loc = [ f.value for f in Fields('present_at_location_ids').find(vdat.value) ] if item_loc: item_loc = item_loc[0] composit_name = f"{item_name} - {var_item_name}" membership_item_names[item_id] = composit_name locations.update(item_loc) elif result.is_error(): print(result.errors) logger.log_struct( { "membership_item_names": membership_item_names, "item locations": list(locations) }, severity='DEBUG') return membership_item_names, locations
#!/usr/bin/env python3 import os, sys import json from pprint import pprint from jsonpath_ng import jsonpath from jsonpath_ng.ext import parse f_json = open('./test_result.json', 'r') json_string = "".join(f_json.readlines()) json_data = json.loads(json_string) jsonpath_expression = parse("$.reports.testsuite[*].testcase[*]") match = jsonpath_expression.find(json_data) matched_in_json = match for test_result in matched_in_json: test_result_value = test_result.value classname = test_result_value['@classname'] testname = test_result_value['@name'] test_result_value['hello123'] = 'world123' pprint('{}.{}'.format(classname, testname)) print(json_data) break # print(matched_in_json)
def validate_json_paths(path_dict): for k in path_dict.keys(): parse(k) return path_dict
def properties_from_json(json, mapping, discovery_pattern=None, discovery_path=None): """Extract properties from a provider json result. :param json: the representation of a provider result as a json object :type json: dict :param mapping: a mapping between :class:`~eodag.api.product.EOProduct`'s metadata keys and the location of the values of these properties in the json representation, expressed as a `jsonpath <http://goessner.net/articles/JsonPath/>`_ :param discovery_pattern: regex pattern for metadata key discovery, e.g. "^[a-zA-Z]+$" :type discovery_pattern: str :param discovery_path: str representation of jsonpath :type discovery_path: str :return: the metadata of the :class:`~eodag.api.product.EOProduct` :rtype: dict """ properties = {} templates = {} used_jsonpaths = [] for metadata, value in mapping.items(): # Treat the case when the value is from a queryable metadata if isinstance(value, list): conversion_or_none, path_or_text = value[1] else: conversion_or_none, path_or_text = value if isinstance(path_or_text, str): if re.search(r"({[^{}]+})+", path_or_text): templates[metadata] = path_or_text else: properties[metadata] = path_or_text else: match = path_or_text.find(json) if len(match) == 1: extracted_value = match[0].value used_jsonpaths.append(match[0].path) else: extracted_value = NOT_AVAILABLE if extracted_value is None: properties[metadata] = None else: if conversion_or_none is None: properties[metadata] = extracted_value else: # reformat conversion_or_none as metadata#converter(args) or metadata#converter if (len(conversion_or_none) > 1 and isinstance(conversion_or_none, list) and conversion_or_none[1] is not None): conversion_or_none = "%s(%s)" % ( conversion_or_none[0], conversion_or_none[1], ) elif isinstance(conversion_or_none, list): conversion_or_none = conversion_or_none[0] properties[metadata] = format_metadata( "{%s%s%s}" % (metadata, SEP, conversion_or_none), **{metadata: extracted_value}) # properties as python objects when possible (format_metadata returns only strings) try: properties[metadata] = ast.literal_eval(properties[metadata]) except Exception: pass # Resolve templates for metadata, template in templates.items(): properties[metadata] = template.format(**properties) # adds missing discovered properties if discovery_pattern and discovery_path: discovered_properties = parse(discovery_path).find(json) for found_jsonpath in discovered_properties: found_key = found_jsonpath.path.fields[-1] if (re.compile(discovery_pattern).match(found_key) and found_key not in properties.keys() and found_jsonpath.path not in used_jsonpaths): properties[found_key] = found_jsonpath.value return properties