def read(self, filename, pointer=None, additional=None): if filename == "-": f = sys.stdin else: f = open(filename, "r") if filename.endswith('.json'): input = json.load(f, object_pairs_hook=OrderedDict) elif filename.endswith(('.yml', '.yaml')): input = yaml.load(f) else: raise ValueError( "Unrecognized file extension, use '*.json' or '*.yml': %s" % filename) if additional: for a in additional: a_input = jsonpointer.resolve_pointer(input, pointer) Schema.parse(a_input, name=a) name = 'root' if pointer: input = jsonpointer.resolve_pointer(input, pointer) name = pointer self.schema_raw = input self.schema = Schema.parse(input, name=name) if f is not sys.stdin: f.close()
def _prune_log_message(self, msg): """ If the "splunk_remove_paths" config item is not set or empty, return ``msg`` unaltered. Otherwise, for each RFC6901 JSON Pointer in "splunk_remove_paths" that points to an element present in ``msg``, remove that element. Return the result. :param msg: Splunk-ready message :type msg: dict :return: msg dict with all ``splunk_remove_paths`` elements removed :rtype: dict """ paths = self.config.get('splunk_remove_paths', []) if not paths: return msg patches = [] for path in paths: try: resolve_pointer(msg, path) patches.append({'op': 'remove', 'path': path}) except JsonPointerException: pass if not patches: return msg msg = JsonPatch(patches).apply(msg) return msg
def _get_context(self, rmrs): """Get the context for each RMR Private method, not to be overridden Parameters ---------- rmrs : UserBaselineProposedVals Object containing the user, baseline, and proposed RMRs Returns ------- UserBaselineProposedVals Object containing the contexts for the user, baseline, and proposed RMRs; an RMR's context is set to None if the corresponding flag in self.rmrs_used is not set """ # Prepend the leading '/' as needed. It is optional in rmr_context for # improved readability if self.rmr_context == '' or self.rmr_context.startswith('/'): pointer = self.rmr_context else: pointer = '/' + self.rmr_context # Note: if there is no match for pointer, resolve_pointer returns None return UserBaselineProposedVals( user = resolve_pointer(rmrs.user, pointer) if self.rmrs_used.user else None, baseline = resolve_pointer(rmrs.baseline, pointer) if self.rmrs_used.baseline else None, proposed = resolve_pointer(rmrs.proposed, pointer) if self.rmrs_used.proposed else None )
def _check_node(breadcrumb: str, value: dict): for nested_key in value: if breadcrumb: base = f"{breadcrumb}/{nested_key}" else: base = nested_key # Dict if isinstance(value[nested_key], dict): _check_node(base, value[nested_key]) # Str else: for locale in _LOCALES_CACHE: if locale == _DEFAULT_LOCALE: continue pointer = f"/{base}" try: resolve_pointer(_LOCALES_CACHE[locale], pointer) except JsonPointerException: warnings.append(( 4, f"Locale {locale} is missing string \"{base}\" (untranslated from {_DEFAULT_LOCALE})" ))
def check_folders(credentials, resources): """ Prints any non-terraformed folders which are siblings of terraformed folders """ service = discovery.build('cloudresourcemanager', 'v2', credentials=credentials) folder_states = resources['google_folder'] parent_ids = [ resolve_pointer(folder, '/primary/attributes/parent') for folder in folder_states ] parent_ids = set(filter(None, parent_ids)) gcp_folders = {} for parent_id in parent_ids: gcp_folders.update(_get_gcp_folders_in_parent(service, parent_id)) state_folders = set( resolve_pointer(folder, '/primary/attributes/name') for folder in folder_states) gcp_folder_ids = set(gcp_folders.keys()) missing_folder_ids = gcp_folder_ids.difference(state_folders) if missing_folder_ids: print(f'\nTerraform is not controlling folders:') for missing_folder_id in missing_folder_ids: folder_data = gcp_folders[missing_folder_id] print(f'\t{folder_data["displayName"]} ({folder_data["name"]})')
def isScalablePmemFunctionalityEnabled(self, config): """ Determine if Scalable PMEM functionality is enabled :param config: configuration data :type config: configuration data :returns: whether or not Scalable PMEM functionality is enabled and messages explaining issues if found :type (bool, string, string) """ # construct functionality disabled reason messages dictionary functionalityDisabledMessages = { "PowerSubsystemProblem": u"Problem with backup power", "StorageSubsystemProblem": u"Problem with backup storage devices", "MemorySubsystemProblem": u"Problem with memory" } if not resolve_pointer(config, "/Attributes/FunctionalityEnabled", False): reasonMessages = [] reasons = resolve_pointer( config, "/Attributes/FunctionalityDisabledReason", None) if reasons: for reasonId, value in reasons.items(): if value: message = functionalityDisabledMessages.get(reasonId) if message: reasonMessages.append(message) return (False, u"Scalable Persistent Memory is disabled due to problems", reasonMessages) return (True, None, None)
def validate_reference(reference, context, **kwargs): try: parts = urlparse.urlparse(reference) jsonpointer.resolve_pointer(context, parts.fragment) except jsonpointer.JsonPointerException: raise ValidationError( MESSAGES['reference']['undefined'].format(reference))
def get_field_list_from_json_and_field_config_type_list(data, field_config): try: list_values = jsonpointer.resolve_pointer(data, field_config["key"]) except jsonpointer.JsonPointerException: return [] if not isinstance(list_values, list): return [] out = [] idx = 0 for list_value in list_values: idx += 1 for item_field_config in field_config["fields"]: try: value = jsonpointer.resolve_pointer( list_value, item_field_config["key"] ) if value: out.append( { "title": field_config["title"] + " " + str(idx) + ": " + item_field_config["title"], "value": value, } ) except jsonpointer.JsonPointerException: pass return out
async def test_fetch_collection(fantasy_client): response = await fantasy_client.get('/api/books', headers=GET_HEADERS) assert response.status == 200 data = await response.json(content_type=JSONAPI_CONTENT_TYPE) books = resolve_pointer(data, '/data') for index in range(len(books)): assert resolve_pointer(data, '/data/{}/type'.format(index)) == 'books'
def __init__(self, reference, context, **kwargs): if self.validators_constructor is None: raise NotImplementedError( "Subclasses of LazyReferenceValidator must specify a " "`validators_constructor` function" ) self._kwargs = kwargs parsed_ref = urlparse.urlparse(reference) self.reference_path = parsed_ref.path self.reference_fragment = parsed_ref.fragment if self.reference_path: from flex.core import load_source if self.reference_path.startswith('/'): context = load_source(self.reference_path) elif 'base_path' in kwargs: context = load_source(os.path.join(kwargs['base_path'], self.reference_path)) # TODO: something better than this which potentiall raises a # JsonPointerException jsonpointer.resolve_pointer(context, self.reference_fragment) self.reference = reference self.context = context
def validate_deferred_references(schema, context, **kwargs): try: deferred_references = context['deferred_references'] except: raise KeyError("`deferred_references` not found in context") with ErrorDict() as errors: for reference in deferred_references: parts = urlparse.urlparse(reference) if any((parts.scheme, parts.netloc, parts.params, parts.query)): errors.add_error( reference, MESSAGES['reference']['unsupported'].format(reference), ) continue if parts.path: from flex.core import load_source if parts.path.startswith('/'): schema = load_source(parts.path) elif 'base_path' in kwargs: schema = load_source( os.path.join(kwargs['base_path'], parts.path)) try: jsonpointer.resolve_pointer(schema, parts.fragment) except jsonpointer.JsonPointerException: errors.add_error( reference, MESSAGES['reference']['undefined'].format(reference), )
def setUp(self): # Swagger self.timeout = 5 self.pre_path = os.path.join(data_dir, 'swagger', "swagger-pre.json") self.static = os.path.join(os.path.dirname(static.__file__), "swagger.json") self.prefix = '/api/v1' self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36' } with open(self.pre_path, 'r', encoding='utf8') as f: self.pre_data = json.load(f) with open(self.static, 'r', encoding='utf8') as f: self.generated_data = json.load(f) # example data self.all_affix_tags = [x['tag'] for x in affix_data] affix_options = AFFIX_OPTIONS['AFFIX_OPTIONS'] self.all_affopt_tags = [x['tag'] for x in affix_options] self.all_pronoun_tags = [x['tag'] for x in pronoun_data] self.all_verb_tags = [x['tag'] for x in verb_data] # define pointers self.servers = [ s['url'] for s in resolve_pointer(self.generated_data, '/servers') ] self.route_pointer = '/paths' self.routes = resolve_pointer(self.generated_data, self.route_pointer).keys() self.routes_with_args = [ self.insert_example_arg(x) for x in self.routes ]
async def test_meta_object(fantasy_client, fantasy_app): response = await fantasy_client.get('/api/books/1', headers=GET_HEADERS) assert response.status == 200 data = await response.json(content_type=JSONAPI_CONTENT_TYPE) meta_object = fantasy_app[JSONAPI]['meta'] assert resolve_pointer(data, '/meta') == meta_object assert resolve_pointer(data, '/meta/fantasy/version') == '0.0.1'
def validate_reference_pointer(reference, context, **kwargs): parts = urlparse.urlparse(reference) if any((parts.scheme, parts.netloc, parts.path, parts.params, parts.query)): raise ValidationError(MESSAGES["reference"]["unsupported"].format(reference)) try: jsonpointer.resolve_pointer(context, parts.fragment) except jsonpointer.JsonPointerException: raise ValidationError(MESSAGES["reference"]["undefined"].format(reference))
def main(): """Entrypoint for this example.""" doc = ymlref.load(DOCUMENT) levels = [ resolve_pointer(doc, '/my/{}/level'.format(i)) for i in range(len(doc['my'])) ] print('Levels of my pokemon: {}'.format(levels)) print('Pikachu is of type: ' + resolve_pointer(doc, '/pokemon/pikachu/type'))
def apply_pointers(self, content, params): """ Let's apply all the json pointers! Valid params in Nulecule: param1: - /spec/containers/0/ports/0/hostPort - /spec/containers/0/ports/0/hostPort2 or param1: - /spec/containers/0/ports/0/hostPort, /spec/containers/0/ports/0/hostPort2 Args: content (str): content of artifact file params (dict): list of params with pointers to replace in content Returns: str: content with replaced pointers Todo: In the future we need to change this to detect haml, yaml, etc as we add more providers Blocked by: github.com/bkabrda/anymarkup-core/blob/master/anymarkup_core/__init__.py#L393 """ obj = anymarkup.parse(content) if type(obj) != dict: logger.debug( "Artifact file not json/haml, assuming it's $VARIABLE substitution" ) return content if params is None: # Nothing to do here! return content for name, pointers in params.items(): if not pointers: logger.warning("Could not find pointer for %s" % name) continue for pointer in pointers: try: resolve_pointer(obj, pointer) set_pointer(obj, pointer, name) logger.debug("Replaced %s pointer with %s param" % (pointer, name)) except JsonPointerException: logger.debug("Error replacing %s with %s" % (pointer, name)) logger.debug("Artifact content: %s", obj) raise NuleculeException( "Error replacing pointer %s with %s." % (pointer, name)) return anymarkup.serialize(obj, format="json")
def use_jsonpointer(doc, pointer): logger.info('The doc\'s type is %s', type(doc)) logger.info('The doc is %s', doc) logger.info('The pointer\'s type is %s', type(pointer)) logger.info('The pointer is %s', pointer) ### jsonpointer.resolve need a dict and str # if type(doc) == str and json.loads(doc.replace('\'', '\"')): if type(doc) == str and doc != '0': if doc == 'None': print('The data is null') # raise ValueError return 'null' doc = eval(doc) ### Change str to dict elif type(doc) == dict: pass else: print('The parameter is \'0\'') doc_code = "0" return doc_code print('The doc\'s type after change is', type(doc)) print('The data\'s type after filtering is', type(jsonpointer.resolve_pointer(doc, pointer))) print('The data\'s type after filtering is', jsonpointer.resolve_pointer(doc, pointer)) pointer_in_doc = jsonpointer.resolve_pointer(doc, pointer) ### If the data return from Jsonpointer is string, Changing it. if type(pointer_in_doc) == str: print('Change Python\'s string to Robot\'s string') print('The pointer_in_doc is str and it\'s', pointer_in_doc) pointer_in_doc = '\"' + pointer_in_doc + '\"' print('The pointer_in_doc is str and it\'s', pointer_in_doc) ### If the data return from Jsonpointer is null. Returning it. elif pointer_in_doc == None: print('It\'s Right!!!') return 'null' else: pointer_in_doc = str(jsonpointer.resolve_pointer(doc, pointer)) print('The data\'s type after filtering and change is', type(pointer_in_doc)) print('The data finally is', pointer_in_doc.replace('\'', '\"')) pointer_in_doc = pointer_in_doc.replace('\'', '\"') logger.info('The fianl doc\'s type is %s', type(pointer_in_doc)) logger.info('The fianl doc is %s', pointer_in_doc) return pointer_in_doc
def get_json_value(self, json_string, json_pointer, stringify=True): """ Get the target node of the JSON document `json_string` specified by `json_pointer`. `stringify` specifies whether JSON data should be transformed to string before assertion. Example: | ${result}= | Get Json Value | {"foo": {"bar": [1,2,3]}} | /foo/bar | | Should Be Equal | ${result} | [1, 2, 3] | | """ if stringify: return json.dumps(jsonpointer.resolve_pointer(load_json(json_string), json_pointer), ensure_ascii=False) else: return jsonpointer.resolve_pointer(load_json(json_string), json_pointer)
def validate_reference_pointer(reference, context, **kwargs): parts = urlparse.urlparse(reference) if any( (parts.scheme, parts.netloc, parts.path, parts.params, parts.query)): raise ValidationError( MESSAGES['reference']['unsupported'].format(reference), ) try: jsonpointer.resolve_pointer(context, parts.fragment) except jsonpointer.JsonPointerException: raise ValidationError( MESSAGES['reference']['undefined'].format(reference), )
def test_pointers_exist_in_generated(self): ''' Check pointers are valid in generated data. ''' with self.assertRaises(JsonPointerException): resolve_pointer(self.generated_data, '/foo/bar') self.assertTrue( all( type(data) == list for data in [ resolve_pointer(self.generated_data, pointer) for pointer in self.pointers ]))
def __init__(self, reference, context): if self.validators_constructor is None: raise NotImplementedError( "Subclasses of LazyReferenceValidator must specify a " "`validators_constructor` function") self.reference_fragment = urlparse.urlparse(reference).fragment # TODO: something better than this which potentiall raises a # JsonPointerException jsonpointer.resolve_pointer(context, self.reference_fragment) self.reference = reference self.context = context
def main(args, config): domain_word_file = args.input[0] domain_text_preprocessed_file = args.input[1] domain_added_model_file = args.input[2] output_file = args.output[0] syn_threshold = resolve_pointer(config, "/Hensyugoi/SynonymExtraction/SimilarityThreshold", 0.95) syn_limit = resolve_pointer(config, "/Hensyugoi/SynonymExtraction/SimilarityLimit", 10) syn = synonymous(domain_word_file, domain_text_preprocessed_file, domain_added_model_file, syn_threshold=syn_threshold, syn_limit=syn_limit) np.save(output_file, syn, allow_pickle = True)
def test_filter(self): doc = { "complex": [ {"idi":9, "ids": "9"}, {"idi":10, "ids": "10"} ], } self.assertEqual(resolve_pointer(doc, "/complex"), [ {"idi":9, "ids": "9"}, {"idi":10, "ids": "10"} ]) self.assertEqual(resolve_pointer(doc, "/complex/1"), {"idi":10, "ids": "10"}) self.assertEqual(resolve_pointer(doc, "/complex/[@idi=10]"), {"idi":10, "ids": "10"}) self.assertEqual(resolve_pointer(doc, "/complex/[@idi=10,@ids='10']"), {"idi":10, "ids": "10"}) # malformed filter self.assertRaises(JsonPointerException, resolve_pointer, doc, "/complex/@idi=10,@ids='10'") # not found self.assertRaises(JsonPointerException, resolve_pointer, doc, "]/complex/[@idi=10,@ids='20']")
def test_extensions(self): doc = {"foo": [{"name": "bar"}, {"name": "baz"}]} self.assertEqual(resolve_pointer(doc, "/foo/*"), [{ "name": "bar" }, { "name": "baz" }]) self.assertEqual(resolve_pointer(doc, "/foo/*/name"), ["bar", "baz"]) doc = {"foo": [{"foo": "bar"}, {"bar": "baz"}]} self.assertEqual(resolve_pointer(doc, "/foo/*/bar"), ["baz"])
def __init__(self, reference, context): if self.validators_constructor is None: raise NotImplementedError( "Subclasses of LazyReferenceValidator must specify a " "`validators_constructor` function" ) self.reference_fragment = urlparse.urlparse(reference).fragment # TODO: something better than this which potentiall raises a # JsonPointerException jsonpointer.resolve_pointer(context, self.reference_fragment) self.reference = reference self.context = context
def validate_reference(reference, context, **kwargs): try: parts = urlparse.urlparse(reference) if parts.path: from flex.core import load_source if parts.path.startswith('/'): context = load_source(parts.path) elif 'base_path' in kwargs: context = load_source( os.path.join(kwargs['base_path'], parts.path)) jsonpointer.resolve_pointer(context, parts.fragment) except jsonpointer.JsonPointerException: raise ValidationError( MESSAGES['reference']['undefined'].format(reference))
def print_diff(dbdict, data, removes=True): """ Print a (hopefully) human readable list of changes. """ # TODO: needs work, especially on multiline properties, # empty properties (should probably never be allowed but still) # and probably more corner cases. Also the output format could # use some tweaking. try: from collections import defaultdict import jsonpatch from jsonpointer import resolve_pointer, JsonPointerException ops = defaultdict(int) diff = jsonpatch.make_patch(dbdict, data) for d in diff: try: ptr = " > ".join(decode_pointer(d["path"])) if d["op"] == "replace": print(yellow("REPLACE:")) print(yellow(ptr)) db_value = resolve_pointer(dbdict, d["path"]) print(red(dump_value(db_value))) print(green(dump_value(d["value"]))) ops["replace"] += 1 if d["op"] == "add": print(green("ADD:")) print(green(ptr)) if d["value"]: print(green(dump_value(d["value"]))) ops["add"] += 1 if removes and d["op"] == "remove": print(red("REMOVE:")) print(red(ptr)) value = resolve_pointer(dbdict, d["path"]) if value: print(red(dump_value(value))) ops["remove"] += 1 except JsonPointerException as e: print(" - Error parsing diff - report this!: %s" % e) # # The following output is a bit misleading, removing for now # print "Total: %d operations (%d replace, %d add, %d remove)" % ( # sum(ops.values()), ops["replace"], ops["add"], ops["remove"]) return diff except ImportError: print(("'jsonpatch' module not available - " "no diff printouts for you! (Try -d instead.)"), file=sys.stderr)
def compute_route_distances(stops_by_name, routes_by_name): route_distances_and_endpoints = {} for long_name in routes_by_name: endpoints_A = [] endpoints_B = [] dest_A, dest_B = resolve_pointer(routes_by_name[long_name], "/attributes/direction_destinations") # Handle compound destinations such as Ashmont/Braintree (two separate endpoints of the Red line). if dest_A not in stops_by_name and "/" in dest_A: subnames = dest_A.split("/") for name in subnames: if name in stops_by_name: endpoints_A.append(name) else: endpoints_A.append(dest_A) if dest_B not in stops_by_name and "/" in dest_B: subnames = dest_B.split("/") for name in subnames: if name in stops_by_name: endpoints_B.append(name) else: endpoints_B.append(dest_B) if len(endpoints_A) == 0 or len(endpoints_B) == 0: print("Couldn't find {} or {} on the {} route".format( dest_A, dest_B, long_name)) route_distances_and_endpoints[long_name] = (0, None, None) for stop_A in endpoints_A: for stop_B in endpoints_B: latA = resolve_pointer(stops_by_name[stop_A], "/attributes/latitude") lngA = resolve_pointer(stops_by_name[stop_A], "/attributes/longitude") latB = resolve_pointer(stops_by_name[stop_B], "/attributes/latitude") lngB = resolve_pointer(stops_by_name[stop_B], "/attributes/longitude") distance_km = haversine_distance(lngA, latA, lngB, latB) if distance_km > route_distances_and_endpoints[long_name][0]: route_distances_and_endpoints[long_name] = (distance_km, stop_A, stop_B) return route_distances_and_endpoints
def apply_pointers(self, content, params): """ Let's apply all the json pointers! Valid params in Nulecule: param1: - /spec/containers/0/ports/0/hostPort - /spec/containers/0/ports/0/hostPort2 or param1: - /spec/containers/0/ports/0/hostPort, /spec/containers/0/ports/0/hostPort2 Args: content (str): content of artifact file params (dict): list of params with pointers to replace in content Returns: str: content with replaced pointers Todo: In the future we need to change this to detect haml, yaml, etc as we add more providers Blocked by: github.com/bkabrda/anymarkup-core/blob/master/anymarkup_core/__init__.py#L393 """ obj = anymarkup.parse(content) if type(obj) != dict: logger.debug("Artifact file not json/haml, assuming it's $VARIABLE substitution") return content if params is None: # Nothing to do here! return content for name, pointers in params.items(): if not pointers: logger.warning("Could not find pointer for %s" % name) continue for pointer in pointers: try: resolve_pointer(obj, pointer) set_pointer(obj, pointer, name) logger.debug("Replaced %s pointer with %s param" % (pointer, name)) except JsonPointerException: logger.debug("Error replacing %s with %s" % (pointer, name)) logger.debug("Artifact content: %s", obj) raise NuleculeException("Error replacing pointer %s with %s." % (pointer, name)) return anymarkup.serialize(obj, format="json")
def validate_deferred_references(schema, context, **kwargs): try: deferred_references = context["deferred_references"] except: raise KeyError("`deferred_references` not found in context") with ErrorDict() as errors: for reference in deferred_references: parts = urlparse.urlparse(reference) if any((parts.scheme, parts.netloc, parts.path, parts.params, parts.query)): errors.add_error(reference, MESSAGES["reference"]["unsupported"].format(reference)) continue try: jsonpointer.resolve_pointer(schema, parts.fragment) except jsonpointer.JsonPointerException: errors.add_error(reference, MESSAGES["reference"]["undefined"].format(reference))
def __init__(self, errors): '''Initialise tree from *errors* list.''' tree = {} for error in sorted( errors, key=lambda item: len(list(item.path)), reverse=True ): branch = tree path = list(error.path) path.insert(0, '__root__') if error.validator == 'required': # Required is set one level above so have to retrieve final # path segment. schema_path = '/' + '/'.join(map(str, error.schema_path)) segment = jsonpointer.resolve_pointer( error.schema, schema_path ) path.append(segment) for segment in path[:-1]: branch = branch.setdefault(segment, {}) if path[-1] in branch and isinstance(branch[path[-1]], Mapping): branch[path[-1]]['__self__'] = error.message else: branch[path[-1]] = error.message self._tree = tree.get('__root__', {})
def get(self, request, pk, pointer, model_class=None, field_name=None): """ :param request: :param pk: the pk of the instance :param pointer: the json pointer of the image. Ex: "people/image", "people/0/image", etc. :param model_class: the model that holds the JSON field :param field_name: the JSON-field name :return: HttpResponse with the image Base view to serve an URI-stored (in a JSON field) image as a file. Example usage in urls.py: re_path( r"some/path/(?P<pk>\d+)/(?P<pointer>.+)/", URIImageAsFileView.as_view(), kwargs={"model_class": MyModel, "field_name": "my_json_field"}, ), """ obj = get_object_or_404(model_class or self.model_class, pk=pk) data = getattr(obj, field_name or self.field_name) pointer = "/{}".format(pointer) try: data = resolve_pointer(data, pointer) uri = DataURI(data) return HttpResponse(uri.data, content_type=uri.mimetype) except (InvalidDataURI, JsonPointerException): raise Http404("Image not found")
def test_eol(self): doc = { "foo": ["bar", "baz"] } self.assertTrue(isinstance(resolve_pointer(doc, "/foo/-"), EndOfList)) self.assertRaises(JsonPointerException, resolve_pointer, doc, "/foo/-/1")
def condition(self): """Return condition value.""" return jsonpointer.resolve_pointer( # TODO get default value from schema self.instance, self.validator_value['condition'], True)
def resolve_string(string, document, max_recursion=100): """ Resolve a string value using the given document as context. :param (str|unicode) string: The string to resolve pointers inside of. :param (dict|list) document: A loaded json document. :param (int) max_recursion: Prevents cyclic references. Increase if needed. :raises (CycleDetected): If a cycle is detected while resolving the string. :return: (str|unicode) The resolved string. """ recursions = 0 while True: if recursions > max_recursion: raise MaxRecursionLimit(max_recursion) recursions += 1 references = get_references(string) if not references: break offset = 0 for reference in references: pointer = reference['pointer'] start, stop = reference['indices'] value = jsonpointer.resolve_pointer(document, pointer) if not isinstance(value, basestring): raise ValueError('Variables must be strings.') string = string[:start + offset] + value + string[stop + offset:] offset += len(value) - (stop - start) return string
def test_get_tender_versioned(self): response = self.app.get('/tenders') self.assertEqual(response.status, '200 OK') self.assertEqual(len(response.json['data']), 0) response = self.app.post_json('/tenders', {'data': test_tender_data}) self.assertEqual(response.status, '201 Created') tender = response.json['data'] self.tender_id = tender['id'] self._update_doc() doc = test_data_with_revisions.copy() revisions = doc.pop('revisions') for i, rev in list(enumerate(revisions))[1:]: path = '/tenders/{}/historical'.format(self.tender_id) response = self.app.get(path, headers={VERSION: str(i)}) tender = response.json['data'] headers = response.headers self.assertEqual(headers[HASH], parse_hash(rev['rev'])) self.assertEqual(headers[VERSION], str(i)) self.assertEqual(headers[PHASH], parse_hash(revisions[i - 1].get('rev', ''))) for ch in [r for r in rev['changes']]: val = ch['value'] if ch['op'] != 'remove' else 'missing' if not all(p for p in ['next_check', 'shouldStartAfter'] if ch['path'] in p): self.assertEqual( resolve_pointer(tender, ch['path'], 'missing'), val) if rev['author'] != 'chronograph': if any('bids' in c['path'] for c in rev['changes']): self.assertNotEqual(tender['dateModified'], rev['date'])
def dereference_reference(reference, context): parts = urlparse.urlparse(reference) if any((parts.scheme, parts.netloc, parts.path, parts.params, parts.query)): raise ValueError( MESSAGES['reference']['unsupported'].format(reference), ) return jsonpointer.resolve_pointer(context, parts.fragment)
def get_schema_from_uri(uri): """Get and parse a JSON schema from the given URI. Internal schemas are loaded from the file system. Caching may apply for internal and external schemas. JSON pointers given as fragment of the URI are supported. """ # split the fragment uri_parsed = urlparse.urlparse(uri) # 1. interal resource? base = url_for('jsonschemas.schema', path='', _external=True) base_parsed = urlparse.urlparse(base) if base_parsed.scheme == uri_parsed.scheme \ and base_parsed.netloc == uri_parsed.netloc \ and uri_parsed.path.startswith(base_parsed.path): internal_path = uri_parsed.path.split(base_parsed.path, 1)[1] data = get_schema_data(internal_path) return resolve_pointer( data, uri_parsed.fragment ) # 2. external resource # FIXME support whitelisting of secure location raise InsecureSchemaLocation( 'Requested schema located on insecure location: ' + uri )
def run(self): include = self.options.get('include') if include: self.include = include.split(',') collapse = self.options.get('collapse') if collapse: self.collapse = collapse.split(',') env = self.state.document.settings.env try: if self.arguments and self.content: raise self.warning('both argument and content. it is invalid') if self.arguments: dirname = os.path.dirname(env.doc2path(env.docname, base=None)) relpath = os.path.join(dirname, self.arguments[0]) abspath = os.path.join(env.srcdir, relpath) if not os.access(abspath, os.R_OK): raise self.warning('JSON Schema file not readable: %s' % self.arguments[0]) env.note_dependency(relpath) schema = JSONSchema.loadfromfile(abspath) else: schema = JSONSchema.loadfromfile(''.join(self.content)) except ValueError as exc: raise self.error('Failed to parse JSON Schema: %s' % exc) if self.options.get('pointer'): schema = JSONSchema.instantiate( None, resolve_pointer(schema.attributes, self.options.get('pointer'))) return self.make_nodes(schema)
def run(self): with open(self.arguments[0]) as fp: json_obj = json.load(fp, object_pairs_hook=OrderedDict) filename = str(self.arguments[0]).split("/")[-1].replace(".json", "") try: title = self.options['title'] except KeyError as e: title = filename pointed = resolve_pointer(json_obj, self.options['jsonpointer']) # Remove the items mentioned in exclude if (self.options.get('exclude')): for item in self.options['exclude'].split(","): try: del pointed[item.strip()] except KeyError as e: pass if (self.options.get('include_only')): for node in list(pointed): if not (node in self.options.get('include_only')): del pointed[node] code = json.dumps(pointed, indent=' ') # Ideally we would add the below to a data-expand element, but I can't see how to do this, so using classes for now... class_list = self.options.get('class', []) class_list.append('file-' + title) expand = str(self.options.get("expand", "")).split(",") class_list = class_list + [ 'expandjson expand-{0}'.format(s.strip()) for s in expand ] literal = nodes.literal_block(code, code, classes=class_list) literal['language'] = 'json' return [literal]
def append_obj_revision(request, obj, patch, date): status_changes = [ p for p in patch if all([p["path"].endswith("/status"), p["op"] == "replace"]) ] changed_obj = obj for change in status_changes: changed_obj = resolve_pointer(obj, change["path"].replace("/status", "")) if changed_obj and hasattr(changed_obj, "date") and hasattr( changed_obj, "revisions"): date_path = change["path"].replace("/status", "/date") if changed_obj.date and not any( [p for p in patch if date_path == p["path"]]): patch.append({ "op": "replace", "path": date_path, "value": changed_obj.date.isoformat() }) elif not changed_obj.date: patch.append({"op": "remove", "path": date_path}) changed_obj.date = date else: changed_obj = obj return append_revision(request, changed_obj, patch)
def evaluate_clause(self, clause, target): field_value = resolve_pointer(target, clause['field'], None) if field_value is None: return False else: reversed = False # Determining operator order # Normal order: field_value, clause['value'] (i.e. condition created > 2000.01.01) # Here clause['value'] = '2001.01.01'. The field_value is target['created'] # So the natural order is: ge(field_value, clause['value'] # But! # Reversed operator order for contains (b in a) if clause['operator'] == 'one_of' or clause['operator'] == 'matches': reversed = True # But not in every case. (i.e. tags matches 'b') # Here field_value is a list, because an annotation can have many tags # And clause['value'] is 'b' if type(field_value) is list: reversed = False if reversed: return getattr(operator, self.operators[clause['operator']])(clause['value'], field_value) else: return getattr(operator, self.operators[clause['operator']])(field_value, clause['value'])
def find(self, path, default=None): """ Retrieves a single value using JSON-Pointer syntax """ result = resolve_pointer(self.__d, path, default) if isinstance(result, dict): result = TDict(result) return result
def _transform_string(self, val, doc): try: return resolve_pointer(doc, val) except JsonPointerException as e: # This is because of jsonpointer's exception structure if 'not found in' in e.args[0] or 'is not a valid list index' in e.args[0]: return None raise e
def evaluate_clause(self, clause, target): if isinstance(clause['field'], list): for field in clause['field']: copied = copy.deepcopy(clause) copied['field'] = field result = self.evaluate_clause(copied, target) if result: return True return False else: field_value = resolve_pointer(target, clause['field'], None) if field_value is None: return False # pylint: disable=maybe-no-member if clause.get('case_sensitive', True): cval = clause['value'] fval = field_value else: if type(clause['value']) is list: cval = [x.lower() for x in clause['value']] else: cval = clause['value'].lower() if type(field_value) is list: fval = [x.lower() for x in field_value] else: fval = field_value.lower() # pylint: enable=maybe-no-member reversed_order = False # Determining operator order # Normal order: field_value, clause['value'] # i.e. condition created > 2000.01.01 # Here clause['value'] = '2001.01.01'. # The field_value is target['created'] # So the natural order is: ge(field_value, clause['value'] # But! # Reversed operator order for contains (b in a) if type(cval) is list or type(fval) is list: if clause['operator'] in ['one_of', 'matches']: reversed_order = True # But not in every case. (i.e. tags matches 'b') # Here field_value is a list, because an annotation can # have many tags. if type(field_value) is list: reversed_order = False if reversed_order: lval = cval rval = fval else: lval = fval rval = cval op = getattr(operator, self.operators[clause['operator']]) return op(lval, rval)
def get_json_value(self, json_string, json_pointer): """ Get the target node of the JSON document `json_string` specified by `json_pointer`. Example: | ${result}= | Get Json Value | {"foo": {"bar": [1,2,3]}} | /foo/bar | | Should Be Equal | ${result} | [1, 2, 3] | | """ return jsonpointer.resolve_pointer(json_string, json_pointer)
def assert_json_pointer_value(context, pointer, value): """ assertion on data value. :param context: behave context :param pointer: json pointer https://tools.ietf.org/html/rfc6901 :param value: value of the data in string format :raises: AssertionError """ result = resolve_pointer(context.response_object, pointer) assert value == result, 'expected {} got {}'.format(value, result)
def test_datarep_getitem(mock_service): root = datarep.DataRep.from_schema(service=mock_service, uri=ANY_URI, jsonschema=ANY_DATA_SCHEMA, data=ANY_DATA) fragment = root['a'][2] assert fragment.jsonschema == root.jsonschema.by_pointer(ANY_FRAGMENT_PTR) assert fragment._data is datarep.DataRep.FRAGMENT assert fragment.data == resolve_pointer(root.data, ANY_FRAGMENT_PTR) assert fragment.root == root
def test_datarep_data_getter_first_access_fragment(any_datarep_fragment): def side_effect(): any_datarep_fragment.root._data = ANY_DATA any_datarep_fragment.root.pull = mock.Mock(side_effect=side_effect) any_datarep_fragment.pull = mock.Mock() d = any_datarep_fragment.data assert not any_datarep_fragment.pull.called any_datarep_fragment.root.pull.assert_called_once_with() assert d == resolve_pointer(ANY_DATA, ANY_FRAGMENT_PTR)
def __get__(self, obj, objtype=None): data = self.ensure_valid_data(obj) try: val = jsonpointer.resolve_pointer(data, self.pointer, default=JSONBProperty.UNDEFINED) except jsonpointer.JsonPointerException as e: # TODO: update jsonpointer to give more specific errors, this could vbe something else besides member not found if self.is_graceful(): val = self.graceful else: raise CannotLookupData("Could not find {} on data {}".format(self.pointer, obj)) return self.converter.deserialize(val)
def assert_json_pointer_type(context, pointer, type_name): """ assertion on data type. :param context: behave context :param pointer: json pointer https://tools.ietf.org/html/rfc6901 :param type_name: name of the type :raises: AssertionError """ result = resolve_pointer(context.response_object, pointer) msg = 'expected {} got {}'.format(type_name, result) assert isinstance(result, eval(type_name)), msg
def prepare_output(document, schema, export_type, slashed_json_pointer): input_data = resolve_pointer(document,slashed_json_pointer, default='') if isinstance(input_data, basestring): return unicode(input_data) elif isinstance(input_data, list): return ", ".join([unicode(dataum) for dataum in input_data]) elif isinstance(input_data, dict): doc_id = document.get("id", "") return prepare_file_output(input_data, schema, export_type, doc_id) return unicode(input_data)
def get_ref(self,jpointer): path_segment=jpointer.split("#") ref_doc=self.root_doc # assuming it's referencing root document if len(path_segment) > 1: doc_path=path_segment.pop(0) if "http" in doc_path: # path is an URL ref_doc=requests.get(doc_path) elif doc_path != "": # path is not empty and not URL, treat as file path f=open(root_path, "r") ref_doc=f.read() reference=resolve_pointer(ref_doc, path_segment[0]) return reference
def update_data(self, data, spec): for jp, spec in spec.items(): val = jsonpointer.resolve_pointer(data, jp) val = spec['format'](val) ele = self.find_element(spec['selector']) if ele is None: continue if 'attr' in spec: ele.set(spec['attr'], val) else: ele.text = val
def test_datarep_getitem_negative_stepped_slice(mock_service): root = datarep.DataRep.from_schema(service=mock_service, uri=ANY_URI, jsonschema=ANY_DATA_SCHEMA, data=ANY_DATA) fragments = root['a'][-1:-4:-2] assert len(fragments) == 2 for fragment, ptr in zip(fragments, ('/a/2', '/a/0')): assert fragment.jsonschema == root.jsonschema.by_pointer(ptr) assert fragment._data == datarep.DataRep.FRAGMENT assert fragment.data == resolve_pointer(root.data, ptr) assert fragment.root == root
def test_alttypes(self): JsonPointer.alttypes = True class Node(object): def __init__(self, name, parent=None): self.name = name self.parent = parent self.left = None self.right = None def set_left(self, node): node.parent = self self.left = node def set_right(self, node): node.parent = self self.right = node def __getitem__(self, key): if key == 'left': return self.left if key == 'right': return self.right raise KeyError("Only left and right supported") def __setitem__(self, key, val): if key == 'left': return self.set_left(val) if key == 'right': return self.set_right(val) raise KeyError("Only left and right supported: %s" % key) root = Node('root') root.set_left(Node('a')) root.left.set_left(Node('aa')) root.left.set_right(Node('ab')) root.set_right(Node('b')) root.right.set_left(Node('ba')) root.right.set_right(Node('bb')) self.assertEqual(resolve_pointer(root, '/left').name, 'a') self.assertEqual(resolve_pointer(root, '/left/right').name, 'ab') self.assertEqual(resolve_pointer(root, '/right').name, 'b') self.assertEqual(resolve_pointer(root, '/right/left').name, 'ba') newroot = set_pointer(root, '/left/right', Node('AB'), inplace=False) self.assertEqual(resolve_pointer(root, '/left/right').name, 'ab') self.assertEqual(resolve_pointer(newroot, '/left/right').name, 'AB') set_pointer(root, '/left/right', Node('AB')) self.assertEqual(resolve_pointer(root, '/left/right').name, 'AB')
def get_version_diff(from_data, to_data): """Calculate the diff (a mangled JSON patch) between from_data and to_data""" basic_patch = jsonpatch.make_patch(from_data, to_data) result = [] for operation in sorted(basic_patch, key=lambda o: (o['op'], o['path'])): op = operation['op'] ignore = False # We deal with standing_in and party_memberships slightly # differently so they can be presented in human-readable form, # so match those cases first: m = re.search( r'(standing_in|party_memberships)(?:/([^/]+))?(?:/(\w+))?', operation['path'], ) if op in ('replace', 'remove'): operation['previous_value'] = \ jsonpointer.resolve_pointer( from_data, operation['path'], default=None ) attribute, election, leaf = m.groups() if m else (None, None, None) if attribute: explain_standing_in_and_party_memberships(operation, attribute, election, leaf) if op in ('replace', 'remove'): if op == 'replace' and not operation['previous_value']: if operation['value']: operation['op'] = 'add' else: # Ignore replacing no data with no data: ignore = True elif op == 'add': # It's important that we don't skip the case where a # standing_in value is being set to None, because that's # saying 'we *know* they're not standing then' if (not operation['value']) and (attribute != 'standing_in'): ignore = True operation['path'] = re.sub(r'^/', '', operation['path']) if not ignore: result.append(operation) # The operations generated by jsonpatch are incremental, so we # need to apply each before going on to parse the next: operation['path'] = '/' + operation['path'] from_data = jsonpatch.apply_patch(from_data, [operation]) for operation in result: operation['path'] = operation['path'].lstrip('/') return result
def getConfigValue(self, conf_key): self._ensureConfig() # jsonpointer, pip install jsonpointer, BSD 3 Clause import jsonpointer try: return jsonpointer.resolve_pointer(self.config, conf_key) except jsonpointer.JsonPointerException as e: # fall back to legacy dot-separated pointers key_path = conf_key.split('.'); c = self.config for part in key_path: if part in c: c = c[part] else: return None return c
def _complete_href_links(self, parent_collection, current): """Resolves self hyperlinks (JSONPath and JSONPointers.""" if isinstance(current, HyperLink) or \ (isinstance(current, dict) and "href" in current): if isinstance(current["href"], (unicode, str)): resource = None if current["href"] in self._cache: resource = self._cache[current["href"]] elif current["href"].startswith("#"): resource = jsonpointer.resolve_pointer(parent_collection, current["href"][1:]) if not resource: resource = "Unresolved" elif current["href"].startswith("$"): path = jsonpath(parent_collection, current["href"], result_type="PATH") if path: resource = eval("parent_collection%s" % path[0].lstrip("$")) else: resource = "Unresolved" self._cache[current["href"]] = resource if resource and resource != "Unresolved": if "selfRef" not in resource: ret = self.set_self_ref(resource) if ret < 0: return ret current["href"] = resource["selfRef"] return 0 elif isinstance(current, list): keys = range(len(current)) elif isinstance(current, dict): keys = current.keys() else: return 0 for key in keys: value = current[key] if isinstance(value, (NetworkResource, Topology)) and \ "selfRef" not in value: ret = self.set_self_ref(value) if ret < 0: return ret if isinstance(value, list) or isinstance(value, dict): ret = self._complete_href_links(parent_collection, value) if ret < 0: return ret return 0