def jsonpatch_operation_validator(operation_dict): """ Checks that the supplied value is a valid JSON patch operation dictionary. """ valid = (type(operation_dict) is dict) and \ ({'op', 'path'} <= set(operation_dict.keys())) and \ (operation_dict['op'] in ('add', 'remove', 'replace', 'move', 'copy', 'test')) if valid: try: jsonpointer.JsonPointer(operation_dict['path']) except: valid = False if valid and (operation_dict['op'] in ('move', 'copy')): valid = 'from' in operation_dict if valid: try: jsonpointer.JsonPointer(operation_dict['from']) except: valid = False if valid and (operation_dict['op'] in ('add', 'replace', 'test')): valid = 'value' in operation_dict if valid: try: json.dumps(operation_dict['value']) except: valid = False if not valid: raise tk.Invalid(_("Invalid JSON patch operation")) return operation_dict
def dereference_resource_descriptor(descriptor, base_path, base_descriptor=None): """Dereference resource descriptor (IN-PLACE FOR NOW). """ PROPERTIES = ['schema', 'dialect'] if base_descriptor is None: base_descriptor = descriptor for property in PROPERTIES: value = descriptor.get(property) # URI -> No if not isinstance(value, six.string_types): continue # URI -> Pointer if value.startswith('#'): try: pointer = jsonpointer.JsonPointer(value[1:]) descriptor[property] = pointer.resolve(base_descriptor) except Exception as error: message = 'Not resolved Pointer URI "%s" for resource.%s' % (value, property) six.raise_from( exceptions.DataPackageException(message), error ) # URI -> Remote elif value.startswith('http'): try: response = requests.get(value) response.raise_for_status() descriptor[property] = response.json() except Exception as error: message = 'Not resolved Remote URI "%s" for resource.%s' % (value, property) six.raise_from( exceptions.DataPackageException(message), error ) # URI -> Local else: if not is_safe_path(value): raise exceptions.DataPackageException( 'Not safe path in Local URI "%s" ' 'for resource.%s' % (value, property)) if not base_path: raise exceptions.DataPackageException( 'Local URI "%s" requires base path ' 'for resource.%s' % (value, property)) fullpath = os.path.join(base_path, value) try: with io.open(fullpath, encoding='utf-8') as file: descriptor[property] = json.load(file) except Exception as error: message = 'Not resolved Local URI "%s" for resource.%s' % (value, property) six.raise_from( exceptions.DataPackageException(message), error ) return descriptor
def fillscope(cluster, workflow, nodes_to_connect, scope='', subcluster=True): # scopecluster = stagecluster = pydotplus.graphviz.Cluster(graph_name = # '_'.join(stagescopeprts), if subcluster: scopecluster = pydotplus.graphviz.Cluster( graph_name=scope.replace('/', ''), label=''.join(['[{}]'.format(p) for p in scope.split('/')[1:]]), style='solid', color='blue') cluster.add_subgraph(scopecluster) else: scopecluster = cluster scopeptr = jsonpointer.JsonPointer(scope) scoped = scopeptr.resolve(workflow.stepsbystage) for stage, elements in scoped.items(): stagescopeprts = scopeptr.parts + [stage] if subcluster: stagecluster = pydotplus.graphviz.Cluster( graph_name='_'.join(stagescopeprts), label=stage, labeljust='l', color='grey', style='dashed') scopecluster.add_subgraph(stagecluster) else: stagecluster = scopecluster for i, element in enumerate(elements): if '_nodeid' in element: nodes_to_connect.append(element['_nodeid']) element = element['_nodeid'] targetcl = stagecluster if stage != 'init' else scopecluster shape = 'diamond' if stage in ['init', 'output'] else 'box' label = '' if stage in ['init', 'output'] else '{}[{}]'.format( stage, i) additional = { 'fixedsize': True, 'height': 0.2, 'width': 0.2 } if stage in ['init', 'output'] else {} targetcl.add_node( pydotplus.graphviz.Node(element, label=label, color='blue', shape=shape, **additional)) nodeobj = workflow.dag.getNode(element) result = nodeobj.result if nodeobj.ready() else None if result: add_result(targetcl, element, result) elif type(element) == dict: # recurse... fillscope(stagecluster, workflow, nodes_to_connect, jsonpointer.JsonPointer.from_parts(scopeptr.parts + [stage, i]).path, subcluster=subcluster)
def rule_steps_indices(workflow): rule_to_steps_index = {} steps_to_rule_index = {} rule_to_subscopes_index = {} for rule in workflow.rules + workflow.applied_rules: path = '/'.join([rule.offset, rule.rule.name]) p = jsonpointer.JsonPointer(path) try: steps_of_rule = [ x['_nodeid'] for x in p.resolve(workflow.stepsbystage) if '_nodeid' in x ] except jsonpointer.JsonPointerException: steps_of_rule = [] try: a = p.resolve(workflow.stepsbystage) subscopes_of_rule = [ # ['{}/{}'.format(x['_offset'],substage) for substage in x.keys() if not substage== '_offset' ] x['_offset'] for x in a if '_offset' in x ] except jsonpointer.JsonPointerException: subscopes_of_rule = [] rule_to_steps_index[rule.identifier] = steps_of_rule rule_to_subscopes_index[rule.identifier] = subscopes_of_rule for step in steps_of_rule: steps_to_rule_index[step] = rule.identifier return rule_to_steps_index, steps_to_rule_index, rule_to_subscopes_index
def _patch(self): """ Patch the json Resource Specification file to correct any issues that would prevent code generation. This uses jsonpatch with the patches coming in via python files (which allows for comments). """ import importlib.util patch_dir = "scripts/patches" for patch_file in os.listdir(patch_dir): if patch_file.endswith(".py"): if patch_file == "__init__.py": continue path = os.path.join(patch_dir, patch_file) import_spec = importlib.util.spec_from_file_location( patch_file[:-3], path) patch = importlib.util.module_from_spec(import_spec) import_spec.loader.exec_module(patch) for p in patch.patches: try: self.spec = jsonpatch.apply_patch(self.spec, [p], in_place=True) except jsonpointer.JsonPointerException: print(f"jsonpatch error: {p}", file=sys.stderr) raise except jsonpatch.JsonPatchConflict: print(f"jsonpatch error: {p}", file=sys.stderr) path = p["path"] obj = jsonpointer.JsonPointer(path).to_last(self.spec) print(f"path: {path}", file=sys.stderr) print(f"obj: {obj}", file=sys.stderr) raise
def echo(topic, key, format): """Echo the config for the given topic and keys.""" try: cfg = json.loads(a0.Cfg(topic).read().payload) except Exception: cfg = {} if key: queried_cfg = {} for k in key: if k.startswith("/"): ptr = jsonpointer.JsonPointer(k) cfg_level = queried_cfg for part in ptr.parts[:-1]: if part not in cfg_level: cfg_level[part] = {} cfg_level = cfg_level[part] ptr.set(queried_cfg, ptr.get(cfg)) else: queried_cfg[k] = cfg.get(k) cfg = queried_cfg if format == "list": def walk(prefix, node): for key, val in node.items(): name = f"{prefix}/{key}" if type(val) == dict: walk(name, val) else: print(f'"{name}" = {json.dumps(val)}') walk("", cfg) elif format == "json": print(json.dumps(cfg, indent=2))
def __getitem__(self, key: str) -> jsonpointer.JsonPointer: """Creates a JSON Pointer for the given path :param key: The path desired. :return: The JsonPointer object from parsing that path. """ if key not in self.cache: self.cache[key] = jsonpointer.JsonPointer(key) return self.cache[key]
def apply(self, obj): from_ptr = jsonpointer.JsonPointer(self.operation['from']) subobj, part = from_ptr.to_last(obj) value = copy.deepcopy(subobj[part]) AddOperation({ 'op': 'add', 'path': self.location, 'value': value }).apply(obj)
def json_pointer_validator(key, data, errors, context): """ Checks that the value is a valid JSON pointer. """ value = data.get(key) if value: try: jsonpointer.JsonPointer(value) except jsonpointer.JsonPointerException, e: _abort(errors, key, _("Invalid JSON pointer: %s") % e.message)
def leafs(self): if not isinstance(self.typed(), (list, dict)): yield jsonpointer.JsonPointer(""), self.typed() else: ptrs = [ jsonpointer.JsonPointer.from_parts(parts) for parts in self.jq( "leaf_paths", multiple_output=True).typed() ] for p in ptrs: yield p, p.get(self.typed())
def pointer(x): import jsonpointer if isinstance(x, str): if x.startswith("/"): return jsonpointer.JsonPointer(x).path x = (x, ) if isinstance(x, tuple): return jsonpointer.JsonPointer.from_parts(x).path raise BaseException("dunno")
def test_refs(): import jq refs = TypedLeafs(nested_data, datamodel).asrefs() assert refs['list_of_things'][0].path == '/list_of_things/0' import jsonpointer jp = jsonpointer.JsonPointer('/list_of_things/0') tl = TypedLeafs(nested_data, datamodel) assert tl.resolve_ref(jp).json() == tl['list_of_things'][0].json()
def readfromresult(self, pointerpath, trackinputs=None, failsilently=False): if not self.has_result(): if failsilently: return None raise RuntimeError("attempt") pointer = jsonpointer.JsonPointer(pointerpath) if trackinputs is not None: trackinputs.append(outputReference(self.identifier, pointer)) v = self.result.resolve_ref(pointer) return v
def leaf_iterator(jsonable): if not isinstance(jsonable, (list, dict)): yield jsonpointer.JsonPointer(""), jsonable else: allleafs = jq.jq("leaf_paths").transform(jsonable, multiple_output=True) leafpointers = [ jsonpointer.JsonPointer.from_parts(x) for x in allleafs ] for x in leafpointers: yield x, x.get(jsonable)
def expand_refs(obj, root): if isinstance(obj, list): return [expand_refs(x, root) for x in obj] elif isinstance(obj, dict): if len(obj) == 1 and "$ref" in obj: ptr_str = obj["$ref"] assert ptr_str.startswith("#") ref = jsonpointer.JsonPointer(ptr_str[1:]) return expand_refs(ref.get(root), root) else: return dict([(k, expand_refs(v, root)) for k, v in obj.items()]) return obj
def update_file_key(self, key): """Update file key if condition is valid.""" if self.condition: jsonpointer.set_pointer(self.instance, self.validator_value['file_key'], key) else: data, part = jsonpointer.JsonPointer( self.validator_value['file_key']).to_last(self.instance) if data: if part is not None: del data[part] else: del data
def test_create_with_pointer(self): patch = jsonpatch.JsonPatch([ {'op': 'add', 'path': jsonpointer.JsonPointer('/foo'), 'value': 'bar'}, {'op': 'add', 'path': jsonpointer.JsonPointer('/baz'), 'value': [1, 2, 3]}, {'op': 'remove', 'path': jsonpointer.JsonPointer('/baz/1')}, {'op': 'test', 'path': jsonpointer.JsonPointer('/baz'), 'value': [1, 3]}, {'op': 'replace', 'path': jsonpointer.JsonPointer('/baz/0'), 'value': 42}, {'op': 'remove', 'path': jsonpointer.JsonPointer('/baz/1')}, {'op': 'move', 'from': jsonpointer.JsonPointer('/foo'), 'path': jsonpointer.JsonPointer('/bar')}, ]) doc = {} result = patch.apply(doc) expected = {'bar': 'bar', 'baz': [42]} self.assertEqual(result, expected)
def apply(self, obj): subobj, part = self.pointer.to_last(obj) value = subobj[part] to_ptr = jsonpointer.JsonPointer(self.operation['to']) if self.pointer.contains(to_ptr): raise JsonPatchException( 'Cannot move values into its own children') RemoveOperation({'op': 'remove', 'path': self.location}).apply(obj) AddOperation({ 'op': 'add', 'path': self.operation['to'], 'value': value }).apply(obj)
def is_relative_json_pointer(instance): # Definition taken from: # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 if not isinstance(instance, str_types): return True non_negative_integer, rest = [], "" for i, character in enumerate(instance): if character.isdigit(): non_negative_integer.append(character) continue if not non_negative_integer: return False rest = instance[i:] break return (rest == "#") or jsonpointer.JsonPointer(rest)
def _dereference_descriptor(self, descriptor): PROPERTIES = ['schema', 'dialect'] for property in PROPERTIES: for resource in descriptor.get('resources', []): value = resource.get(property) # URI -> No if not isinstance(value, six.string_types): continue # URI -> Pointer if value.startswith('#'): try: pointer = jsonpointer.JsonPointer(value[1:]) resource[property] = pointer.resolve(descriptor) except Exception as exception: raise DataPackageException( 'Not resolved Pointer URI "%s" ' 'for resource.%s' % (value, property)) # URI -> Remote elif value.startswith('http'): try: response = requests.get(value) response.raise_for_status() resource[property] = response.json() except Exception as exception: raise DataPackageException( 'Not resolved Remote URI "%s" ' 'for resource.%s' % (value, property)) # URI -> Local else: if not helpers.is_safe_path(value): raise DataPackageException( 'Not safe path in Local URI "%s" ' 'for resource.%s' % (value, property)) fullpath = os.path.join(self.base_path, value) try: with io.open(fullpath, encoding='utf-8') as file: resource[property] = json.load(file) except Exception as exception: raise DataPackageException( 'Not resolved Local URI "%s" ' 'for resource.%s' % (value, property))
def extract_data(self, representation, obj): data = obj.extra_data or {} if INCLUDE_DATA not in representation: return {} if representation.select is None: # include everything return data # include selected data ret = {} for sel in representation.select: if not sel.startswith('/'): sel = '/' + sel ptr = jsonpointer.JsonPointer(sel) selected_data = ptr.resolve(data) if selected_data: ret[ptr.parts[-1]] = selected_data return ret
async def storage_extract( app: T.Mapping[str, T.Any], ptr: str, distinct: bool = False) -> T.Generator[str, None, None]: # language=rst """Generator to extract values from the stored documents, optionally distinct. Used to, for example, get a list of all tags or ids in the system. Or to get all documents stored in the system. If distinct=True then the generator will cache all values in a set, which may become prohibitively large. :param app: the `~datacatalog.application.Application` :param ptr: JSON pointer to the element. :param distinct: Return only distinct values. :raises: ValueError if filter syntax is invalid. """ # If the pointer is '/' we should return all documents if ptr == '/': async with app['pool'].acquire() as con: async with con.transaction(): # use a cursor so we can stream async for row in con.cursor(_Q_RETRIEVE_ALL_DOCS): yield json.loads(row['doc']) return # Otherwise, return the values try: p = jsonpointer.JsonPointer(ptr) except jsonpointer.JsonPointerException: raise ValueError('Cannot parse pointer') ptr_parts = p.parts cache = set() async with app['pool'].acquire() as con: async with con.transaction(): # use a cursor so we can stream async for row in con.cursor(_Q_RETRIEVE_ALL_DOCS): doc = json.loads(row['doc']) for elm in _extract_values(doc, ptr_parts): if not distinct or elm not in cache: yield elm if distinct: cache.add(elm)
def scope_done(scope, flowview): """ walks recursively all scopes starting at some initial scope to determine if all steps and stages under this scope have been executed / applied. Will indicate that it's safe to reference any result of the workflow within that scope. """ log.debug("checking scope %s on view with offset %s", scope, flowview.offset) result = True bookkeeper = jsonpointer.JsonPointer(scope).resolve(flowview.bookkeeper) for k, v in bookkeeper.items(): for k, v in bookkeeper.items(): if k == "_meta": result = result and checkmeta(flowview, v) else: childscope = scope + "/{}".format(k) result = result and scope_done(childscope, flowview) return result
def apply(self, obj): from_ptr = jsonpointer.JsonPointer(self.operation['from']) subobj, part = from_ptr.to_last(obj) value = subobj[part] if self.pointer.contains(from_ptr): raise JsonPatchException( 'Cannot move values into its own children') obj = RemoveOperation({ 'op': 'remove', 'path': self.operation['from'] }).apply(obj) obj = AddOperation({ 'op': 'add', 'path': self.location, 'value': value }).apply(obj) return obj
def to_expr(ptr: str, value: T.Any) -> str: """Create a filterexpression from a json pointer and value.""" try: p = jsonpointer.JsonPointer(ptr) except jsonpointer.JsonPointerException: raise ValueError('Cannot parse pointer') parts = collections.deque(p.parts) value = json.dumps(value) def parse_complex_type(): nxt = parts.popleft() if nxt == 'properties': return parse_obj() elif nxt == 'items': return parse_list() raise ValueError('Child must be either list, ' 'object or end of pointer, not: ' + nxt) def parse_obj() -> str: if len(parts) == 0: raise ValueError( 'Properties must be followed by property name') name = json.dumps(parts.popleft()) # either end-of-pointer primitive... if len(parts) == 0: return '{' + name + ': ' + value + '}' # or a complex type return '{' + name + ': ' + parse_complex_type() + '}' def parse_list() -> str: # either end-of-pointer primitive... if len(parts) == 0: return '[' + value + ']' # or a complex type return '[' + parse_complex_type() + ']' # base case: query json document with solely a single primitive # (string, int, bool, ...) if len(parts) == 0: return value # anything else must be a complex type (object or list) return parse_complex_type()
def is_relative_json_pointer(instance: object) -> bool: # Definition taken from: # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 if not isinstance(instance, str): return True non_negative_integer, rest = [], "" for i, character in enumerate(instance): if character.isdigit(): # digits with a leading "0" are not allowed if i > 0 and int(instance[i - 1]) == 0: return False non_negative_integer.append(character) continue if not non_negative_integer: return False rest = instance[i:] break return (rest == "#") or bool(jsonpointer.JsonPointer(rest))
def translate_create(path, **kwargs): pointer = jp.JsonPointer(path) def generate_create_for_prefix(subpointer, part, pointer): return t.list( sym('if'), t.list( sym('not'), t.list( sym('list-contains'), t.list( sym('object-keys'), t.list( sym('get'), subpointer.path, ), ), part, ), ), t.list( sym('set'), pointer.path, t.list(sym('object')), ), ) def generate_prefixes(): parts = [] for part in pointer.parts: subpointer = jp.JsonPointer.from_parts(parts) parts.append(part) part_pointer = jp.JsonPointer.from_parts(parts) yield generate_create_for_prefix(subpointer, part, part_pointer) return t.list(sym('begin'), *(p for p in generate_prefixes()))
def is_json_pointer(instance): if not isinstance(instance, str_types): return True return jsonpointer.JsonPointer(instance)
def parse_schema(self, resp): """Function to get and replace schema $ref with data :param resp: response data containing ref items. :type resp: str. """ #pylint: disable=maybe-no-member jsonpath_expr = jsonpath_rw.parse('$.."$ref"') matches = jsonpath_expr.find(resp.dict) respcopy = resp.dict typeregex = '([#,@].*?\.)' if matches: for match in matches: fullpath = str(match.full_path) jsonfile = match.value.split('#')[0] jsonpath = match.value.split('#')[1] listmatch = None found = None if 'redfish.dmtf.org' in jsonfile: if 'odata' in jsonfile: jsonpath = jsonpath.replace(jsonpath.split('/')[-1], \ 'odata' + jsonpath.split('/')[-1]) jsonfile = 'Resource.json' found = re.search(typeregex, fullpath) if found: repitem = fullpath[found.regs[0][0]:found.regs[0][1]] schemapath = '/' + fullpath.replace(repitem, '~').\ replace('.', '/').replace('~', repitem) else: schemapath = '/' + fullpath.replace('.', '/') if '.json' in jsonfile: itempath = schemapath if self.is_redfish: if resp.request.path[-1] == '/': newpath = '/'.join(resp.request.path.split('/')\ [:-2]) + '/' + jsonfile + '/' else: newpath = '/'.join(resp.request.path.split('/')\ [:-1]) + '/' + jsonfile + '/' else: newpath = '/'.join(resp.request.path.split('/')[:-1]) \ + '/' + jsonfile if 'href.json' in newpath: continue if not newpath.lower() in self._visited_urls: self.load(newpath, skipcrawl=True, includelogs=False, \ skipinit=True, loadtype='ref') instance = list() #deprecated type "string" for Type.json if 'string' in self.types: for item in self.itertype('string'): instance.append(item) if 'object' in self.types: for item in self.itertype('object'): instance.append(item) for item in instance: if jsonfile in item.resp._rest_request._path: if 'anyOf' in fullpath: break dictcopy = item.resp.dict listmatch = re.search('[[][0-9]+[]]', itempath) if listmatch: start = listmatch.regs[0][0] end = listmatch.regs[0][1] newitempath = [ itempath[:start], itempath[end:] ] start = jsonpointer.JsonPointer(newitempath[0]) end = jsonpointer.JsonPointer(newitempath[1]) del start.parts[-1], end.parts[-1] vals = start.resolve(respcopy) count = 0 for val in vals: try: if '$ref' in six.iterkeys( end.resolve(val)): end.resolve(val).pop('$ref') end.resolve(val).update(dictcopy) replace_pointer = jsonpointer.\ JsonPointer(end.path + jsonpath) data = replace_pointer.resolve(val) set_pointer(val, end.path, data) start.resolve(respcopy)[count].\ update(val) break except: count += 1 else: itempath = jsonpointer.JsonPointer(itempath) del itempath.parts[-1] if '$ref' in six.iterkeys( itempath.resolve(respcopy)): itempath.resolve(respcopy).pop('$ref') itempath.resolve(respcopy).update(dictcopy) break if jsonpath: if 'anyOf' in fullpath: continue if not jsonfile: replacepath = jsonpointer.JsonPointer(jsonpath) schemapath = schemapath.replace('/$ref', '') if re.search('\[\d]', schemapath): schemapath = schemapath.translate(None, '[]') schemapath = jsonpointer.JsonPointer(schemapath) data = replacepath.resolve(respcopy) if '$ref' in schemapath.resolve(respcopy): schemapath.resolve(respcopy).pop('$ref') schemapath.resolve(respcopy).update(data) else: if not listmatch: schemapath = schemapath.replace('/$ref', '') replacepath = schemapath + jsonpath replace_pointer = jsonpointer.\ JsonPointer(replacepath) data = replace_pointer.resolve(respcopy) set_pointer(respcopy, schemapath, data) resp.json(respcopy) else: resp.json(respcopy)
def parse_schema(self, resp): """Function to get and replace schema $ref with data :param resp: response data containing ref items. :type resp: str. """ jsonpath_expr = jsonpath_rw.parse(u'$.."$ref"') matches = jsonpath_expr.find(resp.dict) respcopy = resp.dict if matches: for match in matches: fullpath = str(match.full_path) jsonfile = match.value.split('#')[0] jsonpath = match.value.split('#')[1] if '@odata' in fullpath: schemapath = '/' + fullpath.replace('@odata.', '~').\ replace('.', '/').replace('~', '@odata.') else: schemapath = '/' + fullpath.replace('.', '/') if '.json' in jsonfile: itempath = schemapath if self.is_redfish: if resp.request.path[-1] == '/': newpath = '/'.join(resp.request.path.split('/')\ [:-2]) + '/' + jsonfile + '/' else: newpath = '/'.join(resp.request.path.split('/')\ [:-1]) + '/' + jsonfile + '/' else: newpath = '/'.join(resp.request.path.split('/')[:-1]) \ + '/' + jsonfile if 'href.json' in newpath: continue if not newpath.lower() in self._visited_urls: self.load(newpath, skipcrawl=True, includelogs=False, \ skipinit=True, loadtype='ref') instance = list() if u'st' in self.types: for x in self.types[u'st'][u'Instances']: instance.append(x) if u'ob' in self.types: for x in self.types[u'ob'][u'Instances']: instance.append(x) for item in instance: if jsonfile in item.resp._rest_request._path: if 'anyOf' in fullpath: break dictcopy = item.resp.dict listmatch = re.search('[[][0-9]+[]]', itempath) if listmatch: start = listmatch.regs[0][0] end = listmatch.regs[0][1] newitempath = [itempath[:start],\ itempath[end:]] start = jsonpointer.JsonPointer(newitempath[0]) end = jsonpointer.JsonPointer(newitempath[1]) del start.parts[-1], end.parts[-1] vals = start.resolve(respcopy) count = 0 for val in vals: try: if '$ref' in end.resolve( val).iterkeys(): end.resolve(val).pop('$ref') end.resolve(val).update(dictcopy) replace_pointer = jsonpointer.\ JsonPointer(end.path + jsonpath) data = replace_pointer.resolve(val) set_pointer(val, end.path, data) start.resolve(respcopy)[count].\ update(val) break except: count += 1 else: itempath = jsonpointer.JsonPointer(itempath) del itempath.parts[-1] if '$ref' in itempath.resolve(respcopy).\ iterkeys(): itempath.resolve(respcopy).pop('$ref') itempath.resolve(respcopy).update(dictcopy) if jsonpath: if 'anyOf' in fullpath: continue if not jsonfile: replacepath = jsonpointer.JsonPointer(jsonpath) schemapath = schemapath.replace('/$ref', '') schemapath = jsonpointer.JsonPointer(schemapath) data = replacepath.resolve(respcopy) if '$ref' in schemapath.resolve(respcopy): schemapath.resolve(respcopy).pop('$ref') schemapath.resolve(respcopy).update(data) else: if not listmatch: schemapath = schemapath.replace('/$ref', '') replacepath = schemapath + jsonpath replace_pointer = jsonpointer.\ JsonPointer(replacepath) data = replace_pointer.resolve(respcopy) set_pointer(respcopy, schemapath, data) resp.json(respcopy) else: resp.json(respcopy)