class Expander: def __init__(self, resolver): self.resolver = resolver self.accessor = StackedAccessor(resolver) self.accessing = Accessor() self.ref_walking = DictWalker(["$ref"]) self.errors = [] def expand(self, doc=None, resolver=None, ctx=None): doc = doc or self.resolver.doc resolver = resolver or self.resolver if "$ref" in doc: original = self.accessor.access(doc["$ref"]) new_doc = self.expand(original, resolver=self.accessor.resolver, ctx=ctx) self.accessor.pop_stack() return new_doc else: for path, sd in self.ref_walking.iterate(doc): try: new_sd = self.expand(sd, resolver=resolver, ctx=ctx) container = self.accessing.access(doc, path[:-1]) if not hasattr(container, "parents"): container = ChainMap(make_dict(), container) container.update(new_sd) self.accessing.assign(doc, path[:-1], container) except Exception as e: self.errors.append(ReferenceError(e, path=path[:], data=sd)) return doc
class Emitter: def __init__(self, accessor, item_map): self.raw_accessor = Accessor() self.accessor = accessor self.item_map = item_map @reify def ref_walking(self): return DictWalker([is_ref]) def get_item_by_globalref(self, globalref): return self.accessor.cache[globalref] def get_item_by_localref(self, localref): return self.item_map[localref] def emit(self, resolver, doc, *, conflicted): # side effect d = make_dict() for path, sd in self.ref_walking.iterate(doc): self.replace_ref(resolver, sd) d = deepmerge(d, doc) for name, item in self.item_map.items(): if name == "": continue data = item.data # replace: <file.yaml>#/<ref> -> #/<ref> for path, sd in self.ref_walking.iterate(data): if not sd["$ref"].startswith("#/"): self.replace_ref(item.resolver, sd) if sd["$ref"] in conflicted: self.replace_ref(item.resolver, sd) self.raw_accessor.assign(d, name.split("/"), data) # adhoc paths support will_removes = set() paths = d.get("paths") or {} for path, sub in list(paths.items()): if "$ref" in sub and sub["$ref"].startswith("#/"): related_path = tuple(sub["$ref"][2:].split("/")) paths[path] = self.raw_accessor.access(d, related_path).copy() will_removes.add(related_path) for related_path in will_removes: self.raw_accessor.maybe_remove(d, related_path) return d def replace_ref(self, resolver, sd): filename, _, pointer = resolver.resolve_pathset(sd["$ref"]) related = self.get_item_by_globalref((filename, pointer)) new_ref = "#/{}".format(related.localref) if sd["$ref"] != new_ref: logger.debug( "fix ref: %r -> %r (where=%r)", sd["$ref"], new_ref, resolver.filename ) sd["$ref"] = new_ref
class Scaner: def __init__(self, resolver, *, store: Store): self.resolver = resolver self.store = store self.accessor = StackedAccessor(resolver) self.accessing = Accessor() self.ref_walking = DictWalker([is_ref]) self.errors = [] def scan(self, doc=None, resolver=None): if not doc and doc is not None: return doc resolver = resolver or self.resolver try: doc = doc or resolver.doc except MarkedYAMLError as e: if e.problem_mark is not None: self.errors.append(ParseError(e, store=self.store)) if doc is None: doc = {} doc, _ = self._scan(doc, resolver=resolver, seen={}) return doc def _scan(self, doc, *, resolver, seen: dict): if "$ref" in doc: original = self.accessor.access(doc["$ref"]) new_doc, _ = self._scan( original, resolver=self.accessor.resolver, seen=seen ) return new_doc, self.accessor.pop_stack() else: for path, sd in self.ref_walking.iterate(doc): try: uid = id(sd) if uid in seen: continue seen[uid] = sd new_sd, sresolver = self._scan(sd, resolver=resolver, seen=seen) if resolver.filename != sresolver.filename: container = self.accessing.access(doc, path[:-1]) if not hasattr(container, "parents"): container = ChainMap(make_dict(), container) container.update(new_sd) self.accessing.assign(doc, path[:-1], container) except (KeyError, FileNotFoundError) as e: self.errors.append( ReferenceError(e, store=self.store, path=path[:], data=sd) ) except MarkedYAMLError as e: if e.problem_mark is not None: self.errors.append( ParseError(e, store=self.store, path=path[:], data=sd) ) return doc, resolver
class Scaner: def __init__(self, resolver, *, store: Store): self.resolver = resolver self.store = store self.accessor = StackedAccessor(resolver) self.accessing = Accessor() self.ref_walking = DictWalker(["$ref"]) self.errors = [] def scan(self, doc=None, resolver=None, ctx=None): resolver = resolver or self.resolver try: doc = doc or resolver.doc except MarkedYAMLError as e: if e.problem_mark is not None: self.errors.append(ParseError(e, store=self.store)) if doc is None: doc = {} if "$ref" in doc: original = self.accessor.access(doc["$ref"]) new_doc = self.scan(original, resolver=self.accessor.resolver, ctx=ctx) self.accessor.pop_stack() return new_doc else: for path, sd in self.ref_walking.iterate(doc): try: new_sd = self.scan(sd, resolver=resolver, ctx=ctx) container = self.accessing.access(doc, path[:-1]) if not hasattr(container, "parents"): container = ChainMap(make_dict(), container) container.update(new_sd) self.accessing.assign(doc, path[:-1], container) except (KeyError, FileNotFoundError) as e: self.errors.append( ReferenceError(e, store=self.store, path=path[:], data=sd)) except MarkedYAMLError as e: if e.problem_mark is not None: self.errors.append( ParseError(e, store=self.store, path=path[:], data=sd)) return doc
def run(pkgname: str) -> None: p = subprocess.run(["pyinspect", "list", pkgname], text=True, stdout=subprocess.PIPE) a = Accessor() r = a.make_dict() for module_name in p.stdout.split(): try: m = importlib.import_module(module_name) except Exception as e: print(f"\x1b[32m!! {e!r} \x1b[0m", file=sys.stderr) for name, val in m.__dict__.items(): if inspect.isclass(val): a.assign(r, [module_name, name], inspect.getdoc(val)) elif inspect.isfunction(val): a.assign(r, [module_name, name], inspect.getdoc(val)) loading.dumpfile(r)
def transform( d, *, default_content_type="application/json", is_specific_header=is_specific_header, get_value=get_value, with_response_type=True, with_request_type=True, with_cookies=True, include_all=False, ): r = make_dict() a = Accessor() for path, methods in d.items(): for method, entries in methods.items(): d = {"description": ""} seen_parameters = defaultdict(set) request_bodies: t.List[dict] = [] response_bodies_dict: t.Dict[t.Tuple[int, str], dict] = defaultdict(list) for e in entries: # request # params :: path,query,header,cookie parameters = [] for param_type, k, enabled in [ ("query", "queryString", True), ("header", "headers", True), ("cookie", "cookies", with_cookies), ]: if not enabled: continue seen = seen_parameters[k] for h in e["request"][k]: if h["name"] in seen: continue seen.add(h["name"]) if include_all or is_specific_header( h["name"], h["value"]): parameters.append({ "name": h["name"], "in": param_type, "example": get_value(h["name"], h["value"]), # masking? }) if parameters: d["parameters"] = parameters if e["request"].get("postData"): post_data = e["request"]["postData"] content_type = post_data["mimeType"].split(";", 1)[0] if content_type.endswith("/json") and with_request_type: request_bodies.append( loading.loads(post_data["text"], format="json")) # response status = e["response"]["status"] if status == 304: status = 200 # not modified -> ok content_type = e["response"]["content"].get("mimeType") if content_type is None: for h in e["response"]["headers"]: if h["name"].lower() == "content-type": content_type = h["value"] break else: content_type = default_content_type # "application/json; charset=utf-8" -> "application/json" content_type = content_type.split(";", 1)[0] schema = {} if content_type.startswith("text/"): a.assign(schema, ["type"], "string") elif content_type.endswith("/json") and with_response_type: response_bodies_dict[(status, content_type)].append( loading.loads(e["response"]["content"]["text"], format="json")) a.assign( d, ["responses", status], { "description": e["response"]["statusText"], "content": { content_type: { "schema": schema } }, }, ) if request_bodies: detector = schemalib.Detector() info = None for body in request_bodies: info = detector.detect(body, name="") a.assign(d, ["requestBody"], schemalib.makeschema_from_info(info)) if response_bodies_dict: for (status, content_type), bodies in response_bodies_dict.items(): detector = schemalib.Detector() info = None for body in bodies: info = detector.detect(body, name="") a.assign( d, [ "responses", status, "content", content_type, "schema" ], schemalib.makeschema_from_info(info), ) a.assign(r, ["paths", path, method.lower()], d) return r
import pathlib import importlib.util from dictknife import loading from dictknife import Accessor from dictknife import DictWalker spec = importlib.util.find_spec("botocore") path = pathlib.Path(spec.origin) if path.name == "__init__.py": path = path.parent d = loading.loadfile(path / ("data/sqs/2012-11-05/service-2.json")) dst = {} a = Accessor(make_dict=dict) for name, sd in d["operations"].items(): path = ["operations", name] a.assign(dst, path, sd) ssd = a.access(d, ["shapes", sd["input"]["shape"]]) a.assign(dst, ["shapes", sd["input"]["shape"]], ssd) if "output" in sd: ssd = a.access(d, ["shapes", sd["output"]["shape"]]) a.assign(dst, ["shapes", sd["output"]["shape"]], ssd) # slim-up for path, sd in DictWalker(["documentation"]).walk(dst): sd.pop("documentation") loading.dumpfile(dst, format="json")
def _callFUT(self, d, k, v): from dictknife import Accessor a = Accessor(make_dict=dict) return a.assign(d, k, v)
class Loader: def __init__(self, resolver, *, store: _yaml.NodeStore): self.resolver = resolver self.accessor = StackedAccessor(resolver) self.accessing = Accessor() self.ref_walking = DictWalker([is_ref]) self.errors = [] self.store = store @property def filename(self) -> str: return self.resolver.filename def load(self, doc=None, resolver=None): if not doc and doc is not None: return doc resolver = resolver or self.resolver try: doc = doc or resolver.doc except _yaml.MarkedYAMLError as e: if e.problem_mark is not None: self.errors.append(ParseError(e, history=[resolver.filename])) if doc is None: doc = {} doc, _ = self._load(doc, resolver=resolver, seen={}) return doc def _load(self, doc, *, resolver, seen: dict): if "$ref" in doc: original = self.accessor.access(doc["$ref"]) new_doc, _ = self._load(original, resolver=self.accessor.resolver, seen=seen) return new_doc, self.accessor.pop_stack() else: for path, sd in self.ref_walking.iterate(doc): try: uid = id(sd) if uid in seen: continue seen[uid] = sd new_sd, sresolver = self._load(sd, resolver=resolver, seen=seen) if resolver.filename != sresolver.filename: container = self.accessing.access(doc, path[:-1]) if not hasattr(container, "parents"): container = ChainMap(make_dict(), container) container.update(new_sd) self.accessing.assign(doc, path[:-1], container) except FileNotFoundError as e: self.errors.append( ResolutionError( e, path=path[:], data=sd, history=[ r.filename for r in self.accessor.stack[:-1] ], )) except KeyError as e: self.errors.append( ResolutionError( e, path=path[:], data=sd, history=[r.filename for r in self.accessor.stack], )) except _yaml.MarkedYAMLError as e: if e.problem_mark is not None: self.errors.append( ParseError( e, path=path[:], data=sd, history=[ r.filename for r in self.accessor.stack ], )) return doc, resolver
from collections import OrderedDict from dictknife import Accessor from dictknife.pp import pp, indent a = Accessor(OrderedDict) d = OrderedDict() # assign a.assign(d, ['a', 'b', 'c'], 'v') with indent(2, 'assign:\n'): print(d) pp(d) print() # access with indent(2, '\naccess: ["a", "b", "c"]\n'): print(['a', 'b', 'c'], a.access(d, ['a', 'b', 'c'])) # print(['a', 'b', 'x'], a.access(d, ['a', 'b', 'x'])) # error # exists with indent(2, '\nexists:\n'): import copy # NOQA d2 = copy.deepcopy(d) print(['a', 'b', 'c'], a.exists(d2, ['a', 'b', 'c'])) print(['a', 'b', 'x'], a.exists(d2, ['a', 'b', 'x'])) # maybe_remove with indent(2, '\nmaybe_remove:\n'): import copy # NOQA d2 = copy.deepcopy(d)
import subprocess import importlib import inspect from dictknife import Accessor from dictknife import loading p = subprocess.run( ["pyinspect", "list", "marshmallow"], text=True, stdout=subprocess.PIPE ) a = Accessor() r = a.make_dict() for module_name in p.stdout.split(): m = importlib.import_module(module_name) for name, val in m.__dict__.items(): if inspect.isclass(val): a.assign(r, [module_name, name], inspect.getdoc(val)) elif inspect.isfunction(val): a.assign(r, [module_name, name], inspect.getdoc(val)) loading.dumpfile(r)
def transform( d, *, default_content_type="application/json", is_specific_header=is_specific_header, get_value=get_value, with_response_type=True, with_request_type=True, with_cookies=True, include_all=False, ): r = make_dict() a = Accessor() for path, methods in d.items(): for method, entries in methods.items(): d = {"description": ""} seen_parameters = defaultdict(set) request_bodies: t.List[dict] = [] response_bodies_dict: t.Dict[t.Tuple[int, str], dict] = defaultdict(list) for e in entries: # request # params :: path,query,header,cookie parameters = [] for param_type, k, enabled in [ ("query", "queryString", True), ("header", "headers", True), ("cookie", "cookies", with_cookies), ]: if not enabled: continue seen = seen_parameters[k] for h in e["request"][k]: if h["name"] in seen: continue seen.add(h["name"]) if include_all or is_specific_header(h["name"], h["value"]): parameters.append( { "name": h["name"], "in": param_type, "example": get_value( h["name"], h["value"] ), # masking? } ) if parameters: d["parameters"] = parameters if e["request"].get("postData"): post_data = e["request"]["postData"] content_type = post_data["mimeType"].split(";", 1)[0] if content_type.endswith("/json") and with_request_type: request_bodies.append( loading.loads(post_data["text"], format="json") ) # response status = e["response"]["status"] if status == 304: status = 200 # not modified -> ok content_type = e["response"]["content"].get("mimeType") if content_type is None: for h in e["response"]["headers"]: if h["name"].lower() == "content-type": content_type = h["value"] break else: content_type = default_content_type # "application/json; charset=utf-8" -> "application/json" content_type = content_type.split(";", 1)[0] schema = {} if content_type.startswith("text/"): a.assign(schema, ["type"], "string") elif content_type.endswith("/json") and with_response_type: response_bodies_dict[(status, content_type)].append( loading.loads(e["response"]["content"]["text"], format="json") ) a.assign( d, ["responses", status], { "description": e["response"]["statusText"], "content": {content_type: {"schema": schema}}, }, ) if request_bodies: detector = schemalib.Detector() info = None for body in request_bodies: info = detector.detect(body, name="") a.assign(d, ["requestBody"], schemalib.makeschema_from_info(info)) if response_bodies_dict: for (status, content_type), bodies in response_bodies_dict.items(): detector = schemalib.Detector() info = None for body in bodies: info = detector.detect(body, name="") a.assign( d, ["responses", status, "content", content_type, "schema"], schemalib.makeschema_from_info(info), ) a.assign(r, ["paths", path, method.lower()], d) return r