def run(*, path: str, disable_docstring) -> None: d = loading.loadfile(path) m = Module() a = Accessor(d, m, disable_docstring=disable_docstring) m.import_("typing", as_="t") m.sep() m.stmt("AnyService = t.Any # TODO") m.stmt("AnyResource = t.Any # TODO") m.sep() for rname, resource in a.resources.items(): with m.class_(titleize(rname), ""): with m.method("__init__", "resource: AnyResource"): m.stmt("self.internal = resource") m.stmt("# methods") for mname, method, params in a.iterate_methods(resource): with m.method(mname, params): a.emit_docstring(method["description"]) m.stmt( f"""# {method["httpMethod"]}: {method["flatPath"]}""") m.stmt(f"""# id: {method["id"]}""") m.stmt(f"return self.internal.{mname}({params})") m.stmt("# nested resources") for srname, subresource in a.iterate_nested_resources(resource): with m.method(srname): m.stmt(f"return self.internal.{srname}({params})") # m.stmt("# nested resources") # for mname, subresource in resource.get("resources", {}).items(): # params = LParams() # for is_positional, (pname, parameter) in itertools.zip_longest(subresource.get("parameterOrder", []), subresource.get("parameters", {}).items()): # if is_positional: # params.append(pname) # TODO type: # else: # params[pname] = None # TODO type: # with m.method(mname, params): # docstring(subresource["description"]) # m.stmt(f"""# id: {subresource["id"]}""") # m.stmt(f"return self.{mname}({params})") with m.class_("Service"): with m.method("__init__", "service: AnyService"): m.stmt("self.internal = service") for rname in a.resources.keys(): with m.method(rname, return_type=titleize(rname)): m.stmt(f"return {titleize(rname)}(self.internal.{rname}())") with m.def_("build", "*args", "**kwargs", return_type="Service"): m.stmt("# TODO: use the signature of googleapiclient.discovery.build") m.submodule().from_("googleapiclient.discovery", "build") m.stmt( f"return Service(build({a.name!r}, {a.version!r}, *args, **kwargs))" ) print(m)
def main(d: AnyDict, *, verbose: bool = False) -> None: logging.basicConfig(level=logging.INFO) # debug m = Module() import_area: Module = m.submodule() import_area.stmt("from __future__ import annotations") ctx = Context(import_area=import_area, verbose=verbose) scan(ctx, d=d) emitter = Emitter(m=m) print(emitter.emit(ctx)) cc = ctx.cache_counter logger.info("cache hits=%s, most common=%s", sum(cc.values()), cc.most_common(3))
def transform(node, *, m=None, is_whole=None): is_whole = is_whole or m is None if m is None: m = Module() m.g = m.submodule() if is_whole: m.g.from_("prestring.python", "Module") m.g.stmt("m = Module() # noqa") t = Transformer(node, m=m) t.visit(node) if len(m.g.imported_set) > 0: m.g.stmt("m.sep()") m.g.sep() if is_whole: m.stmt("print(m)") return m
class _ViewContext(object): def __init__(self, parent): self.parent = parent self.m = Module(import_unique=True) self.im = self.m.submodule() self.m.sep() # todo: scan module file def add_view(self, pattern, sym, route, method, d, docstring=None): name = sym.rsplit(".", 1)[-1] m = self.m self.from_("pyramid.view", "view_config") view_setting = self.parent.build_view_setting(pattern, route, method, here=self) m.stmt(LazyFormat("@view_config({})", LazyKeywordsRepr(view_setting))) with m.def_(name, "context", "request"): m.stmt('"""') if "summary" in d: m.stmt(d["summary"]) if docstring: m.stmt("") for line in docstring.split("\n"): m.stmt(line) m.stmt('"""') m.return_("{}") def from_(self, module, name): logger.debug(" import: module=%s, name=%s", module, name) self.im.from_(module, name) def import_(self, module): logger.debug(" import: module=%s", module) self.im.import_(module)
def main(): from openapi_stream import main m = Module(import_unique=True) m.header_area = m.submodule() m.import_area = m.submodule() m.sep() g = Generator(m) toplevels: t.List[Event] = [] stream: t.Iterable[Event] = main(create_visitor=ToplevelVisitor) def consume_stream(stream: t.Iterable[Event], *, is_delayed=False) -> t.List[Event]: delayed_stream: t.List[Event] = [] for ev in stream: if ev.uid in g.name_manager: continue if names.roles.has_expanded in ev.roles: g.case_definitions_manager.add_definitions( ev.get_annotated(names.annotations.expanded)["definitions"] ) if names.roles.has_name in ev.roles: g.generate_class(ev) continue if not is_delayed: if names.roles.toplevel_properties in ev.roles: toplevels.append(ev) # xxx delayed_stream.append(ev) continue # xxx: if ( ev.name in (names.types.object, names.types.array) or names.roles.combine_type in ev.roles or names.roles.child_of_xxx_of in ev.roles or names.roles.field_of_something in ev.roles ): uid_and_clsname_pairs = sorted( g.name_manager.iterate_clssname_and_prefix_pairs(), key=lambda pair: len(pair[0]), reverse=True, ) uid = ev.uid for parent_uid, (parent_clsname, prefix) in uid_and_clsname_pairs: if uid.startswith(parent_uid): classdef_sm = g._end_of_class_definition_conts[parent_uid] fieldname = uid.replace(parent_uid, "").lstrip("/") clsname = f"_{g.helper.classname(ev, name=fieldname)}" classdef_sm.stmt( f"# anonymous definition for {fieldname!r} (TODO: nodename)" ) g.generate_class( ev, clsname=clsname, m=classdef_sm, prefix=f"{prefix}{parent_clsname}.", ) # ok: properties # ok: oneOf, anyof, allof # todo: additionalProperties, patternProperties # assert "/" not in fieldname name = fieldname g._gen_visitor_property(ev, name=name, uid=uid, m=classdef_sm) break else: raise RuntimeError(f"unexpected type: {ev.name}") return delayed_stream delayed_stream = consume_stream(stream, is_delayed=False) for ev in toplevels: if ev.uid.endswith("#/"): g.generate_class(ev, clsname="Toplevel") import os.path m.header_area.stmt( f"# generated from {os.path.relpath(ev.root_file, start=os.getcwd())}" ) m.import_area.from_("dictknife.langhelpers", "reify") m.import_area.from_("openapi_stream", "runtime") m.import_area.from_("openapi_stream.context", "Context") delayed_stream = sorted(delayed_stream, key=lambda ev: len(ev.uid)) delayed_stream = consume_stream(delayed_stream, is_delayed=True) if g.case_definitions_manager.has_definitions: data = {"definitions": g.case_definitions_manager.definitions} m.import_area.from_("openapi_stream", "runtime") g.emitter.emit_data(m, "_case = runtime.Case({})", data, nofmt=True) print(m)
def f( name: str, val: int, default: int = 0, *, nickname: t.Optional[str] = None, debug: bool = False, **metadata: t.Optional[t.Any], ) -> None: pass m = Module() m.toplevel = m.submodule(import_unique=True) m.sep() spec = fnspec(f) with m.class_("F"): for name, typ, kind in spec.parameters: if typ.__module__ != "builtins": m.toplevel.import_(typ.__module__) info = typeinfo(typ) rhs = spec.type_str_of(info.normalized) if info.is_optional: rhs = LazyFormat("typing.Optional[{}]", rhs) if kind == "var_kw": rhs = LazyFormat("typing.Dict[str, {}]", rhs)
def emit(filename: str, *, use_fullname: bool = False) -> None: from dictknife import loading from prestring.python import Module from detector import detect, Object, TypeInfo, ZERO, generate_annotations name_map = {} m = Module() m.toplevel = m.submodule() m.sep() def _pytype(info: TypeInfo, *, m=m, aliases: t.Dict[str, str] = {"typing": "t"}): if info is ZERO: module = aliases.get(t.__name__) or t.__name__ return f"{module}.Any" if hasattr(info, "base"): module = aliases.get(info.base.__module__) or info.base.__module__ m.toplevel.import_(info.base.__module__, aliases.get(info.base.__module__)) if info.base is t.Optional: return f"{module}.Optional[{_pytype(info.item)}]" elif info.base is t.List: return f"{module}.List[{_pytype(info.item)}]" elif hasattr(info, "type"): module = aliases.get(info.type.__module__) or info.type.__module__ prefix = module + "." if module == "builtins": prefix = "" else: m.toplevel.import_(info.type.__module__, aliases.get(info.type.__module__)) return prefix + info.type.__name__ try: return name_map[id(info)] except KeyError: # FIXME: bug import sys print(f"something wrong: {info}", file=sys.stderr) return "UNKNOWN" d = loading.loadfile(filename) result = detect(d) annotations = generate_annotations(result, use_fullname=use_fullname, toplevel_name="toplevel") for info in result.history: if isinstance(info, Object): metadata = annotations["/".join(info.path)] name = metadata.get("after", metadata["before"])["name"] name_map[id(info)] = name m.stmt(f"# from: {'/'.join(info.path)}") with m.class_(name): for name, sub_info in info.props.items(): m.stmt("{}: {}", name, _pytype(sub_info)) print(m)