def parse_args(self, *args, **kwargs): self.history.append({"name": "parse_args", "args": args, "kwargs": kwargs}) from prestring.python import Module, LazyArgumentsAndKeywords def _make_call_stmt(history, default=""): name = history["name"] if name == "__init__": name = default kwargs = {k: repr(v) for k, v in history["kwargs"].items()} args = [repr(v) for v in history["args"]] return f"{name}({LazyArgumentsAndKeywords(args, kwargs)})" m = Module() m.sep() with m.def_("main"): m.import_("argparse") m.stmt(f"parser = argparse.ArgumentParser{_make_call_stmt(self.history[0])}") m.stmt("parser.print_usage = parser.print_help") for x in self.history[1:-1]: m.stmt(f"parser.{_make_call_stmt(x)}") m.stmt(f"args = parser.{_make_call_stmt(self.history[-1])}") m.stmt(f"{self.fn.__name__}(**vars(args))") with m.if_("__name__ == '__main__'"): m.stmt("main()") with open(inspect.getsourcefile(self.fn)) as rf: source = rf.read() rx = re.compile("(?:@([\S]+\.)?as_command.*|^.*import ascommand.*)\n", re.MULTILINE) exposed = rx.sub("", "".join(source)) print(exposed) print(m) sys.exit(0)
def run(*, path: str, disable_docstring) -> None: d = loading.loadfile(path) m = Module() a = Accessor(d, m, disable_docstring=disable_docstring) m.import_("typing", as_="t") m.sep() m.stmt("AnyService = t.Any # TODO") m.stmt("AnyResource = t.Any # TODO") m.sep() for rname, resource in a.resources.items(): with m.class_(titleize(rname), ""): with m.method("__init__", "resource: AnyResource"): m.stmt("self.internal = resource") m.stmt("# methods") for mname, method, params in a.iterate_methods(resource): with m.method(mname, params): a.emit_docstring(method["description"]) m.stmt( f"""# {method["httpMethod"]}: {method["flatPath"]}""") m.stmt(f"""# id: {method["id"]}""") m.stmt(f"return self.internal.{mname}({params})") m.stmt("# nested resources") for srname, subresource in a.iterate_nested_resources(resource): with m.method(srname): m.stmt(f"return self.internal.{srname}({params})") # m.stmt("# nested resources") # for mname, subresource in resource.get("resources", {}).items(): # params = LParams() # for is_positional, (pname, parameter) in itertools.zip_longest(subresource.get("parameterOrder", []), subresource.get("parameters", {}).items()): # if is_positional: # params.append(pname) # TODO type: # else: # params[pname] = None # TODO type: # with m.method(mname, params): # docstring(subresource["description"]) # m.stmt(f"""# id: {subresource["id"]}""") # m.stmt(f"return self.{mname}({params})") with m.class_("Service"): with m.method("__init__", "service: AnyService"): m.stmt("self.internal = service") for rname in a.resources.keys(): with m.method(rname, return_type=titleize(rname)): m.stmt(f"return {titleize(rname)}(self.internal.{rname}())") with m.def_("build", "*args", "**kwargs", return_type="Service"): m.stmt("# TODO: use the signature of googleapiclient.discovery.build") m.submodule().from_("googleapiclient.discovery", "build") m.stmt( f"return Service(build({a.name!r}, {a.version!r}, *args, **kwargs))" ) print(m)
class _ViewContext(object): def __init__(self, parent): self.parent = parent self.m = Module(import_unique=True) self.im = self.m.submodule() self.m.sep() # todo: scan module file def add_view(self, pattern, sym, route, method, d, docstring=None): name = sym.rsplit(".", 1)[-1] m = self.m self.from_("pyramid.view", "view_config") view_setting = self.parent.build_view_setting(pattern, route, method, here=self) m.stmt(LazyFormat("@view_config({})", LazyKeywordsRepr(view_setting))) with m.def_(name, "context", "request"): m.stmt('"""') if "summary" in d: m.stmt(d["summary"]) if docstring: m.stmt("") for line in docstring.split("\n"): m.stmt(line) m.stmt('"""') m.return_("{}") def from_(self, module, name): logger.debug(" import: module=%s, name=%s", module, name) self.im.from_(module, name) def import_(self, module): logger.debug(" import: module=%s", module) self.im.import_(module)
def main(): from openapi_stream import main m = Module(import_unique=True) m.header_area = m.submodule() m.import_area = m.submodule() m.sep() g = Generator(m) toplevels: t.List[Event] = [] stream: t.Iterable[Event] = main(create_visitor=ToplevelVisitor) def consume_stream(stream: t.Iterable[Event], *, is_delayed=False) -> t.List[Event]: delayed_stream: t.List[Event] = [] for ev in stream: if ev.uid in g.name_manager: continue if names.roles.has_expanded in ev.roles: g.case_definitions_manager.add_definitions( ev.get_annotated(names.annotations.expanded)["definitions"] ) if names.roles.has_name in ev.roles: g.generate_class(ev) continue if not is_delayed: if names.roles.toplevel_properties in ev.roles: toplevels.append(ev) # xxx delayed_stream.append(ev) continue # xxx: if ( ev.name in (names.types.object, names.types.array) or names.roles.combine_type in ev.roles or names.roles.child_of_xxx_of in ev.roles or names.roles.field_of_something in ev.roles ): uid_and_clsname_pairs = sorted( g.name_manager.iterate_clssname_and_prefix_pairs(), key=lambda pair: len(pair[0]), reverse=True, ) uid = ev.uid for parent_uid, (parent_clsname, prefix) in uid_and_clsname_pairs: if uid.startswith(parent_uid): classdef_sm = g._end_of_class_definition_conts[parent_uid] fieldname = uid.replace(parent_uid, "").lstrip("/") clsname = f"_{g.helper.classname(ev, name=fieldname)}" classdef_sm.stmt( f"# anonymous definition for {fieldname!r} (TODO: nodename)" ) g.generate_class( ev, clsname=clsname, m=classdef_sm, prefix=f"{prefix}{parent_clsname}.", ) # ok: properties # ok: oneOf, anyof, allof # todo: additionalProperties, patternProperties # assert "/" not in fieldname name = fieldname g._gen_visitor_property(ev, name=name, uid=uid, m=classdef_sm) break else: raise RuntimeError(f"unexpected type: {ev.name}") return delayed_stream delayed_stream = consume_stream(stream, is_delayed=False) for ev in toplevels: if ev.uid.endswith("#/"): g.generate_class(ev, clsname="Toplevel") import os.path m.header_area.stmt( f"# generated from {os.path.relpath(ev.root_file, start=os.getcwd())}" ) m.import_area.from_("dictknife.langhelpers", "reify") m.import_area.from_("openapi_stream", "runtime") m.import_area.from_("openapi_stream.context", "Context") delayed_stream = sorted(delayed_stream, key=lambda ev: len(ev.uid)) delayed_stream = consume_stream(delayed_stream, is_delayed=True) if g.case_definitions_manager.has_definitions: data = {"definitions": g.case_definitions_manager.definitions} m.import_area.from_("openapi_stream", "runtime") g.emitter.emit_data(m, "_case = runtime.Case({})", data, nofmt=True) print(m)
import inspect from yaml.constructor import Constructor from prestring.python import Module m = Module() m.from_("yaml.constructor", "Constructor") m.sep() with m.class_("WrappedConstructor", "Constructor"): with m.def_("wrap", "self", "path", "name", "node", "r"): with m.if_("r is None"): m.stmt("return r") m.stmt('# print("@", id(r), repr(r))') m.stmt("mem[id(r)] = node") m.stmt("return r") seen = set() for cls in Constructor.mro(): for name, attr in cls.__dict__.items(): if name in seen: continue seen.add(name) if name.startswith("construct_") and callable(attr): sigs = inspect.signature(attr) m.stmt("def {}{}:", name, sigs) with m.scope(): args = [] for v in sigs.parameters.values(): if v.name == "self": continue if v.default is inspect._empty: args.append(str(v))
from prestring.python import Module m = Module() # noqa m.sep() with m.def_('hello', 'name', '*', 'message: str = "hello world"'): m.docstring('greeting message') m.stmt('print(f"{name}: {message}")') print(m)
def emit(filename: str, *, use_fullname: bool = False) -> None: from dictknife import loading from prestring.python import Module from detector import detect, Object, TypeInfo, ZERO, generate_annotations name_map = {} m = Module() m.toplevel = m.submodule() m.sep() def _pytype(info: TypeInfo, *, m=m, aliases: t.Dict[str, str] = {"typing": "t"}): if info is ZERO: module = aliases.get(t.__name__) or t.__name__ return f"{module}.Any" if hasattr(info, "base"): module = aliases.get(info.base.__module__) or info.base.__module__ m.toplevel.import_(info.base.__module__, aliases.get(info.base.__module__)) if info.base is t.Optional: return f"{module}.Optional[{_pytype(info.item)}]" elif info.base is t.List: return f"{module}.List[{_pytype(info.item)}]" elif hasattr(info, "type"): module = aliases.get(info.type.__module__) or info.type.__module__ prefix = module + "." if module == "builtins": prefix = "" else: m.toplevel.import_(info.type.__module__, aliases.get(info.type.__module__)) return prefix + info.type.__name__ try: return name_map[id(info)] except KeyError: # FIXME: bug import sys print(f"something wrong: {info}", file=sys.stderr) return "UNKNOWN" d = loading.loadfile(filename) result = detect(d) annotations = generate_annotations(result, use_fullname=use_fullname, toplevel_name="toplevel") for info in result.history: if isinstance(info, Object): metadata = annotations["/".join(info.path)] name = metadata.get("after", metadata["before"])["name"] name_map[id(info)] = name m.stmt(f"# from: {'/'.join(info.path)}") with m.class_(name): for name, sub_info in info.props.items(): m.stmt("{}: {}", name, _pytype(sub_info)) print(m)