def emit(ctx: Context): from prestring.python import Module m = Module() for name, cls in ctx.types.items(): with m.class_(name): # TODO: omit class inheritance for field_name, field_type in t.get_type_hints(cls).items(): # TODO: to pytype m.stmt(f"{field_name}: {field_type.__name__}") return m
def gen() -> Module: m = Module() with m.def_("foo", "x", "y", return_type="int"): m.stmt("pass") with m.def_("bar", "x", return_type="int"): m.stmt("pass") return m
def gen(target: ModuleType, *, m=None): m = m or Module() m.import_("typing as t") m.from_("pydantic", "BaseModel") m.from_("fastapi", "APIRouter", "Depends") m.from_("monogusa.web", "runtime") m.import_(target.__name__) m.stmt("router = APIRouter()") m.sep() # TODO: collect functions and use them. spec = fnspec.fnspec(commands.hello) co = fnspec.spec_to_schema_code(spec, name=pascalcase(spec.name)) co.emit(m=m) m.stmt('@router.post ( "/{}", response_model=runtime.CommandOutput )', spec.name) with m.def_( spec.name, f"input: {co.name}", "writer: commands.Writer = Depends(commands.writer)", # deps return_type="t.Dict[str, t.Any]", ): with m.with_("runtime.handle() as s"): m.stmt("{}.{}(writer, **input.dict())", target.__name__, spec.name) m.stmt("return s.dict()") return m
def main(d: AnyDict, *, verbose: bool = False) -> None: logging.basicConfig(level=logging.INFO) # debug m = Module() import_area: Module = m.submodule() import_area.stmt("from __future__ import annotations") ctx = Context(import_area=import_area, verbose=verbose) scan(ctx, d=d) emitter = Emitter(m=m) print(emitter.emit(ctx)) cc = ctx.cache_counter logger.info("cache hits=%s, most common=%s", sum(cc.values()), cc.most_common(3))
def argspec_to_schema(spec: inspect.FullArgSpec, classname: str, *, m: t.Optional[Module] = None) -> str: if m is None: m = Module() m.from_("pydantic").import_("BaseModel") classified = _classify_args(spec) with m.class_(classname, "BaseModel"): fields = [(name, v) for name, v in spec.annotations.items() if name != "return"] if len(fields) == 0: m.stmt("pass") for name, typ in fields: kind = classified[name] if kind == "args": continue elif kind == "args_defaults": continue elif kind == "kw": m.stmt("{}: {}", name, _resolve_type(typ)) elif kind == "kw_defaults": m.stmt("{}: {} = {}", name, _resolve_type(typ), spec.kwonlydefaults[name]) else: raise ValueError(f"invalid kind. name={name}, kind={kind}") return m
def emit(result: Result, *, m: t.Optional[Module] = None) -> Module: m = m or Module() m.toplevel = m.submodule() g = m.toplevel.import_("graphql", as_="g") m.sep() mapping = { str: g.GraphQLString, int: g.GraphQLInt, bool: g.GraphQLBool, } name_map = get_name_map(result, name="Person") for info in result.history: if not isinstance(info, Object): continue name = name_map["/".join(info.path)] m.stmt("{} = {}(", name, g.GraphQLObjectType) with m.scope(): m.stmt("{!r},", name) m.stmt("lambda: {") with m.scope(): for fieldname, field in info.props.items(): m.stmt( "{!r}: {},", fieldname, g.GraphQLField( to_graphql_type(field, mapping=mapping, g=g)), ) m.stmt("}") m.stmt(")") return m
def argspec_to_schema(spec: FuncSpec, *, name: str, m: t.Optional[Module] = None) -> str: if m is None: m = Module() m.from_("pydantic").import_("BaseModel") with m.class_(name, "BaseModel"): if len(spec.arguments) == 0: m.stmt("pass") for name, typ in spec.arguments: kind = spec.kind_of(name) if kind == "args": continue elif kind == "args_defaults": continue elif kind == "kw": m.stmt("{}: {}", name, spec.type_str_of(typ)) elif kind == "kw_defaults": m.stmt( "{}: {} = {}", name, spec.type_str_of(typ), spec.kwonlydefaults[name], ) else: raise ValueError(f"invalid kind. name={name}, kind={kind}") return m
def main(): import inspect from prestring.python import Module from prestring.python.transform import transform_string m = Module() source = inspect.getsource(hello) m = transform_string(source, m=m) print(m)
def __init__(self, parent): self.scanned = set() self.parent = parent m = self.m = Module() with m.def_("includeme_swagger_router", "config"): self.fm = m.submodule() self.scanm = m.submodule() with m.def_("includeme", "config"): m.stmt("config.include(includeme_swagger_router)")
class _ViewContext(object): def __init__(self, parent): self.parent = parent self.m = Module(import_unique=True) self.im = self.m.submodule() self.m.sep() # todo: scan module file def add_view(self, pattern, sym, route, method, d, docstring=None): name = sym.rsplit(".", 1)[-1] m = self.m self.from_("pyramid.view", "view_config") view_setting = self.parent.build_view_setting(pattern, route, method, here=self) m.stmt(LazyFormat("@view_config({})", LazyKeywordsRepr(view_setting))) with m.def_(name, "context", "request"): m.stmt('"""') if "summary" in d: m.stmt(d["summary"]) if docstring: m.stmt("") for line in docstring.split("\n"): m.stmt(line) m.stmt('"""') m.return_("{}") def from_(self, module, name): logger.debug(" import: module=%s, name=%s", module, name) self.im.from_(module, name) def import_(self, module): logger.debug(" import: module=%s", module) self.im.import_(module)
def __init_subclass__(cls): hints = t.get_type_hints(cls) m = Module() name = cls.__name__ fields = [ V(name=k, type_=v.__name__ if hasattr(v, "__name__") else v) for k, v in hints.items() ] _gen_init(m, name=name, fields=fields) _gen_repr(m, name=name, fields=fields) code = str(m) d = {} exec(code, d) cls.__init__ = d["__init__"] cls.__repr__ = d["__repr__"]
def transform(node, *, m=None, is_whole=None): is_whole = is_whole or m is None if m is None: m = Module() m.g = m.submodule() if is_whole: m.g.from_("prestring.python", "Module") m.g.stmt("m = Module() # noqa") t = Transformer(node, m=m) t.visit(node) if len(m.g.imported_set) > 0: m.g.stmt("m.sep()") m.g.sep() if is_whole: m.stmt("print(m)") return m
def gen_class_code(name: str, fields: t.List[V]) -> str: m = Module() with m.class_(name): _gen_init(m, name=name, fields=fields) _gen_repr(m, name=name, fields=fields) return str(m)
from prestring.python import Module m = Module() with m.def_("main"): m.stmt("print('hello world')") with m.if_("__name__ == '__main__'"): m.stmt("main()") print(m)
from prestring.python import Module m = Module() with m.def_("foo"): m.return_("'foo'") print(m)
from prestring.python import Module from prestring.python._codeobject import CodeobjectModule m = Module() co = CodeobjectModule(m) re = co.import_("re") sys = co.import_("sys") m.sep() pattern = co.let( "pattern", re.compile( r"^(?P<label>DEBUG|INFO|WARNING|ERROR|CRITICAL):\s*(?P<message>\S+)", re.IGNORECASE, ), ) with m.for_("line", sys.stdin): matched = co.let("matched", pattern.search(co.symbol("line"))) with m.if_(f"{matched} is not None"): m.stmt("print(matched.groupdict())") print(m)
from prestring.python import Module m = Module() m.import_("math") m.sep() with m.def_('rmse', 'xs', 'ys'): m.stmt('acc = 0') m.stmt('assert len(xs) == len(ys)') with m.for_('x, y', 'zip(xs, ys)'): m.stmt('acc += (x - y) ** 2') m.return_('math.sqrt(acc / len(xs))') # m.stmt('xs = [92, 95, 110, 114, 100, 98, 93]') # m.stmt('ys = [95, 93, 100, 114, 105, 100, 96]') print(m)
__mod__ __rmod__ __pos__ __neg__ __call__ __getitem__ __lt__ __le__ __gt__ __ge__ __int__ __float__ __complex__ __pow__ __rpow__ __sub__ __rsub__ """ from prestring.python import Module m = Module() with m.scope(): for x in xs.strip().split("\n"): if not x.strip(): continue with m.def_(x, "self", "*args", "**kwargs"): m.stmt(f'return self.__getattr__({x!r})(*args, **kwargs)') print(m)
spec = importlib.util.find_spec("botocore") path = pathlib.Path(spec.origin) if path.name == "__init__.py": path = path.parent d = loading.loadfile(path / ("data/sqs/2012-11-05/service-2.json")) """ operations: <name>: name: <> input: {"shapee": <>} output: {"shape": <>, "resultWrapper": <>} errors: {} documentation """ m = Module() m.from_("__future__").import_("annotations") m.sep() m.stmt("# operations") with m.class_("SQS"): for name, sd in d["operations"].items(): with m.def_( name, f"input: {sd['input']['shape']}", return_type=sd["output"]["shape"] if "output" in sd else None, ): m.stmt("...") m.stmt("# shapes") with m.class_("SQS"): for name, sd in d["shapes"].items(): with m.class_(name):
def emit(self, *, m: t.Optional[Module] = None) -> Module: m = m or Module() return self._emit(m, name=self.name)
def Person(m: Module, name: str) -> Module: # todo: import with m.class_(name, "BaseModel"): m.stmt("name: str") m.stmt("age : int = 0") return m
def parse_args(self, *args, **kwargs): self.history.append({"name": "parse_args", "args": args, "kwargs": kwargs}) from prestring.python import Module, LazyArgumentsAndKeywords def _make_args(history, default=""): name = history["name"] if name == "__init__": name = default kwargs = {k: repr(v) for k, v in history["kwargs"].items()} args = [repr(v) for v in history["args"]] return f"{name}({LazyArgumentsAndKeywords(args, kwargs)})" m = Module() with m.def_("Main"): m.import_("argparse") m.stmt(f"parser = argparse.ArgumentParser{_make_args(self.history[0])}") for x in self.history[1:-1]: m.stmt(f"parser.{_make_args(x)}") m.stmt(f"args = parser.{_make_args(self.history[-1])}") m.stmt("main(**vars(args))") with m.if_("__name__ == '__main__'"): m.stmt("Main()") print(m) sys.exit(0)
def __init__(self, m=None, im=None): self.m = m or Module(import_unique=True) self.im = im or self.m.submodule()
import inspect from yaml.constructor import Constructor from prestring.python import Module m = Module() m.from_("yaml.constructor", "Constructor") m.sep() with m.class_("WrappedConstructor", "Constructor"): with m.def_("wrap", "self", "path", "name", "node", "r"): with m.if_("r is None"): m.stmt("return r") m.stmt('# print("@", id(r), repr(r))') m.stmt("mem[id(r)] = node") m.stmt("return r") seen = set() for cls in Constructor.mro(): for name, attr in cls.__dict__.items(): if name in seen: continue seen.add(name) if name.startswith("construct_") and callable(attr): sigs = inspect.signature(attr) m.stmt("def {}{}:", name, sigs) with m.scope(): args = [] for v in sigs.parameters.values(): if v.name == "self": continue if v.default is inspect._empty: args.append(str(v))
pass def g( name: str, val: int, default: int = 0, *, nickname: t.Optional[str] = None, debug: bool = False, **metadata: t.Optional[t.Any], ) -> None: pass m = Module() m.toplevel = m.submodule(import_unique=True) m.sep() # todo: alias for fn in [f, g]: spec = fnspec(fn) with m.class_(pascalcase(spec.name)): for name, typ, kind in spec.parameters: if typ.__module__ != "builtins": m.toplevel.import_(typ.__module__) info = typeinfo(typ) rhs = spec.type_str_of(info.normalized) if info.is_optional: rhs = LazyFormat("typing.Optional[{}]", rhs)
def main(): from openapi_stream import main m = Module(import_unique=True) m.header_area = m.submodule() m.import_area = m.submodule() m.sep() g = Generator(m) toplevels: t.List[Event] = [] stream: t.Iterable[Event] = main(create_visitor=ToplevelVisitor) def consume_stream(stream: t.Iterable[Event], *, is_delayed=False) -> t.List[Event]: delayed_stream: t.List[Event] = [] for ev in stream: if ev.uid in g.name_manager: continue if names.roles.has_expanded in ev.roles: g.case_definitions_manager.add_definitions( ev.get_annotated(names.annotations.expanded)["definitions"] ) if names.roles.has_name in ev.roles: g.generate_class(ev) continue if not is_delayed: if names.roles.toplevel_properties in ev.roles: toplevels.append(ev) # xxx delayed_stream.append(ev) continue # xxx: if ( ev.name in (names.types.object, names.types.array) or names.roles.combine_type in ev.roles or names.roles.child_of_xxx_of in ev.roles or names.roles.field_of_something in ev.roles ): uid_and_clsname_pairs = sorted( g.name_manager.iterate_clssname_and_prefix_pairs(), key=lambda pair: len(pair[0]), reverse=True, ) uid = ev.uid for parent_uid, (parent_clsname, prefix) in uid_and_clsname_pairs: if uid.startswith(parent_uid): classdef_sm = g._end_of_class_definition_conts[parent_uid] fieldname = uid.replace(parent_uid, "").lstrip("/") clsname = f"_{g.helper.classname(ev, name=fieldname)}" classdef_sm.stmt( f"# anonymous definition for {fieldname!r} (TODO: nodename)" ) g.generate_class( ev, clsname=clsname, m=classdef_sm, prefix=f"{prefix}{parent_clsname}.", ) # ok: properties # ok: oneOf, anyof, allof # todo: additionalProperties, patternProperties # assert "/" not in fieldname name = fieldname g._gen_visitor_property(ev, name=name, uid=uid, m=classdef_sm) break else: raise RuntimeError(f"unexpected type: {ev.name}") return delayed_stream delayed_stream = consume_stream(stream, is_delayed=False) for ev in toplevels: if ev.uid.endswith("#/"): g.generate_class(ev, clsname="Toplevel") import os.path m.header_area.stmt( f"# generated from {os.path.relpath(ev.root_file, start=os.getcwd())}" ) m.import_area.from_("dictknife.langhelpers", "reify") m.import_area.from_("openapi_stream", "runtime") m.import_area.from_("openapi_stream.context", "Context") delayed_stream = sorted(delayed_stream, key=lambda ev: len(ev.uid)) delayed_stream = consume_stream(delayed_stream, is_delayed=True) if g.case_definitions_manager.has_definitions: data = {"definitions": g.case_definitions_manager.definitions} m.import_area.from_("openapi_stream", "runtime") g.emitter.emit_data(m, "_case = runtime.Case({})", data, nofmt=True) print(m)
from prestring.python import Module, LazyFormat def f( name: str, val: int, default: int = 0, *, nickname: t.Optional[str] = None, debug: bool = False, **metadata: t.Optional[t.Any], ) -> None: pass m = Module() m.toplevel = m.submodule(import_unique=True) m.sep() spec = fnspec(f) with m.class_("F"): for name, typ, kind in spec.parameters: if typ.__module__ != "builtins": m.toplevel.import_(typ.__module__) info = typeinfo(typ) rhs = spec.type_str_of(info.normalized) if info.is_optional: rhs = LazyFormat("typing.Optional[{}]", rhs) if kind == "var_kw":
from prestring.python import Module m = Module() # noqa m.sep() with m.def_('hello', 'name', '*', 'message: str = "hello world"'): m.docstring('greeting message') m.stmt('print(f"{name}: {message}")') print(m)
def run(*, path: str, disable_docstring) -> None: d = loading.loadfile(path) m = Module() a = Accessor(d, m, disable_docstring=disable_docstring) m.import_("typing", as_="t") m.sep() m.stmt("AnyService = t.Any # TODO") m.stmt("AnyResource = t.Any # TODO") m.sep() for rname, resource in a.resources.items(): with m.class_(titleize(rname), ""): with m.method("__init__", "resource: AnyResource"): m.stmt("self.internal = resource") m.stmt("# methods") for mname, method, params in a.iterate_methods(resource): with m.method(mname, params): a.emit_docstring(method["description"]) m.stmt( f"""# {method["httpMethod"]}: {method["flatPath"]}""") m.stmt(f"""# id: {method["id"]}""") m.stmt(f"return self.internal.{mname}({params})") m.stmt("# nested resources") for srname, subresource in a.iterate_nested_resources(resource): with m.method(srname): m.stmt(f"return self.internal.{srname}({params})") # m.stmt("# nested resources") # for mname, subresource in resource.get("resources", {}).items(): # params = LParams() # for is_positional, (pname, parameter) in itertools.zip_longest(subresource.get("parameterOrder", []), subresource.get("parameters", {}).items()): # if is_positional: # params.append(pname) # TODO type: # else: # params[pname] = None # TODO type: # with m.method(mname, params): # docstring(subresource["description"]) # m.stmt(f"""# id: {subresource["id"]}""") # m.stmt(f"return self.{mname}({params})") with m.class_("Service"): with m.method("__init__", "service: AnyService"): m.stmt("self.internal = service") for rname in a.resources.keys(): with m.method(rname, return_type=titleize(rname)): m.stmt(f"return {titleize(rname)}(self.internal.{rname}())") with m.def_("build", "*args", "**kwargs", return_type="Service"): m.stmt("# TODO: use the signature of googleapiclient.discovery.build") m.submodule().from_("googleapiclient.discovery", "build") m.stmt( f"return Service(build({a.name!r}, {a.version!r}, *args, **kwargs))" ) print(m)
from prestring.python import Module m = Module() m.import_("re") m.import_("sys") m.sep() m.stmt( "pattern = re.compile({!r}, re.IGNORECASE)", r"^(?P<label>DEBUG|INFO|WARNING|ERROR|CRITICAL):\s*(?P<message>\S+)", ) with m.for_("line", "sys.stdin"): m.stmt("m = pattern.search(line)") with m.if_("m is not None"): m.stmt("print(m.groupdict())") print(m)
from prestring.python import Module from prestring.utils import LParams m = Module() with m.class_("A0"): params0 = LParams() with m.method("f0", params0): m.stmt("pass") with m.class_("A1"): params1 = LParams() with m.method("f1", params1): m.stmt("pass") params1.append_tail("*args") print(m) # class A0: # def f0(self): <- this (not f0(self,)) # pass # class A1: # def f1(self, *args): # pass
import typing as t from metashape.runtime import get_walker from prestring.python import Module from prestring.utils import LazyFormat, LazyArgumentsAndKeywords from typestr import typestr class Hello: name: str age: int nickname: t.Optional[str] = None m = Module() w = get_walker([Hello]) for cls in w.walk(): name = w.resolver.resolve_typename(cls) args = [] for fieldname, info, metadata in w.for_type(cls).walk(): # todo: default if "default" in metadata: args.append( LazyFormat( "{}: {} = {}", fieldname, typestr(info.raw), metadata["default"] ) ) else: args.append(LazyFormat("{}: {}", fieldname, typestr(info.raw))) with m.def_(name, LazyArgumentsAndKeywords(args)): m.stmt("...")
def parse_args(self, *args, **kwargs): self.history.append({"name": "parse_args", "args": args, "kwargs": kwargs}) from prestring.python import Module, LazyArgumentsAndKeywords def _make_call_stmt(history, default=""): name = history["name"] if name == "__init__": name = default kwargs = {k: repr(v) for k, v in history["kwargs"].items()} args = [repr(v) for v in history["args"]] return f"{name}({LazyArgumentsAndKeywords(args, kwargs)})" m = Module() m.sep() with m.def_("main"): m.import_("argparse") m.stmt(f"parser = argparse.ArgumentParser{_make_call_stmt(self.history[0])}") m.stmt("parser.print_usage = parser.print_help") for x in self.history[1:-1]: m.stmt(f"parser.{_make_call_stmt(x)}") m.stmt(f"args = parser.{_make_call_stmt(self.history[-1])}") m.stmt(f"{self.fn.__name__}(**vars(args))") with m.if_("__name__ == '__main__'"): m.stmt("main()") with open(inspect.getsourcefile(self.fn)) as rf: source = rf.read() rx = re.compile("(?:@([\S]+\.)?as_command.*|^.*import ascommand.*)\n", re.MULTILINE) exposed = rx.sub("", "".join(source)) print(exposed) print(m) sys.exit(0)
import typing as t from prestring.utils import LazyFormat from monogusa.web.codegen._fnspec import fnspec from metashape.analyze.typeinfo import typeinfo from prestring.python import Module def f(name: str, *vals: int) -> None: pass m = Module() m.toplevel = m.submodule(import_unique=True) m.sep() spec = fnspec(f) with m.class_("F"): for name, typ, kind in spec.parameters: if typ.__module__ != "builtins": m.toplevel.import_(typ.__module__) info = typeinfo(typ) type_str = spec.type_str_of(info.normalized) if info.is_optional: type_str = LazyFormat("typing.Optional[{}]", type_str) elif kind == "var_kw": type_str = LazyFormat("typing.Dict[str, {}]", type_str) elif kind == "var_args": type_str = LazyFormat("typing.List[{}]", type_str) m.stmt("{}: {}", name, type_str)