def additionals(self): d = {} for path in self.additional_path_list: try: m = import_module(path, cwd=True) except ImportError: m = import_module("kamidana.additionals.{}".format(path), cwd=True) d = deepmerge(d, collect_marked_items(m)) return d
def run( filepath: str, *, pkg: str = "model", rootdir: str = "", here: t.Optional[str] = None, action: t.Literal["generate", "scan"] = "generate", ) -> None: m = import_module(filepath) here = here or m.__file__ settings: SettingsDict = {"rootdir": rootdir, "here": here} app = create_app(settings) app.include("egoist.directives.define_struct_set") define_struct_set = app.define_struct_set( "egoist.generators.structkit:walk") scanned = scan_module(m, is_ignored=lambda x: not inspect.isclass(x)) classes = list(scanned.values()) def models(): from egoist.generators.structkit import runtime, structkit with runtime.generate(structkit, classes=classes) as m: m.package(pkg) define_struct_set(models) app.run([action])
def main(): from magicalimport import import_module parser = argparse.ArgumentParser(add_help=False) parser.add_argument("file") args, rest = parser.parse_known_args() m = import_module(args.file) Driver().run(rest, module=m)
def run(name: str): m = import_module(name) w = get_walker(m, aggressive=True, recursive=False) def _to_str_typeinfo(info: typeinfo.TypeInfo) -> str: """ >>> _to_str_typeinfo(str) 'str' >>> _to_str_typeinfo(Literal["x", "y", "z"]) ' `x, y, z` ' >>> Color = Literal["Red", "Green", "Blue"] >>> _to_str_typeinfo(Color) '[Color](#Color)' """ if info.underlying != info.type_: # enum name = w.resolver.resolve_typename(info.type_) if name != "_GenericAlias": return f"[{name}](#{name})" args = [str(x) for x in typing.get_args(info.type_)] return f" `{', '.join(args)}` " name = info.underlying.__name__ if not info.user_defined_type: return name return f"[{name}](#{name})" for cls in w.walk(kinds=["object", "enum"]): print("") print(f"## {cls.__name__}") kind = guess_mark(cls) # todo: use resolve if kind == "enum": print("") print("```") args = typing.get_args(cls) print(f" {args[0]}") for x in args[1:]: print(f" | {x}") print("```") print("") else: doc = w.resolver.metadata.resolve_doc(cls, verbose=True) if doc is not None: print("") print(doc) print("") print("| name | type | description |") print("| :--- | :--- | :--- |") for name, info, metadata in w.walk_fields(cls): print( f"| {name} | {typeinfo.to_string(info, to_str=_to_str_typeinfo)} | {metadata.get('description') or ''}|" )
def import_(s, context, as_=None): """ $import: "zenmai.actions.suffix" as: s $s.suffix: name: foo """ imported = import_module(s, here=context.filename) as_ = as_ or s.split(".")[0] context.assign(as_, imported)
def run( filename: str, *, aggressive: bool = False, is_member: t.Optional[t.Callable[[t.Type[T]], bool]] = None, emit: t.Optional[EmitFunc] = None, ) -> None: m = import_module(filename, cwd=True) walker = get_walker(m, aggressive=aggressive) emit = emit or import_symbol("metashape.outputs.raw:emit") # xxx: logger.debug("collect members: %d", len(walker)) emit(walker, output=sys.stdout)
def run(filename: str, *, aggressive: bool = False, is_member: t.Optional[t.Callable[[t.Type[T]], bool]] = None) -> None: from magicalimport import import_module # type:ignore m = import_module(filename) if aggressive: is_member = lambda x: hasattr(x, "__name__") # noqa resolver = FakeResolver(is_member=is_member) accessor = Accessor(resolver=resolver, repository=resolver.resolve_repository(m.__dict__)) print(translate(accessor))
def from_(s, import_, context): """ $from: "zenmai.actions.suffix" import: suffix $suffix: name: foo """ imported = import_module(s, here=context.filename) names = import_ if not isinstance(names, (list, tuple)): names = [names] for name in names: member = getattr(imported, name) context.assign(name, member)
def run(filename: str) -> None: import sys from pprint import pprint from magicalimport import import_module m = import_module(filename) builtins = set(sys.modules["builtins"].__dict__.keys()) for k, v in m.__dict__.items(): if k in builtins: continue if k.startswith("_"): continue if getattr(v, "__module__", None) != m.__name__: continue pprint(resolve_type_info(v))
def get_schema(filepath: str, *, codepath: str) -> t.Union[str, dict, None]: m = import_module(codepath, cwd=True) for name in ["schema", "get_schema"]: # t.Optional[get_schema_fn_type, t.Union[t.Optional[str]]] get_schema_fn = getattr(m, name, None) if get_schema_fn is None: continue schema = get_schema_fn(filepath) if callable( get_schema_fn) else get_schema_fn if schema is None: continue if isinstance(schema, str) and not schema.startswith( ("https://", "http:")): return os.path.normpath( os.path.join(os.path.dirname(codepath), schema)) return schema return None
def run( filepath: str, *, rootdir: str = "cmd/", here: t.Optional[str] = None, action: t.Literal["generate", "scan"] = "generate", ) -> None: m = import_module(filepath) here = here or m.__file__ settings: SettingsDict = {"rootdir": rootdir, "here": here} app = create_app(settings) app.include("egoist.directives.define_cli") define_cli = app.define_cli("egoist.generators.clikit:walk") commands = scan_module(m) for name, fn in commands.items(): define_cli(fn) app.run([action])
def main(): import argparse from colorama import init init() parser = argparse.ArgumentParser() parser.add_argument("-m", "--module", default="zenmai.actions") parser.add_argument("--driver", default="zenmai.driver:Driver") parser.add_argument("--debug", action="store_true") parser.add_argument("--logging", default="INFO", choices=list(logging._nameToLevel.keys())) parser.add_argument("-f", "--format", default=None, choices=loading.get_formats()) parser.add_argument("--data", action="append") parser.add_argument("--select", default=None) parser.add_argument("file", default=None) loading.setup() args = parser.parse_args() module = import_module(args.module) data = [loadfile_with_jsonref(path) for path in args.data or []] def wrap(d): if args.select is None: return d return access_by_json_pointer(d, args.select.split("#")[-1]) driver_cls = import_symbol(args.driver, ns="zenmai.driver") driver = driver_cls(module, args.file, format=args.format, data=data) # todo: option logging.basicConfig( format="%(levelname)5s:%(name)30s:%(message)s", level=logging._nameToLevel[args.logging] ) try: if args.file is None: driver.run(sys.stdin, sys.stdout, wrap=wrap) else: with open(args.file) as rf: driver.run(rf, sys.stdout, wrap=wrap) except Exception as e: if args.debug: raise print("{errcolor}{e.__class__.__name__}:{reset} {e}".format(e=e, errcolor=colorama.Fore.YELLOW, reset=colorama.Style.RESET_ALL))
def run(pkg: str) -> None: m = import_module(pkg) for name, v in m.__dict__.items(): if inspect.isfunction(v) or inspect.isclass(v): if v.__module__ == m.__name__: print(f"{name} {get_summary(inspect.getdoc(v) or '')}")
from egoist import runtime from magicalimport import import_module internal = import_module("./internal.py", here=__file__) def use(*, grumby: bool) -> None: from egoist.generate.clikit import clikit from egoist.generate.go import di from egoist.internal.graph import Builder args = runtime.get_args() args.grumby.help = "grumby? what is this?" with runtime.generate(clikit) as m: b = Builder() b.add_node(**di.parse(internal.NewEvent)) b.add_node(**di.parse(internal.NewGreeter)) b.add_node(**di.parse(internal.NewMessage)) g = b.build() event = di.inject(m, g, variables=di.primitives(g, locals())) m.stmt(event.Start()) if __name__ == "__main__": runtime.main(name=__name__, here=__file__, root="")
def main(): parser = argparse.ArgumentParser() parser.add_argument("code", nargs="?", default="get()") parser.add_argument("file", nargs="*", type=argparse.FileType("r")) parser.add_argument("-i", "--input-format", choices=["json", "yaml", "ltsv", "raw"], default="json") parser.add_argument("-o", "--output-format", choices=["json", "yaml", "ltsv", "raw"], default="json") parser.add_argument("-c", "--compact-output", action="store_true") parser.add_argument("-s", "--slurp", action="store_true") parser.add_argument("-S", "--sort-keys", action="store_true") parser.add_argument("--ascii-output", action="store_true") parser.add_argument("-r", "--raw-output", action="store_true") parser.add_argument( "--relative-path", action="store_true", help= "when h.dumpfile(), iff accessing opend filename, treating as relative path", ) parser.add_argument("--here", default=None, help="cwd for h.dumpfile()") parser.add_argument("--buffered", action="store_true", dest="buffered") parser.add_argument("-u", "--unbuffered", action="store_false", dest="buffered") parser.set_defaults(buffered=True) parser.add_argument("--squash", action="count", default=0) parser.add_argument("--show-code", action="store_true") parser.add_argument("--show-none", dest="ignore_none", action="store_false") parser.add_argument("-a", "--additionals") args = parser.parse_args() fnname = "_transform" if not args.file and os.path.exists(args.code): args.file.append(open(args.code)) args.code = "get()" pycode = create_pycode(fnname, args.code) fp = sys.stdout if args.show_code: _describe_pycode(pycode, fp=fp, indent="") sys.exit(0) if args.file: files = args.file[:] else: files = [sys.stdin] additionals = None if args.additionals is not None: additionals = magicalimport.import_module(args.additionals, cwd=True) dump_extra_kwargs = dict( indent=None if args.compact_output else 2, sort_keys=args.sort_keys, ensure_ascii=args.ascii_output, ) # xxx: chdir if here is not None ctx = create_context(here=args.here, extra_kwargs=dump_extra_kwargs) with gentle_error_reporting(pycode, fp): transform_fn = exec_pycode(fnname, pycode) def _load(streams, *, relative=args.relative_path): for stream in streams: if relative: filepath = loading.get_filepath_from_stream(stream) if filepath: ctx.chdir(os.path.dirname(filepath)) m = ctx.get_module(stream, format=args.input_format) for d in m.load(stream, buffered=args.buffered): yield d def _dump(d, *, i=0, fp=fp, raw=False): m = ctx.get_module(fp, format=args.output_format) if i > 0 and m.SEPARATOR: fp.write(m.SEPARATOR) m.dump( d, fp=fp, ignore_none=args.ignore_none, squash_level=args.squash, raw=raw or args.raw_output, extra_kwargs=ctx.dump_extra_kwargs, ) if not args.buffered: fp.flush() if args.slurp: d = list(_load(files)) with gentle_error_reporting(pycode, fp): r = transform(ctx, transform_fn, d, additionals=additionals) _dump(r, i=0) else: with gentle_error_reporting(pycode, fp): for i, d in enumerate(_load(files)): r = transform(ctx, transform_fn, d, additionals=additionals) _dump(r, i=i) fp.flush()
# TODO: このファイルを消し去りたい from magicalimport import import_module from monogusa.cli.runtime import create_parser commands = import_module("./app/commands.py", here=__file__) di = import_module("./app/di.py", here=__file__) parser = create_parser(commands) args = parser.parse_args() print(args) params = vars(args).copy() fn = params.pop("subcommand") print(fn, di.resolve_args(fn)) fn(*di.resolve_args(fn), **params)
from types import ModuleType from magicalimport import import_module from prestring.python import Module from prestring.naming import pascalcase import commands fnspec = import_module("../fnspec.py", here=__file__) def gen(target: ModuleType, *, m=None): m = m or Module() m.import_("typing as t") m.from_("pydantic", "BaseModel") m.from_("fastapi", "APIRouter", "Depends") m.from_("monogusa.web", "runtime") m.import_(target.__name__) m.stmt("router = APIRouter()") m.sep() # TODO: collect functions and use them. spec = fnspec.fnspec(commands.hello) co = fnspec.spec_to_schema_code(spec, name=pascalcase(spec.name)) co.emit(m=m) m.stmt('@router.post ( "/{}", response_model=runtime.CommandOutput )', spec.name) with m.def_( spec.name, f"input: {co.name}",
from magicalimport import import_module from graphql import build_schema, graphql_sync astutil = import_module("01value.py") class Root: def __init__(self, d): self.d = d def tasks(self, info, *, where): r = [] for task in self.d["tasks"]: t = astutil.ast.parse(where) v = astutil.StrictVisitor(env=task) v.visit(t) if v.stack[-1][0]: r.append(task) return r schema = build_schema(""" type Task { name: String! completed: Boolean! } type Query { tasks(where: String): [Task]! } """)
from __future__ import annotations from magicalimport import import_module shapes = import_module("./shapes.py", here=__file__) toplevel = shapes.Toplevel( site_name="Material for MkDocs", site_description="A Material Design theme for MkDocs", site_author="Martin Donath", site_url="https://squidfunk.github.io/mkdocs-material/", # repository repo_name="squidfunk/mkdocs-material", repo_url="https://github.com/squidfunk/mkdocs-material", # Copyright copyright="Copyright © 2016 - 2017 Martin Donath", theme=shapes.Theme( name="material", language="en", palette=shapes.Palette(primary="indigo", accent="indigo"), font=shapes.Font(text="Roboto", code="Roboto Mono"), ), extra=shapes.Extra( manifest="manifest.webmanifest", social=[ shapes.Social(type_="github", link="https://github.com/squidfunk"), shapes.Social(type_="twitter", link="https://twitter.com/squidfunk"), shapes.Social(type_="linkedin", link="https://linkedin.com/in/squidfunk"), ], ), )
# this module is generated by monogusa.web.codegen import typing as t from pydantic import BaseModel from fastapi import APIRouter, Depends, FastAPI from monogusa.web import runtime import magicalimport router = APIRouter() n12web_commands = magicalimport.import_module("commands.py", here="12web") class HelloInput(BaseModel): """ auto generated class from 12web.commands:hello """ name: str @router.post("/hello", response_model=runtime.CommandOutput) def hello(input: HelloInput) -> t.Dict[str, t.Any]: """ hello world """ with runtime.handle() as s: # TODO: support positional arguments? (DI) n12web_commands.hello(**input.dict()) return s.dict()
from magicalimport import import_module from metashape.runtime import get_walker models = import_module("./models.py", here=__file__) w = get_walker(models.Order, aggressive=True, recursive=True) for cls in w.walk(): print(cls.__name__) from metashape.outputs.openapi import emit import sys w = get_walker(models.Order, aggressive=True, recursive=True) emit(w, output=sys.stdout)
if hasattr(cls, "__loader__"): return None # is typed user_defined_type class or callable? if not hasattr(cls, "__name__"): return None if id(cls) in _builtins: return None if not callable(cls): return None if inspect.isclass(cls): return "object" return None w = runtime.get_walker( mod, recursive=True, aggressive=True, _guess_kind=_guess_kind ) m = Module() o = sys.stderr if bool(os.environ.get("DEBUG", "")) else sys.stdout ctx = Context(w, is_minimum=bool(os.environ.get("MINIMUM", ""))) print(emit(ctx, m=m), file=o) if __name__ == "__main__": from magicalimport import import_module m = import_module(sys.argv[1]) main(m)
def load(self, module_path): if ":" in module_path: return magicalimport.import_symbol(module_path, cwd=True) else: return magicalimport.import_module(module_path, cwd=True)
import typing as t from magicalimport import import_module from fastapi import APIRouter, FastAPI from pydantic import BaseModel from monogusa.web import runtime cli = import_module("./cli.py", here=__file__) router = APIRouter() class HelloInput(BaseModel): name: str @router.post("/hello", response_model=runtime.CommandOutput) def hello(input: HelloInput) -> t.Dict[str, t.Any]: with runtime.handle() as s: cli.hello(**input.dict()) # TODO: support positional arguments? return s.dict() class ByeInput(BaseModel): name: str @router.post("/hello", response_model=runtime.CommandOutput) def bye(input: ByeInput) -> t.Dict[str, t.Any]: with runtime.handle() as s: cli.bye(**input.dict()) # TODO: support positional arguments? return s.dict()
import os from magicalimport import import_module print(import_module("xxx", here=os.getcwd()))
from magicalimport import import_module from fastapi import FastAPI routers = import_module("./routers.py", here=__file__) app = FastAPI() app.include_router(routers.router) if __name__ == "__main__": import monogusa.web.cli as webcli webcli.run(app)
from magicalimport import import_module print(import_module("xxx"))