Exemple #1
0
def run(*,
        src: str,
        savedir: str,
        dry_run: bool = False,
        sort_keys: bool = False) -> None:

    resolver = get_resolver(src)
    # xxx: sort_keys for ambitious output (for 3.6 only?)
    # xxx: sort_keys=True, TypeError is occured, compare between str and int
    # xxx: transform set by dump_options?
    m = Migration(
        resolver,
        dump_options={"sort_keys": sort_keys},
        transform=partial(transform, sort_keys=sort_keys),
    )
    with m.migrate(dry_run=dry_run, keep=True, savedir=savedir) as u:
        scope = Scope({
            "consumes": ["application/json"],
            "produces": ["application/json"]
        })
        callbacks = []
        logger.debug("migrate mainfile file=%s", u.name)
        migrate_for_mainfile(u, scope=scope)
        for resolver in u.resolvers:
            uu = u.new_child(resolver)
            logger.debug("migrate subfile file=%s", uu.name)
            migrate_for_subfile(uu, scope=scope, callbacks=callbacks)

        # callback
        logger.debug("callbacks callbacks=%s", len(callbacks))
        for resolver in u.resolvers:
            uu = u.new_child(resolver)
            for cb in callbacks:
                cb(uu)
Exemple #2
0
def get_loader(filename: str) -> Loader:
    store = _yaml.NodeStore()
    yaml_loader_factory = _yaml.YAMLLoaderFactory(_yaml.YAMLLoader,
                                                  store=store)

    resolver = jsonknife.get_resolver(
        filename, loader=_DictknifeLoaderAdapter(yaml_loader_factory))
    return Loader(resolver, store=store)
Exemple #3
0
def run(*, filename):
    def onload(d, resolver, w=DictWalker(["$include"])):
        for _, sd, in w.walk(d):
            subresolver, jsref = resolver.resolve(sd.pop("$include"))
            sd.update(subresolver.access_by_json_pointer(jsref))

    resolver = get_resolver(filename, onload=onload)
    loading.dumpfile(resolver.doc)
Exemple #4
0
    def provide():
        resolver = get_resolver(src)
        visitor = create_visitor()
        ctx = Context(resolver, emit=q.put)

        try:
            visitor.visit(ctx, resolver.doc)
        finally:
            q.put(None)
Exemple #5
0
def run(*, src: str) -> None:
    logging.basicConfig(level=logging.DEBUG)

    resolver = get_resolver(src)
    with Migration(resolver).migrate(dry_run=True) as u:
        for k, item in u.iterate_items():
            if k == "definitions/person":
                ref = "#/definitions/person/properties/value"
                u.update(item.resolver, ref, {"type": "integer"})
            if k == "definitions/name":
                ref = "#/definitions/name/description"
                u.update(item.resolver, ref, "name of something")
def run(*, src: str) -> None:
    logging.basicConfig(level=logging.DEBUG)

    resolver = get_resolver(src)
    with Migration(resolver).migrate(keep=True) as u:
        for k, item in u.iterate_items():
            if k == "definitions/person":
                ref = "#/definitions/person/properties/value"
                u.update(item.resolver, ref, {"type": "integer"})
            if k == "definitions/name":
                ref = "#/definitions/name/description"
                u.update(item.resolver, ref, "name of something")
Exemple #7
0
def run(*, src: str, savedir: str, log: str, dry_run: bool = False) -> None:
    logging.basicConfig(level=getattr(logging, log))

    resolver = get_resolver(src)
    # xxx: sort_keys for ambitious output (for 3.6 only?)
    # xxx: sort_keys=True, TypeError is occured, compare between str and int
    # xxx: transform set by dump_options?
    m = Migration(resolver, dump_options={"sort_keys": False}, transform=transform)
    with m.migrate(dry_run=dry_run, keep=True, savedir=savedir) as u:
        scope = Scope()
        migrate_for_mainfile(u, scope=scope)
        migrate_for_subfile(u, scope=scope)
def run(*, src: str) -> None:
    logging.basicConfig(level=logging.DEBUG)

    resolver = get_resolver(src)
    with Migration(resolver).migrate(dry_run=True) as m:
        item_map = m.item_map
        for k, item in list(item_map.items()):
            if k == "definitions/person":
                ref = "#/definitions/person/properties/value"
                m.update(item.resolver, ref, {"type": "integer"})
            if k == "definitions/name":
                ref = "#/definitions/name/description"
                m.update(item.resolver, ref, "name of something")
def run(*, src: str) -> None:
    resolver = get_resolver(src)

    with Migration(resolver).migrate_dryrun_and_diff() as m:
        item_map = m.item_map
        for k, item in list(item_map.items()):
            if k == "definitions/person":
                item.data["properties"] = ChainMap(make_dict(), item.data["properties"])
                item.data["properties"]["value"] = {"type": "integer"}
            if k == "definitions/name":
                item.data = ChainMap(make_dict(), item.data)
                item.resolver.assign_by_json_pointer(item.localref, item.data)
                item.data["description"] = "name of something"
Exemple #10
0
def run(*, src: str, savedir: str, log: str, dry_run: bool = False) -> None:
    logging.basicConfig(level=getattr(logging, log))

    resolver = get_resolver(src)
    # xxx: sort_keys for ambitious output (for 3.6 only?)
    # xxx: sort_keys=True, TypeError is occured, compare between str and int
    # xxx: transform set by dump_options?
    m = Migration(resolver,
                  dump_options={"sort_keys": False},
                  transform=transform)
    with m.migrate(dry_run=dry_run, keep=True, savedir=savedir) as u:
        scope = Scope()
        migrate_for_mainfile(u, scope=scope)
        migrate_for_subfile(u, scope=scope)
Exemple #11
0
def run(*, src: str) -> None:
    resolver = get_resolver(src)

    with Migration(resolver).migrate_dryrun_and_diff() as m:
        item_map = m.item_map
        for k, item in list(item_map.items()):
            if k == "definitions/person":
                item.data["properties"] = ChainMap(make_dict(),
                                                   item.data["properties"])
                item.data["properties"]["value"] = {"type": "integer"}
            if k == "definitions/name":
                item.data = ChainMap(make_dict(), item.data)
                item.resolver.assign_by_json_pointer(item.localref, item.data)
                item.data["description"] = "name of something"
Exemple #12
0
def run(*, src: str, savedir: str, log: str, dry_run: bool = False) -> None:
    logging.basicConfig(level=getattr(logging, log))

    resolver = get_resolver(src)
    # xxx: sort_keys for ambitious output (for 3.6 only?)
    with Migration(resolver, dump_options={"sort_keys": True}).migrate(
        dry_run=dry_run, keep=True, savedir=savedir
    ) as u:
        for k, item in u.iterate_items():
            if k == "definitions/person":
                ref = "#/definitions/person/properties/value"
                u.update(ref, {"type": "integer"}, resolver=item.resolver)
            if k == "definitions/name":
                ref = "#/definitions/name/description"
                u.update(ref, "name of something", resolver=item.resolver)
Exemple #13
0
def main():
    filename = sys.argv[1]
    yaml_loader_factory = LoaderFactory(Loader)

    resolver = get_resolver(filename, loader=_Adapter(yaml_loader_factory))
    scaner = Scaner(resolver, store=yaml_loader_factory.store)
    doc = scaner.scan()
    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])
    # from dictknife import loading
    # loading.dumpfile(doc)

    if scaner.errors:
        print("?", len(scaner.errors))
        for err in scaner.errors:  # type: ReferenceError
            print(err.describe())
Exemple #14
0
def main():
    filename = sys.argv[1]
    yaml_loader_factory = YAMLLoaderFactory(YAMLLoader)

    resolver = get_resolver(filename, loader=_Adapter(yaml_loader_factory))
    scanner = DataScanner(resolver)
    doc = scanner.scan()
    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])
    # from dictknife import loading
    # loading.dumpfile(doc)

    if scanner.errors:
        describer = Describer(yaml_loader_factory.store)
        print("?", len(scanner.errors))
        for err in scanner.errors:
            print(describer.describe(err))
Exemple #15
0
def main():
    filename = sys.argv[1]
    yaml_loader_factory = LoaderFactory(Loader)

    resolver = get_resolver(filename, loader=_Adapter(yaml_loader_factory))
    scaner = Scaner(resolver, store=yaml_loader_factory.store)
    doc = scaner.scan()
    # loading.dumpfile(doc)  # with $ref

    if scaner.errors:
        # status := ERROR | WARNING | NOTE
        # <status>, <filename>, <start>, <end>, <msg>
        print("?", len(scaner.errors))
        for err in scaner.errors:  # type: ReferenceError
            print(err.describe())

    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])
Exemple #16
0
def main():
    filename = sys.argv[1]
    yaml_loader_factory = LoaderFactory(Loader)

    resolver = get_resolver(filename, loader=_Adapter(yaml_loader_factory))
    doc = resolver.doc
    expander = Expander(resolver)

    try:
        doc = expander.expand()
        loading.dumpfile(doc)  # with $ref
    except MarkedYAMLError as e:
        padding = ""
        mark = e.context_mark or e.problem_mark
        filename = mark.name
        for r in resolver.path_list(filename):
            padding += "  "
            print(padding, os.path.relpath(r.filename))

        padding += "  "
        print(padding, "problem", e.problem, "@", e.problem_mark)
        print(padding, "context", e.context, "@", e.context_mark)
        print("")

    node_cache = yaml_loader_factory.store.node_cache
    if expander.errors:
        print("?", len(expander.errors))
        for path, e, sd in expander.errors:
            for knode, vnode in node_cache[id(sd)].value:
                print("!", repr(e), knode.value, path[-1])
                if knode.value == path[-1]:
                    print(
                        "!!",
                        knode.start_mark,
                        knode.end_mark,
                        "x",
                        vnode.start_mark,
                        vnode.end_mark,
                    )
    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])
Exemple #17
0
def main():
    filename = sys.argv[1]
    yaml_loader_factory = LoaderFactory(Loader)

    resolver = get_resolver(filename, loader=_Adapter(yaml_loader_factory))
    doc = resolver.doc
    expander = Expander(resolver)

    try:
        doc = expander.expand()
        loading.dumpfile(doc)  # with $ref
    except MarkedYAMLError as e:
        padding = ""
        mark = e.context_mark or e.problem_mark
        filename = mark.name
        for r in resolver.path_list(filename):
            padding += "  "
            print(padding, os.path.relpath(r.filename))

        padding += "  "
        print(padding, "problem", e.problem, "@", e.problem_mark)
        print(padding, "context", e.context, "@", e.context_mark)
        print("")

    store = yaml_loader_factory.store
    if expander.errors:
        print("?", len(expander.errors))
        for err in expander.errors:  # type: ReferenceError
            map_node = err.lookup_node(store)
            knode, vnode = err.lookup_kvpair(map_node)
            print(
                "!!",
                knode.start_mark,
                knode.end_mark,
                "x",
                vnode.start_mark,
                vnode.end_mark,
            )

    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])
Exemple #18
0
def main():
    filename = sys.argv[1]
    resolver = get_resolver(filename)
    try:
        expander = Expander(resolver)
        doc = expander.expand()
        loading.dumpfile(doc)
    except MarkedYAMLError as e:
        padding = ""
        mark = e.context_mark or e.problem_mark
        filename = mark.name
        for r in resolver.path_list(filename):
            padding += "  "
            print(padding, os.path.relpath(r.filename))

        padding += "  "
        print(padding, "problem", e.problem, "@", e.problem_mark)
        print(padding, "context", e.context, "@", e.context_mark)
        print("")
    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])
def run(*, src: str) -> None:
    resolver = get_resolver(src)
    accessor = CachedItemAccessor(resolver)
    item_map = make_dict()
    scanner = Scanner(accessor, item_map, strict=True)
    scanner.scan(resolver.doc)

    resolvers = set()
    for k, item in list(item_map.items()):
        if k == "definitions/person":
            item.data["properties"] = ChainMap(make_dict(), item.data["properties"])
            item.data["properties"]["value"] = {"type": "integer"}
        if k == "definitions/name":
            item.data = ChainMap(make_dict(), item.data)
            item.resolver.assign_by_json_pointer(item.localref, item.data)
            item.data["description"] = "name of something"
        resolvers.add(item.resolver)

    for r in resolvers:
        print(os.path.relpath(r.name, start=os.getcwd()))
        for line in diff(before_data(r.doc), after_data(r.doc)):
            print(line)
Exemple #20
0
def run(*, src: str) -> None:
    resolver = get_resolver(src)
    accessor = CachedItemAccessor(resolver)
    item_map = make_dict()
    scanner = Scanner(accessor, item_map, strict=True)
    scanner.scan(resolver.doc)

    resolvers = set()
    for k, item in list(item_map.items()):
        if k == "definitions/person":
            item.data["properties"] = ChainMap(make_dict(),
                                               item.data["properties"])
            item.data["properties"]["value"] = {"type": "integer"}
        if k == "definitions/name":
            item.data = ChainMap(make_dict(), item.data)
            item.resolver.assign_by_json_pointer(item.localref, item.data)
            item.data["description"] = "name of something"
        resolvers.add(item.resolver)

    for r in resolvers:
        print(os.path.relpath(r.name, start=os.getcwd()))
        for line in diff(before_data(r.doc), after_data(r.doc)):
            print(line)
Exemple #21
0
def main():
    filename = sys.argv[1]
    yaml_loader_factory = LoaderFactory(Loader)

    resolver = get_resolver(filename, loader=_Adapter(yaml_loader_factory))
    doc = resolver.doc

    try:
        expander = Expander(resolver)
        doc = expander.expand()
        loading.dumpfile(doc)  # with $ref
    except MarkedYAMLError as e:
        padding = ""
        mark = e.context_mark or e.problem_mark
        filename = mark.name
        for r in resolver.path_list(filename):
            padding += "  "
            print(padding, os.path.relpath(r.filename))

        padding += "  "
        print(padding, "problem", e.problem, "@", e.problem_mark)
        print(padding, "context", e.context, "@", e.context_mark)
        print("")

    print("----------------------------------------")
    subprocess.run(["cat", "-n", filename])

    node_cache = yaml_loader_factory.store.node_cache

    sd = doc["components"]["schemas"]["a"]
    if hasattr(sd, "maps"):
        sd = sd.maps[-1]  # find original (in ChainMap)
    node = node_cache[id(sd)]

    print("@", node.start_mark)
    print("@", node.end_mark)
Exemple #22
0
def run(src: str) -> None:
    resolver = get_resolver(src)

    def gen(*, path):
        def on_schema(d, *, path):
            for k, prop in d.get("properties", {}).items():
                if not hasattr(prop, "get"):
                    # print("hmm")
                    continue
                path.append(k)
                if "$ref" in prop:
                    k = prop["$ref"].rsplit("/", 1)[-1]
                    prop = resolver.access_by_json_pointer(prop["$ref"])

                # type: str?, object?
                yield from on_schema(prop, path=path)
                path.pop()
            yield ("/".join(path), d)

        yield from on_schema(resolver.doc, path=path)

    for name, node in gen(path=["Top"]):
        print(name, node.get("type"))
    print("*ok*")
Exemple #23
0
def select(
    *,
    src: str,
    dst: str,
    refs,
    unwrap,
    wrap,
    input_format: str,
    output_format: str,
    format: str,
):
    from dictknife.jsonknife import Expander
    from dictknife.jsonknife.accessor import assign_by_json_pointer
    from dictknife.jsonknife import get_resolver

    input_format = input_format or format
    resolver = get_resolver(src)
    expander = Expander(resolver)
    if unwrap and not refs:
        refs = []
        refs.append(unwrap)

    if not refs:
        d = expander.expand()
    else:
        d = make_dict()
        for ref in refs:
            ref_wrap = wrap
            if "@" in ref:
                ref, ref_wrap = ref.split("@", 1)
            extracted = expander.expand_subpart(expander.access(ref))
            if ref_wrap:
                assign_by_json_pointer(d, ref_wrap, extracted)
            else:
                d = deepmerge(d, extracted)
    loading.dumpfile(d, dst, format=output_format or format)
Exemple #24
0
def get_selector(filename: str, *, r=None) -> Selector:
    return Selector(get_resolver(filename), wrap=True, r=r)
Exemple #25
0
from dictknife import loading
from dictknife.jsonknife import get_resolver
from dictknife.jsonknife import Bundler
from dictknife import DictWalker
import logging
logging.basicConfig(level=logging.DEBUG)


def build_subset(resolver, ref):
    subset = {}
    ob = resolver.access_by_json_pointer(ref)
    resolver.assign_by_json_pointer(ref, ob, doc=subset)
    for path, sd in DictWalker(["$ref"]).walk(ob):
        # xxx:
        if sd["$ref"].startswith("#/"):
            resolver.assign(path[:-1], sd, doc=subset)
    return subset


filename = "./src/person.json"
r = get_resolver(filename)
subset = build_subset(r, "/definitions/person")
b = Bundler(r)
d = b.bundle(subset)
loading.dumpfile(d)
Exemple #26
0
from dictknife import loading
from dictknife.jsonknife import get_resolver
from dictknife.jsonknife import Bundler

filename = "./src/main.json"
r = get_resolver(filename)
r.doc = {
    "definitions": {
        "person": r.access_by_json_pointer("/definitions/person"),
    }
}
b = Bundler(r)
d = b.bundle()
loading.dumpfile(d)