Ejemplo n.º 1
0
def migrate_for_subfile(u, *, scope):
    for resolver in u.resolvers:
        uu = u.new_child(resolver)
        if uu.has("paths", resolver=resolver):
            from dictknife import DictWalker, Or

            method_walker = DictWalker(
                [Or(["get", "post", "put", "delete", "patch", "head"])]
            )
            schema_walker = DictWalker(["schema"])

            for path, sd in method_walker.walk(uu.resolver.doc["paths"]):
                # parameters
                # responses
                frame = {}
                if "produces" in sd[path[-1]]:
                    ref = path_to_json_pointer(["paths", *path, "produces"])
                    frame["produces"] = uu.pop(ref)

                with scope.scope(frame or None):
                    if "responses" in sd[path[-1]]:
                        for spath, ssd in schema_walker.walk(sd[path[-1]]["responses"]):
                            fullpath = ["paths", *path, "responses", *spath]
                            ref = path_to_json_pointer(fullpath)
                            schema = uu.pop(ref)
                            content = uu.make_dict()
                            for produce in scope[["produces"]]:
                                content[produce] = {"schema": schema}
                            ref = path_to_json_pointer([*fullpath[:-1], "content"])
                            uu.update(ref, content)
Ejemplo n.º 2
0
 def __init__(self, resolver, *, store: _yaml.NodeStore):
     self.resolver = resolver
     self.accessor = StackedAccessor(resolver)
     self.accessing = Accessor()
     self.ref_walking = DictWalker([is_ref])
     self.errors = []
     self.store = store
Ejemplo n.º 3
0
def run(filename: str) -> str:
    d = loading.loadfile(filename)
    w = DictWalker(["vars", "exec"])
    for path, w in w.walk(d):
        # path: services/<service_name>/vars/exec
        service_name = path[-3]
        exec_path = w["exec"]
        if exec_path == "./enduser_server":
            continue
        if exec_path == "./tuner_server":
            continue

        if _normalize(_normalize2(exec_path)).startswith(
                _normalize(_normalize2(service_name))
        ):
            continue
        loading.dumpfile(
            {
                "must_include_part": _normalize(_normalize2(service_name)),
                "ref": f"#/{'/'.join(path[:-2])}",
                "exec_path": exec_path,
            },
            format="json",
        )
        print("")
Ejemplo n.º 4
0
    def __init__(self, resolver, *, store: Store):
        self.resolver = resolver
        self.store = store

        self.accessor = StackedAccessor(resolver)
        self.accessing = Accessor()
        self.ref_walking = DictWalker(["$ref"])
        self.errors = []
Ejemplo n.º 5
0
class Expander:
    def __init__(self, resolver):
        self.resolver = resolver
        self.accessor = StackedAccessor(resolver)
        self.accessing = Accessor()
        self.ref_walking = DictWalker(["$ref"])
        self.errors = []

    def expand(self, doc=None, resolver=None, ctx=None):
        doc = doc or self.resolver.doc
        resolver = resolver or self.resolver

        if "$ref" in doc:
            original = self.accessor.access(doc["$ref"])
            new_doc = self.expand(original,
                                  resolver=self.accessor.resolver,
                                  ctx=ctx)
            self.accessor.pop_stack()
            return new_doc
        else:
            for path, sd in self.ref_walking.iterate(doc):
                try:
                    new_sd = self.expand(sd, resolver=resolver, ctx=ctx)
                    container = self.accessing.access(doc, path[:-1])
                    if not hasattr(container, "parents"):
                        container = ChainMap(make_dict(), container)
                        container.update(new_sd)
                    self.accessing.assign(doc, path[:-1], container)
                except Exception as e:
                    self.errors.append(ReferenceError(e, path=path[:],
                                                      data=sd))
            return doc
Ejemplo n.º 6
0
def run(filename: str, output: t.Optional[str] = None):
    d = loading.loadfile(filename)
    for path, sd in DictWalker(["allOf"]).walk(d):
        parent = d
        for name in path[:-2]:
            parent = parent[name]
        assert parent[path[-2]] == sd
        parent[path[-2]] = sd.pop("allOf")[0]
    loading.dumpfile(d, output)
Ejemplo n.º 7
0
def build_subset(resolver, ref):
    subset = {}
    ob = resolver.access_by_json_pointer(ref)
    resolver.assign_by_json_pointer(ref, ob, doc=subset)
    for path, sd in DictWalker(["$ref"]).walk(ob):
        # xxx:
        if sd["$ref"].startswith("#/"):
            resolver.assign(path[:-1], sd, doc=subset)
    return subset
Ejemplo n.º 8
0
class Scaner:
    def __init__(self, resolver, *, store: Store):
        self.resolver = resolver
        self.store = store

        self.accessor = StackedAccessor(resolver)
        self.accessing = Accessor()
        self.ref_walking = DictWalker([is_ref])
        self.errors = []

    def scan(self, doc=None, resolver=None):
        if not doc and doc is not None:
            return doc
        resolver = resolver or self.resolver
        try:
            doc = doc or resolver.doc
        except MarkedYAMLError as e:
            if e.problem_mark is not None:
                self.errors.append(ParseError(e, store=self.store))
            if doc is None:
                doc = {}
        doc, _ = self._scan(doc, resolver=resolver, seen={})
        return doc

    def _scan(self, doc, *, resolver, seen: dict):
        if "$ref" in doc:
            original = self.accessor.access(doc["$ref"])
            new_doc, _ = self._scan(
                original, resolver=self.accessor.resolver, seen=seen
            )
            return new_doc, self.accessor.pop_stack()
        else:
            for path, sd in self.ref_walking.iterate(doc):
                try:
                    uid = id(sd)
                    if uid in seen:
                        continue

                    seen[uid] = sd
                    new_sd, sresolver = self._scan(sd, resolver=resolver, seen=seen)
                    if resolver.filename != sresolver.filename:
                        container = self.accessing.access(doc, path[:-1])
                        if not hasattr(container, "parents"):
                            container = ChainMap(make_dict(), container)
                            container.update(new_sd)
                        self.accessing.assign(doc, path[:-1], container)
                except (KeyError, FileNotFoundError) as e:
                    self.errors.append(
                        ReferenceError(e, store=self.store, path=path[:], data=sd)
                    )
                except MarkedYAMLError as e:
                    if e.problem_mark is not None:
                        self.errors.append(
                            ParseError(e, store=self.store, path=path[:], data=sd)
                        )
            return doc, resolver
Ejemplo n.º 9
0
 def transform(self, rawtarget, depth):
     d = self.oas2transformer.transform(rawtarget, depth)
     for _, sd in DictWalker(["$ref"]).walk(d):
         sd["$ref"] = sd["$ref"].replace("#/definitions/", "#/components/schemas/")
     if "components" not in d:
         d["components"] = {}
     if "schemas" not in d["components"]:
         d["components"]["schemas"] = {}
     d["components"]["schemas"] = d.pop("definitions", {})
     return d
Ejemplo n.º 10
0
def main(*, src: str) -> None:
    precompile_ref_walker = DictWalker(["$precompile-ref"])

    def onload(d, resolver):
        for path, sd in precompile_ref_walker.walk(d):
            subresolver, query = resolver.resolve(sd.pop("$precompile-ref"))
            value = subresolver.access(subresolver.doc, query)
            jsref = path_to_json_pointer(path[:-1])
            resolver.assign(d, jsref, value)

    d = bundle(src, onload=onload)
    loading.dumpfile(d)
Ejemplo n.º 11
0
def migrate_for_subfile(u, *, scope):
    for resolver in u.resolvers:
        uu = u.new_child(resolver)
        if uu.has("paths", resolver=resolver):
            from dictknife import DictWalker, Or

            method_walker = DictWalker(
                [Or(["get", "post", "put", "delete", "patch", "head"])])
            schema_walker = DictWalker(["schema"])

            for path, sd in method_walker.walk(uu.resolver.doc["paths"]):
                # parameters
                # responses
                frame = {}
                if "produces" in sd[path[-1]]:
                    ref = path_to_json_pointer(["paths", *path, "produces"])
                    frame["produces"] = uu.pop(ref)

                with scope.scope(frame or None):
                    if "responses" in sd[path[-1]]:
                        for spath, ssd in schema_walker.walk(
                                sd[path[-1]]["responses"]):
                            fullpath = ["paths", *path, "responses", *spath]
                            ref = path_to_json_pointer(fullpath)
                            schema = uu.pop(ref)
                            content = uu.make_dict()
                            for produce in scope[["produces"]]:
                                content[produce] = {"schema": schema}
                            ref = path_to_json_pointer(
                                [*fullpath[:-1], "content"])
                            uu.update(ref, content)
Ejemplo n.º 12
0
def main(*, src: str) -> None:
    precompile_ref_walker = DictWalker(["$precompile-ref"])
    accessor = Accessor()

    def onload(d, subresolver):
        for path, sd in precompile_ref_walker.walk(d):
            sdoc, query = subresolver.resolve(sd.pop("$precompile-ref"))
            sresolved = access_by_json_pointer(sdoc.doc, query)
            accessor.assign(d, path[:-1], sresolved)

    resolver = get_resolver_from_filename(src, onload=onload)
    d = Bundler(resolver).bundle()
    loading.dumpfile(d)
Ejemplo n.º 13
0
class Scaner:
    def __init__(self, resolver, *, store: Store):
        self.resolver = resolver
        self.store = store

        self.accessor = StackedAccessor(resolver)
        self.accessing = Accessor()
        self.ref_walking = DictWalker(["$ref"])
        self.errors = []

    def scan(self, doc=None, resolver=None, ctx=None):
        resolver = resolver or self.resolver
        try:
            doc = doc or resolver.doc
        except MarkedYAMLError as e:
            if e.problem_mark is not None:
                self.errors.append(ParseError(e, store=self.store))
            if doc is None:
                doc = {}

        if "$ref" in doc:
            original = self.accessor.access(doc["$ref"])
            new_doc = self.scan(original,
                                resolver=self.accessor.resolver,
                                ctx=ctx)
            self.accessor.pop_stack()
            return new_doc
        else:
            for path, sd in self.ref_walking.iterate(doc):
                try:
                    new_sd = self.scan(sd, resolver=resolver, ctx=ctx)
                    container = self.accessing.access(doc, path[:-1])
                    if not hasattr(container, "parents"):
                        container = ChainMap(make_dict(), container)
                        container.update(new_sd)
                    self.accessing.assign(doc, path[:-1], container)
                except (KeyError, FileNotFoundError) as e:
                    self.errors.append(
                        ReferenceError(e,
                                       store=self.store,
                                       path=path[:],
                                       data=sd))
                except MarkedYAMLError as e:
                    if e.problem_mark is not None:
                        self.errors.append(
                            ParseError(e,
                                       store=self.store,
                                       path=path[:],
                                       data=sd))
            return doc
Ejemplo n.º 14
0
def migrate_refs(uu, data, *, scope, walker=DictWalker(["$ref"])):
    for path, d in walker.walk(data):
        if "/definitions/" in d["$ref"]:
            uu.update_by_path(
                path, d["$ref"].replace("/definitions/",
                                        "/components/schemas/", 1))
        if "/parameters/" in d["$ref"]:
            uu.update_by_path(
                path, d["$ref"].replace("/parameters/",
                                        "/components/parameters/", 1))

        if "/responses/" in d["$ref"]:
            uu.update_by_path(
                path, d["$ref"].replace("/responses/",
                                        "/components/responses/", 1))
Ejemplo n.º 15
0
def merge(*, extracted, jsonschema):
    schemas = {}
    paths = extracted["paths"]

    for name, s in jsonschema["definitions"].items():
        if "$ref" in s:
            s["$ref"].replace("#definitions", "#/components/schemas")
            schemas[name] = s
        else:
            schemas[name] = s

    # remove needless title property
    for path, sd in DictWalker(["title"]).walk(schemas):
        if path[-2] == sd["title"]:
            sd.pop("title")

    return {"components": {"schemas": schemas}, "paths": paths}
Ejemplo n.º 16
0
def json2swagger(
    *,
    files,
    dst: str,
    output_format: str,
    name: str,
    detector,
    emitter,
    annotate,
    emit,
    with_minimap: bool,
    without_example: bool
):
    from prestring import Module
    from dictknife import DictWalker

    if annotate is not None:
        annotate = loading.loadfile(annotate)
    else:
        annotate = {}

    ns = "dictknife.swaggerknife.json2swagger"
    detector = import_symbol(detector, ns=ns)()
    emitter = import_symbol(emitter, ns=ns)(annotate)

    info = None
    for src in files:
        data = loading.loadfile(src)
        info = detector.detect(data, name, info=info)

    if emit == "info":
        loading.dumpfile(info, filename=dst)
    else:
        m = Module(indent="  ")
        m.stmt(name)
        emitter.emit(info, m)
        if with_minimap:
            print("# minimap ###")
            print("# *", end="")
            print("\n# ".join(str(m).split("\n")))

        if without_example:
            for _, d in DictWalker(["example"]).walk(emitter.doc):
                d.pop("example")
        loading.dumpfile(emitter.doc, filename=dst, format=output_format)
Ejemplo n.º 17
0
from prestring.python import Module
from dictknife import DictWalker
from dictknife import loading

w = DictWalker(["lines"])
d = loading.loadfile(format="json")

r = []
for _, d in w.walk(d):
    if d["language"] == "python" or d["language"] == "py":
        r.append(d["lines"])

m = Module()
m.from_("nbreversible", "code")
for lines in r:
    with m.with_("code()"):
        for line in lines:
            if line.startswith("%"):
                m.stmt("#{}", line)
            else:
                m.stmt(line.rstrip())
    m.sep()
print(m)
Ejemplo n.º 18
0
 def onload(d, resolver, w=DictWalker(["$include"])):
     for _, sd, in w.walk(d):
         subresolver, jsref = resolver.resolve(sd.pop("$include"))
         sd.update(subresolver.access_by_json_pointer(jsref))
Ejemplo n.º 19
0
 def __init__(self, resolver):
     self.resolver = resolver
     self.accessor = StackedAccessor(resolver)
     self.accessing = Accessor()
     self.ref_walking = DictWalker(["$ref"])
Ejemplo n.º 20
0
 def ref_walking(self):
     return DictWalker([is_ref])
Ejemplo n.º 21
0
import pathlib
import importlib.util
from dictknife import loading
from dictknife import Accessor
from dictknife import DictWalker

spec = importlib.util.find_spec("botocore")
path = pathlib.Path(spec.origin)
if path.name == "__init__.py":
    path = path.parent
d = loading.loadfile(path / ("data/sqs/2012-11-05/service-2.json"))

dst = {}
a = Accessor(make_dict=dict)

for name, sd in d["operations"].items():
    path = ["operations", name]
    a.assign(dst, path, sd)
    ssd = a.access(d, ["shapes", sd["input"]["shape"]])
    a.assign(dst, ["shapes", sd["input"]["shape"]], ssd)

    if "output" in sd:
        ssd = a.access(d, ["shapes", sd["output"]["shape"]])
        a.assign(dst, ["shapes", sd["output"]["shape"]], ssd)

# slim-up
for path, sd in DictWalker(["documentation"]).walk(dst):
    sd.pop("documentation")
loading.dumpfile(dst, format="json")
Ejemplo n.º 22
0
 def ref_walker(self):
     return DictWalker([And(["$ref", _is_string])])
Ejemplo n.º 23
0
 def ref_walking(self):
     return DictWalker(["$ref"])
Ejemplo n.º 24
0
def migrate_for_subfile(uu,
                        *,
                        scope,
                        callbacks,
                        ref_wawlker=DictWalker(["$ref"])):
    migrate_refs(uu, uu.resolver.doc, scope=scope)
    if uu.has("definitions"):
        uu.update_by_path(["components", "schemas"],
                          uu.pop_by_path(["definitions"]))
    if uu.has("parameters"):
        request_bodies = migrate_parameters(uu,
                                            uu.resolver.doc,
                                            path=["parameters"],
                                            scope=scope)
        uu.update_by_path(["components", "parameters"],
                          uu.pop_by_path(["parameters"]))
        # for in:body
        if request_bodies:
            names = list(request_bodies.keys())

            # todo: optimizatin?
            def fixref(uu, *, names=names):
                will_be_remove_paths_if_empty = set()
                for path, sd in ref_wawlker.walk(uu.resolver.doc):
                    ref = sd["$ref"]
                    if is_empty(ref):
                        continue
                    if "#/components/parameters" not in ref:
                        continue
                    if not any(name in ref for name in names):
                        continue
                    uu.pop_by_path(path[:-1])
                    new_value = {
                        "$ref":
                        ref.replace("#/components/parameters",
                                    "#/components/requestBodies")
                    }
                    if path[0] == "paths":
                        new_path = itertools.takewhile(
                            lambda x: x != "parameters", path)
                        uu.update_by_path([*new_path, "requestBody"],
                                          new_value)
                    elif path[0] == "components":
                        # #/components/parameters/<name>
                        uu.update_by_path(
                            ["components", "requestBodies", path[2]],
                            new_value)
                    else:
                        raise RuntimeError("unexpected path: {}".format(path))

                    will_be_remove_paths_if_empty.add(tuple(path[:-2]))

                for path in will_be_remove_paths_if_empty:
                    if is_empty_collection(uu.resolver.access(path)):
                        uu.pop_by_path(path)

            callbacks.append(fixref)

            for name, body in request_bodies.items():
                uu.update_by_path(["components", "requestBodies", name], body)

    if uu.has("responses"):
        uu.update_by_path(["components", "responses"],
                          uu.pop_by_path(["responses"]))
    if uu.has("securityDefinitions"):
        uu.update_by_path(["components", "securitySchemes"],
                          uu.pop_by_path(["securityDefinitions"]))

    # todo: requestBodies
    frame = make_dict()
    frame.update(
        migrate_parameters(uu,
                           uu.resolver.doc,
                           path=["parameters"],
                           scope=scope))

    with scope.scope(frame or None):
        if uu.has("paths"):
            migrate_paths(uu, uu.resolver.doc, scope=scope)
Ejemplo n.º 25
0
def migrate_paths(uu, data, *, scope, schema_walker=DictWalker(["schema"])):
    for url_path, path_item in data["paths"].items():
        # xxx: vendor extensions?
        if url_path.startswith("x-"):
            continue

        # todo: parse pathItem object
        # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#path-item-object
        operation_methods = [
            "get", "put", "post", "delete", "options", "head", "patch"
        ]
        frame = make_dict()
        frame.update(
            migrate_parameters(uu,
                               path_item,
                               path=["paths", url_path, "parameters"],
                               scope=scope))
        with scope.scope(frame or None):
            for method_name in operation_methods:
                operation = path_item.get(method_name)
                if operation is None:
                    continue

                # todo: parse Operation object
                # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#operation-object

                frame = make_dict()

                # parameters
                frame.update(
                    migrate_parameters(
                        uu,
                        operation,
                        path=["paths", url_path, method_name, "parameters"],
                        scope=scope,
                    ))

                # produces
                if "produces" in operation:
                    frame["produces"] = uu.pop_by_path(
                        ["paths", url_path, method_name, "produces"])
                # consumes
                if "consumes" in operation:
                    frame["consumes"] = uu.pop_by_path(
                        ["paths", url_path, method_name, "consumes"])

                # responses
                with scope.scope(frame or None):
                    # requestBody
                    request_body = scope.get(["requestBody"])
                    if request_body is not None:
                        uu.update_by_path(
                            ["paths", url_path, method_name, "requestBody"],
                            request_body,
                        )

                    if "responses" in operation:
                        for spath, sd in schema_walker.walk(
                                operation["responses"]):
                            fullpath = [
                                "paths",
                                url_path,
                                method_name,
                                "responses",
                                *spath,
                            ]
                            schema = uu.pop_by_path(fullpath)
                            content = uu.make_dict()
                            for produce in scope[["produces"]]:
                                content[produce] = {"schema": schema}
                            uu.update_by_path([*fullpath[:-1], "content"],
                                              content)
Ejemplo n.º 26
0
from dictknife import DictWalker

# from: https://github.com/BigstickCarpet/json-schema-ref-parser
d = loading.loads(
    """
{
  "definitions": {
    "person": {
      "$ref": "schemas/people/Bruce-Wayne.json"
    },
    "place": {
      "$ref": "schemas/places.yaml#/definitions/Gotham-City"
    },
    "thing": {
      "$ref": "http://wayne-enterprises.com/things/batmobile"
    },
    "color": {
      "$ref": "#/definitions/thing/properties/colors/black-as-the-night"
    }
  }
}
""",
    format="json"
)

walker = DictWalker(["$ref"])
refs = [("/".join(path[:]), sd["$ref"]) for path, sd in walker.walk(d)]

for path, ref in refs:
    print(path, ref)
Ejemplo n.º 27
0
 def xxx_of_walker(self):
     return DictWalker([And([Or(_combine_types), _is_array])])
Ejemplo n.º 28
0
 def description_walker(self):
     return DictWalker(["description"])
Ejemplo n.º 29
0
def onload(d, resolver, w=DictWalker(["$xref"])):
    for path, sd in w.walk(d):
        subresolver, jsonref = resolver.resolve(sd.pop("$xref"))
        value = subresolver.access_by_json_pointer(jsonref)
        resolver.assign(path[:-1], value)
Ejemplo n.º 30
0
class Loader:
    def __init__(self, resolver, *, store: _yaml.NodeStore):
        self.resolver = resolver
        self.accessor = StackedAccessor(resolver)
        self.accessing = Accessor()
        self.ref_walking = DictWalker([is_ref])
        self.errors = []
        self.store = store

    @property
    def filename(self) -> str:
        return self.resolver.filename

    def load(self, doc=None, resolver=None):
        if not doc and doc is not None:
            return doc
        resolver = resolver or self.resolver
        try:
            doc = doc or resolver.doc
        except _yaml.MarkedYAMLError as e:
            if e.problem_mark is not None:
                self.errors.append(ParseError(e, history=[resolver.filename]))
            if doc is None:
                doc = {}
        doc, _ = self._load(doc, resolver=resolver, seen={})
        return doc

    def _load(self, doc, *, resolver, seen: dict):
        if "$ref" in doc:
            original = self.accessor.access(doc["$ref"])
            new_doc, _ = self._load(original,
                                    resolver=self.accessor.resolver,
                                    seen=seen)
            return new_doc, self.accessor.pop_stack()
        else:
            for path, sd in self.ref_walking.iterate(doc):
                try:
                    uid = id(sd)
                    if uid in seen:
                        continue

                    seen[uid] = sd
                    new_sd, sresolver = self._load(sd,
                                                   resolver=resolver,
                                                   seen=seen)
                    if resolver.filename != sresolver.filename:
                        container = self.accessing.access(doc, path[:-1])
                        if not hasattr(container, "parents"):
                            container = ChainMap(make_dict(), container)
                            container.update(new_sd)
                        self.accessing.assign(doc, path[:-1], container)
                except FileNotFoundError as e:
                    self.errors.append(
                        ResolutionError(
                            e,
                            path=path[:],
                            data=sd,
                            history=[
                                r.filename for r in self.accessor.stack[:-1]
                            ],
                        ))
                except KeyError as e:
                    self.errors.append(
                        ResolutionError(
                            e,
                            path=path[:],
                            data=sd,
                            history=[r.filename for r in self.accessor.stack],
                        ))
                except _yaml.MarkedYAMLError as e:
                    if e.problem_mark is not None:
                        self.errors.append(
                            ParseError(
                                e,
                                path=path[:],
                                data=sd,
                                history=[
                                    r.filename for r in self.accessor.stack
                                ],
                            ))
            return doc, resolver
Ejemplo n.º 31
0
def describe_dict(d, *, life=None, showzero_callable=None):
    sigmap = {}
    walker = DictWalker([Or(["$len", "$members"])])

    def _describe_type(v):
        if hasattr(v, "keys"):
            return dict.__name__
        return type(v).__name__

    def _show_on_lifezero(d, *, path):
        if hasattr(d, "keys"):
            rep = make_dict()
            for k, v in d.items():
                if hasattr(v, "keys"):
                    rep[k] = f"{_describe_type(v)}@{len(v)}"
                elif isinstance(v, (list, tuple)):
                    rep[k] = f"{_describe_type(v)}@{len(v)}"
                else:
                    rep[k] = f"{_describe_type(v)}"
            sig = json.dumps(rep, sort_keys=True, default=str)
            if sig in sigmap:
                return sigmap[sig]
            sigmap[sig] = path_to_json_pointer(path)
            return rep

        elif isinstance(d, (list, tuple)):
            seen = {}
            for x in d:
                rep = _show(x, life=0, path=path)
                sig = json.dumps(rep, sort_keys=True, default=str)
                if sig in seen:
                    continue
                seen[sig] = rep
            return {"$len": len(d), "$cases": list(seen.values())}
        else:
            return _describe_type(d)

    def _show(d,
              *,
              path,
              life,
              showzero_callable=showzero_callable or _show_on_lifezero):
        if life == 0:  # -1 is full expand
            return showzero_callable(d, path=path)

        if hasattr(d, "keys"):
            rep = make_dict()
            for k, v in d.items():
                path.append(k)
                if hasattr(v, "__len__") and len(v) == 0:
                    rep[k] = f"{_describe_type(v)}@{len(v)}"
                elif hasattr(v, "keys"):
                    rep[k] = _show(v, life=life - 1, path=path)
                elif isinstance(v, (list, tuple)):
                    rep[k] = _show(v, life=life - 1, path=path)
                else:
                    rep[k] = f"{_describe_type(v)}"
                path.pop()
            sig = json.dumps(rep, sort_keys=True, default=str)
            if sig in sigmap:
                return sigmap[sig]
            sigmap[sig] = path_to_json_pointer(path)
            return rep
        elif isinstance(d, (list, tuple)):
            seen = {}
            members = []
            path.append("[]")

            csigmap = {}
            for x in d:
                rep = _show(x, life=life - 1, path=path)
                sig = json.dumps(rep, sort_keys=True, default=str)

                if sig in seen:
                    members.append(csigmap[sig])
                    continue
                rep = copy.deepcopy(rep)
                for ks, sd in walker.walk(rep):
                    sd.pop(ks[-1])
                seen[sig] = rep
                csigmap[
                    sig] = f"{path_to_json_pointer(path)}/$cases/{len(csigmap)}"
                members.append(csigmap[sig])
            rep = {"$len": len(d)}
            if seen:
                rep["$cases"] = list(seen.values())
            if members:
                rep["$members"] = members
            path.pop()
            sig = json.dumps(rep, sort_keys=True, default=str)
            if sig in sigmap:
                return sigmap[sig]
            sigmap[sig] = path_to_json_pointer(path)
            return rep
        else:
            return _describe_type(d)

    if life is None:
        life = int(os.environ.get("LIFE") or "0")
    rep = _show(d, life=life, path=["#"])
    return rep