def transform(*, src: str, dst: str, config: str, config_file: str, code: str, functions: str, input_format: str, output_format: str, format: str, sort_keys: str): """transform dict""" from magicalimport import import_symbol from dictknife import deepmerge if code is not None: transform = eval(code) elif functions: def transform(d): for fn in functions: if "." not in fn: fn = "dictknife.transform:{}".format(fn) d = import_symbol(fn)(d) return d else: transform = lambda x: x # NOQA input_format = input_format or format kwargs = loading.loads(config, format=input_format) if config_file: with open(config_file) as rf: kwargs = deepmerge(kwargs, loading.load(rf, format=input_format)) data = loading.loadfile(src, input_format) result = transform(data, **kwargs) loading.dumpfile(result, dst, format=output_format or format, sort_keys=sort_keys)
def dump(self, commands, outdir): outdir = outdir or "." for t, cmd, data in commands: result = _render_with_newline(t, data) outpath = os.path.join(outdir, cmd["dst"]) logger.info("rendering %s (template=%s)", outpath, t.name) fmt = cmd.get("format") or self.format or "raw" if fmt != "raw": result = loading.loads(result, format=fmt) loading.dumpfile(result, outpath, format=fmt)
def test_load__with_kwargs(self): from tempfile import TemporaryDirectory from pathlib import Path with TemporaryDirectory() as d: d = Path(d) a = textwrap.dedent(""" b: $let: mydata: {value: 10} body: $load: ./b.yaml data: {$get: mydata} """) loading.dumpfile(loading.loads(a), str(d.joinpath("./a.yaml"))) b = textwrap.dedent(""" # need: data name: b data: {$get: data} """) loading.dumpfile(loading.loads(b), str(d.joinpath("./b.yaml"))) class m: from zenmai.actions import get # NOQA from zenmai.actions import load # NOQA d = self._callFUT(a, m, filename=str(d.joinpath("./a.yaml"))) actual = loading.dumps(d) expected = textwrap.dedent(""" b: name: b data: value: 10 """) self.assertDiff(actual.strip(), expected.strip())
def cat(*, files, dst, format, input_format, output_format, sort_keys, encoding=None, errors=None, size=None, slurp=False, extra=None, merge_method="addtoset"): from dictknife import deepmerge input_format = input_format or format d = make_dict() with contextlib.ExitStack() as s: for f in files: logger.debug("merge: %s", f) opener = loading.get_opener(filename=f, format=input_format, default=_open) rf = s.enter_context(opener(f, encoding=encoding, errors=errors)) if slurp: sd = (loading.loads(line, format=input_format) for line in rf) else: sd = loading.load(rf, format=input_format, errors=errors) if size is not None: sd = itertools.islice(sd, size) if hasattr(sd, "keys"): d = deepmerge(d, sd, method=merge_method) elif len(files) == 1: d = sd else: if not isinstance(d, (list, tuple)): d = [d] if d else [] d = deepmerge(d, sd, method=merge_method) loading.dumpfile(d, dst, format=output_format or format, sort_keys=sort_keys, extra=extra)
def transform( d, *, default_content_type="application/json", is_specific_header=is_specific_header, get_value=get_value, with_response_type=True, with_request_type=True, with_cookies=True, include_all=False, ): r = make_dict() a = Accessor() for path, methods in d.items(): for method, entries in methods.items(): d = {"description": ""} seen_parameters = defaultdict(set) request_bodies: t.List[dict] = [] response_bodies_dict: t.Dict[t.Tuple[int, str], dict] = defaultdict(list) for e in entries: # request # params :: path,query,header,cookie parameters = [] for param_type, k, enabled in [ ("query", "queryString", True), ("header", "headers", True), ("cookie", "cookies", with_cookies), ]: if not enabled: continue seen = seen_parameters[k] for h in e["request"][k]: if h["name"] in seen: continue seen.add(h["name"]) if include_all or is_specific_header( h["name"], h["value"]): parameters.append({ "name": h["name"], "in": param_type, "example": get_value(h["name"], h["value"]), # masking? }) if parameters: d["parameters"] = parameters if e["request"].get("postData"): post_data = e["request"]["postData"] content_type = post_data["mimeType"].split(";", 1)[0] if content_type.endswith("/json") and with_request_type: request_bodies.append( loading.loads(post_data["text"], format="json")) # response status = e["response"]["status"] if status == 304: status = 200 # not modified -> ok content_type = e["response"]["content"].get("mimeType") if content_type is None: for h in e["response"]["headers"]: if h["name"].lower() == "content-type": content_type = h["value"] break else: content_type = default_content_type # "application/json; charset=utf-8" -> "application/json" content_type = content_type.split(";", 1)[0] schema = {} if content_type.startswith("text/"): a.assign(schema, ["type"], "string") elif content_type.endswith("/json") and with_response_type: response_bodies_dict[(status, content_type)].append( loading.loads(e["response"]["content"]["text"], format="json")) a.assign( d, ["responses", status], { "description": e["response"]["statusText"], "content": { content_type: { "schema": schema } }, }, ) if request_bodies: detector = schemalib.Detector() info = None for body in request_bodies: info = detector.detect(body, name="") a.assign(d, ["requestBody"], schemalib.makeschema_from_info(info)) if response_bodies_dict: for (status, content_type), bodies in response_bodies_dict.items(): detector = schemalib.Detector() info = None for body in bodies: info = detector.detect(body, name="") a.assign( d, [ "responses", status, "content", content_type, "schema" ], schemalib.makeschema_from_info(info), ) a.assign(r, ["paths", path, method.lower()], d) return r
def _callFUT(self, source, m, filename=None): from zenmai import compile d = loading.loads(source) return compile(d, m, filename=filename)
def test_jinja2_raw_format(self): from tempfile import TemporaryDirectory from pathlib import Path class m: from zenmai.actions import jinja2_templatefile, jinja2_template # NOQA from zenmai.actions import load # NOQA with TemporaryDirectory() as d: d = Path(d) main = textwrap.dedent(""" $let: readme-template: $jinja2_templatefile: ./readme.jinja2 format: raw body: ./one.md: $readme-template: name: one ./two.md: $readme-template: name: two ./three.md: $readme-template: name: three """) loading.dumpfile(loading.loads(main), str(d.joinpath("./main.yaml"))) template = textwrap.dedent("""\ # {{name}} this is {{name}}. """) with open(str(d.joinpath("./readme.jinja2")), "w") as wf: wf.write(template) d = self._callFUT(main, m, filename=str(d.joinpath("./main.yaml"))) actual = loading.dumps(d) expected = textwrap.dedent(""" ./one.md: |- # one this is one. ./two.md: |- # two this is two. ./three.md: |- # three this is three. """) self.assertDiff(actual.strip(), expected.strip()) source = textwrap.dedent(""" $let: item-template: $jinja2_template: | items: {% for i in nums %} - {{prefix|default("no")}}.{{i}} {% endfor %} body: listing: $item-template: nums: [1,2,3] """) d = self._callFUT(source, m) actual = loading.dumps(d) expected = textwrap.dedent(""" listing: items: - no.1 - no.2 - no.3 """) self.assertDiff(actual.strip(), expected.strip())
def _callFUT(self, source, m): from zenmai import compile d = loading.loads(source) return compile(d, m)
def test_load(self): from tempfile import TemporaryDirectory from pathlib import Path with TemporaryDirectory() as d: d = Path(d) main = textwrap.dedent(""" definitions: one: $load: "./one/one.yaml" two: $load: "./two/two.yaml" """) loading.dumpfile(loading.loads(main), str(d.joinpath("./main.yaml"))) one = textwrap.dedent(""" type: object properties: value: $load: "./value.yaml" """) loading.dumpfile(loading.loads(one), str(d.joinpath("./one/one.yaml"))) value = textwrap.dedent(""" description: value type: integer """) loading.dumpfile(loading.loads(value), str(d.joinpath("./one/value.yaml"))) two = textwrap.dedent(""" type: object properties: value: $load: "../one/value.yaml" """) loading.dumpfile(loading.loads(two), str(d.joinpath("./two/two.yaml"))) class m: from zenmai.actions import load # NOQA d = self._callFUT(main, m, filename=str(d.joinpath("./main.yaml"))) actual = loading.dumps(d) expected = textwrap.dedent(""" definitions: one: type: object properties: value: description: value type: integer two: type: object properties: value: description: value type: integer """) self.assertDiff(actual.strip(), expected.strip())
from dictknife import loading from dictknife.jsonknife import access_by_json_pointer s = """ arguments: x: foo """ d = loading.loads(s) print(access_by_json_pointer(d, "#/arguments/x")) print(access_by_json_pointer(d, "#/arguments/y")) # Traceback (most recent call last): # File "VENV/lib/python3.7/site-packages/dictknife/jsonknife/accessor.py", line 24, in access_by_json_pointer # return accessor.access(doc, path) # File "VENV/lib/python3.7/site-packages/dictknife/accessing.py", line 15, in access # d = d[name] # KeyError: 'y' # During handling of the above exception, another exception occurred: # Traceback (most recent call last): # File "00jsonref.py", line 11, in <module> # print(access_by_json_pointer(d, "#/arguments/y")) # File "VENV/lib/python3.7/site-packages/dictknife/jsonknife/accessor.py", line 26, in access_by_json_pointer # raise KeyError(query) # KeyError: '#/arguments/y'
def dump(self, d, dst): fmt = self.format if fmt != "raw": d = loading.loads(d, format=fmt) return loading.dumpfile(d, dst, format=fmt)
from dictknife import loading from dictknife import DictWalker # from: https://github.com/BigstickCarpet/json-schema-ref-parser d = loading.loads( """ { "definitions": { "person": { "$ref": "schemas/people/Bruce-Wayne.json" }, "place": { "$ref": "schemas/places.yaml#/definitions/Gotham-City" }, "thing": { "$ref": "http://wayne-enterprises.com/things/batmobile" }, "color": { "$ref": "#/definitions/thing/properties/colors/black-as-the-night" } } } """, format="json" ) walker = DictWalker(["$ref"]) refs = [("/".join(path[:]), sd["$ref"]) for path, sd in walker.walk(d)] for path, ref in refs: print(path, ref)
def transform( d, *, default_content_type="application/json", is_specific_header=is_specific_header, get_value=get_value, with_response_type=True, with_request_type=True, with_cookies=True, include_all=False, ): r = make_dict() a = Accessor() for path, methods in d.items(): for method, entries in methods.items(): d = {"description": ""} seen_parameters = defaultdict(set) request_bodies: t.List[dict] = [] response_bodies_dict: t.Dict[t.Tuple[int, str], dict] = defaultdict(list) for e in entries: # request # params :: path,query,header,cookie parameters = [] for param_type, k, enabled in [ ("query", "queryString", True), ("header", "headers", True), ("cookie", "cookies", with_cookies), ]: if not enabled: continue seen = seen_parameters[k] for h in e["request"][k]: if h["name"] in seen: continue seen.add(h["name"]) if include_all or is_specific_header(h["name"], h["value"]): parameters.append( { "name": h["name"], "in": param_type, "example": get_value( h["name"], h["value"] ), # masking? } ) if parameters: d["parameters"] = parameters if e["request"].get("postData"): post_data = e["request"]["postData"] content_type = post_data["mimeType"].split(";", 1)[0] if content_type.endswith("/json") and with_request_type: request_bodies.append( loading.loads(post_data["text"], format="json") ) # response status = e["response"]["status"] if status == 304: status = 200 # not modified -> ok content_type = e["response"]["content"].get("mimeType") if content_type is None: for h in e["response"]["headers"]: if h["name"].lower() == "content-type": content_type = h["value"] break else: content_type = default_content_type # "application/json; charset=utf-8" -> "application/json" content_type = content_type.split(";", 1)[0] schema = {} if content_type.startswith("text/"): a.assign(schema, ["type"], "string") elif content_type.endswith("/json") and with_response_type: response_bodies_dict[(status, content_type)].append( loading.loads(e["response"]["content"]["text"], format="json") ) a.assign( d, ["responses", status], { "description": e["response"]["statusText"], "content": {content_type: {"schema": schema}}, }, ) if request_bodies: detector = schemalib.Detector() info = None for body in request_bodies: info = detector.detect(body, name="") a.assign(d, ["requestBody"], schemalib.makeschema_from_info(info)) if response_bodies_dict: for (status, content_type), bodies in response_bodies_dict.items(): detector = schemalib.Detector() info = None for body in bodies: info = detector.detect(body, name="") a.assign( d, ["responses", status, "content", content_type, "schema"], schemalib.makeschema_from_info(info), ) a.assign(r, ["paths", path, method.lower()], d) return r
from dictknife import loading from dictknife.pp import indent loading.setup() yaml = """ definitions: person: type: object properties: name: type: string age: type: integer """ d = loading.loads(yaml, format="yaml") with indent(2, "load data\n"): print(loading.dumps(d, format="yaml")) with indent(2, "access by json pointer\n"): from dictknife.jsonknife import access_by_json_pointer q = "/definitions/person/properties" v = access_by_json_pointer(d, q) print("access : {}".format(q)) print(loading.dumps(v, format="yaml")) # this is also ok(but this is json reference). q = "#/definitions/person/properties" v = access_by_json_pointer(d, q) print("access : {}".format(q))