async def test_ignore_policy(dc_cb: OnNewDirContent, file_extra_factory, ignore_symlinks: bool): a_b_1_5 = seed_file(scan_dir / "a" / "b", 1, 5) x_f_b_1_5 = seed_file(scan_dir / "x" / "f" / "b", 1, 5) c_b_2_7 = seed_file(scan_dir / "c" / "b", 2, 7) c_b_1_5 = seed_file(scan_dir / "c" / "b", 1, 5) try: (scan_dir / "b").symlink_to(scan_dir / "a") except: pass rules = DEFAULT_IGNORE_POLICY.apply(scan_dir) rules.ignore_symlinks = ignore_symlinks entry = await process_dir(scan_dir, rules, dc_cb, file_extra_factory) assert entry.name() == "scanning" extras = entry.xtra.extras first_level = ["a", "c", "x"] if not ignore_symlinks: first_level.insert(1, "b") # same symlinked directories assert (extras[0].xtra.extras[0].xtra.extras[0].xtra == extras[1].xtra.extras[0].xtra.extras[0].xtra) assert json_encode(to_json(extras[0].xtra.extras[0])) == json_encode( to_json(extras[1].xtra.extras[0])) assert [e.name() for e in extras] == first_level json = json_encode(to_json(entry)) fe = FileExtra.from_json(json_decode(json)) assert [x.name() for x in fe.xtra] == [x.name() for x in entry.xtra.extras] for i in range(len(entry.xtra)): assert fe.xtra[i].name() == entry.xtra[i].name() if file_extra_factory == Cake.from_file and ignore_symlinks: json = re.sub(r'"mod": "[^"]+",', "", json) print(json) assert ( json == '{ "name": "scanning", "size": 22, "type": "TREE", "xtra": ' '[{ "name": "a", "size": 5, "type": "TREE", "xtra": ' '[{ "name": "b", "size": 5, "type": "TREE", "xtra": ' '[{ "name": "1_5.dat", "size": 5, "type": "FILE", ' '"xtra": "xzbg412Ar6X0AOPfFUg3nc3wSoSQ96jak07pdh7jrOH"}]}]}, ' '{ "name": "c", "size": 12, "type": "TREE", "xtra": ' '[{ "name": "b", "size": 12, "type": "TREE", "xtra": ' '[{ "name": "1_5.dat", "size": 5, "type": "FILE", ' '"xtra": "xzbg412Ar6X0AOPfFUg3nc3wSoSQ96jak07pdh7jrOH"}, ' '{ "name": "2_7.dat", "size": 7, "type": "FILE", ' '"xtra": "6L1wce6TqSrO61DDPCNGnMabCQxko3hyEvEd9ZCXFg0"}]}]}, ' '{ "name": "x", "size": 5, "type": "TREE", "xtra": ' '[{ "name": "f", "size": 5, "type": "TREE", "xtra": ' '[{ "name": "b", "size": 5, "type": "TREE", "xtra": ' '[{ "name": "1_5.dat", "size": 5, "type": "FILE", ' '"xtra": "xzbg412Ar6X0AOPfFUg3nc3wSoSQ96jak07pdh7jrOH"}]}]}]}]}')
def test_json(): hl = logic.HashLogic.from_module(plugin) json = str(hl) match = ('{"methods": [' '{"in_mold": [' '"n:Required[hashkernel.ake:Cake]", ' '"i:Required[int]"], ' '"out_mold": [' '"_:Required[hashkernel.ake:Cake]"], ' '"ref": "hashkernel.tests.logic_test_module:fn"}, ' '{"in_mold": [], ' '"out_mold": [' '"name:Required[str]", ' '"id:Required[int]", ' '"x:Required[hashkernel.ake:Cake]"], ' '"ref": "hashkernel.tests.logic_test_module:fn2"}, ' '{"in_mold": [' '"n:Required[hashkernel.ake:Cake]", ' '"i:Required[int]=5"], ' '"out_mold": [' '"_:Required[hashkernel.ake:Cake]"], ' '"ref": "hashkernel.tests.logic_test_module:fn3"}], ' '"name": "hashkernel.tests.logic_test_module"}') assert json == match hl2 = logic.HashLogic(to_json(hl)) assert str(hl2) == match
def do_check(w): assert str(w.unwrap()) == s assert ( str(w) == '{"classRef": "hashkernel.tests.smattr_tests:Abc", "json": {"name": "n", "val": 555}}' ) assert str(JsonWrap(to_json(w)).unwrap()) == s
def __to_json__(self): return { "name": self.name(), "type": self.file.type.name, "size": self.file.size, "mod": self.file.mod.isoformat(), "xtra": to_json(self.xtra), }
async def main(): path = Path(sys.argv[1]).absolute() print( json_encode( to_json(await process_dir( path, DEFAULT_IGNORE_POLICY.apply(path), content_cb=print_dc, file_extra_factory=Cake.from_file, ))), file=sys.stderr, )
def test_Bundle(): b1 = HashRack() assert b1.content() == "[[], []]" empty_rack_cake = b1.cake() with tempfile.NamedTemporaryFile("w", delete=False) as w: w.write(b1.content()) b2 = HashRack().parse(b1.content()) u_f = Cake.from_file(w.name) os.unlink(w.name) u2 = b2.cake() assert u_f == u2 assert empty_rack_cake == u2 assert empty_rack_cake == u2 b1["a"] = empty_rack_cake udk_bundle_str = f'[["a"], ["{empty_rack_cake}"]]' assert str(b1) == udk_bundle_str u1 = b1.cake() assert u1 != u2 b2.parse(utf8_reader(BytesIO(bytes(b1)))) assert str(b2) == udk_bundle_str assert b2.size() == 56 u2 = b2.cake() assert u1 == u2 del b2["a"] u2 = b2.cake() assert empty_rack_cake == u2 assert b1["a"] == empty_rack_cake assert b1.get_cakes() == [empty_rack_cake] assert [k for k in b1] == ["a"] assert [k for k in b2] == [] assert b1.get_name_by_cake(empty_rack_cake) == "a" assert HashRack(to_json(b1)) == b1 assert HashRack.ensure_it(to_json(b1)) == b1 assert len(b1) == 1 assert str(b1) == udk_bundle_str assert hash(b1) == hash(udk_bundle_str)
def convert(self, v: Any, direction: Conversion, flator: Optional[Flator] = None) -> Any: try: if (direction.needs_flator() and flator is not None and flator.is_applied(self.cls)): if direction.produces_object(): if isinstance(v, str): return flator.inflate(v, self.cls) else: if isinstance(v, self.cls): return flator.deflate(v) if direction.produces_object(): return self._from_json(v) else: return to_json(v) except: reraise_with_msg(f"{self.cls} {v}")
def test_ables(): x = StringableIterable("x") assert bytes(x) == b"x" z5 = JsonableExample("z", 5) assert bytes(z5) == b'{"i": 5, "s": "z"}' z3 = JsonableExample("z", 3) z5too = JsonableExample("z", 5) assert z5 == z5too assert z5 != z3 assert not (z5 == z3) assert kernel.to_dict(z5) == kernel.to_json(z5) assert kernel.to_tuple(z5) == ("z", 5) assert kernel.to_tuple(x) == ("x", ) try: kernel.to_dict(x) assert False except NotImplementedError: ...
def __to_json__(self): return [to_json(e) for e in self.extras]