def test_pack_unpack(tmp_path: Path, type): files = {"abc": str(tmp_path / "foo.pth")} with (tmp_path / "foo.pth").open("w"): pass with (tmp_path / "bar.yaml").open("w") as f: # I dared to stack "/" to test yaml.safe_dump({"a": str(tmp_path / "//foo.pth")}, f) with (tmp_path / "a").open("w"): pass (tmp_path / "b").mkdir(parents=True, exist_ok=True) with (tmp_path / "b" / "a").open("w"): pass pack( files=files, yaml_files={"def": str(tmp_path / "bar.yaml")}, option=[tmp_path / "a", tmp_path / "b" / "a"], outpath=str(tmp_path / f"out.{type}"), ) retval = unpack(str(tmp_path / f"out.{type}"), str(tmp_path)) # Retry unpack. If cache file exists, generate dict from it retval2 = unpack(str(tmp_path / f"out.{type}"), str(tmp_path)) assert retval == { "abc": str(tmp_path / tmp_path / "foo.pth"), "def": str(tmp_path / tmp_path / "bar.yaml"), } assert retval2 == { "abc": str(tmp_path / tmp_path / "foo.pth"), "def": str(tmp_path / tmp_path / "bar.yaml"), }
def test_pack_unpack_recursive(tmp_path: Path, type): p = tmp_path / "a" / "b" p.mkdir(parents=True) with (p / "foo.pth").open("w"): pass pack( files={}, yaml_files={}, option=[p], outpath=str(tmp_path / f"out.{type}"), ) unpack(str(tmp_path / f"out.{type}"), str(tmp_path)) assert (tmp_path / p / "foo.pth").exists()
def test_pack_unpack(tmp_path: Path): files = {"abc.pth": str(tmp_path / "foo.pth")} with (tmp_path / "foo.pth").open("w"): pass with (tmp_path / "bar.yaml").open("w") as f: # I dared to stack "/" to test yaml.safe_dump({"a": str(tmp_path / "//foo.pth")}, f) with (tmp_path / "a").open("w"): pass (tmp_path / "b").mkdir(parents=True, exist_ok=True) with (tmp_path / "b" / "a").open("w"): pass pack( files=files, yaml_files={"def.yaml": str(tmp_path / "bar.yaml")}, option=[tmp_path / "a", tmp_path / "b" / "a"], outpath=str(tmp_path / "out.tgz"), ) retval = unpack(str(tmp_path / "out.tgz"), str(tmp_path)) assert retval == { "abc": str(tmp_path / "packed" / "abc.pth"), "def": str(tmp_path / "packed" / "def.yaml"), "option": [ str(tmp_path / "packed" / "option" / "a"), str(tmp_path / "packed" / "option" / "a.1"), ], "meta": str(tmp_path / "packed" / "meta.yaml"), }
def unpack_local_file(self, name: str = None ) -> Dict[str, Union[str, List[str]]]: if not Path(name).exists(): raise FileNotFoundError(f"No such file or directory: {name}") warnings.warn("Expanding a local model to the cachedir. " "If you'll move the file to another path, " "it's treated as a different model.") name = Path(name).absolute() outdir = self.cachedir / str_to_hash(name) filename = outdir / name.name outdir.mkdir(parents=True, exist_ok=True) if not filename.exists(): if filename.is_symlink(): filename.unlink() filename.symlink_to(name) # Skip unpacking if the cache exists meta_yaml = outdir / "meta.yaml" if meta_yaml.exists(): info = get_dict_from_cache(meta_yaml) if info is not None: return info # Extract files from archived file return unpack(filename, outdir)
def download_and_unpack(self, name: str = None, version: int = -1, **kwargs: str) -> Dict[str, Union[str, List[str]]]: url = self.get_url(name=name, version=version, **kwargs) if not is_url(url) and Path(url).exists(): return self.unpack_local_file(url) # Unpack to <cachedir>/<hash> in order to give an unique name outdir = self.cachedir / str_to_hash(url) # Skip downloading and unpacking if the cache exists meta_yaml = outdir / "meta.yaml" if meta_yaml.exists(): info = get_dict_from_cache(meta_yaml) if info is not None: return info # Download the file to an unique path filename = self.download(url) # Extract files from archived file return unpack(filename, outdir)
def test_unpack_no_meta_yaml(tmp_path: Path): with tarfile.open(tmp_path / "a.tgz", "w:gz"): pass with pytest.raises(RuntimeError): unpack(str(tmp_path / "a.tgz"), "out")