def test_subdirectory_fragment(): """ Test the subdirectory URL fragment is part of the cache key. """ wc = WheelCache("/tmp/.foo/", None) link1 = Link("git+https://g.c/o/r#subdirectory=d1") link2 = Link("git+https://g.c/o/r#subdirectory=d2") assert wc.get_path_for_link(link1) != wc.get_path_for_link(link2)
def _get_cache_dir( req: InstallRequirement, wheel_cache: WheelCache, ) -> str: """Return the persistent or temporary cache directory where the built wheel need to be stored. """ cache_available = bool(wheel_cache.cache_dir) assert req.link if cache_available and _should_cache(req): cache_dir = wheel_cache.get_path_for_link(req.link) else: cache_dir = wheel_cache.get_ephem_path_for_link(req.link) return cache_dir
def test_wheel_name_filter(tmpdir): """ Test the wheel cache filters on wheel name when several wheels for different package are stored under the same cache directory. """ wc = WheelCache(tmpdir, FormatControl()) link = Link("https://g.c/package.tar.gz") cache_path = wc.get_path_for_link(link) ensure_dir(cache_path) with open(os.path.join(cache_path, "package-1.0-py3-none-any.whl"), "w"): pass # package matches wheel name assert wc.get(link, "package", [("py3", "none", "any")]) is not link # package2 does not match wheel name assert wc.get(link, "package2", [("py3", "none", "any")]) is link
def test_get_cache_entry(tmpdir): wc = WheelCache(tmpdir, FormatControl()) persi_link = Link("https://g.c/o/r/persi") persi_path = wc.get_path_for_link(persi_link) ensure_dir(persi_path) with open(os.path.join(persi_path, "persi-1.0.0-py3-none-any.whl"), "w"): pass ephem_link = Link("https://g.c/o/r/ephem") ephem_path = wc.get_ephem_path_for_link(ephem_link) ensure_dir(ephem_path) with open(os.path.join(ephem_path, "ephem-1.0.0-py3-none-any.whl"), "w"): pass other_link = Link("https://g.c/o/r/other") supported_tags = [Tag("py3", "none", "any")] assert (wc.get_cache_entry(persi_link, "persi", supported_tags).persistent) assert (not wc.get_cache_entry(ephem_link, "ephem", supported_tags).persistent) assert wc.get_cache_entry(other_link, "other", supported_tags) is None
def test_get_path_for_link_legacy(tmpdir): """ Test that an existing cache entry that was created with the legacy hashing mechanism is used. """ wc = WheelCache(tmpdir, FormatControl()) link = Link("https://g.c/o/r") path = wc.get_path_for_link(link) legacy_path = wc.get_path_for_link_legacy(link) assert path != legacy_path ensure_dir(path) with open(os.path.join(path, "test-pyz-none-any.whl"), "w"): pass ensure_dir(legacy_path) with open(os.path.join(legacy_path, "test-pyx-none-any.whl"), "w"): pass expected_candidates = {"test-pyx-none-any.whl", "test-pyz-none-any.whl"} assert set(wc._get_candidates(link, "test")) == expected_candidates
def test_download_info_archive_legacy_cache(self, tmp_path: Path, shared_data: TestData) -> None: """Test download_info hash is not set for an archive with legacy cache entry.""" url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() finder = make_test_finder() wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl()) cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) Path(cache_entry_dir).mkdir(parents=True) wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir) with self._basic_resolver(finder, wheel_cache=wheel_cache) as resolver: ireq = get_processed_req_from_line(f"simple @ {url}") reqset = resolver.resolve([ireq], True) assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] assert req.original_link_is_in_wheel_cache assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) assert not req.download_info.info.hash
def test_download_info_archive_cache_with_origin( self, tmp_path: Path, shared_data: TestData) -> None: """Test download_info hash is set for a web archive with cache entry that has origin.json.""" url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() hash = "sha256=ad977496000576e1b6c41f6449a9897087ce9da6db4f15b603fe8372af4bf3c6" finder = make_test_finder() wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl()) cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) Path(cache_entry_dir).mkdir(parents=True) Path(cache_entry_dir).joinpath("origin.json").write_text( DirectUrl(url, ArchiveInfo(hash=hash)).to_json()) wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir) with self._basic_resolver(finder, wheel_cache=wheel_cache) as resolver: ireq = get_processed_req_from_line(f"simple @ {url}") reqset = resolver.resolve([ireq], True) assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] assert req.original_link_is_in_wheel_cache assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) assert req.download_info.info.hash == hash