def test_unsupported_hashes(self, data): """VCS and dir links should raise errors when --require-hashes is on. In addition, complaints about the type of requirement (VCS or dir) should trump the presence or absence of a hash. """ reqset = RequirementSet() reqset.add_requirement( get_processed_req_from_line( 'git+git://github.com/pypa/pip-test-package --hash=sha256:123', lineno=1, )) dir_path = data.packages.joinpath('FSPkg') reqset.add_requirement( get_processed_req_from_line( 'file://%s' % (dir_path, ), lineno=2, )) finder = make_test_finder(find_links=[data.find_links]) sep = os.path.sep if sep == '\\': sep = '\\\\' # This needs to be escaped for the regex with self._basic_resolver(finder, require_hashes=True) as resolver: assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " r"have a way to hash version control repositories:\n" r" git\+git://github\.com/pypa/pip-test-package \(from -r " r"file \(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because " r"they point to directories:\n" r" file://.*{sep}data{sep}packages{sep}FSPkg " r"\(from -r file \(line 2\)\)".format(sep=sep), resolver.resolve, reqset, )
def test_no_reuse_existing_build_dir(self, data: TestData) -> None: """Test prepare_files raise exception with previous build dir""" build_dir = os.path.join(self.tempdir, "build", "simple") os.makedirs(build_dir) with open(os.path.join(build_dir, "setup.py"), "w"): pass reqset = RequirementSet() req = install_req_from_line("simple") req.user_supplied = True reqset.add_requirement(req) finder = make_test_finder(find_links=[data.find_links]) with self._basic_resolver(finder) as resolver: with pytest.raises( PreviousBuildDirError, match=( r"pip can't proceed with [\s\S]*{req}[\s\S]*{build_dir_esc}".format( build_dir_esc=build_dir.replace("\\", "\\\\"), req=req ) ), ): resolver.resolve(reqset.all_requirements, True)
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url((data.packages / 'simple-1.0.tar.gz').resolve()) reqset = RequirementSet() reqset.add_requirement( get_processed_req_from_line( '{file_url} --hash=sha256:badbad'.format(**locals()), lineno=1, )) finder = make_test_finder(find_links=[data.find_links]) with self._basic_resolver(finder, require_hashes=True) as resolver: assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb' r'866d6ca016b42d2e6ce53619b653$', resolver.resolve, reqset.all_requirements, True, )
def make_test_resolver( monkeypatch: pytest.MonkeyPatch, mock_candidates: List[InstallationCandidate], ) -> Resolver: def _find_candidates(project_name: str) -> List[InstallationCandidate]: return mock_candidates finder = make_test_finder() monkeypatch.setattr(finder, "find_all_candidates", _find_candidates) return Resolver( finder=finder, preparer=mock.Mock(), # Not used. make_install_req=install_req_from_line, wheel_cache=None, use_user_site=False, force_reinstall=False, ignore_dependencies=False, ignore_installed=False, ignore_requires_python=False, upgrade_strategy="to-satisfy-only", )
def test_unhashed_deps_on_require_hashes(self, data): """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = RequirementSet() finder = make_test_finder(find_links=[data.find_links]) reqset.add_requirement( get_processed_req_from_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', lineno=1)) with self._basic_resolver(finder, require_hashes=True) as resolver: assert_raises_regexp( HashErrors, r'In --require-hashes mode, all requirements must have their ' r'versions pinned.*\n' r' TopoRequires from .*$', resolver.resolve, reqset.all_requirements, True, )
def test_no_reuse_existing_build_dir(self, data): """Test prepare_files raise exception with previous build dir""" build_dir = os.path.join(self.tempdir, 'build', 'simple') os.makedirs(build_dir) with open(os.path.join(build_dir, "setup.py"), 'w'): pass reqset = RequirementSet() req = install_req_from_line('simple') req.user_supplied = True reqset.add_requirement(req) finder = make_test_finder(find_links=[data.find_links]) with self._basic_resolver(finder) as resolver: assert_raises_regexp( PreviousBuildDirError, r"pip can't proceed with [\s\S]*{req}[\s\S]*{build_dir_esc}" .format( build_dir_esc=build_dir.replace('\\', '\\\\'), req=req), resolver.resolve, reqset.all_requirements, True, )
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = RequirementSet() reqset.add_requirement(get_processed_req_from_line( 'simple==1.0', lineno=1 )) finder = make_test_finder(find_links=[data.find_links]) with self._basic_resolver(finder, require_hashes=True) as resolver: assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1' r'af95fb866d6ca016b42d2e6ce53619b653$', resolver.resolve, reqset.all_requirements, True, )
def test_unhashed_deps_on_require_hashes(self, data: TestData) -> None: """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = RequirementSet() finder = make_test_finder(find_links=[data.find_links]) reqset.add_requirement( get_processed_req_from_line( "TopoRequires2==0.0.1 " # requires TopoRequires "--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd" "e3591d14f7896bdbefcf48543720c970", lineno=1, )) with self._basic_resolver(finder, require_hashes=True) as resolver: with pytest.raises( HashErrors, match= (r"In --require-hashes mode, all requirements must have their " r"versions pinned.*\n" r" TopoRequires from .*$"), ): resolver.resolve(reqset.all_requirements, True)
def test_finder_only_installs_data_require(data): """ Test whether the PackageFinder understand data-python-requires This can optionally be exposed by a simple-repository to tell which distribution are compatible with which version of Python by adding a data-python-require to the anchor links. See pep 503 for more information. """ # using a local index (that has pre & dev releases) finder = make_test_finder(index_urls=[data.index_url("datarequire")]) links = finder.find_all_candidates("fakepackage") expected = ['1.0.0', '9.9.9'] if (2, 7) < sys.version_info < (3, ): expected.append('2.7.0') elif sys.version_info > (3, 3): expected.append('3.3.0') assert {str(v.version) for v in links} == set(expected)
def test_hash_mismatch(self, data: TestData) -> None: """A hash mismatch should raise an error.""" file_url = data.packages.joinpath( "simple-1.0.tar.gz").resolve().as_uri() reqset = RequirementSet() reqset.add_unnamed_requirement( get_processed_req_from_line( f"{file_url} --hash=sha256:badbad", lineno=1, )) finder = make_test_finder(find_links=[data.find_links]) with self._basic_resolver(finder, require_hashes=True) as resolver: with pytest.raises( HashErrors, match= (r"THESE PACKAGES DO NOT MATCH THE HASHES.*\n" r" file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n" r" Expected sha256 badbad\n" r" Got 393043e672415891885c9a2a0929b1af95fb" r"866d6ca016b42d2e6ce53619b653$"), ): resolver.resolve(reqset.all_requirements, True)
def test_link_sorting_wheels_with_build_tags(self): """Verify build tags affect sorting.""" links = [ InstallationCandidate( "simplewheel", "2.0", Link("simplewheel-2.0-1-py2.py3-none-any.whl"), ), InstallationCandidate( "simplewheel", "2.0", Link("simplewheel-2.0-py2.py3-none-any.whl"), ), InstallationCandidate( "simplewheel", "1.0", Link("simplewheel-1.0-py2.py3-none-any.whl"), ), ] finder = make_test_finder() sort_key = finder.candidate_evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) results2 = sorted(reversed(links), key=sort_key, reverse=True) assert links == results == results2, results2
def test_add_trusted_host__logging(self, caplog): """ Test logging when add_trusted_host() is called. """ trusted_hosts = ['host1'] session = PipSession(insecure_hosts=trusted_hosts) finder = make_test_finder( session=session, trusted_hosts=trusted_hosts, ) with caplog.at_level(logging.INFO): # Test adding an existing host. finder.add_trusted_host('host1', source='somewhere') finder.add_trusted_host('host2') # Test calling add_trusted_host() on the same host twice. finder.add_trusted_host('host2') actual = [(r.levelname, r.message) for r in caplog.records] expected = [ ('INFO', "adding trusted host: 'host1' (from somewhere)"), ('INFO', "adding trusted host: 'host2'"), ('INFO', "adding trusted host: 'host2'"), ] assert actual == expected
def test_download_info_archive_cache_with_origin( self, tmp_path: Path, shared_data: TestData) -> None: """Test download_info hash is set for a web archive with cache entry that has origin.json.""" url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() hash = "sha256=ad977496000576e1b6c41f6449a9897087ce9da6db4f15b603fe8372af4bf3c6" finder = make_test_finder() wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl()) cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) Path(cache_entry_dir).mkdir(parents=True) Path(cache_entry_dir).joinpath("origin.json").write_text( DirectUrl(url, ArchiveInfo(hash=hash)).to_json()) wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir) with self._basic_resolver(finder, wheel_cache=wheel_cache) as resolver: ireq = get_processed_req_from_line(f"simple @ {url}") reqset = resolver.resolve([ireq], True) assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] assert req.original_link_is_in_wheel_cache assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) assert req.download_info.info.hash == hash
def test_finder_only_installs_stable_releases(data): """ Test PackageFinder only accepts stable versioned releases by default. """ req = install_req_from_line("bar", None) # using a local index (that has pre & dev releases) finder = make_test_finder(index_urls=[data.index_url("pre")]) found = finder.find_requirement(req, False) assert found.link.url.endswith("bar-1.0.tar.gz"), found.link.url # using find-links links = ["https://foo/bar-1.0.tar.gz", "https://foo/bar-2.0b1.tar.gz"] finder = make_no_network_finder(links) found = finder.find_requirement(req, False) assert found.link.url == "https://foo/bar-1.0.tar.gz" links.reverse() finder = make_no_network_finder(links) found = finder.find_requirement(req, False) assert found.link.url == "https://foo/bar-1.0.tar.gz"
def finder(session: PipSession) -> PackageFinder: return make_test_finder(session=session)
def test_incorrect_case_file_index(data): """Test PackageFinder detects latest using wrong case""" req = install_req_from_line('dinner', None) finder = make_test_finder(index_urls=[data.find_links3]) found = finder.find_requirement(req, False) assert found.link.url.endswith("Dinner-2.0.tar.gz")
def test_finder_detects_latest_find_links(data): """Test PackageFinder detects latest using find-links""" req = install_req_from_line('simple', None) finder = make_test_finder(find_links=[data.find_links]) found = finder.find_requirement(req, False) assert found.link.url.endswith("simple-3.0.tar.gz")
def test_find_all_candidates_index(data): finder = make_test_finder(index_urls=[data.index_url('simple')]) versions = finder.find_all_candidates('simple') assert [str(v.version) for v in versions] == ['1.0']
def test_find_all_candidates_find_links(data): finder = make_test_finder(find_links=[data.find_links]) versions = finder.find_all_candidates('simple') assert [str(v.version) for v in versions] == ['3.0', '2.0', '1.0']
def test_find_all_candidates_nothing(): """Find nothing without anything""" finder = make_test_finder() assert not finder.find_all_candidates('pip')
def test_find_all_candidates_find_links(data: TestData) -> None: finder = make_test_finder(find_links=[data.find_links]) versions = finder.find_all_candidates("simple") assert [str(v.version) for v in versions] == ["3.0", "2.0", "1.0"]
def test_find_all_candidates_index(data: TestData) -> None: finder = make_test_finder(index_urls=[data.index_url("simple")]) versions = finder.find_all_candidates("simple") assert [str(v.version) for v in versions] == ["1.0"]
def test_build_env_allow_empty_requirements_install() -> None: finder = make_test_finder() build_env = BuildEnvironment() for prefix in ("normal", "overlay"): build_env.install_requirements(finder, [], prefix, "Installing build dependencies")
def finder(session): return make_test_finder(session=session)
def test_secure_origin(location, trusted, expected): finder = make_test_finder(trusted_hosts=trusted) logger = MockLogger() finder._validate_secure_origin(logger, location) assert logger.called == expected