def _transfer_to_fridge(resource, url, refill_behavior): ''' download any kind of package to the fridge, see refill_fridge for refill_behavior ''' targetpath = os.path.join(_fridgedir(), _frozenname(resource, url)) if os.path.exists(targetpath): if refill_behavior == RB_ABORT: abort("could not refill fridge with %s %s , because target already exists and if_exists_abort = True" % (resource, url)) elif refill_behavior == RB_IGNORE: warn("did not refresh fridge with %s %s , because target already exists and reload_existing = False" % (resource, url)) return if resource == "pypi": req = InstallRequirement.from_line(url) pf = PackageFinder([],['http://pypi.python.org/simple']) url = pf.find_requirement(req,False).url elif resource in ['https', 'http', 'ftp']: pass filename, headers = urllib.urlretrieve(url) if os.path.exists(targetpath): if refill_behavior == RB_OVERWRITE: warn("%s %s already exist and will get overwritten because reload_behavior = %s" % (resource, url, RB_OVERWRITE)) elif refill_behavior == RB_UPDATE: if open(filename,"rb").read() == open(targetpath,"rb").read(): print ("files %s %s identical, skipped" % (filename, targetpath)) return warn("%s %s already exist and will get overwritten because its different to original and reload_behavior = %s" % (resource, url, RB_UPDATE)) return shutil.move(filename, targetpath)
def test_sort_locations_file_find_link(): """ Test that a file:// find-link dir gets listdir run """ finder = PackageFinder([find_links], []) files, urls = finder._sort_locations([find_links]) assert files and not urls, "files and not urls should have been found at find-links url: %s" % find_links
def test_no_mpkg(): """Finder skips zipfiles with "macosx10" in the name.""" finder = PackageFinder([find_links], []) req = InstallRequirement.from_line("pkgwithmpkg") found = finder.find_requirement(req, False) assert found.url.endswith("pkgwithmpkg-1.0.tar.gz"), found
def fetch_generate_page(dir, package): finder = PackageFinder([], []) # these args are not really used req = InstallRequirement(package, None) page_versions = [] out_html = [] for p in finder._get_pages([Link("%s/%s" % (PYPI_URL, package))], req): page_versions.extend(finder._package_versions(p.links, req.name.lower())) if not page_versions: # nothing found - maybe no such package return '' seen_archive_names = set() for _parsed_version, link, version in page_versions: archive_name, real_url, fragment = parse_link_url(link.url) archive_name = alt_filename(archive_name, seen_archive_names) with open("%s/%s.url" % (dir, archive_name), "w") as f: f.write(real_url) if fragment: fragment = "#%s" % fragment out_html.append('<a href="%s%s">%s</a>' % (archive_name, fragment, version)) return "<html><head><title>Links for %s</title></head><body><h1>Links for %s</h1>%s</body></html>" % ( package, package, '\n'.join(out_html))
def pip_package_versions(index, package): format_control = FormatControl(no_binary=(':all:'), only_binary=()) session = PipSession() finder = PackageFinder([], [index], format_control=format_control, session=session, allow_external=[package], allow_unverified=[package]) return sorted((PipPackage(str(pv.version), pv.location) for pv in finder._find_all_versions(package)), # pylint: disable=protected-access key=lambda pp: (pp.version, {'.tar.gz': 1, '.zip': 2, '.tar.bz2': 3}.get(pp.link.ext, 10000)))
def test_no_partial_name_match(): """Finder requires the full project name to match, not just beginning.""" finder = PackageFinder([find_links], []) req = InstallRequirement.from_line("gmpy") found = finder.find_requirement(req, False) assert found.url.endswith("gmpy-1.15.tar.gz"), found
def test_finder_priority_page_over_deplink(): """Test PackageFinder prefers page links over equivalent dependency links""" req = InstallRequirement.from_line('gmpy==1.15', None) finder = PackageFinder([], ["http://pypi.python.org/simple"]) finder.add_dependency_links(['http://c.pypi.python.org/simple/gmpy/']) link = finder.find_requirement(req, False) assert link.url.startswith("http://pypi")
def test_sort_locations_file_not_find_link(data): """ Test that a file:// url dir that's not a find-link, doesn't get a listdir run """ finder = PackageFinder([], []) files, urls = finder._sort_locations(data.index_url("empty_with_pkg")) assert urls and not files, "urls, but not files should have been found"
def latest_version(req, session, include_prereleases=False): """Returns a Version instance with the latest version for the package. :param req: Instance of pip.req.req_install.InstallRequirement. :param session: Instance of pip.download.PipSession. :param include_prereleases: Include prereleased beta versions. """ if not req: # pragma: nocover return None index_urls = [PyPI.simple_url] finder = PackageFinder(session=session, find_links=[], index_urls=index_urls) all_candidates = finder.find_all_candidates(req.name) if not include_prereleases: all_candidates = [candidate for candidate in all_candidates if not candidate.version.is_prerelease] if not all_candidates: return None best_candidate = max(all_candidates, key=finder._candidate_sort_key) remote_version = best_candidate.version return remote_version
def test_finder_only_installs_data_require(data): """ Test whether the PackageFinder understand data-python-requires This can optionally be exposed by a simple-repository to tell which distribution are compatible with which version of Python by adding a data-python-require to the anchor links. See pep 503 for more informations. """ # using a local index (that has pre & dev releases) finder = PackageFinder([], [data.index_url("datarequire")], session=PipSession()) links = finder.find_all_candidates("fakepackage") expected = ['1.0.0', '9.9.9'] if sys.version_info < (2, 7): expected.append('2.6.0') elif (2, 7) < sys.version_info < (3,): expected.append('2.7.0') elif sys.version_info > (3, 3): expected.append('3.3.0') assert set([str(v.version) for v in links]) == set(expected)
def test_link_sorting(self): """ Test link sorting """ links = [ InstallationCandidate("simple", "2.0", Link(Inf)), InstallationCandidate("simple", "2.0", Link('simple-2.0.tar.gz')), InstallationCandidate( "simple", "1.0", Link('simple-1.0-pyT-none-TEST.whl'), ), InstallationCandidate( "simple", '1.0', Link('simple-1.0-pyT-TEST-any.whl'), ), InstallationCandidate( "simple", '1.0', Link('simple-1.0-pyT-none-any.whl'), ), InstallationCandidate( "simple", '1.0', Link('simple-1.0.tar.gz'), ), ] finder = PackageFinder([], [], session=PipSession()) results = finder._sort_versions(links) results2 = finder._sort_versions(reversed(links)) assert links == results == results2, results2
def test_get_index_urls_locations(): """Check that the canonical name is on all indexes""" finder = PackageFinder( [], ['file://index1/', 'file://index2'], session=PipSession()) locations = finder._get_index_urls_locations( InstallRequirement.from_line('Complex_Name').name) assert locations == ['file://index1/complex-name/', 'file://index2/complex-name/']
def test_finder_no_raises_error(self, monkeypatch): """ Test the PackageFinder doesn't raises an error when use_wheel is False, and wheel is supported """ monkeypatch.setattr('pkg_resources.DistInfoDistribution', True) p = PackageFinder( [], [], use_wheel=False) p = PackageFinder([], []) p.use_wheel = False
def test_tilde(data): """Finder can accept a path with ~ in it and will normalize it.""" session = PipSession() with patch('pip.index.os.path.exists', return_value=True): finder = PackageFinder(['~/python-pkgs'], [], session=session) req = InstallRequirement.from_line("gmpy") with pytest.raises(DistributionNotFound): finder.find_requirement(req, False)
def test_finder_no_raises_error(self, mock_get_distribution): """ Test the PackageFinder doesn't raises an error when use_wheel is False, and wheel is supported """ mock_get_distribution.return_value = pkg_resources.Distribution(project_name='setuptools', version='0.9') p = PackageFinder( [], [], use_wheel=False) p = PackageFinder([], []) p.use_wheel = False
def test_find_wheel_supported(self): """ Test finding supported wheel. """ req = InstallRequirement.from_line("simple.dist") finder = PackageFinder([find_links], [], use_wheel=True) found = finder.find_requirement(req, True) assert found.url.endswith("simple.dist-0.1-py2.py3-none-any.whl"), found
def test_duplicates_sort_ok(): """Finder successfully finds one of a set of duplicates in different locations""" finder = PackageFinder([find_links, find_links2], []) req = InstallRequirement.from_line("duplicate") found = finder.find_requirement(req, False) assert found.url.endswith("duplicate-1.0.tar.gz"), found
def test_sort_locations_non_existing_path(): """ Test that a non-existing path is ignored. """ finder = PackageFinder([], [], session=PipSession()) files, urls = finder._sort_locations( [os.path.join('this', 'doesnt', 'exist')]) assert not urls and not files, "nothing should have been found"
def test_sort_locations_file_not_find_link(): """ Test that a file:// url dir that's not a find-link, doesn't get a listdir run """ index_url = path_to_url(os.path.join(tests_data, 'indexes', 'empty_with_pkg')) finder = PackageFinder([], []) files, urls = finder._sort_locations([index_url]) assert urls and not files, "urls, but not files should have been found"
def test_sort_locations_file_find_link(): """ Test that a file:// find-link dir gets listdir run """ find_links_url = path_to_url(os.path.join(here, 'packages')) find_links = [find_links_url] finder = PackageFinder(find_links, []) files, urls = finder._sort_locations(find_links) assert files and not urls, "files and not urls should have been found at find-links url: %s" % find_links_url
def test_wheel_over_sdist_priority(self): """ Test wheels have priority over sdists. `test_link_sorting` also covers this at lower level """ req = InstallRequirement.from_line("priority") finder = PackageFinder([find_links], [], use_wheel=True) found = finder.find_requirement(req, True) assert found.url.endswith("priority-1.0-py2.py3-none-any.whl"), found
def test_finder_priority_file_over_page(data): """Test PackageFinder prefers file links over equivalent page links""" req = InstallRequirement.from_line('gmpy==1.15', None) finder = PackageFinder( [data.find_links], ["http://pypi.python.org/simple"], ) link = finder.find_requirement(req, False) assert link.url.startswith("file://")
def test_find_wheel_supported(self, data, monkeypatch): """ Test finding supported wheel. """ monkeypatch.setattr(pip.pep425tags, "supported_tags", [('py2', 'none', 'any')]) req = InstallRequirement.from_line("simple.dist") finder = PackageFinder([data.find_links], [], use_wheel=True) found = finder.find_requirement(req, True) assert found.url.endswith("simple.dist-0.1-py2.py3-none-any.whl"), found
def test_finder_ignores_external_links(data): """ Tests that PackageFinder ignores external links, with or without hashes. """ req = InstallRequirement.from_line("bar", None) # using a local index finder = PackageFinder([], [data.index_url("externals")]) link = finder.find_requirement(req, False) assert link.filename == "bar-1.0.tar.gz"
def test_sort_locations_file_expand_dir(data): """ Test that a file:// dir gets listdir run with expand_dir """ finder = PackageFinder([data.find_links], [], session=PipSession()) files, urls = finder._sort_locations([data.find_links], expand_dir=True) assert files and not urls, ( "files and not urls should have been found at find-links url: %s" % data.find_links )
def test_finder_ignores_external_links(): """ Tests that PackageFinder ignores external links, with or without hashes. """ req = InstallRequirement.from_line("bar", None) # using a local index index_url = path_to_url(os.path.join(tests_data, "indexes", "externals")) finder = PackageFinder([], [index_url]) link = finder.find_requirement(req, False) assert link.filename == "bar-1.0.tar.gz"
def test_finder_finds_external_links_with_hashes_all(data): """ Tests that PackageFinder finds external links but only if they have a hash using the all externals flag. """ req = InstallRequirement.from_line("bar", None) # using a local index finder = PackageFinder([], [data.index_url("externals")], allow_all_external=True) link = finder.find_requirement(req, False) assert link.filename == "bar-2.0.tar.gz"
def test_link_sorting_raises_when_wheel_unsupported(self): links = [ InstallationCandidate( "simple", '1.0', Link('simple-1.0-py2.py3-none-TEST.whl'), ), ] finder = PackageFinder([], [], session=PipSession()) with pytest.raises(InstallationError): finder._sort_versions(links)
def test_finder_installs_dev_releases(data): """ Test PackageFinder finds dev releases if asked to. """ req = InstallRequirement.from_line("bar", None, prereleases=True) # using a local index (that has dev releases) finder = PackageFinder([], [data.index_url("dev")]) link = finder.find_requirement(req, False) assert link.url.endswith("bar-2.0.dev1.tar.gz"), link.url
def test_link_sorting_raises_when_wheel_unsupported(self): links = [ ( parse_version('1.0'), Link('simple-1.0-py2.py3-none-TEST.whl'), '1.0', ), ] finder = PackageFinder([], [], use_wheel=True) with pytest.raises(InstallationError): finder._sort_versions(links)
def test_finder_priority_file_over_page(data): """Test PackageFinder prefers file links over equivalent page links""" req = InstallRequirement.from_line('gmpy==1.15', None) finder = PackageFinder( [data.find_links], ["http://pypi.python.org/simple"], session=PipSession(), ) all_versions = finder.find_all_candidates(req.name) # 1 file InstallationCandidate followed by all https ones assert all_versions[0].location.scheme == 'file' assert all(version.location.scheme == 'https' for version in all_versions[1:]), all_versions link = finder.find_requirement(req, False) assert link.url.startswith("file://")
def test_finder_installs_dev_releases(data): """ Test PackageFinder finds dev releases if asked to. """ req = InstallRequirement.from_line("bar", None) # using a local index (that has dev releases) finder = PackageFinder( [], [data.index_url("dev")], allow_all_prereleases=True, session=PipSession(), ) link = finder.find_requirement(req, False) assert link.url.endswith("bar-2.0.dev1.tar.gz"), link.url
def test_finder_finds_external_links_with_hashes_all(data): """ Tests that PackageFinder finds external links but only if they have a hash using the all externals flag. """ req = InstallRequirement.from_line("bar", None) # using a local index finder = PackageFinder( [], [data.index_url("externals")], allow_all_external=True, session=PipSession(), ) link = finder.find_requirement(req, False) assert link.filename == "bar-2.0.tar.gz"
def test_finder_finds_external_links_without_hashes_per_project(data): """ Tests that PackageFinder finds external links if they do not have a hash """ req = InstallRequirement.from_line("bar==3.0", None) # using a local index finder = PackageFinder( [], [data.index_url("externals")], allow_external=["bar"], allow_unverified=["bar"], session=PipSession(), ) link = finder.find_requirement(req, False) assert link.filename == "bar-3.0.tar.gz"
def test_mirror_url_formats(): """ Test various mirror formats get transformed properly """ formats = [ 'some_mirror', 'some_mirror/', 'some_mirror/simple', 'some_mirror/simple/' ] for scheme in ['http://', 'https://', 'file://', '']: result = (scheme or 'http://') + 'some_mirror/simple/' scheme_formats = ['%s%s' % (scheme, format) for format in formats] finder = PackageFinder([], []) urls = finder._get_mirror_urls(mirrors=scheme_formats, main_mirror_url=None) for url in urls: assert url == result, str([url, result])
def test_finder_finds_external_links_without_hashes_scraped_per_project_all_insecure( data): """ Tests that PackageFinder finds externally scraped links """ req = InstallRequirement.from_line("bar", None) # using a local index finder = PackageFinder( [], [data.index_url("externals")], allow_external=["bar"], allow_unverified=["bar"], ) link = finder.find_requirement(req, False) assert link.filename == "bar-4.0.tar.gz"
def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = self.basic_reqset() # Test that there must be exactly 1 specifier: reqset.add_requirement( list(process_line('simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement(list(process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', reqset.prepare_files, finder)
def _build_package_finder(self, options, session, platform=None, python_versions=None, abi=None, implementation=None): """ Create a package finder appropriate to this requirement command. """ index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] return PackageFinder( find_links=options.find_links, format_control=options.format_control, index_urls=index_urls, trusted_hosts=options.trusted_hosts, allow_all_prereleases=options.pre, process_dependency_links=options.process_dependency_links, session=session, platform=platform, versions=python_versions, abi=abi, implementation=implementation, )
def test_unsupported_hashes(self, data): """VCS and dir links should raise errors when --require-hashes is on. In addition, complaints about the type of requirement (VCS or dir) should trump the presence or absence of a hash. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list( process_line( 'git+git://github.com/pypa/pip-test-package --hash=sha256:123', 'file', 1))[0]) dir_path = data.packages.join('FSPkg') reqset.add_requirement( list(process_line('file://%s' % (dir_path, ), 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) sep = os.path.sep if sep == '\\': sep = '\\\\' # This needs to be escaped for the regex assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " r"have a way to hash version control repositories:\n" r" git\+git://github\.com/pypa/pip-test-package \(from -r file " r"\(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because they " r"point to directories:\n" r" file://.*{sep}data{sep}packages{sep}FSPkg " r"\(from -r file \(line 2\)\)".format(sep=sep), resolver.resolve, reqset)
def test_finder_finds_external_links_without_hashes_all_all_insecure(data): """ Tests that PackageFinder finds external links if they do not have a hash using the all external flag """ req = InstallRequirement.from_line("bar==3.0", None) # using a local index finder = PackageFinder( [], [data.index_url("externals")], allow_all_external=True, allow_unverified=["bar"], ) link = finder.find_requirement(req, False) assert link.filename == "bar-3.0.tar.gz"
def get_requirements_and_latest(filename, force=False): """Parse a requirements file and get latest version for each requirement. Yields a tuple of (original line, InstallRequirement instance, spec_versions, latest_version). :param filename: Path to a requirements.txt file. :param force: Force getting latest version even for packages without a version specified. """ session = PipSession() finder = PackageFinder( session=session, find_links=[], index_urls=[PyPI.simple_url]) _, content = get_file_content(filename, session=session) for line_number, line, orig_line in yield_lines(content): line = req_file.COMMENT_RE.sub('', line) line = line.strip() req = parse_requirement_line(line, filename, line_number, session, finder) if req is None or req.name is None or req_file.SCHEME_RE.match(req.name): yield (orig_line, None, None, None) continue spec_ver = current_version(req) if spec_ver or force: latest_ver = latest_version(req, session, finder) yield (orig_line, req, spec_ver, latest_ver)
def __init__(self): self.session = PipSession() self.finder = PackageFinder(find_links=[], index_urls=[self.DEFAULT_INDEX_URL], session=self.session) # Caches # stores project_name => InstallationCandidate mappings for all # versions reported by PyPI, so we only have to ask once for each # project self._available_versions_cache = {} # Setup file paths self.freshen_build_caches() self._download_dir = os.path.join(CACHE_DIR, 'pkgs') self._wheel_download_dir = os.path.join(CACHE_DIR, 'wheels')
def __init__(self): self.session = requests.Session() self.finder = PackageFinder( find_links=[], index_urls=[], session=self.session, process_dependency_links=False, )
def test_finder_finds_external_links_without_hashes_scraped_all(data): """ Tests that PackageFinder finds externally scraped links using the all external flag. """ req = InstallRequirement.from_line("bar", None) # using a local index finder = PackageFinder( [], [data.index_url("externals")], allow_all_external=True, allow_unverified=["bar"], session=PipSession(), ) link = finder.find_requirement(req, False) assert link.filename == "bar-4.0.tar.gz"
def test_skip_invalid_wheel_link(self, caplog, data): """ Test if PackageFinder skips invalid wheel filenames """ req = InstallRequirement.from_line("invalid") # data.find_links contains "invalid.whl", which is an invalid wheel finder = PackageFinder( [data.find_links], [], use_wheel=True, session=PipSession(), ) with pytest.raises(DistributionNotFound): finder.find_requirement(req, True) assert ("invalid.whl because the wheel filename is invalid" in caplog.text())
def test_skip_invalid_wheel_link(self, data): """ Test if PackageFinder skips invalid wheel filenames """ log = [] logger.add_consumers((logger.DEBUG, log.append)) req = InstallRequirement.from_line("invalid") # data.find_links contains "invalid.whl", which is an invalid wheel finder = PackageFinder( [data.find_links], [], use_wheel=True, session=PipSession(), ) with pytest.raises(DistributionNotFound): finder.find_requirement(req, True) "invalid.whl because the wheel filename is invalid" in "".join(log)
def _build_package_finder(self, options, index_urls): """ Create a package finder appropriate to this list command. """ return PackageFinder(find_links=options.find_links, index_urls=index_urls, use_mirrors=options.use_mirrors, mirrors=options.mirrors)
class test_link_package_versions(object): # patch this for travis which has distribute in its base env for now @patch( 'pip.wheel.pkg_resources.get_distribution', lambda x: Distribution(project_name='setuptools', version='0.9') ) def setup(self): self.version = '1.0' self.parsed_version = parse_version(self.version) self.search_name = 'pytest' self.finder = PackageFinder( [], [], use_wheel=True, session=PipSession(), ) def test_link_package_versions_match_wheel(self): """Test that 'pytest' archives match for 'pytest'""" # TODO: Uncomment these, when #1217 is fixed # link = Link('http:/yo/pytest-1.0.tar.gz') # result = self.finder._link_package_versions(link, self.search_name) # assert result == [(self.parsed_version, link, self.version)], result link = Link('http:/yo/pytest-1.0-py2.py3-none-any.whl') result = self.finder._link_package_versions(link, self.search_name) assert result == [(self.parsed_version, link, self.version)], result def test_link_package_versions_substring_fails(self): """Test that 'pytest<something> archives won't match for 'pytest'""" # TODO: Uncomment these, when #1217 is fixed # link = Link('http:/yo/pytest-xdist-1.0.tar.gz') # result = self.finder._link_package_versions(link, self.search_name) # assert result == [], result # link = Link('http:/yo/pytest2-1.0.tar.gz') # result = self.finder._link_package_versions(link, self.search_name) # assert result == [], result link = Link('http:/yo/pytest_xdist-1.0-py2.py3-none-any.whl') result = self.finder._link_package_versions(link, self.search_name) assert result == [], result
def __init__(self): self.pip_index_url = PipPackagePathFinder._get_pip_index_url() package_finder = PackageFinder(find_links=[], index_urls=[self.pip_index_url], use_mirrors=False, mirrors=[]) self.finder = partial(_requirement_finder, finder=package_finder)
def test_req_file_parse_use_wheel(): """ Test parsing --use-wheel from a req file """ reqfile = os.path.join(tests_data, 'reqfiles', 'supported_options.txt') finder = PackageFinder([], []) for req in parse_requirements(reqfile, finder): pass assert finder.use_wheel
def _build_package_finder(self, options, index_urls): """ Create a package finder appropriate to this install command. This method is meant to be overridden by subclasses, not called directly. """ return PackageFinder(find_links=options.find_links, index_urls=index_urls, use_mirrors=options.use_mirrors, mirrors=options.mirrors)
def load(self,reqfile): if not os.path.exists(reqfile): raise ValueError('The given requirements file doesn\'t exist') finder = PackageFinder([],[],session=requests) for requirement in parse_requirements(reqfile,finder=finder,session=requests): if requirement.req: if not getattr(requirement.req,'name',None): requirement.req.name=requirement.req.project_name self.requirements.append(requirement.req)
def test_req_file_parse_no_use_wheel(data): """ Test parsing --no-use-wheel from a req file """ finder = PackageFinder([], [], session=PipSession()) for req in parse_requirements(data.reqfiles.join("supported_options.txt"), finder, session=PipSession()): pass assert not finder.use_wheel
def test_finder_detects_latest_already_satisfied_pypi_links(): """Test PackageFinder detects latest already satisfied using pypi links""" req = InstallRequirement.from_line('initools', None) # the latest initools on pypi is 0.3.1 latest_version = "0.3.1" satisfied_by = Mock( location="/path", parsed_version=parse_version(latest_version), version=latest_version, ) req.satisfied_by = satisfied_by finder = PackageFinder( [], ["http://pypi.python.org/simple"], session=PipSession(), ) with pytest.raises(BestVersionAlreadyInstalled): finder.find_requirement(req, True)
def test_find_wheel_supported(self, data, monkeypatch): """ Test finding supported wheel. """ monkeypatch.setattr( pip.pep425tags, "supported_tags", [('py2', 'none', 'any')], ) req = InstallRequirement.from_line("simple.dist") finder = PackageFinder( [data.find_links], [], session=PipSession(), ) found = finder.find_requirement(req, True) assert ( found.url.endswith("simple.dist-0.1-py2.py3-none-any.whl")), found
def test_finder_raises_error(self, monkeypatch): """ Test the PackageFinder raises an error when wheel is not supported """ monkeypatch.delattr('pkg_resources.DistInfoDistribution') # on initialization assert_raises_regexp(InstallationError, 'wheel support', PackageFinder, [], [], use_wheel=True) # when setting property later p = PackageFinder([], [], use_wheel=False) assert_raises_regexp(InstallationError, 'wheel support', self.set_use_wheel_true, p)
def test_not_find_wheel_not_supported(self, data, monkeypatch): """ Test not finding an unsupported wheel. """ monkeypatch.setattr( pip.pep425tags, "supported_tags", [('py1', 'none', 'any')], ) req = InstallRequirement.from_line("simple.dist") finder = PackageFinder( [data.find_links], [], session=PipSession(), ) with pytest.raises(DistributionNotFound): finder.find_requirement(req, True)
def run(self, package_name): # PackageFinder requires session which requires options options, args = self.parse_args([]) session = self._build_session(options=options) finder = PackageFinder( find_links=[], index_urls=['https://pypi.python.org/simple/'], session=session, ) candidates = finder.find_all_candidates(package_name) # set() to remove repeated versions - ie. matplotlib versions = sorted(set(c.version for c in candidates)) print('\n'.join(map(str, versions)))
def wheel(self, package, sender_data): source = sender_data[1].pop('path') # If the file has the wheel extention, we bail. We don't have to do # anything :) if re.findall('whl$', source): raise NotForMe target = os.path.dirname(source) # The package finder is what PIP uses to find packages given their # names. This finder won't use internet at all, only the folder we know # that our file is. finder = PackageFinder(find_links=[target], index_urls=[]) # Another requirement to use PIP API, we have to build a requirement # set. build_dir = tempfile.mkdtemp() requirement_set = RequirementSet( build_dir=build_dir, src_dir=None, download_dir=None, download_cache=None, ignore_dependencies=True, ignore_installed=True, ) requirement_set.add_requirement(InstallRequirement.from_line(package)) # Here we go, we're finally converting the package from a regular # format to a wheel. Notice that the wheel dir is another tmp # directory. See comments below. wheel_dir = tempfile.mkdtemp() builder = WheelBuilder( requirement_set, finder, wheel_dir=wheel_dir, build_options=[], global_options=[], ) builder.build() # Since I just can't retrieve the brand new file name through the API, # the wheel dir is a tmp directory so the *only* file over there *is* # the one that we want. wheel_file = os.listdir(wheel_dir)[0] path = self.index.from_file(os.path.join(wheel_dir, wheel_file)) # Cleaning up the mess. Here I kill the two temp folders I created to # 1) build the package into a wheel, 2) output the wheel file # separately shutil.rmtree(build_dir) shutil.rmtree(wheel_dir) # Finally, we just say where in the storage the file is return {'path': os.path.join(os.path.dirname(source), wheel_file)}
def test_no_egg_on_require_hashes(self, data): """Make sure --egg is illegal with --require-hashes. --egg would cause dependencies to always be installed, since it cedes control directly to setuptools. """ reqset = self.basic_reqset(require_hashes=True, as_egg=True) finder = PackageFinder([data.find_links], [], session=PipSession()) with pytest.raises(InstallationError): reqset.prepare_files(finder)