def test_make_link_evaluator( self, allow_yanked, ignore_requires_python, only_binary, expected_formats, ): # Create a test TargetPython that we can check for. target_python = TargetPython(py_version_info=(3, 7)) format_control = FormatControl(set(), only_binary) link_collector = LinkCollector( session=PipSession(), search_scope=SearchScope([], []), ) finder = PackageFinder( link_collector=link_collector, target_python=target_python, allow_yanked=allow_yanked, format_control=format_control, ignore_requires_python=ignore_requires_python, ) # Pass a project_name that will be different from canonical_name. link_evaluator = finder.make_link_evaluator('Twine') assert link_evaluator.project_name == 'Twine' assert link_evaluator._canonical_name == 'twine' assert link_evaluator._allow_yanked == allow_yanked assert link_evaluator._ignore_requires_python == ignore_requires_python assert link_evaluator._formats == expected_formats # Test the _target_python attribute. actual_target_python = link_evaluator._target_python # The target_python attribute should be set as is. assert actual_target_python is target_python # For good measure, check that the attributes weren't reset. assert actual_target_python._given_py_version_info == (3, 7) assert actual_target_python.py_version_info == (3, 7, 0)
def _build_session(self, options, retries=None, timeout=None): # type: (Values, Optional[int], Optional[int]) -> PipSession assert not options.cache_dir or os.path.isabs(options.cache_dir) session = PipSession( cache=(os.path.join(options.cache_dir, "http") if options.cache_dir else None), retries=retries if retries is not None else options.retries, trusted_hosts=options.trusted_hosts, index_urls=self._get_index_urls(options), ) # Handle custom ca-bundles from the user if options.cert: session.verify = options.cert # Handle SSL client certificate if options.client_cert: session.cert = options.client_cert # Handle timeouts if options.timeout or timeout: session.timeout = (timeout if timeout is not None else options.timeout) # Handle configured proxies if options.proxy: session.proxies = { "http": options.proxy, "https": options.proxy, } # Determine if we can prompt the user for authentication or not session.auth.prompting = not options.no_input return session
def get_required_dallinger_version(experiment_tmp_path: str) -> str: """Examine the requirements.txt in an experiment tmp directory and return the dallinger version required by the experiment. """ requirements_path = str(Path(experiment_tmp_path) / "requirements.txt") all_requirements = parse_requirements(requirements_path, session=PipSession()) dallinger_requirements = [ el.requirement for el in all_requirements if el.requirement.startswith("dallinger==") or el.requirement.startswith( "file:dallinger-" ) # In case dallinger is installed in editable mode ] if not dallinger_requirements: print("Could not determine Dallinger version. Using latest") return "" # pip-compile should have created a single spec in the form "dallinger==7.2.0" if "==" in dallinger_requirements[0]: return dallinger_requirements[0].split("==")[1] # Or we might have a requirement like `file:dallinger-7.2.0-py3-none-any.whl` return parse_wheel_filename(dallinger_requirements[0][len("file:") :]).version
def test_create__format_control(self) -> None: """ Test that the format_control attribute is set correctly. """ link_collector = LinkCollector( session=PipSession(), search_scope=SearchScope([], []), ) format_control = FormatControl(set(), {":all:"}) selection_prefs = SelectionPreferences( allow_yanked=True, format_control=format_control, ) finder = PackageFinder.create( link_collector=link_collector, selection_prefs=selection_prefs, use_deprecated_html5lib=False, ) actual_format_control = finder.format_control assert actual_format_control is format_control # Check that the attributes weren't reset. assert actual_format_control.only_binary == {":all:"}
def parse_install_requirements(requirements_lock: str, extra_pip_args: List[str]) -> List[Tuple[InstallRequirement, str]]: ps = PipSession() # This is roughly taken from pip._internal.req.req_file.parse_requirements # (https://github.com/pypa/pip/blob/21.0.1/src/pip/_internal/req/req_file.py#L127) in order to keep # the original line (sort-of, its preprocessed) from the requirements_lock file around, to pass to sub repos # as the requirement. line_parser = get_line_parser(finder=None) parser = RequirementsFileParser(ps, line_parser) install_req_and_lines: List[Tuple[InstallRequirement, str]] = [] _, content = get_file_content(requirements_lock, ps) for parsed_line, (_, line) in zip(parser.parse(requirements_lock, constraint=False), preprocess(content)): if parsed_line.is_requirement: install_req_and_lines.append( ( constructors.install_req_from_line(parsed_line.requirement), line ) ) else: extra_pip_args.extend(shlex.split(line)) return install_req_and_lines
def pip_download_link(resconfig, url: str, destdir: str): with redirect_stdout(sys.stderr): netloc = urlsplit(resconfig['source']['repository']['index_url'])[1] hostname = netloc.split(':')[0] with PipSession(retries=RETRIES, trusted_hosts=[ hostname, ]) as session: session.timeout = TIMEOUT session.auth.prompting = False session.auth.passwords[netloc] = ( resconfig['source']['repository'].get('username', None), resconfig['source']['repository'].get('password', None)) # pip internals hardcode global tempdir manager. # need to copy to destdir before tempdir gets blown away. with global_tempdir_manager(): file = unpack_url( Link(url), destdir, Downloader(session, "pretty"), ) shutil.copy(file.path, destdir)
def _parse_requirements(file_path): pip_ver = pkg_resources.get_distribution('pip').version pip_version = list(map(int, pip_ver.split('.')[:2])) # parse_requirements() returns generator of pip.req.InstallRequirement objects # (This API is deprecated, cf. https://stackoverflow.com/a/59971236) if pip_version >= [20, 0]: from pip._internal.req import parse_requirements from pip._internal.network.session import PipSession req = 'requirement' elif pip_version >= [10, 0]: from pip._internal.req import parse_requirements from pip.download import PipSession req = 'req' elif pip_version >= [6, 0]: from pip.req import parse_requirements from pip.download import PipSession req = 'req' else: from pip.req import parse_requirements req = 'req' raw = parse_requirements(file_path, session=PipSession()) return [str(getattr(i, req)) for i in raw]
def test_expand_missing_env_variables(self, tmpdir, finder): req_url = ( 'https://${NON_EXISTENT_VARIABLE}:$WRONG_FORMAT@' '%WINDOWS_FORMAT%github.com/user/repo/archive/master.zip' ) with open(tmpdir.joinpath('req1.txt'), 'w') as fp: fp.write(req_url) with patch('pip._internal.req.req_file.os.getenv') as getenv: getenv.return_value = '' reqs = list(parse_reqfile( tmpdir.joinpath('req1.txt'), finder=finder, session=PipSession() )) assert len(reqs) == 1, \ 'parsing requirement file with env variable failed' assert reqs[0].link.url == req_url, \ 'ignoring invalid env variable in req file failed'
def test_create__candidate_prefs( self, allow_all_prereleases, prefer_binary, ): """ Test that the _candidate_prefs attribute is set correctly. """ link_collector = LinkCollector( session=PipSession(), search_scope=SearchScope([], []), ) selection_prefs = SelectionPreferences( allow_yanked=True, allow_all_prereleases=allow_all_prereleases, prefer_binary=prefer_binary, ) finder = PackageFinder.create( link_collector=link_collector, selection_prefs=selection_prefs, ) candidate_prefs = finder._candidate_prefs assert candidate_prefs.allow_all_prereleases == allow_all_prereleases assert candidate_prefs.prefer_binary == prefer_binary
def _basic_resolver( self, finder: PackageFinder, require_hashes: bool = False ) -> Iterator[Resolver]: make_install_req = partial( install_req_from_req_string, isolated=False, use_pep517=None, ) session = PipSession() with get_requirement_tracker() as tracker: preparer = RequirementPreparer( build_dir=os.path.join(self.tempdir, "build"), src_dir=os.path.join(self.tempdir, "src"), download_dir=None, build_isolation=True, req_tracker=tracker, session=session, progress_bar="on", finder=finder, require_hashes=require_hashes, use_user_site=False, lazy_wheel=False, verbosity=0, in_tree_build=False, ) yield Resolver( preparer=preparer, make_install_req=make_install_req, finder=finder, wheel_cache=None, use_user_site=False, upgrade_strategy="to-satisfy-only", ignore_dependencies=False, ignore_installed=False, ignore_requires_python=False, force_reinstall=False, )
def get_file_content(url: str, session: PipSession) -> Tuple[str, str]: """Gets the content of a file; it may be a filename, file: URL, or http: URL. Returns (location, content). Content is unicode. Respects # -*- coding: declarations on the retrieved files. :param url: File path or url. :param session: PipSession instance. """ scheme = get_url_scheme(url) # Pip has special support for file:// URLs (LocalFSAdapter). if scheme in ["http", "https", "file"]: resp = session.get(url) raise_for_status(resp) return resp.url, resp.text # Assume this is a bare path. try: with open(url, "rb") as f: content = auto_decode(f.read()) except OSError as exc: raise InstallationError(f"Could not open requirements file: {exc}") return url, content
def test_make_link_collector( find_links, no_index, suppress_no_index, expected, ): """ :param expected: the expected (find_links, index_urls) values. """ expected_find_links, expected_index_urls = expected session = PipSession() options = pretend.stub( find_links=find_links, index_url='default_url', extra_index_urls=['url1', 'url2'], no_index=no_index, ) link_collector = make_link_collector( session, options=options, suppress_no_index=suppress_no_index, ) assert link_collector.session is session search_scope = link_collector.search_scope assert search_scope.find_links == expected_find_links assert search_scope.index_urls == expected_index_urls
def test_make_candidate_evaluator( self, allow_all_prereleases: bool, prefer_binary: bool, ) -> None: target_python = TargetPython() target_python._valid_tags = [Tag("py36", "none", "any")] candidate_prefs = CandidatePreferences( prefer_binary=prefer_binary, allow_all_prereleases=allow_all_prereleases, ) link_collector = LinkCollector( session=PipSession(), search_scope=SearchScope([], []), ) finder = PackageFinder( link_collector=link_collector, target_python=target_python, allow_yanked=True, candidate_prefs=candidate_prefs, use_deprecated_html5lib=False, ) specifier = SpecifierSet() # Pass hashes to check that _hashes is set. hashes = Hashes({"sha256": [64 * "a"]}) evaluator = finder.make_candidate_evaluator( "my-project", specifier=specifier, hashes=hashes, ) assert evaluator._allow_all_prereleases == allow_all_prereleases assert evaluator._hashes == hashes assert evaluator._prefer_binary == prefer_binary assert evaluator._project_name == "my-project" assert evaluator._specifier is specifier assert evaluator._supported_tags == [Tag("py36", "none", "any")]
def test_unpack_url_with_urllib_response_without_content_type( data: TestData) -> None: """ It should download and unpack files even if no Content-Type header exists """ _real_session = PipSession() def _fake_session_get(*args: Any, **kwargs: Any) -> Dict[str, str]: resp = _real_session.get(*args, **kwargs) del resp.headers["Content-Type"] return resp session = Mock() session.get = _fake_session_get download = Downloader(session, progress_bar="on") uri = data.packages.joinpath("simple-1.0.tar.gz").as_uri() link = Link(uri) temp_dir = mkdtemp() try: unpack_url( link, temp_dir, download=download, download_dir=None, verbosity=0, ) assert set(os.listdir(temp_dir)) == { "PKG-INFO", "setup.cfg", "setup.py", "simple", "simple.egg-info", } finally: rmtree(temp_dir)
def process_line( line, filename, line_number, finder=None, options=None, session=None, constraint=False, ): if session is None: session = PipSession() prefix = '\n' * (line_number - 1) path = tmpdir.joinpath(filename) path.parent.mkdir(exist_ok=True) path.write_text(prefix + line) monkeypatch.chdir(str(tmpdir)) return list(parse_requirements( filename, finder=finder, options=options, session=session, constraint=constraint, ))
def test_expand_missing_env_variables(self, tmpdir: Path, finder: PackageFinder) -> None: req_url = ("https://${NON_EXISTENT_VARIABLE}:$WRONG_FORMAT@" "%WINDOWS_FORMAT%github.com/user/repo/archive/master.zip") with open(tmpdir.joinpath("req1.txt"), "w") as fp: fp.write(req_url) # Construct the session outside the monkey-patch, since it access the # env session = PipSession() with mock.patch("pip._internal.req.req_file.os.getenv") as getenv: getenv.return_value = "" reqs = list( parse_reqfile(tmpdir.joinpath("req1.txt"), finder=finder, session=session)) assert len( reqs) == 1, "parsing requirement file with env variable failed" assert reqs[0].link is not None assert (reqs[0].link.url == req_url ), "ignoring invalid env variable in req file failed"
def test_make_candidate_evaluator( self, allow_all_prereleases, prefer_binary, ): target_python = TargetPython() target_python._valid_tags = [('py36', 'none', 'any')] candidate_prefs = CandidatePreferences( prefer_binary=prefer_binary, allow_all_prereleases=allow_all_prereleases, ) link_collector = LinkCollector( session=PipSession(), search_scope=SearchScope([], []), ) finder = PackageFinder( link_collector=link_collector, target_python=target_python, allow_yanked=True, candidate_prefs=candidate_prefs, ) specifier = SpecifierSet() # Pass hashes to check that _hashes is set. hashes = Hashes({'sha256': [64 * 'a']}) evaluator = finder.make_candidate_evaluator( 'my-project', specifier=specifier, hashes=hashes, ) assert evaluator._allow_all_prereleases == allow_all_prereleases assert evaluator._hashes == hashes assert evaluator._prefer_binary == prefer_binary assert evaluator._project_name == 'my-project' assert evaluator._specifier is specifier assert evaluator._supported_tags == [('py36', 'none', 'any')]
def merge_requirements(files): requirements = defaultdict(lambda: Requirement()) links = set() for filename in files: f_requirements = parse_requirements(filename, session=PipSession()) for parsed_requirement in f_requirements: requirement = install_req_from_parsed_requirement( parsed_requirement) if not hasattr(requirement.req, 'name'): links.add(requirement.link.url) break name = requirement.req.name specifiers = requirement.req.specifier extras = requirement.req.extras requirements[name].extras |= set(extras) requirements[name].specifiers |= set(specifiers) if requirement.link: requirements[name].links |= {requirement.link.url} requirements[name].editable |= requirement.editable result = [] for key, value in requirements.items(): if value.links: result.append("%s" % value.links.pop()) elif not value.extras: result.append("%s %s" % (key, ",".join(map(str, value.specifiers)))) else: result.append("%s [%s] %s" % (key, ",".join(map( str, value.extras)), ",".join(map(str, value.specifiers)))) for link in links: result.append(link) return "\n".join(result)
def _http_get_download(session: PipSession, link: Link) -> Response: target_url = link.url.split("#", 1)[0] resp = session.get(target_url, headers=HEADERS, stream=True) raise_for_status(resp) return resp
#!/usr/bin/env python from setuptools import setup from pip._internal.network.session import PipSession from pip._internal.req import parse_requirements reqs = parse_requirements('requirements.txt', session=PipSession()) reqs = [str(req.requirement) for req in reqs] setup( name='diskmap', version='0.1.1', description='Scattered light mapping of protoplanetary disks', long_description=open('README.rst').read(), long_description_content_type='text/x-rst', author='Tomas Stolker', author_email='*****@*****.**', url='https://github.com/tomasstolker/diskmap', packages=['diskmap'], package_dir={'diskmap': 'diskmap'}, include_package_data=True, install_requires=reqs, license='MIT', zip_safe=False, keywords='diskmap', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Astronomy', 'License :: OSI Approved :: MIT License',
def test_http_cache_is_not_enabled(self, tmpdir: Path) -> None: session = PipSession(cache=os.fspath(tmpdir.joinpath("test-cache"))) assert not hasattr(session.adapters["http://"], "cache")
def test_cache_defaults_off(self) -> None: session = PipSession() assert not hasattr(session.adapters["http://"], "cache") assert not hasattr(session.adapters["https://"], "cache")
def test_user_agent_user_data(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("PIP_USER_AGENT_USER_DATA", "some_string") assert "some_string" in PipSession().headers["User-Agent"]
def get_user_agent() -> str: return PipSession().headers["User-Agent"]
from setuptools import setup, find_packages try: # pip >=20 from pip._internal.network.session import PipSession from pip._internal.req import parse_requirements install_requires = parse_requirements('requirements.txt', session=PipSession()) dependencies = [str(package.req) for package in install_requires] except ImportError: try: # 10.0.0 <= pip <= 19.3.1 from pip._internal.download import PipSession from pip._internal.req import parse_requirements except ImportError: # pip <= 9.0.3 from pip.download import PipSession from pip.req import parse_requirements install_requires = parse_requirements('requirements.txt',session=PipSession()) dependencies = [str(package.req) for package in install_requires] setup( name='hydra_python_core', version='0.1', packages=find_packages(), license='MIT', description='Core functions for Hydrus', long_description=open('README.md').read(), long_description_content_type="text/markdown",
# for pip <= 9.0.3 from pip.req import parse_requirements from pip.download import PipSession from os import path from setuptools import find_packages, setup here = path.abspath(path.dirname(__file__)) links = [] requires = [] try: # new versions of pip requires a session requirements = parse_requirements( path.join(here, 'requirements.txt'), session=PipSession()) except Exception as exc: requirements = parse_requirements(path.join(here, 'requirements.txt')) for item in requirements: # we want to handle package names and also repo urls if getattr(item, 'url', None): # older pip has url links.append(str(item.url)) if getattr(item, 'link', None): # newer pip has link links.append(str(item.link)) try: if item.req: requires.append(str(item.req)) except AttributeError: if item.requirement:
#!/usr/bin/env python """Setup script for Hydrus.""" from setuptools import setup, find_packages try: # pip >=20 from pip._internal.network.session import PipSession from pip._internal.req import parse_requirements install_requires = parse_requirements("requirements.txt", session=PipSession()) dependencies = [str(package.requirement) for package in install_requires] except ImportError: try: # 10.0.0 <= pip <= 19.3.1 from pip._internal.download import PipSession from pip._internal.req import parse_requirements except ImportError: # pip <= 9.0.3 from pip.download import PipSession from pip.req import parse_requirements install_requires = parse_requirements("requirements.txt", session=PipSession()) dependencies = [str(package.req) for package in install_requires] for package_index in range(len(dependencies)): if dependencies[package_index].startswith("git+"): dependencies[package_index] = dependencies[package_index].split("=")[1]
# pip <= 9.0.3 from pip.download import PipSession from pip.req import parse_requirements from distutils.core import setup from setuptools import find_packages import os import sys current_directory = os.path.dirname(os.path.abspath(__file__)) sys.path.append(current_directory) # Parse requirements.txt to get the list of dependencies inst_req = parse_requirements("requirements.txt", session=PipSession()) REQUIREMENTS = [ str(r.req) if hasattr(r, 'req') else r.requirement if not r.is_editable else '' for r in inst_req ] setup( name="GeoNode", version=__import__("geonode").get_version(), description="Application for serving and sharing geospatial data", long_description=open("README.md").read(), classifiers=["Development Status :: 5 - Production/Stable"], python_requires=">=3.6", keywords="", author="GeoNode Developers",
def session(): return PipSession()
def session() -> PipSession: return PipSession()