def requires_dists( dist, # type: DistributionLike include_1_1_requires=True, # type: bool ): # type: (...) -> Iterator[Requirement] """Examines dist for and returns any declared requirements. Looks for `Requires-Dist` metadata and, optionally, the older `Requires` metadata if `include_1_1_requires`. See: + https://www.python.org/dev/peps/pep-0345/#requires-dist-multiple-use + https://www.python.org/dev/peps/pep-0314/#requires-multiple-use :param dist: A distribution to check for requirement metadata. :return: All requirements found. """ pkg_info = _parse_pkg_info(dist) if pkg_info is None: return for requires_dist in pkg_info.get_all("Requires-Dist", ()): yield Requirement.parse(requires_dist) if include_1_1_requires: for requires in pkg_info.get_all("Requires", ()): yield Requirement.parse(requires)
def test_info_verbose(pex, pex_tools_env): # type: (str, Dict[str, str]) -> None output = subprocess.check_output(args=[pex, "repository", "info", "-v"], env=pex_tools_env) infos = {} for line in output.decode("utf-8").splitlines(): info = json.loads(line) distribution = DistributionHelper.distribution_from_path(info["location"]) assert isinstance(distribution, Distribution) project_name = info["project_name"] assert distribution.project_name == project_name assert distribution.version == info["version"] infos[project_name] = info assert {"certifi", "chardet", "idna", "requests", "urllib3"} == set(infos.keys()) requests_info = infos["requests"] assert "2.25.1" == requests_info["version"] assert SpecifierSet("!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7") == SpecifierSet( requests_info["requires_python"] ) assert { Requirement.parse(req) for req in ( 'PySocks!=1.5.7,>=1.5.6; extra == "socks"', "certifi>=2017.4.17", "chardet<5,>=3.0.2", 'cryptography>=1.3.4; extra == "security"', "idna<3,>=2.5", 'pyOpenSSL>=0.14; extra == "security"', "urllib3<1.27,>=1.21.1", 'win-inet-pton; (sys_platform == "win32" and python_version == "2.7") and extra == "socks"', ) } == {Requirement.parse(req) for req in requests_info["requires_dists"]}
def test_resolve_from_pex_intransitive( pex_repository, # type: str py27, # type: PythonInterpreter py36, # type: PythonInterpreter foreign_platform, # type: Platform manylinux, # type: Optional[str] ): # type: (...) -> None resolved_distributions = resolve_from_pex( pex=pex_repository, requirements=["requests"], transitive=False, interpreters=[py27, py36], platforms=[foreign_platform], manylinux=manylinux, ) assert 3 == len( resolved_distributions ), "Expected one resolved distribution per distribution target." assert 1 == len( frozenset(resolved_distribution.distribution.location for resolved_distribution in resolved_distributions) ), ("Expected one underlying resolved universal distribution usable on Linux and macOs by " "both Python 2.7 and Python 3.6.") for resolved_distribution in resolved_distributions: assert (Requirement.parse("requests==2.25.1") == resolved_distribution.distribution.as_requirement()) assert Requirement.parse( "requests") == resolved_distribution.direct_requirement
def test_empty_iteration(): crawler_mock = mock.create_autospec(Crawler, spec_set=True) crawler_mock.crawl.return_value = [] iterator = Iterator(crawler=crawler_mock) assert list(iterator.iter(Requirement.parse('foo'))) == [] assert len(crawler_mock.crawl.mock_calls) == 1 _, args, kwargs = crawler_mock.crawl.mock_calls[0] assert list(args[0]) == list(PyPIFetcher().urls(Requirement.parse('foo'))) assert kwargs == {'follow_links': False}
def parse_requirement(cls, requirement, default_interpreter='CPython'): if isinstance(requirement, Requirement): return requirement elif isinstance(requirement, string): try: requirement = Requirement.parse(requirement) except ValueError: try: requirement = Requirement.parse('%s%s' % (default_interpreter, requirement)) except ValueError: raise ValueError('Unknown requirement string: %s' % requirement) return requirement else: raise ValueError('Unknown requirement type: %r' % (requirement,))
def test_resolve_overlapping_requirements_discriminated_by_markers_issues_1196( py27): # type: (PythonInterpreter) -> None resolved_distributions = resolve_multi( requirements=[ "setuptools<45; python_full_version == '2.7.*'", "setuptools; python_version > '2.7'", ], interpreters=[py27], ) assert 1 == len(resolved_distributions) resolved_distribution = resolved_distributions[0] assert (Requirement.parse("setuptools<45; python_full_version == '2.7.*'") == resolved_distribution.direct_requirement) assert (Requirement.parse("setuptools==44.1.1") == resolved_distribution.distribution.as_requirement())
def to_requirement(self, dist): req = dist.as_requirement() markers = OrderedSet() # Here we map any wheel python requirement to the equivalent environment marker: # See: # + https://www.python.org/dev/peps/pep-0345/#requires-python # + https://www.python.org/dev/peps/pep-0508/#environment-markers python_requires = dist_metadata.requires_python(dist) if python_requires: markers.update( Marker(python_version) for python_version in sorted( 'python_version {operator} {version!r}'.format( operator=specifier.operator, version=specifier.version) for specifier in python_requires)) markers.update(self._markers_by_requirement_key.get(req.key, ())) if not markers: return req if len(markers) == 1: marker = next(iter(markers)) req.marker = marker return req # We may have resolved with multiple paths to the dependency represented by dist and at least # two of those paths had (different) conditional requirements for dist based on environment # marker predicates. In that case, since the pip resolve succeeded, the implication is that the # environment markers are compatible; i.e.: their intersection selects the target interpreter. # Here we make that intersection explicit. # See: https://www.python.org/dev/peps/pep-0508/#grammar marker = ' and '.join('({})'.format(marker) for marker in markers) return Requirement.parse('{}; {}'.format(req, marker))
def resolve(self): # type: () -> Iterable[Distribution] if self._resolved_dists is None: all_reqs = [ Requirement.parse(req) for req in self._pex_info.requirements ] self._resolved_dists = self.resolve_dists(all_reqs) return self._resolved_dists
def test_line_types(flag_separator): reqs = requirements_from_lines(dedent(""" simple_requirement specific_requirement==2 --allow-external%sspecific_requirement """ % flag_separator).splitlines()) # simple_requirement assert len(reqs) == 2 assert isinstance(reqs[0], ResolvableRequirement) assert reqs[0].requirement == Requirement.parse('simple_requirement') assert not reqs[0].options._allow_external # specific_requirement assert isinstance(reqs[1], ResolvableRequirement) assert reqs[1].requirement == Requirement.parse('specific_requirement==2') assert reqs[1].options._allow_external
def _markers_by_requirement(stdout): dependency_requirements = json.loads(stdout.decode('utf-8')) markers_by_req_key = defaultdict(OrderedSet) for requirement in dependency_requirements: req = Requirement.parse(requirement) if req.marker: markers_by_req_key[req.key].add(req.marker) return markers_by_req_key
def test_requires_dists(): # type: () -> None with example_distribution("aws_cfn_bootstrap-1.4-py2-none-any.whl") as (wheel_path, dist): expected_requirements = [ Requirement.parse(req) for req in ("python-daemon>=1.5.2,<2.0", "pystache>=0.4.0", "setuptools") ] assert expected_requirements == list(requires_dists(wheel_path)) assert expected_requirements == list(requires_dists(dist))
def _try_parse_pip_local_project_marker(path): # type: (str) -> Tuple[str, Optional[Marker]] project_requirement = os.path.basename(path) try: req = Requirement.parse(project_requirement) stripped_path = os.path.join(os.path.dirname(path), req.name) return stripped_path, req.marker except (RequirementParseError, ValueError): return path, None
def _activate(self): # type: () -> WorkingSet pex_file = os.path.realpath(self._pex) self._update_candidate_distributions( self._load_internal_cache(pex_file, self._pex_info)) is_zipped_pex = os.path.isfile(pex_file) if not self._pex_info.zip_safe and is_zipped_pex: explode_dir = self._force_local(pex_file=pex_file, pex_info=self._pex_info) # Force subsequent imports to come from the exploded .pex directory rather than the .pex file. TRACER.log( "Adding exploded non zip-safe pex to the head of sys.path: %s" % explode_dir) sys.path[:] = [ path for path in sys.path if pex_file != os.path.realpath(path) ] sys.path.insert(0, explode_dir) self._update_module_paths(pex_file=pex_file) elif not any(pex_file == os.path.realpath(path) for path in sys.path): TRACER.log("Adding pex %s to the head of sys.path: %s" % ("file" if is_zipped_pex else "dir", pex_file)) sys.path.insert(0, pex_file) all_reqs = [ Requirement.parse(req) for req in self._pex_info.requirements ] working_set = WorkingSet([]) resolved = self._resolve(working_set, all_reqs) for dist in resolved: with TRACER.timed("Activating %s" % dist, V=2): working_set.add(dist) if self._inherit_path == InheritPath.FALLBACK: # Prepend location to sys.path. # # This ensures that bundled versions of libraries will be used before system-installed # versions, in case something is installed in both, helping to favor hermeticity in # the case of non-hermetic PEX files (i.e. those with inherit_path=True). # # If the path is not already in sys.path, site.addsitedir will append (not prepend) # the path to sys.path. But if the path is already in sys.path, site.addsitedir will # leave sys.path unmodified, but will do everything else it would do. This is not part # of its advertised contract (which is very vague), but has been verified to be the # case by inspecting its source for both cpython 2.7 and cpython 3.7. sys.path.insert(0, dist.location) else: sys.path.append(dist.location) with TRACER.timed("Adding sitedir", V=2): site.addsitedir(dist.location) return working_set
def _try_parse_pip_local_formats( path, # type: str basepath=None, # type: Optional[str] ): # type: (...) -> Optional[ProjectNameExtrasAndMarker] project_requirement = os.path.basename(path) # Requirements strings can optionally include: REQUIREMENT_PARTS_START = ( # + Trailing extras denoted by `[...]`. # See: https://www.python.org/dev/peps/pep-0508/#extras r"\[", # + A version specifier denoted by a leading `!=`, `==`, `===`, `>=`, `<=` or `~=`. # See: https://www.python.org/dev/peps/pep-0508/#grammar r"!=><~", # + Environment markers denoted by `;...` # See: https://www.python.org/dev/peps/pep-0508/#environment-markers r";", ) # N.B.: The basename of the current directory (.) is '' and we allow this. match = re.match( r""" ^ (?P<directory_name>[^{REQUIREMENT_PARTS_START}]*)? (?P<requirement_parts>.*)? $ """.format( REQUIREMENT_PARTS_START="".join(REQUIREMENT_PARTS_START) ), project_requirement, re.VERBOSE, ) if not match: return None directory_name, requirement_parts = match.groups() stripped_path = os.path.join(os.path.dirname(path), directory_name) abs_stripped_path = ( os.path.join(basepath, stripped_path) if basepath else os.path.abspath(stripped_path) ) if not os.path.exists(abs_stripped_path): return None # Maybe a local archive or project path. requirement_parts = match.group("requirement_parts") if not requirement_parts: return ProjectNameExtrasAndMarker.create(abs_stripped_path) project_requirement = "fake_project{}".format(requirement_parts) try: req = Requirement.parse(project_requirement) return ProjectNameExtrasAndMarker.create( abs_stripped_path, extras=req.extras, marker=req.marker ) except (RequirementParseError, ValueError): return None
def test_requires_dists(): # type: () -> None with example_distribution( "aws_cfn_bootstrap-1.4-py2-none-any.whl") as dist: assert [ Requirement.parse(req) for req in ( "python-daemon>=1.5.2,<2.0", "pystache>=0.4.0", "setuptools", ) ] == list(requires_dists(dist))
def _try_parse_fragment_project_name_and_marker(fragment): # type: (str) -> Optional[ProjectNameExtrasAndMarker] project_requirement = None for part in fragment.split("&"): if part.startswith("egg="): _, project_requirement = part.split("=", 1) break if project_requirement is None: return None try: req = Requirement.parse(project_requirement) return ProjectNameExtrasAndMarker.create(req.name, extras=req.extras, marker=req.marker) except (RequirementParseError, ValueError): return ProjectNameExtrasAndMarker.create(project_requirement)
def assert_iteration(all_versions, *expected_versions, **iterator_kwargs): def package_url(version): return 'https://pypi.org/packages/source/p/pex/pex-%s.tar.gz' % version urls = [package_url(v) for v in all_versions] crawler_mock = mock.create_autospec(Crawler, spec_set=True) crawler_mock.crawl.return_value = urls iterator = Iterator(crawler=crawler_mock, follow_links=True, **iterator_kwargs) assert list(iterator.iter(Requirement.parse('pex'))) == [SourcePackage(package_url(v)) for v in expected_versions] assert len(crawler_mock.crawl.mock_calls) == 1 _, _, kwargs = crawler_mock.crawl.mock_calls[0] assert kwargs == {'follow_links': True}
def _try_parse_fragment_project_name_and_marker(fragment): # type: (str) -> Tuple[Optional[str], Optional[Marker]] project_requirement = None for part in fragment.split("&"): if part.startswith("egg="): _, project_requirement = part.split("=", 1) break if project_requirement is None: return None, None try: req = Requirement.parse(project_requirement) return req.name, req.marker except (RequirementParseError, ValueError): return project_requirement, None
def requires_dists(dist): # type: (DistributionLike) -> Iterator[Requirement] """Examines dist for and returns any declared requirements. Looks for `Requires-Dist` metadata. The older `Requires` metadata is intentionally ignored, athough we do log a warning if it is found to draw attention to this ~work-around and the associated issue in case any new data comes in. See: + https://www.python.org/dev/peps/pep-0345/#requires-dist-multiple-use + https://www.python.org/dev/peps/pep-0314/#requires-multiple-use :param dist: A distribution to check for requirement metadata. :return: All requirements found. """ pkg_info = _parse_pkg_info(dist) if pkg_info is None: return for requires_dist in pkg_info.get_all("Requires-Dist", ()): yield Requirement.parse(requires_dist) legacy_requires = pkg_info.get_all("Requires", []) # type: List[str] if legacy_requires: name_and_version = project_name_and_version(dist) project_name = name_and_version.project_name if name_and_version else dist pex_warnings.warn( dedent( """\ Ignoring {count} `Requires` {field} in {dist} metadata: {requires} You may have issues using the '{project_name}' distribution as a result. More information on this workaround can be found here: https://github.com/pantsbuild/pex/issues/1201#issuecomment-791715585 """ ).format( dist=dist, project_name=project_name, count=len(legacy_requires), field=pluralize(legacy_requires, "field"), requires=os.linesep.join( "{index}.) Requires: {req}".format(index=index, req=req) for index, req in enumerate(legacy_requires, start=1) ), ) )
def test_source_packages(): for ext in ('.tar.gz', '.tar', '.tgz', '.zip', '.tar.bz2'): sl = SourcePackage('a_p_r-3.1.3' + ext) assert sl._name == 'a_p_r' assert sl.name == 'a-p-r' assert sl.raw_version == '3.1.3' assert sl.version == parse_version(sl.raw_version) for req in ('a_p_r', 'a_p_r>2', 'a_p_r>3', 'a_p_r>=3.1.3', 'a_p_r==3.1.3', 'a_p_r>3,<3.5'): assert sl.satisfies(req) assert sl.satisfies(Requirement.parse(req)) for req in ('foo', 'a_p_r==4.0.0', 'a_p_r>4.0.0', 'a_p_r>3.0.0,<3.0.3', 'a==3.1.3'): assert not sl.satisfies(req) sl = SourcePackage('python-dateutil-1.5.tar.gz') assert sl.name == 'python-dateutil' assert sl.raw_version == '1.5'
def parse_requirement_from_project_name_and_specifier( project_name, # type: str extras=None, # type: Optional[Iterable[str]] specifier=None, # type: Optional[SpecifierSet] marker=None, # type: Optional[Marker] ): # type: (...) -> Requirement requirement_string = "{project_name}{extras}{specifier}".format( project_name=project_name, extras="[{extras}]".format(extras=", ".join(extras)) if extras else "", specifier=specifier or SpecifierSet(), ) if marker: requirement_string += ";" + str(marker) return Requirement.parse(requirement_string)
def assert_iteration(all_versions, *expected_versions, **iterator_kwargs): def package_url(version): return 'https://pypi.org/packages/source/p/pex/pex-%s.tar.gz' % version urls = [package_url(v) for v in all_versions] crawler_mock = mock.create_autospec(Crawler, spec_set=True) crawler_mock.crawl.return_value = urls iterator = Iterator(crawler=crawler_mock, follow_links=True, **iterator_kwargs) assert list(iterator.iter(Requirement.parse('pex'))) == [ SourcePackage(package_url(v)) for v in expected_versions ] assert len(crawler_mock.crawl.mock_calls) == 1 _, _, kwargs = crawler_mock.crawl.mock_calls[0] assert kwargs == {'follow_links': True}
def _activate(self): self.update_candidate_distributions( self.load_internal_cache(self._pex, self._pex_info)) if not self._pex_info.zip_safe and os.path.isfile(self._pex): explode_dir = self.force_local(pex_file=self._pex, pex_info=self._pex_info) self.update_module_paths(pex_file=self._pex, explode_dir=explode_dir) all_reqs = [ Requirement.parse(req) for req in self._pex_info.requirements ] working_set = WorkingSet([]) resolved = self._resolve(working_set, all_reqs) for dist in resolved: with TRACER.timed('Activating %s' % dist, V=2): working_set.add(dist) if self._inherit_path == "fallback": # Prepend location to sys.path. # # This ensures that bundled versions of libraries will be used before system-installed # versions, in case something is installed in both, helping to favor hermeticity in # the case of non-hermetic PEX files (i.e. those with inherit_path=True). # # If the path is not already in sys.path, site.addsitedir will append (not prepend) # the path to sys.path. But if the path is already in sys.path, site.addsitedir will # leave sys.path unmodified, but will do everything else it would do. This is not part # of its advertised contract (which is very vague), but has been verified to be the # case by inspecting its source for both cpython 2.7 and cpython 3.7. sys.path.insert(0, dist.location) else: sys.path.append(dist.location) with TRACER.timed('Adding sitedir', V=2): site.addsitedir(dist.location) # Delay calling 'self._declare_namespace_packages' until 'sys.path' contains all of the # resolved dists. for dist in resolved: self._declare_namespace_packages(dist) return working_set
def to_requirement(self, dist): req = dist.as_requirement() markers = self._markers_by_requirement_key.get(req.key) if not markers: return req if len(markers) == 1: marker = next(iter(markers)) req.marker = marker return req # Here we have a resolve with multiple paths to the dependency represented by dist. At least # two of those paths had (different) conditional requirements for dist based on environment # marker predicates. Since the pip resolve succeeded, the implication is that the environment # markers are compatible; i.e.: their intersection selects the target interpreter. Here we # make that intersection explicit. # See: https://www.python.org/dev/peps/pep-0496/#micro-language marker = ' and '.join('({})'.format(marker) for marker in markers) return Requirement.parse('{}; {}'.format(req, marker))
def matches_requirement(req, wheels): """List of wheels matching a requirement. :param req: The requirement to satisfy :param wheels: List of wheels to search. """ try: from pex.third_party.pkg_resources import Distribution, Requirement except ImportError: raise RuntimeError("Cannot use requirements without pkg_resources") req = Requirement.parse(req) selected = [] for wf in wheels: f = wf.parsed_filename dist = Distribution(project_name=f.group("name"), version=f.group("ver")) if dist in req: selected.append(wf) return selected
def _activate(self): self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info)) if not self._pex_info.zip_safe and os.path.isfile(self._pex): explode_dir = self.force_local(pex_file=self._pex, pex_info=self._pex_info) self.update_module_paths(pex_file=self._pex, explode_dir=explode_dir) all_reqs = [Requirement.parse(req) for req in self._pex_info.requirements] working_set = WorkingSet([]) resolved = self._resolve(working_set, all_reqs) for dist in resolved: with TRACER.timed('Activating %s' % dist, V=2): working_set.add(dist) if self._inherit_path == "fallback": # Prepend location to sys.path. # # This ensures that bundled versions of libraries will be used before system-installed # versions, in case something is installed in both, helping to favor hermeticity in # the case of non-hermetic PEX files (i.e. those with inherit_path=True). # # If the path is not already in sys.path, site.addsitedir will append (not prepend) # the path to sys.path. But if the path is already in sys.path, site.addsitedir will # leave sys.path unmodified, but will do everything else it would do. This is not part # of its advertised contract (which is very vague), but has been verified to be the # case by inspecting its source for both cpython 2.7 and cpython 3.7. sys.path.insert(0, dist.location) else: sys.path.append(dist.location) with TRACER.timed('Adding sitedir', V=2): site.addsitedir(dist.location) self._declare_namespace_packages(resolved) return working_set
def test_resolve_from_pex_ignore_errors( pex_repository, # type: str py27, # type: PythonInterpreter ): # type: (...) -> None # See test_resolve_from_pex_constraints above for the failure this would otherwise cause. resolved_distributions = resolve_from_pex( pex=pex_repository, requirements=["requests"], constraint_files=[create_constraints_file("urllib3==1.26.2")], interpreters=[py27], ignore_errors=True, ) resolved_distributions_by_key = { resolved_distribution.distribution.key: resolved_distribution.distribution.as_requirement() for resolved_distribution in resolved_distributions } assert len(resolved_distributions_by_key ) > 1, "We should resolve at least requests and urllib3" assert "requests" in resolved_distributions_by_key assert Requirement.parse( "urllib3==1.26.1") == resolved_distributions_by_key["urllib3"]
def _parse_requirement_line( line, # type: LogicalLine basepath=None, # type: Optional[str] ): # type: (...) -> ReqInfo basepath = basepath or os.getcwd() editable, processed_text = _strip_requirement_options(line) # Handle urls (Pip proprietary). parsed_url = urlparse.urlparse(processed_text) if _is_recognized_pip_url_scheme(parsed_url.scheme): project_name, marker = _try_parse_fragment_project_name_and_marker( parsed_url.fragment) if not project_name: project_name = _try_parse_project_name_from_path(parsed_url.path) url = parsed_url._replace(fragment="").geturl() return ReqInfo.create(line, project_name=project_name, url=url, marker=marker, editable=editable) # Handle local archives and project directories (Pip proprietary). maybe_abs_path, marker = _try_parse_pip_local_formats(processed_text, basepath=basepath) if maybe_abs_path is not None and any( os.path.isfile(os.path.join(maybe_abs_path, *p)) for p in ((), ("setup.py", ), ("pyproject.toml", ))): archive_or_project_path = os.path.realpath(maybe_abs_path) is_local_project = os.path.isdir(archive_or_project_path) project_name = ( None if is_local_project else _try_parse_project_name_from_path(archive_or_project_path)) return ReqInfo.create( line, project_name=project_name, url=archive_or_project_path, marker=marker, editable=editable, is_local_project=is_local_project, ) # Handle PEP-440. See: https://www.python.org/dev/peps/pep-0440. # # The `pkg_resources.Requirement.parse` method does all of this for us (via # `packaging.requirements.Requirement`) except for the handling of PEP-440 direct url # references; so we strip those urls out first. requirement, direct_reference_url = _split_direct_references( processed_text) try: req = Requirement.parse(requirement) return ReqInfo.create( line, project_name=req.name, url=direct_reference_url or req.url, marker=req.marker, editable=editable, ) except RequirementParseError as e: raise ParseError( line, "Problem parsing {!r} as a requirement: {}".format( processed_text, e))
def maybe_requirement(req): if isinstance(req, Requirement) or quacks_like_req(req): return req elif isinstance(req, compatibility_string): return Requirement.parse(req) raise ValueError('Unknown requirement %r' % (req,))
def _parse_requirement(req): return Requirement.parse(str(req))
def _parse_requirement(req): # type: (Union[str, Requirement]) -> Requirement if isinstance(req, Requirement): req = str(req) return Requirement.parse(req)
def _resolve(self, working_set, reqs): environment = self._target_interpreter_env.copy() environment["extra"] = list(set(itertools.chain(*(req.extras for req in reqs)))) reqs_by_key = OrderedDict() for req in reqs: if req.marker and not req.marker.evaluate(environment=environment): TRACER.log( "Skipping activation of `%s` due to environment marker de-selection" % req ) continue reqs_by_key.setdefault(req.key, []).append(req) unresolved_reqs = OrderedDict() resolveds = OrderedSet() # Resolve them one at a time so that we can figure out which ones we need to elide should # there be an interpreter incompatibility. for key, reqs in reqs_by_key.items(): with TRACER.timed("Resolving {} from {}".format(key, reqs), V=2): # N.B.: We resolve the bare requirement with no version specifiers since the resolve process # used to build this pex already did so. There may be multiple distributions satisfying any # particular key (e.g.: a Python 2 specific version and a Python 3 specific version for a # multi-python PEX) and we want the working set to pick the most appropriate one. req = Requirement.parse(key) try: resolveds.update(working_set.resolve([req], env=self)) except DistributionNotFound as e: TRACER.log("Failed to resolve a requirement: %s" % e) requirers = unresolved_reqs.setdefault(e.req, OrderedSet()) if e.requirers: for requirer in e.requirers: requirers.update(reqs_by_key[requirer]) if unresolved_reqs: TRACER.log("Unresolved requirements:") for req in unresolved_reqs: TRACER.log(" - %s" % req) TRACER.log("Distributions contained within this pex:") distributions_by_key = defaultdict(list) if not self._pex_info.distributions: TRACER.log(" None") else: for dist_name, dist_digest in self._pex_info.distributions.items(): TRACER.log(" - %s" % dist_name) distribution = DistributionHelper.distribution_from_path( path=os.path.join(self._pex_info.install_cache, dist_digest, dist_name) ) distributions_by_key[distribution.as_requirement().key].append(distribution) if not self._pex_info.ignore_errors: items = [] for index, (requirement, requirers) in enumerate(unresolved_reqs.items()): rendered_requirers = "" if requirers: rendered_requirers = ("\n Required by:" "\n {requirers}").format( requirers="\n ".join(map(str, requirers)) ) items.append( "{index: 2d}: {requirement}" "{rendered_requirers}" "\n But this pex only contains:" "\n {distributions}".format( index=index + 1, requirement=requirement, rendered_requirers=rendered_requirers, distributions="\n ".join( os.path.basename(d.location) for d in distributions_by_key[requirement.key] ), ) ) die( "Failed to execute PEX file. Needed {platform} compatible dependencies for:\n{items}".format( platform=self._interpreter.platform, items="\n".join(items) ) ) return resolveds
def parse_requirement_arg(spec): try: return Requirement.parse(spec) except ValueError: raise DistutilsError( "Not a URL, existing file, or requirement spec: %r" % (spec, ))