def find_compatible_interpreters(pex_python_path=None, compatibility_constraints=None): """Find all compatible interpreters on the system within the supplied constraints and use PEX_PYTHON_PATH if it is set. If not, fall back to interpreters on $PATH. """ if pex_python_path: interpreters = [] for binary in pex_python_path.split(os.pathsep): try: interpreters.append(PythonInterpreter.from_binary(binary)) except Executor.ExecutionError: print("Python interpreter %s in PEX_PYTHON_PATH failed to load properly." % binary, file=sys.stderr) if not interpreters: die('PEX_PYTHON_PATH was defined, but no valid interpreters could be identified. Exiting.') else: # We may have been invoked with a specific interpreter not on the $PATH, make sure our # sys.executable is included as a candidate in this case. interpreters = OrderedSet([PythonInterpreter.get()]) # Add all qualifying interpreters found in $PATH. interpreters.update(PythonInterpreter.all()) return list( matched_interpreters(interpreters, compatibility_constraints) if compatibility_constraints else interpreters )
def _iter_pex_python(pex_python): def try_create(try_path): try: return PythonInterpreter.from_binary(try_path) except Executor.ExecutionError: return None interpreter = try_create(pex_python) if interpreter: # If the target interpreter specified in PEX_PYTHON is an existing absolute path - use it. yield interpreter else: # Otherwise scan the PATH for matches: try_paths = OrderedSet( os.path.realpath(os.path.join(directory, pex_python)) for directory in os.getenv('PATH', '').split(os.pathsep)) # Prefer the current interpreter if present in the `path`. current_interpreter = PythonInterpreter.get() if current_interpreter.binary in try_paths: try_paths.remove(current_interpreter.binary) yield current_interpreter for try_path in try_paths: interpreter = try_create(try_path) if interpreter: yield interpreter
def _iter_interpreters(): seen = set() paths = None current_interpreter = PythonInterpreter.get() if path: paths = OrderedSet( os.path.realpath(p) for p in path.split(os.pathsep)) # Prefer the current interpreter if present on the `path`. candidate_paths = frozenset( (current_interpreter.binary, os.path.dirname(current_interpreter.binary))) if candidate_paths.intersection(paths): for p in candidate_paths: paths.remove(p) seen.add(current_interpreter) yield current_interpreter else: # We may have been invoked with a specific interpreter, make sure our sys.executable is # included as a candidate in this case. seen.add(current_interpreter) yield current_interpreter for interp in PythonInterpreter.iter(paths=paths): if interp not in seen: seen.add(interp) yield interp
def test_backwards_incompatible_pex_info(): # type: () -> None def make_pex_info(requirements): # type: (List[Text]) -> PexInfo return PexInfo(info={"requirements": requirements}) # forwards compatibility pi = make_pex_info(["hello"]) assert pi.requirements == OrderedSet(["hello"]) pi = make_pex_info(["hello==0.1", "world==0.2"]) assert pi.requirements == OrderedSet(["hello==0.1", "world==0.2"]) # malformed with pytest.raises(ValueError): make_pex_info("hello") # type: ignore[arg-type] with pytest.raises(ValueError): make_pex_info([("hello", False)]) # type: ignore[list-item] # backwards compatibility pi = make_pex_info([ ["hello==0.1", False, None], # type: ignore[list-item] ["world==0.2", False, None], # type: ignore[list-item] ]) assert pi.requirements == OrderedSet(["hello==0.1", "world==0.2"])
def record_unresolved(dist_not_found): # type: (_DistributionNotFound) -> None TRACER.log("Failed to resolve a requirement: {}".format(dist_not_found.requirement)) requirers = unresolved_reqs.get(dist_not_found.requirement) if requirers is None: requirers = OrderedSet() unresolved_reqs[dist_not_found.requirement] = requirers if dist_not_found.required_by: requirers.add(dist_not_found.required_by)
def test_output_multiple_targets_one_source_overlapping(self): # target1 target2 # source="some/file.py" source="some/file.py" # / / # dep dep dep_address = FileDepsTest.make_build_target_address("dep/target") dep_target = self.mock_hydrated_target(dep_address, { "dep/file.py": "", }, ()) target_address1 = FileDepsTest.make_build_target_address("some/target") hydrated_target1 = self.mock_hydrated_target(target_address1, { "some/file.py": "", }, (dep_target, )) target_address2 = FileDepsTest.make_build_target_address("some/target") hydrated_target2 = self.mock_hydrated_target(target_address2, { "some/file.py": "", }, (dep_target, )) transitive_targets = TransitiveHydratedTargets( (hydrated_target1, hydrated_target2), OrderedSet([hydrated_target1, hydrated_target2, dep_target])) self.filedeps_rule_test( transitive_targets, dedent('''\ some/target/BUILD some/file.py dep/target/BUILD dep/file.py '''))
def test_output_one_target_one_source_with_dep(self): dep_address = FileDepsTest.make_build_target_address("dep/target") dep_target = self.mock_hydrated_target(dep_address, {"dep/file.py": "", }, ()) target_address = FileDepsTest.make_build_target_address("some/target") hydrated_target = self.mock_hydrated_target( target_address, {"some/file.py": "", }, (dep_target,) ) transitive_targets = TransitiveHydratedTargets( (hydrated_target,), OrderedSet([hydrated_target, dep_target]) ) self.filedeps_rule_test( transitive_targets, dedent( '''\ some/target/BUILD some/file.py dep/target/BUILD dep/file.py ''') )
def __init__(self, info=None): """Construct a new PexInfo. This should not be used directly.""" if info is not None and not isinstance(info, dict): raise ValueError('PexInfo can only be seeded with a dict, got: ' '%s of type %s' % (info, type(info))) self._pex_info = info or {} if 'inherit_path' in self._pex_info: self.inherit_path = self._pex_info['inherit_path'] self._distributions = self._pex_info.get('distributions', {}) # cast as set because pex info from json must store interpreter_constraints as a list self._interpreter_constraints = set(self._pex_info.get('interpreter_constraints', set())) requirements = self._pex_info.get('requirements', []) if not isinstance(requirements, (list, tuple)): raise ValueError('Expected requirements to be a list, got %s' % type(requirements)) self._requirements = OrderedSet(self._parse_requirement_tuple(req) for req in requirements)
def filter(cls, pythons): """ Given a map of python interpreters in the format provided by PythonInterpreter.find(), filter out duplicate versions and versions we would prefer not to use. Returns a map in the same format as find. """ good = [] MAJOR, MINOR, SUBMINOR = range(3) def version_filter(version): return (version[MAJOR] == 2 and version[MINOR] >= 7 or version[MAJOR] == 3 and version[MINOR] >= 4) all_versions = OrderedSet(interpreter.identity.version for interpreter in pythons) good_versions = filter(version_filter, all_versions) for version in good_versions: # For each candidate, use the latest version we find on the filesystem. candidates = defaultdict(list) for interp in pythons: if interp.identity.version == version: candidates[interp.identity.interpreter].append(interp) for interp_class in candidates: candidates[interp_class].sort( key=lambda interp: os.path.getmtime(interp.binary), reverse=True) good.append(candidates[interp_class].pop(0)) return good
def _iter_interpreters(): # type: () -> Iterator[InterpreterOrError] seen = set() normalized_paths = (OrderedSet( PythonInterpreter.canonicalize_path(p) for p in path.split(os.pathsep)) if path else None) # Prefer the current interpreter, if valid. current_interpreter = preferred_interpreter or PythonInterpreter.get() if not _valid_path or _valid_path(current_interpreter.binary): if normalized_paths: candidate_paths = frozenset( (current_interpreter.binary, os.path.dirname(current_interpreter.binary))) candidate_paths_in_path = candidate_paths.intersection( normalized_paths) if candidate_paths_in_path: # In case the full path of the current interpreter binary was in the # `normalized_paths` we're searching, remove it to prevent identifying it again # just to then skip it as `seen`. normalized_paths.discard(current_interpreter.binary) seen.add(current_interpreter) yield current_interpreter else: seen.add(current_interpreter) yield current_interpreter for interp in PythonInterpreter.iter_candidates( paths=normalized_paths, path_filter=_valid_path): if interp not in seen: seen.add(interp) yield interp
def test_copy(): # type: () -> None default_info = PexInfo.default() default_info_copy = default_info.copy() assert default_info is not default_info_copy assert default_info.dump() == default_info_copy.dump() info = PexInfo.default() info.unzip = True info.code_hash = "foo" info.inherit_path = InheritPath.FALLBACK info.add_requirement("bar==1") info.add_requirement("baz==2") info.add_distribution("bar.whl", "bar-sha") info.add_distribution("baz.whl", "baz-sha") info.add_interpreter_constraint(">=2.7.18") info.add_interpreter_constraint("CPython==2.7.9") info_copy = info.copy() assert info_copy.unzip is True assert "foo" == info_copy.code_hash assert InheritPath.FALLBACK == info_copy.inherit_path assert OrderedSet(["bar==1", "baz==2"]) == info_copy.requirements assert { "bar.whl": "bar-sha", "baz.whl": "baz-sha" } == info_copy.distributions assert {">=2.7.18", "CPython==2.7.9"} == set(info_copy.interpreter_constraints) assert info.dump() == info_copy.dump()
def _iter_interpreters(): # type: () -> Iterator[InterpreterOrError] seen = set() normalized_paths = (OrderedSet( os.path.realpath(p) for p in path.split(os.pathsep)) if path else None) # Prefer the current interpreter, if valid. current_interpreter = PythonInterpreter.get() if not _valid_path or _valid_path(current_interpreter.binary): if normalized_paths: candidate_paths = frozenset( (current_interpreter.binary, os.path.dirname(current_interpreter.binary))) candidate_paths_in_path = candidate_paths.intersection( normalized_paths) if candidate_paths_in_path: for p in candidate_paths_in_path: normalized_paths.remove(p) seen.add(current_interpreter) yield current_interpreter else: seen.add(current_interpreter) yield current_interpreter for interp in PythonInterpreter.iter_candidates( paths=normalized_paths, path_filter=_valid_path): if interp not in seen: seen.add(interp) yield interp
def run(self): if self._installed is not None: return self._installed with TRACER.timed('Installing %s' % self._install_tmp, V=2): env = self._interpreter.sanitized_environment() mixins = OrderedSet(['setuptools'] + self.mixins) env['PYTHONPATH'] = os.pathsep.join(third_party.expose(mixins)) env['__PEX_UNVENDORED__'] = '1' command = [self._interpreter.binary, '-s', '-'] + self.setup_command() try: Executor.execute(command, env=env, cwd=self._source_dir, stdin_payload=self.setup_py_wrapper.encode('ascii')) self._installed = True except Executor.NonZeroExit as e: self._installed = False name = os.path.basename(self._source_dir) print('**** Failed to install %s (caused by: %r\n):' % (name, e), file=sys.stderr) print('stdout:\n%s\nstderr:\n%s\n' % (e.stdout, e.stderr), file=sys.stderr) return self._installed return self._installed
def __init__(self, info=None): # type: (Optional[Mapping[str, Any]]) -> None """Construct a new PexInfo. This should not be used directly. """ if info is not None and not isinstance(info, dict): raise ValueError( "PexInfo can only be seeded with a dict, got: " "%s of type %s" % (info, type(info)) ) self._pex_info = dict(info) if info else {} # type Dict[str, Any] self._distributions = self._pex_info.get("distributions", {}) # cast as set because pex info from json must store interpreter_constraints as a list self._interpreter_constraints = set(self._pex_info.get("interpreter_constraints", set())) requirements = self._pex_info.get("requirements", []) if not isinstance(requirements, (list, tuple)): raise ValueError("Expected requirements to be a list, got %s" % type(requirements)) self._requirements = OrderedSet(self._parse_requirement_tuple(req) for req in requirements)
def minimum_sys_path(cls, site_libs, inherit_path): scrub_paths = OrderedSet() site_distributions = OrderedSet() user_site_distributions = OrderedSet() def all_distribution_paths(path): locations = set(dist.location for dist in find_distributions(path)) return set([path]) | locations | set( os.path.realpath(path) for path in locations) for path_element in sys.path: if cls._tainted_path(path_element, site_libs): TRACER.log('Tainted path element: %s' % path_element) site_distributions.update(all_distribution_paths(path_element)) else: TRACER.log('Not a tainted path element: %s' % path_element, V=2) user_site_distributions.update(all_distribution_paths(USER_SITE)) if inherit_path == 'false': scrub_paths = site_distributions | user_site_distributions for path in user_site_distributions: TRACER.log('Scrubbing from user site: %s' % path) for path in site_distributions: TRACER.log('Scrubbing from site-packages: %s' % path) scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths) scrub_from_importer_cache = filter( lambda key: any(key.startswith(path) for path in scrub_paths), sys.path_importer_cache.keys()) scrubbed_importer_cache = dict( (key, value) for (key, value) in sys.path_importer_cache.items() if key not in scrub_from_importer_cache) for importer_cache_entry in scrub_from_importer_cache: TRACER.log('Scrubbing from path_importer_cache: %s' % importer_cache_entry, V=2) return scrubbed_sys_path, scrubbed_importer_cache
def test_backwards_incompatible_pex_info(): # forwards compatibility pi = make_pex_info(["hello"]) assert pi.requirements == OrderedSet(["hello"]) pi = make_pex_info(["hello==0.1", "world==0.2"]) assert pi.requirements == OrderedSet(["hello==0.1", "world==0.2"]) # malformed with pytest.raises(ValueError): make_pex_info("hello") with pytest.raises(ValueError): make_pex_info([("hello", False)]) # backwards compatibility pi = make_pex_info([ ["hello==0.1", False, None], ["world==0.2", False, None], ]) assert pi.requirements == OrderedSet(["hello==0.1", "world==0.2"])
def test_backwards_incompatible_pex_info(): # forwards compatibility pi = make_pex_info(['hello']) assert pi.requirements == OrderedSet(['hello']) pi = make_pex_info(['hello==0.1', 'world==0.2']) assert pi.requirements == OrderedSet(['hello==0.1', 'world==0.2']) # malformed with pytest.raises(ValueError): make_pex_info('hello') with pytest.raises(ValueError): make_pex_info([('hello', False)]) # backwards compatibility pi = make_pex_info([ ['hello==0.1', False, None], ['world==0.2', False, None], ]) assert pi.requirements == OrderedSet(['hello==0.1', 'world==0.2'])
def to_requirement(self, dist): req = dist.as_requirement() markers = OrderedSet() # Here we map any wheel python requirement to the equivalent environment marker: # See: # + https://www.python.org/dev/peps/pep-0345/#requires-python # + https://www.python.org/dev/peps/pep-0508/#environment-markers python_requires = dist_metadata.requires_python(dist) if python_requires: markers.update( Marker(python_version) for python_version in sorted( 'python_version {operator} {version!r}'.format( operator=specifier.operator, version=specifier.version) for specifier in python_requires)) markers.update(self._markers_by_requirement_key.get(req.key, ())) if not markers: return req if len(markers) == 1: marker = next(iter(markers)) req.marker = marker return req # We may have resolved with multiple paths to the dependency represented by dist and at least # two of those paths had (different) conditional requirements for dist based on environment # marker predicates. In that case, since the pip resolve succeeded, the implication is that the # environment markers are compatible; i.e.: their intersection selects the target interpreter. # Here we make that intersection explicit. # See: https://www.python.org/dev/peps/pep-0508/#grammar marker = ' and '.join('({})'.format(marker) for marker in markers) return Requirement.parse('{}; {}'.format(req, marker))
def set_script(self, script): """Set the entry point of this PEX environment based upon a distribution script. :param script: The script name as defined either by a console script or ordinary script within the setup.py of one of the distributions added to the PEX. :raises: :class:`PEXBuilder.InvalidExecutableSpecification` if the script is not found in any distribution added to the PEX. """ distributions = OrderedSet(self._distributions.values()) if self._pex_info.pex_path: for pex in self._pex_info.pex_path.split(":"): if os.path.exists(pex): distributions.update( PEX(pex, interpreter=self._interpreter).resolve()) # Check if 'script' is a console_script. dist, entry_point = get_entry_point_from_console_script( script, distributions) if entry_point: self.set_entry_point(entry_point) TRACER.log("Set entrypoint to console_script {!r} in {!r}".format( entry_point, dist)) return # Check if 'script' is an ordinary script. dist_script = get_script_from_distributions(script, distributions) if dist_script: if self._pex_info.entry_point: raise self.InvalidExecutableSpecification( "Cannot set both entry point and script of PEX!") self._pex_info.script = script TRACER.log("Set entrypoint to script {!r} in {!r}".format( script, dist_script.dist)) return raise self.InvalidExecutableSpecification( "Could not find script {!r} in any distribution {} within PEX!". format(script, ", ".join(str(d) for d in distributions)))
def _get_supported(version=None, platform=None, impl=None, abi=None, force_manylinux=False): versions = _gen_all_compatible_versions(version) if version is not None else None all_supported = get_supported( versions=versions, platform=platform, impl=impl, abi=abi ) def iter_all_supported(): for supported in all_supported: yield supported python_tag, abi_tag, platform_tag = supported if platform_tag.startswith('linux') and force_manylinux: yield python_tag, abi_tag, platform_tag.replace('linux', 'manylinux1') return list(OrderedSet(iter_all_supported()))
def test_outputs_multiple_targets_one_source_with_dep(self): # target1 target2 # source="some/file.py" source="other/file.py" # / / # dep1 dep2 dep_address1 = FileDepsTest.make_build_target_address("dep1/target") dep_target1 = self.mock_hydrated_target(dep_address1, { "dep1/file.py": "", }, ()) target_address1 = FileDepsTest.make_build_target_address("some/target") hydrated_target1 = self.mock_hydrated_target(target_address1, { "some/file.py": "", }, (dep_target1, )) dep_address2 = FileDepsTest.make_build_target_address("dep2/target") dep_target2 = self.mock_hydrated_target(dep_address2, { "dep2/file.py": "", }, ()) target_address2 = FileDepsTest.make_build_target_address( "other/target") hydrated_target2 = self.mock_hydrated_target(target_address2, { "other/file.py": "", }, (dep_target2, )) transitive_targets = TransitiveHydratedTargets( (hydrated_target1, hydrated_target2), OrderedSet( [hydrated_target1, hydrated_target2, dep_target1, dep_target2])) self.filedeps_rule_test( transitive_targets, dedent('''\ some/target/BUILD some/file.py other/target/BUILD other/file.py dep1/target/BUILD dep1/file.py dep2/target/BUILD dep2/file.py '''))
def setup_interpreter(distributions, interpreter=None): """Return an interpreter configured with vendored distributions as extras. Any distributions that are present in the vendored set will be added to the interpreter as extras. :param distributions: The names of distributions to setup the interpreter with. :type distributions: list of str :param interpreter: An optional interpreter to configure. If ``None``, the current interpreter is used. :type interpreter: :class:`pex.interpreter.PythonInterpreter` :return: An bare interpreter configured with vendored extras. :rtype: :class:`pex.interpreter.PythonInterpreter` """ from pex.interpreter import PythonInterpreter interpreter = interpreter or PythonInterpreter.get() for dist in _vendored_dists(OrderedSet(distributions)): interpreter = interpreter.with_extra(dist.key, dist.version, dist.location) return interpreter
def file_deps(console, filedeps_options, transitive_hydrated_targets): uniq_set = OrderedSet() for hydrated_target in transitive_hydrated_targets.closure: if hydrated_target.address.rel_path: uniq_set.add(hydrated_target.address.rel_path) if hasattr(hydrated_target.adaptor, "sources"): uniq_set.update(hydrated_target.adaptor.sources.snapshot.files) with Filedeps.line_oriented(filedeps_options, console) as (print_stdout, print_stderr): for f_path in uniq_set: print_stdout(f_path) return Filedeps(exit_code=0)
def find_compatible_interpreters(path=None, compatibility_constraints=None): """Find all compatible interpreters on the system within the supplied constraints and use path if it is set. If not, fall back to interpreters on $PATH. """ interpreters = OrderedSet() paths = None if path: paths = path.split(os.pathsep) else: # We may have been invoked with a specific interpreter, make sure our sys.executable is included # as a candidate in this case. interpreters.add(PythonInterpreter.get()) interpreters.update(PythonInterpreter.all(paths=paths)) return _filter_compatible_interpreters( interpreters, compatibility_constraints=compatibility_constraints)
def test_output_multiple_targets_one_source(self): target_address1 = FileDepsTest.make_build_target_address("some/target") hydrated_target1 = self.mock_hydrated_target(target_address1, {"some/file.py": "", }, ()) target_address2 = FileDepsTest.make_build_target_address("other/target") hydrated_target2 = self.mock_hydrated_target(target_address2, {"other/file.py": "", }, ()) transitive_targets = TransitiveHydratedTargets( (hydrated_target1, hydrated_target2), OrderedSet([hydrated_target1, hydrated_target2]) ) self.filedeps_rule_test( transitive_targets, dedent( '''\ some/target/BUILD some/file.py other/target/BUILD other/file.py ''') )
def file_deps(console, transitive_hydrated_targets): """List all source and BUILD files a target transitively depends on. Files are listed with relative paths and any BUILD files implied in the transitive closure of targets are also included. """ uniq_set = OrderedSet() for hydrated_target in transitive_hydrated_targets.closure: if hydrated_target.address.rel_path: uniq_set.add(hydrated_target.address.rel_path) if hasattr(hydrated_target.adaptor, "sources"): uniq_set.update(f.path for f in hydrated_target.adaptor.sources.snapshot.files) for f_path in uniq_set: console.print_stdout(f_path)
def _identify_interpreters( cls, filter, # type: PathFilter error_handler=None, # type: Optional[ErrorHandler] paths=None, # type: Optional[Iterable[str]] ): # type: (...) -> Union[Iterator[PythonInterpreter], Iterator[InterpreterOrJobError]] def iter_candidates(): # type: () -> Iterator[str] for path in cls._paths(paths=paths): for fn in cls._expand_path(path): if filter(fn): binary = cls._resolve_pyenv_shim(fn) if binary: yield binary results = execute_parallel( inputs=OrderedSet(iter_candidates()), spawn_func=cls._spawn_from_binary, error_handler=error_handler, ) return cast( "Union[Iterator[PythonInterpreter], Iterator[InterpreterOrJobError]]", results)
def minimum_sys_path(cls, site_libs, inherit_path): scrub_paths = OrderedSet() site_distributions = OrderedSet() user_site_distributions = OrderedSet() def all_distribution_paths(path): locations = set(dist.location for dist in find_distributions(path)) return set([path]) | locations | set(os.path.realpath(path) for path in locations) for path_element in sys.path: if cls._tainted_path(path_element, site_libs): TRACER.log('Tainted path element: %s' % path_element) site_distributions.update(all_distribution_paths(path_element)) else: TRACER.log('Not a tainted path element: %s' % path_element, V=2) user_site_distributions.update(all_distribution_paths(USER_SITE)) if inherit_path == 'false': scrub_paths = site_distributions | user_site_distributions for path in user_site_distributions: TRACER.log('Scrubbing from user site: %s' % path) for path in site_distributions: TRACER.log('Scrubbing from site-packages: %s' % path) scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths) scrub_from_importer_cache = filter( lambda key: any(key.startswith(path) for path in scrub_paths), sys.path_importer_cache.keys()) scrubbed_importer_cache = dict((key, value) for (key, value) in sys.path_importer_cache.items() if key not in scrub_from_importer_cache) for importer_cache_entry in scrub_from_importer_cache: TRACER.log('Scrubbing from path_importer_cache: %s' % importer_cache_entry, V=2) return scrubbed_sys_path, scrubbed_importer_cache
def minimum_sys_path(cls, site_libs, inherit_path): scrub_paths = OrderedSet() site_distributions = OrderedSet() user_site_distributions = OrderedSet() def all_distribution_paths(path): locations = set(dist.location for dist in find_distributions(path)) return set([path]) | locations | set( os.path.realpath(path) for path in locations) for path_element in sys.path: if cls._tainted_path(path_element, site_libs): TRACER.log('Tainted path element: %s' % path_element) site_distributions.update(all_distribution_paths(path_element)) else: TRACER.log('Not a tainted path element: %s' % path_element, V=2) user_site_distributions.update(all_distribution_paths(USER_SITE)) if inherit_path == 'false': scrub_paths = site_distributions | user_site_distributions for path in user_site_distributions: TRACER.log('Scrubbing from user site: %s' % path) for path in site_distributions: TRACER.log('Scrubbing from site-packages: %s' % path) scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths) pythonpath = cls.unstash_pythonpath() if pythonpath is not None: original_pythonpath = pythonpath.split(os.pathsep) user_pythonpath = list( OrderedSet(original_pythonpath) - set(sys.path)) if original_pythonpath == user_pythonpath: TRACER.log('Unstashed PYTHONPATH of %s' % pythonpath, V=2) else: TRACER.log( 'Extracted user PYTHONPATH of %s from unstashed PYTHONPATH of %s' % (os.pathsep.join(user_pythonpath), pythonpath), V=2) if inherit_path == 'false': for path in user_pythonpath: TRACER.log('Scrubbing user PYTHONPATH element: %s' % path) elif inherit_path == 'prefer': TRACER.log('Prepending user PYTHONPATH: %s' % os.pathsep.join(user_pythonpath)) scrubbed_sys_path = user_pythonpath + scrubbed_sys_path elif inherit_path == 'fallback': TRACER.log('Appending user PYTHONPATH: %s' % os.pathsep.join(user_pythonpath)) scrubbed_sys_path = scrubbed_sys_path + user_pythonpath scrub_from_importer_cache = filter( lambda key: any(key.startswith(path) for path in scrub_paths), sys.path_importer_cache.keys()) scrubbed_importer_cache = dict( (key, value) for (key, value) in sys.path_importer_cache.items() if key not in scrub_from_importer_cache) for importer_cache_entry in scrub_from_importer_cache: TRACER.log('Scrubbing from path_importer_cache: %s' % importer_cache_entry, V=2) return scrubbed_sys_path, scrubbed_importer_cache
class PexInfo(object): """PEX metadata. # Build metadata: build_properties: BuildProperties # (key-value information about the build system) code_hash: str # sha1 hash of all names/code in the archive distributions: {dist_name: str} # map from distribution name (i.e. path in # the internal cache) to its cache key (sha1) requirements: list # list of requirements for this environment # Environment options pex_root: string # root of all pex-related files eg: ~/.pex entry_point: string # entry point into this pex script: string # script to execute in this pex environment # at most one of script/entry_point can be specified zip_safe: bool, default True # is this pex zip safe? unzip: bool, default False # should this pex be unzipped and re-executed from there? inherit_path: false/fallback/prefer # should this pex inherit site-packages + user site-packages # + PYTHONPATH? ignore_errors: True, default False # should we ignore inability to resolve dependencies? always_write_cache: False # should we always write the internal cache to disk first? # this is useful if you have very large dependencies that # do not fit in RAM constrained environments .. versionchanged:: 0.8 Removed the ``repositories`` and ``indices`` information, as they were never implemented. """ PATH = 'PEX-INFO' INSTALL_CACHE = 'installed_wheels' @classmethod def make_build_properties(cls, interpreter=None): from .interpreter import PythonInterpreter from .platforms import Platform pi = interpreter or PythonInterpreter.get() plat = Platform.current() platform_name = plat.platform return { 'pex_version': pex_version, 'class': pi.identity.interpreter, 'version': pi.identity.version, 'platform': platform_name, } @classmethod def default(cls, interpreter=None): pex_info = { 'requirements': [], 'distributions': {}, 'build_properties': cls.make_build_properties(interpreter), } return cls(info=pex_info) @classmethod def from_pex(cls, pex): if os.path.isfile(pex): with open_zip(pex) as zf: pex_info = zf.read(cls.PATH) else: with open(os.path.join(pex, cls.PATH)) as fp: pex_info = fp.read() return cls.from_json(pex_info) @classmethod def from_json(cls, content): if isinstance(content, bytes): content = content.decode('utf-8') return cls(info=json.loads(content)) @classmethod def from_env(cls, env=ENV): supplied_env = env.strip_defaults() zip_safe = None if supplied_env.PEX_FORCE_LOCAL is None else not supplied_env.PEX_FORCE_LOCAL unzip = None if supplied_env.PEX_UNZIP is None else supplied_env.PEX_UNZIP pex_info = { 'pex_root': supplied_env.PEX_ROOT, 'entry_point': supplied_env.PEX_MODULE, 'script': supplied_env.PEX_SCRIPT, 'zip_safe': zip_safe, 'unzip': unzip, 'inherit_path': supplied_env.PEX_INHERIT_PATH, 'ignore_errors': supplied_env.PEX_IGNORE_ERRORS, 'always_write_cache': supplied_env.PEX_ALWAYS_CACHE, } # Filter out empty entries not explicitly set in the environment. return cls(info=dict( (k, v) for (k, v) in pex_info.items() if v is not None)) @classmethod def _parse_requirement_tuple(cls, requirement_tuple): if isinstance(requirement_tuple, (tuple, list)): if len(requirement_tuple) != 3: raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple, )) # pre 0.8.x requirement type: pex_warnings.warn( 'Attempting to use deprecated PEX feature. Please upgrade past PEX 0.8.x.' ) return requirement_tuple[0] elif isinstance(requirement_tuple, compatibility_string): return requirement_tuple raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple, )) def __init__(self, info=None): """Construct a new PexInfo. This should not be used directly.""" if info is not None and not isinstance(info, dict): raise ValueError('PexInfo can only be seeded with a dict, got: ' '%s of type %s' % (info, type(info))) self._pex_info = info or {} if 'inherit_path' in self._pex_info: self.inherit_path = self._pex_info['inherit_path'] self._distributions = self._pex_info.get('distributions', {}) # cast as set because pex info from json must store interpreter_constraints as a list self._interpreter_constraints = set( self._pex_info.get('interpreter_constraints', set())) requirements = self._pex_info.get('requirements', []) if not isinstance(requirements, (list, tuple)): raise ValueError('Expected requirements to be a list, got %s' % type(requirements)) self._requirements = OrderedSet( self._parse_requirement_tuple(req) for req in requirements) def _get_safe(self, key): if key not in self._pex_info: return None value = self._pex_info[key] return value.encode('utf-8') if PY2 else value @property def build_properties(self): """Information about the system on which this PEX was generated. :returns: A dictionary containing metadata about the environment used to build this PEX. """ return self._pex_info.get('build_properties', {}) @build_properties.setter def build_properties(self, value): if not isinstance(value, dict): raise TypeError('build_properties must be a dictionary!') self._pex_info['build_properties'] = self.make_build_properties() self._pex_info['build_properties'].update(value) @property def zip_safe(self): """Whether or not this PEX should be treated as zip-safe. If set to false and the PEX is zipped, the contents of the PEX will be unpacked into a directory within the PEX_ROOT prior to execution. This allows code and frameworks depending upon __file__ existing on disk to operate normally. By default zip_safe is True. May be overridden at runtime by the $PEX_FORCE_LOCAL environment variable. """ return self._pex_info.get('zip_safe', True) @zip_safe.setter def zip_safe(self, value): self._pex_info['zip_safe'] = bool(value) @property def unzip(self): """Whether or not PEX should be unzipped before it's executed. Unzipping a PEX is a operation that can be cached on the 1st run of a given PEX file which can result in lower startup latency in subsequent runs. """ return self._pex_info.get('unzip', False) @unzip.setter def unzip(self, value): self._pex_info['unzip'] = bool(value) @property def strip_pex_env(self): """Whether or not this PEX should strip `PEX_*` env vars before executing its entrypoint. You might want to set this to `False` if this PEX executes other PEXes or the Pex CLI itself and you want the executed PEX to be controlled via PEX environment variables. """ return self._pex_info.get('strip_pex_env', True) @strip_pex_env.setter def strip_pex_env(self, value): self._pex_info['strip_pex_env'] = bool(value) @property def pex_path(self): """A colon separated list of other pex files to merge into the runtime environment. This pex info property is used to persist the PEX_PATH environment variable into the pex info metadata for reuse within a built pex. """ return self._pex_info.get('pex_path') @pex_path.setter def pex_path(self, value): self._pex_info['pex_path'] = value @property def inherit_path(self): """Whether or not this PEX should be allowed to inherit system dependencies. By default, PEX environments are scrubbed of all system distributions prior to execution. This means that PEX files cannot rely upon preexisting system libraries. By default inherit_path is false. This may be overridden at runtime by the $PEX_INHERIT_PATH environment variable. """ return self._pex_info.get('inherit_path', 'false') @inherit_path.setter def inherit_path(self, value): if value is False: value = 'false' elif value is True: value = 'prefer' self._pex_info['inherit_path'] = value @property def interpreter_constraints(self): """A list of constraints that determine the interpreter compatibility for this pex, using the Requirement-style format, e.g. ``'CPython>=3', or just '>=2.7,<3'`` for requirements agnostic to interpreter class. This property will be used at exec time when bootstrapping a pex to search PEX_PYTHON_PATH for a list of compatible interpreters. """ return list(self._interpreter_constraints) def add_interpreter_constraint(self, value): self._interpreter_constraints.add(str(value)) @property def ignore_errors(self): return self._pex_info.get('ignore_errors', False) @ignore_errors.setter def ignore_errors(self, value): self._pex_info['ignore_errors'] = bool(value) @property def emit_warnings(self): return self._pex_info.get('emit_warnings', True) @emit_warnings.setter def emit_warnings(self, value): self._pex_info['emit_warnings'] = bool(value) @property def code_hash(self): return self._pex_info.get('code_hash') @code_hash.setter def code_hash(self, value): self._pex_info['code_hash'] = value @property def entry_point(self): return self._get_safe('entry_point') @entry_point.setter def entry_point(self, value): self._pex_info['entry_point'] = value @property def script(self): return self._get_safe('script') @script.setter def script(self, value): self._pex_info['script'] = value def add_requirement(self, requirement): self._requirements.add(str(requirement)) @property def requirements(self): return self._requirements def add_distribution(self, location, sha): self._distributions[location] = sha @property def distributions(self): return self._distributions @property def always_write_cache(self): return self._pex_info.get('always_write_cache', False) @always_write_cache.setter def always_write_cache(self, value): self._pex_info['always_write_cache'] = bool(value) @property def pex_root(self): pex_root = os.path.expanduser( self._pex_info.get('pex_root', os.path.join('~', '.pex'))) if not can_write_dir(pex_root): tmp_root = safe_mkdtemp() pex_warnings.warn( 'PEX_ROOT is configured as {pex_root} but that path is un-writeable, ' 'falling back to a temporary PEX_ROOT of {tmp_root} which will hurt ' 'performance.'.format(pex_root=pex_root, tmp_root=tmp_root)) pex_root = self._pex_info['pex_root'] = tmp_root return pex_root @pex_root.setter def pex_root(self, value): if value is None: self._pex_info.pop('pex_root', None) else: self._pex_info['pex_root'] = value @property def internal_cache(self): return '.deps' @property def install_cache(self): return os.path.join(self.pex_root, self.INSTALL_CACHE) @property def zip_unsafe_cache(self): return os.path.join(self.pex_root, 'code') def update(self, other): if not isinstance(other, PexInfo): raise TypeError('Cannot merge a %r with PexInfo' % type(other)) self._pex_info.update(other._pex_info) self._distributions.update(other.distributions) self._interpreter_constraints.update(other.interpreter_constraints) self._requirements.update(other.requirements) def dump(self, **kwargs): pex_info_copy = self._pex_info.copy() pex_info_copy['requirements'] = sorted(self._requirements) pex_info_copy['interpreter_constraints'] = sorted( self._interpreter_constraints) pex_info_copy['distributions'] = self._distributions.copy() return json.dumps(pex_info_copy, **kwargs) def copy(self): return self.from_json(self.dump()) @staticmethod def _merge_split(*paths): filtered_paths = filter(None, paths) return [p for p in ':'.join(filtered_paths).split(':') if p] def merge_pex_path(self, pex_path): """Merges a new PEX_PATH definition into the existing one (if any). :param str pex_path: The PEX_PATH to merge. """ if not pex_path: return self.pex_path = ':'.join(self._merge_split(self.pex_path, pex_path)) def __repr__(self): return '{}({!r})'.format(type(self).__name__, self._pex_info)
def build_pex(reqs, options, cache=None): interpreters = None # Default to the current interpreter. pex_python_path = options.python_path # If None, this will result in using $PATH. # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag. if pex_python_path is None and (options.rc_file or not ENV.PEX_IGNORE_RCFILES): rc_variables = Variables(rc=options.rc_file) pex_python_path = rc_variables.PEX_PYTHON_PATH # NB: options.python and interpreter constraints cannot be used together. if options.python: with TRACER.timed("Resolving interpreters", V=2): def to_python_interpreter(full_path_or_basename): if os.path.isfile(full_path_or_basename): return PythonInterpreter.from_binary(full_path_or_basename) else: interp = PythonInterpreter.from_env(full_path_or_basename) if interp is None: die("Failed to find interpreter: %s" % full_path_or_basename) return interp interpreters = [ to_python_interpreter(interp) for interp in options.python ] elif options.interpreter_constraint: with TRACER.timed("Resolving interpreters", V=2): constraints = options.interpreter_constraint validate_constraints(constraints) try: interpreters = list( iter_compatible_interpreters( path=pex_python_path, interpreter_constraints=constraints)) except UnsatisfiableInterpreterConstraintsError as e: die( e.create_message( "Could not find a compatible interpreter."), CANNOT_SETUP_INTERPRETER, ) platforms = OrderedSet(options.platforms) interpreters = interpreters or [] if options.platforms and options.resolve_local_platforms: with TRACER.timed( "Searching for local interpreters matching {}".format( ", ".join(map(str, platforms)))): candidate_interpreters = OrderedSet( iter_compatible_interpreters(path=pex_python_path)) candidate_interpreters.add(PythonInterpreter.get()) for candidate_interpreter in candidate_interpreters: resolved_platforms = candidate_interpreter.supported_platforms.intersection( platforms) if resolved_platforms: for resolved_platform in resolved_platforms: TRACER.log("Resolved {} for platform {}".format( candidate_interpreter, resolved_platform)) platforms.remove(resolved_platform) interpreters.append(candidate_interpreter) if platforms: TRACER.log( "Could not resolve a local interpreter for {}, will resolve only binary distributions " "for {}.".format( ", ".join(map(str, platforms)), "this platform" if len(platforms) == 1 else "these platforms", )) interpreter = (PythonInterpreter.latest_release_of_min_compatible_version( interpreters) if interpreters else None) try: with open(options.preamble_file) as preamble_fd: preamble = preamble_fd.read() except TypeError: # options.preamble_file is None preamble = None pex_builder = PEXBuilder( path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble, copy_mode=CopyMode.SYMLINK, include_tools=options.include_tools or options.venv, ) if options.resources_directory: pex_warnings.warn( "The `-R/--resources-directory` option is deprecated. Resources should be added via " "`-D/--sources-directory` instead.") for directory in OrderedSet(options.sources_directory + options.resources_directory): src_dir = os.path.normpath(directory) for root, _, files in os.walk(src_dir): for f in files: src_file_path = os.path.join(root, f) dst_path = os.path.relpath(src_file_path, src_dir) pex_builder.add_source(src_file_path, dst_path) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.unzip = options.unzip pex_info.venv = bool(options.venv) pex_info.venv_bin_path = options.venv pex_info.venv_copies = options.venv_copies pex_info.pex_path = options.pex_path pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.emit_warnings = options.emit_warnings pex_info.inherit_path = InheritPath.for_value(options.inherit_path) pex_info.pex_root = options.runtime_pex_root pex_info.strip_pex_env = options.strip_pex_env if options.interpreter_constraint: for ic in options.interpreter_constraint: pex_builder.add_interpreter_constraint(ic) indexes = compute_indexes(options) for requirements_pex in options.requirements_pexes: pex_builder.add_from_requirements_pex(requirements_pex) with TRACER.timed( "Resolving distributions ({})".format(reqs + options.requirement_files)): if options.cache_ttl: pex_warnings.warn( "The --cache-ttl option is deprecated and no longer has any effect." ) if options.headers: pex_warnings.warn( "The --header option is deprecated and no longer has any effect." ) network_configuration = NetworkConfiguration( retries=options.retries, timeout=options.timeout, proxy=options.proxy, cert=options.cert, client_cert=options.client_cert, ) try: if options.pex_repository: with TRACER.timed("Resolving requirements from PEX {}.".format( options.pex_repository)): resolveds = resolve_from_pex( pex=options.pex_repository, requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, network_configuration=network_configuration, transitive=options.transitive, interpreters=interpreters, platforms=list(platforms), manylinux=options.manylinux, ignore_errors=options.ignore_errors, ) else: with TRACER.timed("Resolving requirements."): resolveds = resolve_multi( requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, allow_prereleases=options.allow_prereleases, transitive=options.transitive, interpreters=interpreters, platforms=list(platforms), indexes=indexes, find_links=options.find_links, resolver_version=ResolverVersion.for_value( options.resolver_version), network_configuration=network_configuration, cache=cache, build=options.build, use_wheel=options.use_wheel, compile=options.compile, manylinux=options.manylinux, max_parallel_jobs=options.max_parallel_jobs, ignore_errors=options.ignore_errors, ) for resolved_dist in resolveds: pex_builder.add_distribution(resolved_dist.distribution) if resolved_dist.direct_requirement: pex_builder.add_requirement( resolved_dist.direct_requirement) except Unsatisfiable as e: die(str(e)) if options.entry_point and options.script: die("Must specify at most one entry point or script.", INVALID_OPTIONS) if options.entry_point: pex_builder.set_entry_point(options.entry_point) elif options.script: pex_builder.set_script(options.script) if options.python_shebang: pex_builder.set_shebang(options.python_shebang) return pex_builder
class PexInfo(object): """PEX metadata. # Build metadata: build_properties: BuildProperties # (key-value information about the build system) code_hash: str # sha1 hash of all names/code in the archive distributions: {dist_name: str} # map from distribution name (i.e. path in # the internal cache) to its cache key (sha1) requirements: list # list of requirements for this environment # Environment options pex_root: string # root of all pex-related files eg: ~/.pex entry_point: string # entry point into this pex script: string # script to execute in this pex environment # at most one of script/entry_point can be specified zip_safe: True, default False # is this pex zip safe? inherit_path: false/fallback/prefer # should this pex inherit site-packages + PYTHONPATH? ignore_errors: True, default False # should we ignore inability to resolve dependencies? always_write_cache: False # should we always write the internal cache to disk first? # this is useful if you have very large dependencies that # do not fit in RAM constrained environments .. versionchanged:: 0.8 Removed the ``repositories`` and ``indices`` information, as they were never implemented. """ PATH = 'PEX-INFO' INTERNAL_CACHE = '.deps' @classmethod def make_build_properties(cls, interpreter=None): from .interpreter import PythonInterpreter from .platforms import Platform pi = interpreter or PythonInterpreter.get() plat = Platform.current() platform_name = plat.platform return { 'pex_version': pex_version, 'class': pi.identity.interpreter, 'version': pi.identity.version, 'platform': platform_name, } @classmethod def default(cls, interpreter=None): pex_info = { 'requirements': [], 'distributions': {}, 'build_properties': cls.make_build_properties(interpreter), } return cls(info=pex_info) @classmethod def from_pex(cls, pex): if os.path.isfile(pex): with open_zip(pex) as zf: pex_info = zf.read(cls.PATH) else: with open(os.path.join(pex, cls.PATH)) as fp: pex_info = fp.read() return cls.from_json(pex_info) @classmethod def from_json(cls, content): if isinstance(content, bytes): content = content.decode('utf-8') return cls(info=json.loads(content)) @classmethod def from_env(cls, env=ENV): supplied_env = env.strip_defaults() zip_safe = None if supplied_env.PEX_FORCE_LOCAL is None else not supplied_env.PEX_FORCE_LOCAL pex_info = { 'pex_root': supplied_env.PEX_ROOT, 'entry_point': supplied_env.PEX_MODULE, 'script': supplied_env.PEX_SCRIPT, 'zip_safe': zip_safe, 'inherit_path': supplied_env.PEX_INHERIT_PATH, 'ignore_errors': supplied_env.PEX_IGNORE_ERRORS, 'always_write_cache': supplied_env.PEX_ALWAYS_CACHE, } # Filter out empty entries not explicitly set in the environment. return cls(info=dict((k, v) for (k, v) in pex_info.items() if v is not None)) @classmethod def _parse_requirement_tuple(cls, requirement_tuple): if isinstance(requirement_tuple, (tuple, list)): if len(requirement_tuple) != 3: raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,)) # pre 0.8.x requirement type: pex_warnings.warn('Attempting to use deprecated PEX feature. Please upgrade past PEX 0.8.x.') return requirement_tuple[0] elif isinstance(requirement_tuple, compatibility_string): return requirement_tuple raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,)) def __init__(self, info=None): """Construct a new PexInfo. This should not be used directly.""" if info is not None and not isinstance(info, dict): raise ValueError('PexInfo can only be seeded with a dict, got: ' '%s of type %s' % (info, type(info))) self._pex_info = info or {} if 'inherit_path' in self._pex_info: self.inherit_path = self._pex_info['inherit_path'] self._distributions = self._pex_info.get('distributions', {}) # cast as set because pex info from json must store interpreter_constraints as a list self._interpreter_constraints = set(self._pex_info.get('interpreter_constraints', set())) requirements = self._pex_info.get('requirements', []) if not isinstance(requirements, (list, tuple)): raise ValueError('Expected requirements to be a list, got %s' % type(requirements)) self._requirements = OrderedSet(self._parse_requirement_tuple(req) for req in requirements) def _get_safe(self, key): if key not in self._pex_info: return None value = self._pex_info[key] return value.encode('utf-8') if PY2 else value @property def build_properties(self): """Information about the system on which this PEX was generated. :returns: A dictionary containing metadata about the environment used to build this PEX. """ return self._pex_info.get('build_properties', {}) @build_properties.setter def build_properties(self, value): if not isinstance(value, dict): raise TypeError('build_properties must be a dictionary!') self._pex_info['build_properties'] = self.make_build_properties() self._pex_info['build_properties'].update(value) @property def zip_safe(self): """Whether or not this PEX should be treated as zip-safe. If set to false and the PEX is zipped, the contents of the PEX will be unpacked into a directory within the PEX_ROOT prior to execution. This allows code and frameworks depending upon __file__ existing on disk to operate normally. By default zip_safe is True. May be overridden at runtime by the $PEX_FORCE_LOCAL environment variable. """ return self._pex_info.get('zip_safe', True) @zip_safe.setter def zip_safe(self, value): self._pex_info['zip_safe'] = bool(value) @property def pex_path(self): """A colon separated list of other pex files to merge into the runtime environment. This pex info property is used to persist the PEX_PATH environment variable into the pex info metadata for reuse within a built pex. """ return self._pex_info.get('pex_path') @pex_path.setter def pex_path(self, value): self._pex_info['pex_path'] = value @property def inherit_path(self): """Whether or not this PEX should be allowed to inherit system dependencies. By default, PEX environments are scrubbed of all system distributions prior to execution. This means that PEX files cannot rely upon preexisting system libraries. By default inherit_path is false. This may be overridden at runtime by the $PEX_INHERIT_PATH environment variable. """ return self._pex_info.get('inherit_path', 'false') @inherit_path.setter def inherit_path(self, value): if value is False: value = 'false' elif value is True: value = 'prefer' self._pex_info['inherit_path'] = value @property def interpreter_constraints(self): """A list of constraints that determine the interpreter compatibility for this pex, using the Requirement-style format, e.g. ``'CPython>=3', or just '>=2.7,<3'`` for requirements agnostic to interpreter class. This property will be used at exec time when bootstrapping a pex to search PEX_PYTHON_PATH for a list of compatible interpreters. """ return list(self._interpreter_constraints) def add_interpreter_constraint(self, value): self._interpreter_constraints.add(str(value)) @property def ignore_errors(self): return self._pex_info.get('ignore_errors', False) @ignore_errors.setter def ignore_errors(self, value): self._pex_info['ignore_errors'] = bool(value) @property def emit_warnings(self): return self._pex_info.get('emit_warnings', True) @emit_warnings.setter def emit_warnings(self, value): self._pex_info['emit_warnings'] = bool(value) @property def code_hash(self): return self._pex_info.get('code_hash') @code_hash.setter def code_hash(self, value): self._pex_info['code_hash'] = value @property def entry_point(self): return self._get_safe('entry_point') @entry_point.setter def entry_point(self, value): self._pex_info['entry_point'] = value @property def script(self): return self._get_safe('script') @script.setter def script(self, value): self._pex_info['script'] = value def add_requirement(self, requirement): self._requirements.add(str(requirement)) @property def requirements(self): return self._requirements def add_distribution(self, location, sha): self._distributions[location] = sha @property def distributions(self): return self._distributions @property def always_write_cache(self): return self._pex_info.get('always_write_cache', False) @always_write_cache.setter def always_write_cache(self, value): self._pex_info['always_write_cache'] = bool(value) @property def pex_root(self): return os.path.expanduser(self._pex_info.get('pex_root', os.path.join('~', '.pex'))) @pex_root.setter def pex_root(self, value): self._pex_info['pex_root'] = value @property def internal_cache(self): return self.INTERNAL_CACHE @property def install_cache(self): return os.path.join(self.pex_root, 'install') @property def zip_unsafe_cache(self): return os.path.join(self.pex_root, 'code') def update(self, other): if not isinstance(other, PexInfo): raise TypeError('Cannot merge a %r with PexInfo' % type(other)) self._pex_info.update(other._pex_info) self._distributions.update(other.distributions) self._interpreter_constraints.update(other.interpreter_constraints) self._requirements.update(other.requirements) def dump(self, **kwargs): pex_info_copy = self._pex_info.copy() pex_info_copy['requirements'] = sorted(self._requirements) pex_info_copy['interpreter_constraints'] = sorted(self._interpreter_constraints) pex_info_copy['distributions'] = self._distributions.copy() return json.dumps(pex_info_copy, **kwargs) def copy(self): return self.from_json(self.dump()) def merge_pex_path(self, pex_path): """Merges a new PEX_PATH definition into the existing one (if any). :param string pex_path: The PEX_PATH to merge. """ if not pex_path: return self.pex_path = ':'.join(merge_split(self.pex_path, pex_path)) def __repr__(self): return '{}({!r})'.format(type(self).__name__, self._pex_info)