def _upload_zip(zip_file: str, package_path: str, resolved_fs=None, force_upload: bool = False): packer = packaging.detect_packer_from_file(zip_file) if packer == packaging.PEX_PACKER and resolved_fs.exists(package_path): with tempfile.TemporaryDirectory() as tempdir: local_copy_path = os.path.join(tempdir, os.path.basename(package_path)) resolved_fs.get(package_path, local_copy_path) info_from_storage = PexInfo.from_pex(local_copy_path) into_to_upload = PexInfo.from_pex(zip_file) if not force_upload and info_from_storage.code_hash == into_to_upload.code_hash: _logger.info(f"skip upload of current {zip_file}" f" as it is already uploaded on {package_path}") return _logger.info(f"upload current {zip_file} to {package_path}") dir = os.path.dirname(package_path) if not resolved_fs.exists(dir): resolved_fs.mkdir(dir) resolved_fs.put(zip_file, package_path) # Remove previous metadata archive_meta_data = _get_archive_metadata_path(package_path) if resolved_fs.exists(archive_meta_data): resolved_fs.rm(archive_meta_data)
def test_pants_binary_interpreter_selection_with_pexrc(self): py27_path, py3_path = python_interpreter_path(PY_27), python_interpreter_path(PY_3) with setup_pexrc_with_pex_python_path([py27_path, py3_path]): with temporary_dir() as interpreters_cache: pants_ini_config = {'python-setup': {'interpreter_cache_dir': interpreters_cache}} pants_run_27 = self.run_pants( command=['binary', '{}:main_py2'.format(os.path.join(self.testproject, 'python_3_selection_testing'))], config=pants_ini_config ) self.assert_success(pants_run_27) pants_run_3 = self.run_pants( command=['binary', '{}:main_py3'.format(os.path.join(self.testproject, 'python_3_selection_testing'))], config=pants_ini_config ) self.assert_success(pants_run_3) # Ensure proper interpreter constraints were passed to built pexes. py2_pex = os.path.join(os.getcwd(), 'dist', 'main_py2.pex') py3_pex = os.path.join(os.getcwd(), 'dist', 'main_py3.pex') py2_info = PexInfo.from_pex(py2_pex) py3_info = PexInfo.from_pex(py3_pex) self.assertIn('CPython>2.7.6,<3', py2_info.interpreter_constraints) self.assertIn('CPython>3', py3_info.interpreter_constraints) # Cleanup created pexes. os.remove(py2_pex) os.remove(py3_pex)
def _activate(self): # type: () -> WorkingSet working_set = WorkingSet([]) # set up the local .pex environment pex_info = self.pex_info() self._envs.append(PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment.declare_namespace_packages(working_set) self.patch_pkg_resources(working_set) return working_set
def create( cls, path, # type: str interpreter=None, # type: Optional[PythonInterpreter] ): # type: (...) -> Pip """Creates a pip tool with PEX isolation at path. :param path: The path to assemble the pip tool at. :param interpreter: The interpreter to run Pip with. The current interpreter by default. :return: The path of a PEX that can be used to execute Pip in isolation. """ pip_interpreter = interpreter or PythonInterpreter.get() pip_pex_path = os.path.join(path, isolated().pex_hash) with atomic_directory(pip_pex_path, exclusive=True) as chroot: if not chroot.is_finalized: from pex.pex_builder import PEXBuilder isolated_pip_builder = PEXBuilder(path=chroot.work_dir) isolated_pip_builder.info.venv = True for dist_location in third_party.expose( ["pip", "setuptools", "wheel"]): isolated_pip_builder.add_dist_location(dist=dist_location) isolated_pip_builder.set_script("pip") isolated_pip_builder.freeze() pex_info = PexInfo.from_pex(pip_pex_path) pex_info.add_interpreter_constraint( str(pip_interpreter.identity.requirement)) return cls( ensure_venv( PEX(pip_pex_path, interpreter=pip_interpreter, pex_info=pex_info)))
def _activate(self): # type: () -> Iterable[Distribution] # set up the local .pex environment pex_info = self.pex_info() target = DistributionTarget.for_interpreter(self._interpreter) self._envs.append(PEXEnvironment(self._pex, pex_info, target=target)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, target=target)) # activate all of them activated_dists = [] # type: List[Distribution] for env in self._envs: activated_dists.extend(env.activate()) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment._declare_namespace_packages(activated_dists) return activated_dists
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append( PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append( PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def cached_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True) return self.create_chroot(interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements)
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append(PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def test_target_constraints_with_no_sources(self): with temporary_dir() as interpreters_cache: pants_ini_config = { 'python-setup': { 'interpreter_cache_dir': interpreters_cache, 'interpreter_constraints': ['CPython>3'], } } # Run task. pants_run = self.run_pants( command=['run', '{}:test_bin'.format(os.path.join(self.testproject, 'test_target_with_no_sources'))], config=pants_ini_config ) self.assert_success(pants_run) self.assertIn('python3', pants_run.stdout_data) # Binary task. pants_run = self.run_pants( command=['binary', '{}:test_bin'.format(os.path.join(self.testproject, 'test_target_with_no_sources'))], config=pants_ini_config ) self.assert_success(pants_run) # Ensure proper interpreter constraints were passed to built pexes. py2_pex = os.path.join(os.getcwd(), 'dist', 'test_bin.pex') py2_info = PexInfo.from_pex(py2_pex) self.assertIn('CPython>3', py2_info.interpreter_constraints) # Cleanup. os.remove(py2_pex)
def cached_chroot(self, interpreter, pex_info, targets, platforms=None, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. :rtype: pants.backend.python.python_chroot.PythonChroot TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True) return self.create_chroot(interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements)
def __init__( self, pex, # type: str pex_info=None, # type: Optional[PexInfo] interpreter=None, # type: Optional[PythonInterpreter] ): # type: (...) -> None self._pex = pex self._pex_info = pex_info or PexInfo.from_pex(pex) self._internal_cache = os.path.join(self._pex, self._pex_info.internal_cache) self._activated = False self._working_set = None self._interpreter = interpreter or PythonInterpreter.get() self._inherit_path = self._pex_info.inherit_path self._supported_tags = frozenset(self._interpreter.identity.supported_tags) self._target_interpreter_env = self._interpreter.identity.env_markers # For the bug this works around, see: https://bitbucket.org/pypy/pypy/issues/1686 # NB: This must be installed early before the underlying pex is loaded in any way. if self._interpreter.identity.python_tag.startswith("pp") and zipfile.is_zipfile(self._pex): self._install_pypy_zipimporter_workaround(self._pex) super(PEXEnvironment, self).__init__( search_path=[] if self._pex_info.inherit_path == InheritPath.FALSE else sys.path, platform=self._interpreter.identity.platform_tag, ) TRACER.log( "E: tags for %r x %r -> %s" % (self.platform, self._interpreter, self._supported_tags), V=9, )
def add_from_requirements_pex(self, pex): """Add requirements from an existing pex. :param pex: The path to an existing .pex file or unzipped pex directory. """ self._ensure_unfrozen("Adding from pex") pex_info = PexInfo.from_pex(pex) def add(location, dname, expected_dhash): dhash = self._add_dist_dir(location, dname) if dhash != expected_dhash: raise self.InvalidDistribution( "Distribution {} at {} had hash {}, expected {}".format( dname, location, dhash, expected_dhash)) self._pex_info.add_distribution(dname, dhash) if os.path.isfile(pex): with open_zip(pex) as zf: for dist_name, dist_hash in pex_info.distributions.items(): internal_dist_path = "/".join( [pex_info.internal_cache, dist_name]) cached_location = os.path.join(pex_info.install_cache, dist_hash, dist_name) CacheHelper.cache_distribution(zf, internal_dist_path, cached_location) add(cached_location, dist_name, dist_hash) else: for dist_name, dist_hash in pex_info.distributions.items(): add(os.path.join(pex, pex_info.internal_cache, dist_name), dist_name, dist_hash) for req in pex_info.requirements: self._pex_info.add_requirement(req)
def activate_pex(): entry_point = os.environ.get('UWSGI_PEX') if not entry_point: sys.stderr.write('couldnt determine pex from UWSGI_PEX environment variable, bailing!\n') sys.exit(1) sys.stderr.write('entry_point=%s\n' % entry_point) sys.path[0] = os.path.abspath(sys.path[0]) sys.path.insert(0, entry_point) sys.path.insert(0, os.path.abspath(os.path.join(entry_point, '.bootstrap'))) from pex import pex_bootstrapper from pex.environment import PEXEnvironment from pex.finders import register_finders from pex.pex_info import PexInfo pex_bootstrapper.monkeypatch_build_zipmanifest() register_finders() pex_info = PexInfo.from_pex(entry_point) print str(pex_info) env = PEXEnvironment(entry_point, pex_info) working_set = env.activate() sys.stderr.write('sys.path=%s\n\n' % sys.path) return entry_point, pex_info, env, working_set
def __init__( self, pex, # type: str pex_info=None, # type: Optional[PexInfo] target=None, # type: Optional[DistributionTarget] ): # type: (...) -> None self._pex = os.path.realpath(pex) self._pex_info = pex_info or PexInfo.from_pex(pex) self._available_ranked_dists_by_key = defaultdict( list) # type: DefaultDict[str, List[_RankedDistribution]] self._activated_dists = None # type: Optional[Iterable[Distribution]] self._target = target or DistributionTarget.current() self._interpreter_version = self._target.get_python_version_str() # The supported_tags come ordered most specific (platform specific) to least specific # (universal). We want to rank most specific highest; so we need to reverse iteration order # here. self._supported_tags_to_rank = { tag: rank for rank, tag in enumerate( reversed(self._target.get_supported_tags())) } self._platform, _ = self._target.get_platform() # For the bug this works around, see: https://bitbucket.org/pypy/pypy/issues/1686 # NB: This must be installed early before the underlying pex is loaded in any way. if self._platform.impl == "pp" and zipfile.is_zipfile(self._pex): self._install_pypy_zipimporter_workaround(self._pex)
def add_pex_path_items(pex_path): if not pex_path: return from pex.pex_info import PexInfo pex_path_contents = venv_contents["pex_path"] for pex in pex_path.split(":"): pex_path_contents[pex] = PexInfo.from_pex(pex).distributions
def __init__(self, pex=sys.argv[0], interpreter=None, env=ENV, verify_entry_point=False): self._pex = pex self._interpreter = interpreter or PythonInterpreter.get() self._pex_info = PexInfo.from_pex(self._pex) self._pex_info_overrides = PexInfo.from_env(env=env) self._vars = env self._envs = [] self._working_set = None if verify_entry_point: self._do_entry_point_verification()
def __init__(self, interpreter, pex): # Here we hack around `coverage.cmdline` nuking the 0th element of `sys.path` (our root pex) # by ensuring, the root pex is on the sys.path twice. # See: https://github.com/nedbat/coveragepy/issues/715 pex_path = pex.path() pex_info = PexInfo.from_pex(pex_path) pex_info.merge_pex_path(pex_path) # We're now on the sys.path twice. PEXBuilder(pex_path, interpreter=interpreter, pex_info=pex_info).freeze() self._pex = PEX(pex=pex_path, interpreter=interpreter) self._interpreter = interpreter
def test_pants_binary_interpreter_selection_with_pexrc(self): py27_path, py3_path = python_interpreter_path( PY_27), python_interpreter_path(PY_3) with setup_pexrc_with_pex_python_path([py27_path, py3_path]): with temporary_dir() as interpreters_cache: pants_ini_config = { "python-setup": { "interpreter_cache_dir": interpreters_cache } } pants_run_27 = self.run_pants( command=[ "binary", "{}:main_py2".format( os.path.join(self.testproject, "python_3_selection_testing")), ], config=pants_ini_config, ) self.assert_success(pants_run_27) pants_run_3 = self.run_pants( command=[ "binary", "{}:main_py3".format( os.path.join(self.testproject, "python_3_selection_testing")), ], config=pants_ini_config, ) self.assert_success(pants_run_3) # Ensure proper interpreter constraints were passed to built pexes. py2_pex = os.path.join(os.getcwd(), "dist", "main_py2.pex") py3_pex = os.path.join(os.getcwd(), "dist", "main_py3.pex") py2_info = PexInfo.from_pex(py2_pex) py3_info = PexInfo.from_pex(py3_pex) self.assertIn("CPython>2.7.6,<3", py2_info.interpreter_constraints) self.assertIn("CPython>3", py3_info.interpreter_constraints) # Cleanup created pexes. os.remove(py2_pex) os.remove(py3_pex)
def main( pex=None, # type: Optional[PEX] pex_prog_path=None, # type: Optional[str] ): # type: (...) -> int logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) with TRACER.timed("Executing PEX_TOOLS"): pex_prog_path = simplify_pex_path(pex_prog_path or pex.path()) if pex else None prog = ("PEX_TOOLS=1 {pex_path}".format( pex_path=pex_prog_path) if pex else "{python} {module}".format( python=sys.executable, module=".".join(__name__.split(".")[:-1]))) parser = ArgumentParser( prog=prog, description="Tools for working with {}.".format( pex_prog_path if pex else "PEX files"), ) if pex is None: parser.add_argument("pex", nargs=1, metavar="PATH", help="The path of the PEX file to operate on.") parser.set_defaults(func=functools.partial(show_help, parser)) subparsers = parser.add_subparsers( description= "{} can be operated on using any of the following subcommands.". format("The PEX file {}".format(pex_prog_path ) if pex else "A PEX file"), ) for command in commands.all_commands(): name = command.__class__.__name__.lower() # N.B.: We want to trigger the default argparse description if the doc string is empty. description = command.__doc__ or None help_text = description.splitlines()[0] if description else None command_parser = subparsers.add_parser(name, help=help_text, description=description) command.add_arguments(command_parser) command_parser.set_defaults(func=command.run) options = parser.parse_args() if pex is None: pex_info = PexInfo.from_pex(options.pex[0]) pex_info.update(PexInfo.from_env()) interpreter = pex_bootstrapper.find_compatible_interpreter( interpreter_constraints=pex_info.interpreter_constraints) pex = PEX(options.pex[0], interpreter=interpreter) func = cast("CommandFunc", options.func) result = func(pex, options) result.maybe_display() return result.exit_code
def test_resolve_from_pex( pex_repository, # type: str py27, # type: PythonInterpreter py36, # type: PythonInterpreter foreign_platform, # type: Platform manylinux, # type: Optional[str] ): # type: (...) -> None pex_info = PexInfo.from_pex(pex_repository) direct_requirements = pex_info.requirements assert 1 == len(direct_requirements) resolved_distributions = resolve_from_pex( pex=pex_repository, requirements=direct_requirements, interpreters=[py27, py36], platforms=[foreign_platform], manylinux=manylinux, ) distribution_locations_by_key = defaultdict( set) # type: DefaultDict[str, Set[str]] for resolved_distribution in resolved_distributions: distribution_locations_by_key[ resolved_distribution.distribution.key].add( resolved_distribution.distribution.location) assert { os.path.basename(location) for locations in distribution_locations_by_key.values() for location in locations } == set(pex_info.distributions.keys()), ( "Expected to resolve the same full set of distributions from the pex repository as make " "it up when using the same requirements.") assert "requests" in distribution_locations_by_key assert 1 == len(distribution_locations_by_key["requests"]) assert "pysocks" in distribution_locations_by_key assert 2 == len(distribution_locations_by_key["pysocks"]), ( "PySocks has a non-platform-specific Python 2.7 distribution and a non-platform-specific " "Python 3 distribution; so we expect to resolve two distributions - one covering " "Python 2.7 and one covering local Python 3.6 and our cp36 foreign platform." ) assert "cryptography" in distribution_locations_by_key assert 3 == len(distribution_locations_by_key["cryptography"]), ( "The cryptography requirement of the security extra is platform specific; so we expect a " "unique distribution to be resolved for each of the three distribution targets" )
def cached_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info) chroot = PythonChroot(context=self.context, python_setup=PythonSetup.global_instance(), python_repos=PythonRepos.global_instance(), interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements) # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so # while transitioning calls to temporary_chroot to calls to cached_chroot. # We can revisit after that transition is complete. yield chroot
def __init__( self, pex=sys.argv[0], # type: str interpreter=None, # type: Optional[PythonInterpreter] env=ENV, # type: Variables verify_entry_point=False, # type: bool ): # type: (...) -> None self._pex = pex self._interpreter = interpreter or PythonInterpreter.get() self._pex_info = PexInfo.from_pex(self._pex) self._pex_info_overrides = PexInfo.from_env(env=env) self._vars = env self._envs = [] # type: List[PEXEnvironment] self._activated_dists = None # type: Optional[Iterable[Distribution]] if verify_entry_point: self._do_entry_point_verification()
def test_target_constraints_with_no_sources(self): with temporary_dir() as interpreters_cache: pants_config = { "python-setup": { "interpreter_cache_dir": interpreters_cache, "interpreter_constraints": ["CPython>3"], }, "source": { "root_patterns": ["src/python"] }, } # Run task. pants_run = self.run_pants( command=[ "run", "{}:test_bin".format( os.path.join(self.testproject, "test_target_with_no_sources")), ], config=pants_config, ) self.assert_success(pants_run) self.assertIn("python3", pants_run.stdout_data) # Binary task. pants_run = self.run_pants( command=[ "binary", "{}:test_bin".format( os.path.join(self.testproject, "test_target_with_no_sources")), ], config=pants_config, ) self.assert_success(pants_run) # Ensure proper interpreter constraints were passed to built pexes. py2_pex = os.path.join(os.getcwd(), "dist", "test_bin.pex") py2_info = PexInfo.from_pex(py2_pex) self.assertIn("CPython>3", py2_info.interpreter_constraints) # Cleanup. os.remove(py2_pex)
def cached_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info) chroot = PythonChroot( context=self.context, python_setup=PythonSetup.global_instance(), python_repos=PythonRepos.global_instance(), interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements) # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so # while transitioning calls to temporary_chroot to calls to cached_chroot. # We can revisit after that transition is complete. yield chroot
def _loaded_envs(self): # type: () -> Iterable[PEXEnvironment] if self._envs is None: # set up the local .pex environment pex_info = self.pex_info() target = DistributionTarget.for_interpreter(self._interpreter) envs = [PEXEnvironment(self._pex, pex_info, target=target)] # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) envs.append( PEXEnvironment(pex_path, pex_info, target=target)) self._envs = tuple(envs) return self._envs
def assert_pex_attribute(self, pex, attr, value): self.assertTrue(os.path.exists(pex)) pex_info = PexInfo.from_pex(pex) self.assertEqual(getattr(pex_info, attr), value)
def __str__(self) -> str: pex_path = self._requirements_pex.path() pex_info = PexInfo.from_pex(pex_path) requirements = "\n ".join(map(str, pex_info.requirements)) return f"{type(self).__name__} at {pex_path} with requirements:\n {requirements} "
def test_issues_789_demo(): # type: () -> None tmpdir = safe_mkdtemp() pex_project_dir = (subprocess.check_output( ["git", "rev-parse", "--show-toplevel"]).decode("utf-8").strip()) # 1. Imagine we've pre-resolved the requirements needed in our wheel house. requirements = [ "ansicolors", "isort", "setuptools", # N.B.: isort doesn't declare its setuptools dependency. ] wheelhouse = os.path.join(tmpdir, "wheelhouse") get_pip().spawn_download_distributions(download_dir=wheelhouse, requirements=requirements).wait() # 2. Also imagine this configuration is passed to a tool (PEX or a wrapper as in this test # example) via the CLI or other configuration data sources. For example, Pants has a `PythonSetup` # that combines with BUILD target data to get you this sort of configuration info outside pex. resolver_settings = dict( indexes=[], # Turn off pypi. find_links=[wheelhouse], # Use our wheel house. build=False, # Use only pre-built wheels. ) # type: Dict[str, Any] # 3. That same configuration was used to build a standard pex: resolver_args = [] if len(resolver_settings["find_links"]) == 0: resolver_args.append("--no-index") else: for index in resolver_settings["indexes"]: resolver_args.extend(["--index", index]) for repo in resolver_settings["find_links"]: resolver_args.extend(["--find-links", repo]) resolver_args.append( "--build" if resolver_settings["build"] else "--no-build") project_code_dir = os.path.join(tmpdir, "project_code_dir") with safe_open(os.path.join(project_code_dir, "colorized_isort.py"), "w") as fp: fp.write( dedent("""\ import colors import os import subprocess import sys def run(): env = os.environ.copy() env.update(PEX_MODULE='isort') isort_process = subprocess.Popen( sys.argv, env=env, stdout = subprocess.PIPE, stderr = subprocess.PIPE ) stdout, stderr = isort_process.communicate() print(colors.green(stdout.decode('utf-8'))) print(colors.red(stderr.decode('utf-8'))) sys.exit(isort_process.returncode) """)) colorized_isort_pex = os.path.join(tmpdir, "colorized_isort.pex") args = [ "--sources-directory", project_code_dir, "--entry-point", "colorized_isort:run", "--output-file", colorized_isort_pex, ] result = run_pex_command(args + resolver_args + requirements) result.assert_success() # 4. Now the tool builds a "dehydrated" PEX using the standard pex + resolve settings as the # template. ptex_cache = os.path.join(tmpdir, ".ptex") colorized_isort_pex_info = PexInfo.from_pex(colorized_isort_pex) colorized_isort_pex_info.pex_root = ptex_cache # Force the standard pex to extract its code. An external tool like Pants would already know the # orignal source code file paths, but we need to discover here. colorized_isort_pex_code_dir = os.path.join( colorized_isort_pex_info.zip_unsafe_cache, colorized_isort_pex_info.code_hash) env = os.environ.copy() env.update(PEX_ROOT=ptex_cache, PEX_INTERPRETER="1", PEX_FORCE_LOCAL="1") subprocess.check_call([colorized_isort_pex, "-c", ""], env=env) colorized_isort_ptex_code_dir = os.path.join( tmpdir, "colorized_isort_ptex_code_dir") safe_mkdir(colorized_isort_ptex_code_dir) code = [] for root, dirs, files in os.walk(colorized_isort_pex_code_dir): rel_root = os.path.relpath(root, colorized_isort_pex_code_dir) for f in files: # Don't ship compiled python from the code extract above, the target interpreter will not # match ours in general. if f.endswith(".pyc"): continue rel_path = os.path.normpath(os.path.join(rel_root, f)) # The root __main__.py is special for any zipapp including pex, let it write its own # __main__.py bootstrap. Similarly. PEX-INFO is special to pex and we want the PEX-INFO for # The ptex pex, not the pex being ptexed. if rel_path in ("__main__.py", PexInfo.PATH): continue os.symlink(os.path.join(root, f), os.path.join(colorized_isort_ptex_code_dir, rel_path)) code.append(rel_path) ptex_code_dir = os.path.join(tmpdir, "ptex_code_dir") ptex_info = dict(code=code, resolver_settings=resolver_settings) with safe_open(os.path.join(ptex_code_dir, "PTEX-INFO"), "w") as fp: json.dump(ptex_info, fp) with safe_open(os.path.join(ptex_code_dir, "IPEX-INFO"), "w") as fp: fp.write(colorized_isort_pex_info.dump()) with safe_open(os.path.join(ptex_code_dir, "ptex.py"), "w") as fp: fp.write( dedent("""\ import json import os import sys from pex import resolver from pex.common import open_zip from pex.pex_builder import PEXBuilder from pex.pex_info import PexInfo from pex.util import CacheHelper from pex.variables import ENV self = sys.argv[0] ipex_file = '{}.ipex'.format(os.path.splitext(self)[0]) if not os.path.isfile(ipex_file): print('Hydrating {} to {}'.format(self, ipex_file)) ptex_pex_info = PexInfo.from_pex(self) code_root = os.path.join(ptex_pex_info.zip_unsafe_cache, ptex_pex_info.code_hash) with open_zip(self) as zf: # Populate the pex with the pinned requirements and distribution names & hashes. ipex_info = PexInfo.from_json(zf.read('IPEX-INFO')) ipex_builder = PEXBuilder(pex_info=ipex_info) # Populate the pex with the needed code. ptex_info = json.loads(zf.read('PTEX-INFO').decode('utf-8')) for path in ptex_info['code']: ipex_builder.add_source(os.path.join(code_root, path), path) # Perform a fully pinned intransitive resolve to hydrate the install cache (not the # pex!). resolver_settings = ptex_info['resolver_settings'] resolved_distributions = resolver.resolve( requirements=[str(req) for req in ipex_info.requirements], cache=ipex_info.pex_root, transitive=False, **resolver_settings ) ipex_builder.build(ipex_file) os.execv(ipex_file, [ipex_file] + sys.argv[1:]) """)) colorized_isort_ptex = os.path.join(tmpdir, "colorized_isort.ptex") result = run_pex_command([ "--not-zip-safe", "--always-write-cache", "--pex-root", ptex_cache, pex_project_dir, # type: ignore[list-item] # This is unicode in Py2, whereas everthing else is bytes. That's fine. "--sources-directory", ptex_code_dir, "--sources-directory", colorized_isort_ptex_code_dir, "--entry-point", "ptex", "--output-file", colorized_isort_ptex, ]) result.assert_success() subprocess.check_call([colorized_isort_ptex, "--version"]) with pytest.raises(CalledProcessError): subprocess.check_call([colorized_isort_ptex, "--not-a-flag"]) safe_rmtree(ptex_cache) # The dehydrated pex now fails since it lost its hydration from the cache. with pytest.raises(CalledProcessError): subprocess.check_call([colorized_isort_ptex, "--version"])
def _bootstrap(entry_point): # type: (str) -> PexInfo pex_info = PexInfo.from_pex(entry_point) # type: PexInfo pex_info.update(PexInfo.from_env()) pex_warnings.configure_warnings(pex_info, ENV) return pex_info