def test_access_zipped_assets_integration(): test_executable = dedent(''' import os from _pex.util import DistributionHelper temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) ''') with nested(temporary_dir(), temporary_dir()) as (td1, td2): pb = PEXBuilder(path=td1) with open(os.path.join(td1, 'exe.py'), 'w') as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, 'my_package', 'submodule') safe_mkdir(submodule) mod_path = os.path.join(submodule, 'mod.py') with open(mod_path, 'w') as fp: fp.write('accessed') pb.add_source(fp.name, 'my_package/submodule/mod.py') pex = os.path.join(td2, 'app.pex') pb.build(pex) output, returncode = run_simple_pex(pex) try: output = output.decode('UTF-8') except ValueError: pass assert output == 'accessed\n' assert returncode == 0
def assert_access_zipped_assets(distribution_helper_import): # type: (str) -> bytes test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with temporary_dir() as td1, temporary_dir() as td2: pb = PEXBuilder(path=td1) with open(os.path.join(td1, "exe.py"), "w") as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, "my_package", "submodule") safe_mkdir(submodule) mod_path = os.path.join(submodule, "mod.py") with open(mod_path, "w") as fp: fp.write("accessed") pb.add_source(fp.name, "my_package/submodule/mod.py") pb.add_source(None, "my_package/__init__.py") pb.add_source(None, "my_package/submodule/__init__.py") pex = os.path.join(td2, "app.pex") pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b"accessed\n" == stdout return cast(bytes, stderr)
def test_dir_hash(): # type: () -> None with temporary_dir() as tmp_dir: safe_mkdir(os.path.join(tmp_dir, "a", "b")) with safe_open(os.path.join(tmp_dir, "c", "d", "e.py"), "w") as fp: fp.write("contents1") with safe_open(os.path.join(tmp_dir, "f.py"), "w") as fp: fp.write("contents2") hash1 = CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c"), os.path.join(tmp_dir, "c-renamed")) assert hash1 != CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c-renamed"), os.path.join(tmp_dir, "c")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) pycache_dir = os.path.join(tmp_dir, "__pycache__") safe_mkdir(pycache_dir) touch(os.path.join(pycache_dir, "f.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.py")) assert hash1 == CacheHelper.dir_hash( tmp_dir ), "All content under __pycache__ directories should be ignored."
def create( cls, venv_dir, # type: str interpreter=None, # type: Optional[PythonInterpreter] force=False, # type: bool ): # type: (...) -> Virtualenv venv_dir = os.path.abspath(venv_dir) safe_mkdir(venv_dir, clean=force) interpreter = interpreter or PythonInterpreter.get() if interpreter.is_venv: base_interpreter = interpreter.resolve_base_interpreter() TRACER.log( "Ignoring enclosing venv {} and using its base interpreter {} to create venv at {}" " instead.".format(interpreter.prefix, base_interpreter.binary, venv_dir), V=3, ) interpreter = base_interpreter if interpreter.version[0] >= 3 and not interpreter.identity.interpreter == "PyPy": # N.B.: PyPy3 comes equipped with a venv module but it does not seem to work. interpreter.execute(args=["-m", "venv", "--without-pip", venv_dir]) else: virtualenv_py = resource_string(__name__, "virtualenv_16.7.10_py") with named_temporary_file(mode="wb") as fp: fp.write(virtualenv_py) fp.close() interpreter.execute( args=[fp.name, "--no-pip", "--no-setuptools", "--no-wheel", venv_dir], ) return cls(venv_dir)
def build(self, filename, bytecode_compile=True, deterministic_timestamp=False): """Package the PEX into a zipfile. :param filename: The filename where the PEX should be stored. :param bytecode_compile: If True, precompile .py files into .pyc files. :param deterministic_timestamp: If True, will use our hardcoded time for zipfile timestamps. If the PEXBuilder is not yet frozen, it will be frozen by ``build``. This renders the PEXBuilder immutable. """ if not self._frozen: self.freeze(bytecode_compile=bytecode_compile) try: os.unlink(filename + '~') self._logger.warn('Previous binary unexpectedly exists, cleaning: %s' % (filename + '~')) except OSError: # The expectation is that the file does not exist, so continue pass if os.path.dirname(filename): safe_mkdir(os.path.dirname(filename)) with open(filename + '~', 'ab') as pexfile: assert os.path.getsize(pexfile.name) == 0 pexfile.write(to_bytes('%s\n' % self._shebang)) self._chroot.zip(filename + '~', mode='a', deterministic_timestamp=deterministic_timestamp) if os.path.exists(filename): os.unlink(filename) os.rename(filename + '~', filename) chmod_plus_x(filename)
def assert_access_zipped_assets(distribution_helper_import): test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with nested(temporary_dir(), temporary_dir()) as (td1, td2): pb = PEXBuilder(path=td1) with open(os.path.join(td1, 'exe.py'), 'w') as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, 'my_package', 'submodule') safe_mkdir(submodule) mod_path = os.path.join(submodule, 'mod.py') with open(mod_path, 'w') as fp: fp.write('accessed') pb.add_source(fp.name, 'my_package/submodule/mod.py') pb.add_source(None, 'my_package/__init__.py') pb.add_source(None, 'my_package/submodule/__init__.py') pex = os.path.join(td2, 'app.pex') pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b'accessed\n' == stdout return stderr
def resolve_interpreter(cache, fetchers, interpreter, requirement): """Resolve an interpreter with a specific requirement. Given a :class:`PythonInterpreter` and a requirement, return an interpreter with the capability of resolving that requirement or ``None`` if it's not possible to install a suitable requirement.""" requirement = maybe_requirement(requirement) # short circuit if interpreter.satisfies([requirement]): return interpreter def installer_provider(sdist): return EggInstaller( Archiver.unpack(sdist), strict=requirement.key != 'setuptools', interpreter=interpreter) interpreter_dir = os.path.join(cache, str(interpreter.identity)) safe_mkdir(interpreter_dir) egg = _resolve_and_link_interpreter( requirement, fetchers, os.path.join(interpreter_dir, requirement.key), installer_provider) if egg: return interpreter.with_extra(egg.name, egg.raw_version, egg.path)
def resolve_interpreter(cache, fetchers, interpreter, requirement): """Resolve an interpreter with a specific requirement. Given a :class:`PythonInterpreter` and a requirement, return an interpreter with the capability of resolving that requirement or ``None`` if it's not possible to install a suitable requirement.""" requirement = maybe_requirement(requirement) # short circuit if interpreter.satisfies([requirement]): return interpreter def installer_provider(sdist): return EggInstaller(Archiver.unpack(sdist), strict=requirement.key != 'setuptools', interpreter=interpreter) interpreter_dir = os.path.join(cache, str(interpreter.identity)) safe_mkdir(interpreter_dir) egg = _resolve_and_link_interpreter( requirement, fetchers, os.path.join(interpreter_dir, requirement.key), installer_provider) if egg: return interpreter.with_extra(egg.name, egg.raw_version, egg.path)
def build(self, filename, bytecode_compile=True, deterministic_timestamp=False): """Package the PEX into a zipfile. :param filename: The filename where the PEX should be stored. :param bytecode_compile: If True, precompile .py files into .pyc files. :param deterministic_timestamp: If True, will use our hardcoded time for zipfile timestamps. If the PEXBuilder is not yet frozen, it will be frozen by ``build``. This renders the PEXBuilder immutable. """ if not self._frozen: self.freeze(bytecode_compile=bytecode_compile) try: os.unlink(filename + '~') self._logger.warning('Previous binary unexpectedly exists, cleaning: %s' % (filename + '~')) except OSError: # The expectation is that the file does not exist, so continue pass if os.path.dirname(filename): safe_mkdir(os.path.dirname(filename)) with open(filename + '~', 'ab') as pexfile: assert os.path.getsize(pexfile.name) == 0 pexfile.write(to_bytes('%s\n' % self._shebang)) self._chroot.zip(filename + '~', mode='a', deterministic_timestamp=deterministic_timestamp) if os.path.exists(filename): os.unlink(filename) os.rename(filename + '~', filename) chmod_plus_x(filename)
def test_executor_execute_dir(): with temporary_dir() as temp_dir: test_dir = os.path.realpath(os.path.join(temp_dir, 'tmp')) safe_mkdir(test_dir) assert os.path.isdir(test_dir) with pytest.raises(Executor.ExecutionError) as e: Executor.execute(test_dir) assert test_dir in str(e)
def test_pex_executable(): # type: () -> None # Tests that pex keeps executable permissions with temporary_dir() as temp_dir: pex_dir = os.path.join(temp_dir, "pex_dir") safe_mkdir(pex_dir) with open(os.path.join(pex_dir, "exe.py"), "w") as fp: fp.write( textwrap.dedent( """ import subprocess import os import sys import my_package path = os.path.join(os.path.dirname(my_package.__file__), 'bin/start.sh') sys.stdout.write(subprocess.check_output([path]).decode('utf-8')) """ ) ) project_content = { "setup.py": textwrap.dedent( """ from setuptools import setup setup( name='my_project', version='0.0.0.0', zip_safe=True, packages=['my_package'], package_data={'my_package': ['bin/*']}, install_requires=[], ) """ ), "my_package/__init__.py": 0, "my_package/bin/start.sh": ( "#!/usr/bin/env bash\n" "echo 'hello world from start.sh!'" ), "my_package/my_module.py": 'def do_something():\n print("hello world!")\n', } # type: Dict[str, Union[str, int]] pex_builder = PEXBuilder(path=pex_dir) with temporary_content(project_content, perms=0o755) as project_dir: installer = WheelBuilder(project_dir) bdist = installer.bdist() pex_builder.add_dist_location(bdist) pex_builder.set_executable(os.path.join(pex_dir, "exe.py")) pex_builder.freeze() app_pex = os.path.join(os.path.join(temp_dir, "out_pex_dir"), "app.pex") pex_builder.build(app_pex) std_out, rc = run_simple_pex(app_pex, env={"PEX_ROOT": os.path.join(temp_dir, ".pex")}) assert rc == 0 assert std_out.decode("utf-8") == "hello world from start.sh!\n"
def write_simple_pex(td, exe_contents=None, dists=None, sources=None, coverage=False, interpreter=None, pex_info=None): """Write a pex file that optionally contains an executable entry point. :param str td: temporary directory path :param str exe_contents: entry point python file :param dists: distributions to include, typically sdists or bdists :type: list of :class:`pex.third_party.pkg_resources.Distribution` :param sources: sources to include, as a list of pairs (env_filename, contents) :type sources: list of (str, str) :param bool coverage: include coverage header :param interpreter: a custom interpreter to use to build the pex :type interpreter: :class:`pex.interpreter.PythonInterpreter` :param pex_info: a custom PexInfo to use to build the pex. :type pex_info: :class:`pex.pex_info.PexInfo` """ dists = dists or [] sources = sources or [] safe_mkdir(td) pb = PEXBuilder( path=td, preamble=COVERAGE_PREAMBLE if coverage else None, interpreter=interpreter, pex_info=pex_info, ) for dist in dists: pb.add_dist_location( dist.location if isinstance(dist, Distribution) else dist) for env_filename, contents in sources: src_path = os.path.join(td, env_filename) safe_mkdir(os.path.dirname(src_path)) with open(src_path, "w") as fp: fp.write(contents) pb.add_source(src_path, env_filename) if exe_contents: with open(os.path.join(td, "exe.py"), "w") as fp: fp.write(exe_contents) pb.set_executable(os.path.join(td, "exe.py")) pb.freeze() return pb
def walk_zipped_assets(static_module_name, static_path, asset_path, temp_dir): for asset in resource_listdir(static_module_name, asset_path): asset_target = os.path.normpath( os.path.join(os.path.relpath(asset_path, static_path), asset)) if resource_isdir(static_module_name, os.path.join(asset_path, asset)): safe_mkdir(os.path.join(temp_dir, asset_target)) walk_zipped_assets(static_module_name, static_path, os.path.join(asset_path, asset), temp_dir) else: with open(os.path.join(temp_dir, asset_target), 'wb') as fp: path = os.path.join(static_path, asset_target) file_data = resource_string(static_module_name, path) fp.write(file_data)
def write_simple_pex( td, # type: str exe_contents=None, # type: Optional[str] dists=None, # type: Optional[Iterable[Distribution]] sources=None, # type: Optional[Iterable[Tuple[str, str]]] coverage=False, # type: bool interpreter=None, # type: Optional[PythonInterpreter] pex_info=None, # type: Optional[PexInfo] ): # type: (...) -> PEXBuilder """Write a pex file that optionally contains an executable entry point. :param td: temporary directory path :param exe_contents: entry point python file :param dists: distributions to include, typically sdists or bdists :param sources: sources to include, as a list of pairs (env_filename, contents) :param coverage: include coverage header :param interpreter: a custom interpreter to use to build the pex :param pex_info: a custom PexInfo to use to build the pex. """ dists = dists or [] sources = sources or [] safe_mkdir(td) pb = PEXBuilder( path=td, preamble=COVERAGE_PREAMBLE if coverage else None, interpreter=interpreter, pex_info=pex_info, ) for dist in dists: pb.add_dist_location( dist.location if isinstance(dist, Distribution) else dist) for env_filename, contents in sources: src_path = os.path.join(td, env_filename) safe_mkdir(os.path.dirname(src_path)) with open(src_path, "w") as fp: fp.write(contents) pb.add_source(src_path, env_filename) if exe_contents: with open(os.path.join(td, "exe.py"), "w") as fp: fp.write(exe_contents) pb.set_executable(os.path.join(td, "exe.py")) pb.freeze() return pb
def _output_for_open(self, options): # type: (Namespace) -> Iterator[Tuple[IO, str]] if self.is_stdout(options): tmpdir = os.path.join(ENV.PEX_ROOT, "tmp") safe_mkdir(tmpdir) with tempfile.NamedTemporaryFile( prefix="{}.".format(__name__), suffix=".deps.{}".format(options.format), dir=tmpdir, delete=False, ) as tmp_out: yield tmp_out, tmp_out.name return with self.output(options, binary=True) as out: yield out, out.name
def temporary_content(content_map, interp=None, seed=31337, perms=0o644): """Write content to disk where content is map from string => (int, string). If target is int, write int random bytes. Otherwise write contents of string.""" random.seed(seed) interp = interp or {} with temporary_dir() as td: for filename, size_or_content in content_map.items(): dest = os.path.join(td, filename) safe_mkdir(os.path.dirname(dest)) with open(dest, 'wb') as fp: if isinstance(size_or_content, int): fp.write(random_bytes(size_or_content)) else: fp.write((size_or_content % interp).encode('utf-8')) os.chmod(dest, perms) yield td
def temporary_content(content_map, interp=None, seed=31337, perms=0o644): # type: (Mapping[str, Union[int, str]], Optional[Dict[str, Any]], int, int) -> Iterator[str] """Write content to disk where content is map from string => (int, string). If target is int, write int random bytes. Otherwise write contents of string. """ random.seed(seed) interp = interp or {} with temporary_dir() as td: for filename, size_or_content in content_map.items(): dest = os.path.join(td, filename) safe_mkdir(os.path.dirname(dest)) with open(dest, "wb") as fp: if isinstance(size_or_content, int): fp.write(random_bytes(size_or_content)) else: fp.write((size_or_content % interp).encode("utf-8")) os.chmod(dest, perms) yield td
def write_simple_pex(td, exe_contents, dists=None, sources=None, coverage=False, interpreter=None): """Write a pex file that contains an executable entry point :param td: temporary directory path :param exe_contents: entry point python file :type exe_contents: string :param dists: distributions to include, typically sdists or bdists :param sources: sources to include, as a list of pairs (env_filename, contents) :param coverage: include coverage header :param interpreter: a custom interpreter to use to build the pex """ dists = dists or [] sources = sources or [] safe_mkdir(td) with open(os.path.join(td, 'exe.py'), 'w') as fp: fp.write(exe_contents) pb = PEXBuilder(path=td, preamble=COVERAGE_PREAMBLE if coverage else None, interpreter=interpreter) for dist in dists: pb.add_dist_location(dist.location) for env_filename, contents in sources: src_path = os.path.join(td, env_filename) safe_mkdir(os.path.dirname(src_path)) with open(src_path, 'w') as fp: fp.write(contents) pb.add_source(src_path, env_filename) pb.set_executable(os.path.join(td, 'exe.py')) pb.freeze() return pb
def _copytree( src, # type: str dst, # type: str exclude=(), # type: Tuple[str, ...] ): # type: (...) -> Iterator[Tuple[str, str]] safe_mkdir(dst) link = True for root, dirs, files in os.walk(src, topdown=True, followlinks=False): if src == root: dirs[:] = [d for d in dirs if d not in exclude] files[:] = [f for f in files if f not in exclude] for d in dirs: try: os.mkdir( os.path.join(dst, os.path.relpath(os.path.join(root, d), src))) except OSError as e: if e.errno != errno.EEXIST: raise e for f in files: src_entry = os.path.join(root, f) dst_entry = os.path.join(dst, os.path.relpath(src_entry, src)) yield src_entry, dst_entry try: # We only try to link regular files since linking a symlink on Linux can produce # another symlink, which leaves open the possibility the src_entry target could # later go missing leaving the dst_entry dangling. if link and not os.path.islink(src_entry): try: os.link(src_entry, dst_entry) continue except OSError as e: if e.errno != errno.EXDEV: raise e link = False shutil.copy(src_entry, dst_entry) except OSError as e: if e.errno != errno.EEXIST: raise e
def walk_zipped_assets(static_module_name, static_path, asset_path, temp_dir): for asset in resource_listdir(static_module_name, asset_path): if not asset: # The `resource_listdir` function returns a '' asset for the directory entry # itself if it is either present on the filesystem or present as an explicit # zip entry. Since we only care about files and subdirectories at this point, # skip these assets. continue asset_target = os.path.normpath( os.path.join(os.path.relpath(asset_path, static_path), asset) ) if resource_isdir(static_module_name, os.path.join(asset_path, asset)): safe_mkdir(os.path.join(temp_dir, asset_target)) walk_zipped_assets( static_module_name, static_path, os.path.join(asset_path, asset), temp_dir ) else: with open(os.path.join(temp_dir, asset_target), "wb") as fp: path = os.path.join(static_path, asset_target) file_data = resource_string(static_module_name, path) fp.write(file_data)
def _copytree( src, # type: str dst, # type: str exclude=(), # type: Tuple[str, ...] ): # type: (...) -> Iterator[Tuple[str, str]] safe_mkdir(dst) link = True for root, dirs, files in os.walk(src, topdown=True, followlinks=False): if src == root: dirs[:] = [d for d in dirs if d not in exclude] files[:] = [f for f in files if f not in exclude] for d in dirs: try: os.mkdir( os.path.join(dst, os.path.relpath(os.path.join(root, d), src))) except OSError as e: if e.errno != errno.EEXIST: raise e for f in files: src_entry = os.path.join(root, f) dst_entry = os.path.join(dst, os.path.relpath(src_entry, src)) yield src_entry, dst_entry try: if link: try: os.link(src_entry, dst_entry) continue except OSError as e: if e.errno != errno.EXDEV: raise e link = False shutil.copy(src_entry, dst_entry) except OSError as e: if e.errno != errno.EEXIST: raise e
def force_local(cls, pex_file, pex_info): if pex_info.code_hash is None: # Do not support force_local if code_hash is not set. (It should always be set.) return pex_file explode_dir = os.path.join(pex_info.zip_unsafe_cache, pex_info.code_hash) TRACER.log('PEX is not zip safe, exploding to %s' % explode_dir) if not os.path.exists(explode_dir): explode_tmp = explode_dir + '.' + uuid.uuid4().hex with TRACER.timed('Unzipping %s' % pex_file): try: safe_mkdir(explode_tmp) with open_zip(pex_file) as pex_zip: pex_files = (x for x in pex_zip.namelist() if not x.startswith(pex_builder.BOOTSTRAP_DIR) and not x.startswith(PexInfo.INTERNAL_CACHE)) pex_zip.extractall(explode_tmp, pex_files) except: # noqa: T803 safe_rmtree(explode_tmp) raise TRACER.log('Renaming %s to %s' % (explode_tmp, explode_dir)) rename_if_empty(explode_tmp, explode_dir) return explode_dir
def resolve_interpreter(cache, fetchers, interpreter, requirement): requirement = maybe_requirement(requirement) # short circuit if interpreter.satisfies([requirement]): return interpreter def installer_provider(sdist): return EggInstaller( Archiver.unpack(sdist), strict=requirement.key != 'setuptools', interpreter=interpreter) interpreter_dir = os.path.join(cache, str(interpreter.identity)) safe_mkdir(interpreter_dir) egg = _resolve_and_link_interpreter( requirement, fetchers, os.path.join(interpreter_dir, requirement.key), installer_provider) if egg: return interpreter.with_extra(egg.name, egg.raw_version, egg.path)
def test_pyenv_shims(self, tmpdir): # type: (Any) -> None py35, _, run_pyenv = ensure_python_distribution(PY35) py36 = ensure_python_interpreter(PY36) pyenv_root = str(run_pyenv(["root"]).strip()) pyenv_shims = os.path.join(pyenv_root, "shims") def pyenv_global(*versions): # type: (*str) -> None run_pyenv(["global"] + list(versions)) def pyenv_local(*versions): # type: (*str) -> None run_pyenv(["local"] + list(versions)) @contextmanager def pyenv_shell(*versions): # type: (*str) -> Iterator[None] with environment_as(PYENV_VERSION=":".join(versions)): yield pex_root = os.path.join(str(tmpdir), "pex_root") cwd = safe_mkdir(os.path.join(str(tmpdir), "home", "jake", "project")) with ENV.patch(PEX_ROOT=pex_root) as pex_env, environment_as( PYENV_ROOT=pyenv_root, PEX_PYTHON_PATH=pyenv_shims, **pex_env ), pyenv_shell(), pushd(cwd): pyenv = Pyenv.find() assert pyenv is not None assert pyenv_root == pyenv.root def interpreter_for_shim(shim_name): # type: (str) -> PythonInterpreter binary = os.path.join(pyenv_shims, shim_name) return PythonInterpreter.from_binary(binary, pyenv=pyenv) def assert_shim( shim_name, # type: str expected_binary_path, # type: str ): # type: (...) -> None python = interpreter_for_shim(shim_name) assert expected_binary_path == python.binary def assert_shim_inactive(shim_name): # type: (str) -> None with pytest.raises(PythonInterpreter.IdentificationError): interpreter_for_shim(shim_name) pyenv_global(PY35, PY36) assert_shim("python", py35) assert_shim("python3", py35) assert_shim("python3.5", py35) assert_shim("python3.6", py36) pyenv_global(PY36, PY35) assert_shim("python", py36) assert_shim("python3", py36) assert_shim("python3.6", py36) assert_shim("python3.5", py35) pyenv_local(PY35) assert_shim("python", py35) assert_shim("python3", py35) assert_shim("python3.5", py35) assert_shim_inactive("python3.6") with pyenv_shell(PY36): assert_shim("python", py36) assert_shim("python3", py36) assert_shim("python3.6", py36) assert_shim_inactive("python3.5") with pyenv_shell(PY35, PY36): assert_shim("python", py35) assert_shim("python3", py35) assert_shim("python3.5", py35) assert_shim("python3.6", py36) # The shim pointer is now invalid since python3.5 was uninstalled and so # should be re-read and found invalid. py35_version_dir = os.path.dirname(os.path.dirname(py35)) py35_deleted = "{}.uninstalled".format(py35_version_dir) os.rename(py35_version_dir, py35_deleted) try: assert_shim_inactive("python") assert_shim_inactive("python3") assert_shim_inactive("python3.5") finally: os.rename(py35_deleted, py35_version_dir) assert_shim("python", py35)
def __init__(self, cache, cache_ttl, *args, **kw): self.__cache = cache self.__cache_ttl = cache_ttl safe_mkdir(self.__cache) super(CachingResolver, self).__init__(*args, **kw)
def _extract_sdist( pex, # type: PEX dest_dir, # type: str ): # type: (...) -> None chroot = safe_mkdtemp() src = os.path.join(chroot, "src") safe_mkdir(src) excludes = ["__main__.py", "PEX-INFO"] if zipfile.is_zipfile(pex.path()): PEXEnvironment(pex.path()).explode_code(src, exclude=excludes) else: shutil.copytree(pex.path(), src, ignore=lambda _dir, _names: excludes) pex_info = pex.pex_info() name, _ = os.path.splitext(os.path.basename(pex.path())) version = "0.0.0+{}".format(pex_info.code_hash) zip_safe = pex_info.zip_safe py_modules = [os.path.splitext(f)[0] for f in os.listdir(src) if f.endswith(".py")] packages = [ os.path.relpath(os.path.join(root, d), src).replace(os.sep, ".") for root, dirs, _ in os.walk(src) for d in dirs ] install_requires = [str(req) for req in pex_info.requirements] python_requires = None if len(pex_info.interpreter_constraints) == 1: python_requires = str( PythonIdentity.parse_requirement(pex_info.interpreter_constraints[0]).specifier ) elif pex_info.interpreter_constraints: pex_warnings.warn( "Omitting `python_requires` for {name} sdist since {pex} has multiple " "interpreter constraints:\n{interpreter_constraints}".format( name=name, pex=os.path.normpath(pex.path()), interpreter_constraints="\n".join( "{index}.) {constraint}".format(index=index, constraint=constraint) for index, constraint in enumerate( pex_info.interpreter_constraints, start=1 ) ), ) ) entry_points = [] if pex_info.entry_point and ":" in pex_info.entry_point: entry_points = [(name, pex_info.entry_point)] with open(os.path.join(chroot, "setup.cfg"), "w") as fp: fp.write( dedent( """\ [metadata] name = {name} version = {version} [options] zip_safe = {zip_safe} {py_modules} {packages} package_dir = =src include_package_data = True {python_requires} {install_requires} [options.entry_points] {entry_points} """ ).format( name=name, version=version, zip_safe=zip_safe, py_modules=( "py_modules =\n {}".format("\n ".join(py_modules)) if py_modules else "" ), packages=( "packages = \n {}".format("\n ".join(packages)) if packages else "" ), install_requires=( "install_requires =\n {}".format("\n ".join(install_requires)) if install_requires else "" ), python_requires=( "python_requires = {}".format(python_requires) if python_requires else "" ), entry_points=( "console_scripts =\n {}".format( "\n ".join( "{} = {}".format(name, entry_point) for name, entry_point in entry_points ) ) if entry_points else "" ), ) ) with open(os.path.join(chroot, "MANIFEST.in"), "w") as fp: fp.write("recursive-include src *") with open(os.path.join(chroot, "setup.py"), "w") as fp: fp.write("import setuptools; setuptools.setup()") spawn_python_job( args=["setup.py", "sdist", "--dist-dir", dest_dir], interpreter=pex.interpreter, expose=["setuptools"], cwd=chroot, ).wait()
def ensure_venv(pex): # type: (PEX) -> str pex_info = pex.pex_info() venv_dir = pex_info.venv_dir if venv_dir is None: raise AssertionError( "Expected PEX-INFO for {} to have the components of a venv directory" .format(pex.path())) with atomic_directory(venv_dir, exclusive=True) as venv: if venv: from .tools.commands.venv import populate_venv_with_pex from .tools.commands.virtualenv import Virtualenv virtualenv = Virtualenv.create( venv_dir=venv, interpreter=pex.interpreter, copies=pex_info.venv_copies, ) pex_path = os.path.abspath(pex.path()) short_venv_dir = os.path.join(pex_info.pex_root, "venvs", "short") safe_mkdir(short_venv_dir) # A sha1 hash is 160 bits -> 20 bytes -> 40 hex characters. We start with 8 characters # (32 bits) of entropy since that is short and _very_ unlikely to collide with another # PEX venv on this machine. If we still collide after using the whole sha1 (for a total # of 33 collisions), then the universe is broken and we raise. It's the least we can do. venv_hash = hashlib.sha1(venv_dir.encode("utf-8")).hexdigest() collisions = [] for chars in range(8, len(venv_hash) + 1): entropy = venv_hash[:chars] short_venv_path = os.path.join(short_venv_dir, entropy) try: os.symlink(venv_dir, short_venv_path) break except OSError as e: if e.errno != errno.EEXIST: raise e collisions.append(short_venv_path) if entropy == venv_hash: raise RuntimeError( "The venv for {pex} at {venv} has hash collisions with {count} other " "{venvs}!\n{collisions}".format( pex=pex_path, venv=venv_dir, count=len(collisions), venvs=pluralize(collisions, "venv"), collisions="\n".join( "{index}.) {venv_path}".format( index=index, venv_path=os.path.realpath(path)) for index, path in enumerate(collisions, start=1)), )) shenbang = populate_venv_with_pex( virtualenv, pex, bin_path=pex_info.venv_bin_path, python=os.path.join(short_venv_path, "bin", os.path.basename(pex.interpreter.binary)), collisions_ok=True, ) # There are popular Linux distributions with shebang length limits (BINPRM_BUF_SIZE # in /usr/include/linux/binfmts.h) set at 128 characters, so we warn in the _very_ # unlikely case that our shortened shebang is longer than this. if len(shenbang) > 128: pex_warnings.warn( "The venv for {pex} at {venv} has script shebangs of {shebang!r} with {count} " "characters. On some systems this may be too long and cause problems running " "the venv scripts. You may be able adjust PEX_ROOT from {pex_root} to a " "shorter path as a work-around.".format( pex=pex_path, venv=venv_dir, shebang=shenbang, count=len(shenbang), pex_root=pex_info.pex_root, )) return os.path.join(venv_dir, "pex")
def test_issues_789_demo(): # type: () -> None tmpdir = safe_mkdtemp() pex_project_dir = (subprocess.check_output( ["git", "rev-parse", "--show-toplevel"]).decode("utf-8").strip()) # 1. Imagine we've pre-resolved the requirements needed in our wheel house. requirements = [ "ansicolors", "isort", "setuptools", # N.B.: isort doesn't declare its setuptools dependency. ] wheelhouse = os.path.join(tmpdir, "wheelhouse") get_pip().spawn_download_distributions(download_dir=wheelhouse, requirements=requirements).wait() # 2. Also imagine this configuration is passed to a tool (PEX or a wrapper as in this test # example) via the CLI or other configuration data sources. For example, Pants has a `PythonSetup` # that combines with BUILD target data to get you this sort of configuration info outside pex. resolver_settings = dict( indexes=[], # Turn off pypi. find_links=[wheelhouse], # Use our wheel house. build=False, # Use only pre-built wheels. ) # type: Dict[str, Any] # 3. That same configuration was used to build a standard pex: resolver_args = [] if len(resolver_settings["find_links"]) == 0: resolver_args.append("--no-index") else: for index in resolver_settings["indexes"]: resolver_args.extend(["--index", index]) for repo in resolver_settings["find_links"]: resolver_args.extend(["--find-links", repo]) resolver_args.append( "--build" if resolver_settings["build"] else "--no-build") project_code_dir = os.path.join(tmpdir, "project_code_dir") with safe_open(os.path.join(project_code_dir, "colorized_isort.py"), "w") as fp: fp.write( dedent("""\ import colors import os import subprocess import sys def run(): env = os.environ.copy() env.update(PEX_MODULE='isort') isort_process = subprocess.Popen( sys.argv, env=env, stdout = subprocess.PIPE, stderr = subprocess.PIPE ) stdout, stderr = isort_process.communicate() print(colors.green(stdout.decode('utf-8'))) print(colors.red(stderr.decode('utf-8'))) sys.exit(isort_process.returncode) """)) colorized_isort_pex = os.path.join(tmpdir, "colorized_isort.pex") args = [ "--sources-directory", project_code_dir, "--entry-point", "colorized_isort:run", "--output-file", colorized_isort_pex, ] result = run_pex_command(args + resolver_args + requirements) result.assert_success() # 4. Now the tool builds a "dehydrated" PEX using the standard pex + resolve settings as the # template. ptex_cache = os.path.join(tmpdir, ".ptex") colorized_isort_pex_info = PexInfo.from_pex(colorized_isort_pex) colorized_isort_pex_info.pex_root = ptex_cache # Force the standard pex to extract its code. An external tool like Pants would already know the # orignal source code file paths, but we need to discover here. colorized_isort_pex_code_dir = os.path.join( colorized_isort_pex_info.zip_unsafe_cache, colorized_isort_pex_info.code_hash) env = os.environ.copy() env.update(PEX_ROOT=ptex_cache, PEX_INTERPRETER="1", PEX_FORCE_LOCAL="1") subprocess.check_call([colorized_isort_pex, "-c", ""], env=env) colorized_isort_ptex_code_dir = os.path.join( tmpdir, "colorized_isort_ptex_code_dir") safe_mkdir(colorized_isort_ptex_code_dir) code = [] for root, dirs, files in os.walk(colorized_isort_pex_code_dir): rel_root = os.path.relpath(root, colorized_isort_pex_code_dir) for f in files: # Don't ship compiled python from the code extract above, the target interpreter will not # match ours in general. if f.endswith(".pyc"): continue rel_path = os.path.normpath(os.path.join(rel_root, f)) # The root __main__.py is special for any zipapp including pex, let it write its own # __main__.py bootstrap. Similarly. PEX-INFO is special to pex and we want the PEX-INFO for # The ptex pex, not the pex being ptexed. if rel_path in ("__main__.py", PexInfo.PATH): continue os.symlink(os.path.join(root, f), os.path.join(colorized_isort_ptex_code_dir, rel_path)) code.append(rel_path) ptex_code_dir = os.path.join(tmpdir, "ptex_code_dir") ptex_info = dict(code=code, resolver_settings=resolver_settings) with safe_open(os.path.join(ptex_code_dir, "PTEX-INFO"), "w") as fp: json.dump(ptex_info, fp) with safe_open(os.path.join(ptex_code_dir, "IPEX-INFO"), "w") as fp: fp.write(colorized_isort_pex_info.dump()) with safe_open(os.path.join(ptex_code_dir, "ptex.py"), "w") as fp: fp.write( dedent("""\ import json import os import sys from pex import resolver from pex.common import open_zip from pex.pex_builder import PEXBuilder from pex.pex_info import PexInfo from pex.util import CacheHelper from pex.variables import ENV self = sys.argv[0] ipex_file = '{}.ipex'.format(os.path.splitext(self)[0]) if not os.path.isfile(ipex_file): print('Hydrating {} to {}'.format(self, ipex_file)) ptex_pex_info = PexInfo.from_pex(self) code_root = os.path.join(ptex_pex_info.zip_unsafe_cache, ptex_pex_info.code_hash) with open_zip(self) as zf: # Populate the pex with the pinned requirements and distribution names & hashes. ipex_info = PexInfo.from_json(zf.read('IPEX-INFO')) ipex_builder = PEXBuilder(pex_info=ipex_info) # Populate the pex with the needed code. ptex_info = json.loads(zf.read('PTEX-INFO').decode('utf-8')) for path in ptex_info['code']: ipex_builder.add_source(os.path.join(code_root, path), path) # Perform a fully pinned intransitive resolve to hydrate the install cache (not the # pex!). resolver_settings = ptex_info['resolver_settings'] resolved_distributions = resolver.resolve( requirements=[str(req) for req in ipex_info.requirements], cache=ipex_info.pex_root, transitive=False, **resolver_settings ) ipex_builder.build(ipex_file) os.execv(ipex_file, [ipex_file] + sys.argv[1:]) """)) colorized_isort_ptex = os.path.join(tmpdir, "colorized_isort.ptex") result = run_pex_command([ "--not-zip-safe", "--always-write-cache", "--pex-root", ptex_cache, pex_project_dir, # type: ignore[list-item] # This is unicode in Py2, whereas everthing else is bytes. That's fine. "--sources-directory", ptex_code_dir, "--sources-directory", colorized_isort_ptex_code_dir, "--entry-point", "ptex", "--output-file", colorized_isort_ptex, ]) result.assert_success() subprocess.check_call([colorized_isort_ptex, "--version"]) with pytest.raises(CalledProcessError): subprocess.check_call([colorized_isort_ptex, "--not-a-flag"]) safe_rmtree(ptex_cache) # The dehydrated pex now fails since it lost its hydration from the cache. with pytest.raises(CalledProcessError): subprocess.check_call([colorized_isort_ptex, "--version"])
def _extract( self, pex, # type: PEX options, # type: Namespace ): # type: (...) -> Result if not options.serve and not options.dest_dir: return Error("Specify a --find-links directory to extract wheels to.") dest_dir = ( os.path.abspath(os.path.expanduser(options.dest_dir)) if options.dest_dir else safe_mkdtemp() ) safe_mkdir(dest_dir) if options.sources: self._extract_sdist(pex, dest_dir) def spawn_extract(distribution): # type: (Distribution) -> SpawnedJob[Text] job = spawn_python_job( args=["-m", "wheel", "pack", "--dest-dir", dest_dir, distribution.location], interpreter=pex.interpreter, expose=["wheel"], stdout=subprocess.PIPE, ) return SpawnedJob.stdout( job, result_func=lambda out: "{}: {}".format(distribution, out.decode()) ) with self._distributions_output(pex, options) as (distributions, output): errors = [] for result in execute_parallel(distributions, spawn_extract, error_handler=Retain()): if isinstance(result, tuple): distribution, error = result errors.append(distribution) output.write( "Failed to build a wheel for {distribution}: {error}\n".format( distribution=distribution, error=error ) ) else: output.write(result) if errors: return Error( "Failed to build wheels for {count} {distributions}.".format( count=len(errors), distributions=pluralize(errors, "distribution") ) ) if not options.serve: return Ok() repo = FindLinksRepo.serve( interpreter=pex.interpreter, port=options.port, directory=dest_dir ) output.write( "Serving find-links repo of {pex} via {find_links} at http://localhost:{port}\n".format( pex=os.path.normpath(pex.path()), find_links=dest_dir, port=repo.port ) ) if options.pid_file: with safe_open(options.pid_file, "w") as fp: fp.write("{}:{}".format(repo.pid, repo.port)) try: return Result(exit_code=repo.join(), message=" ".join(repo.cmd)) except KeyboardInterrupt: repo.kill() return Ok("Shut down server for find links repo at {}.".format(dest_dir))