def test_pex_builder(): # test w/ and w/o zipfile dists with nested(temporary_dir(), make_bdist('p1', zipped=True)) as (td, p1): write_pex(td, exe_main, dists=[p1]) success_txt = os.path.join(td, 'success.txt') PEX(td).run(args=[success_txt]) assert os.path.exists(success_txt) with open(success_txt) as fp: assert fp.read() == 'success' # test w/ and w/o zipfile dists with nested(temporary_dir(), temporary_dir(), make_bdist('p1', zipped=True)) as ( td1, td2, p1): target_egg_dir = os.path.join(td2, os.path.basename(p1.location)) safe_mkdir(target_egg_dir) with closing(zipfile.ZipFile(p1.location, 'r')) as zf: zf.extractall(target_egg_dir) p1 = DistributionHelper.distribution_from_path(target_egg_dir) write_pex(td1, exe_main, dists=[p1]) success_txt = os.path.join(td1, 'success.txt') PEX(td1).run(args=[success_txt]) assert os.path.exists(success_txt) with open(success_txt) as fp: assert fp.read() == 'success'
def test_pex_verify_entry_point_module_should_pass(): # type: () -> None with _add_test_hello_to_pex("test") as pex_builder: # No error should happen here because `test` is correct PEX(pex_builder.path(), interpreter=pex_builder.interpreter, verify_entry_point=True)
def execute(self): binary = self.require_single_root_target() if isinstance(binary, PythonBinary): # We can't throw if binary isn't a PythonBinary, because perhaps we were called on a # jvm_binary, in which case we have to no-op and let jvm_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets( self.context.targets()) with self.temporary_pex_builder( interpreter=interpreter, pex_info=binary.pexinfo) as builder: chroot = PythonChroot(targets=[binary], builder=builder, platforms=binary.platforms, interpreter=interpreter) chroot.dump() builder.freeze() pex = PEX(builder.path(), interpreter=interpreter) self.context.release_lock() with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): args = self.get_options().args + self.get_passthru_args() po = pex.run(blocking=False, args=args) try: result = po.wait() if result != 0: raise TaskError( 'python {args} ... exited non-zero ({code})' % dict(args=args, code=result), exit_code=result) except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def verify(pb): # type: (PEXBuilder) -> None success_txt = os.path.join(pb.path(), "success.txt") PEX(pb.path(), interpreter=pb.interpreter).run(args=[success_txt]) assert os.path.exists(success_txt) with open(success_txt) as fp: assert fp.read() == "success"
def execute(self): binary = self.require_single_root_target() if isinstance(binary, PythonBinary): # We can't throw if binary isn't a PythonBinary, because perhaps we were called on a # jvm_binary, in which case we have to no-op and let jvm_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets( self.context.targets()) with self.temporary_pex_builder( interpreter=interpreter, pex_info=binary.pexinfo) as builder: chroot = PythonChroot(targets=[binary], builder=builder, platforms=binary.platforms, interpreter=interpreter, conn_timeout=self.conn_timeout) chroot.dump() builder.freeze() pex = PEX(builder.path(), interpreter=interpreter) self.context.lock.release() with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): po = pex.run(blocking=False) try: return po.wait() except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def test_pex_run_conflicting_custom_setuptools_useable(): # Here we use an older setuptools to build the pex which has a newer setuptools requirement. # These setuptools dists have different pkg_resources APIs: # $ diff \ # <(zipinfo -1 setuptools-20.3.1-py2.py3-none-any.whl | grep pkg_resources/ | sort) \ # <(zipinfo -1 setuptools-40.4.3-py2.py3-none-any.whl | grep pkg_resources/ | sort) # 2a3,4 # > pkg_resources/py31compat.py # > pkg_resources/_vendor/appdirs.py with temporary_dir() as resolve_cache: dists = [resolved_dist.distribution for resolved_dist in resolve(['setuptools==20.3.1'], cache=resolve_cache)] interpreter = PythonInterpreter.from_binary(sys.executable, path_extras=[dist.location for dist in dists], include_site_extras=False) dists = [resolved_dist.distribution for resolved_dist in resolve(['setuptools==40.4.3'], cache=resolve_cache)] with temporary_dir() as temp_dir: pex = write_simple_pex( temp_dir, 'from pkg_resources import appdirs, py31compat', dists=dists, interpreter=interpreter ) rc = PEX(pex.path()).run() assert rc == 0
def assert_access_zipped_assets(distribution_helper_import): # type: (str) -> bytes test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with temporary_dir() as td1, temporary_dir() as td2: pb = PEXBuilder(path=td1) with open(os.path.join(td1, "exe.py"), "w") as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, "my_package", "submodule") safe_mkdir(submodule) mod_path = os.path.join(submodule, "mod.py") with open(mod_path, "w") as fp: fp.write("accessed") pb.add_source(fp.name, "my_package/submodule/mod.py") pb.add_source(None, "my_package/__init__.py") pb.add_source(None, "my_package/submodule/__init__.py") pex = os.path.join(td2, "app.pex") pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b"accessed\n" == stdout return cast(bytes, stderr)
def resolve_requirement_strings(self, interpreter, requirement_strings): """Resolve a list of pip-style requirement strings.""" requirement_strings = sorted(requirement_strings) if len(requirement_strings) == 0: req_strings_id = "no_requirements" elif len(requirement_strings) == 1: req_strings_id = requirement_strings[0] else: req_strings_id = hash_all(requirement_strings) path = os.path.realpath( os.path.join(self.workdir, str(interpreter.identity), req_strings_id)) if not os.path.isdir(path): reqs = [ PythonRequirement(req_str) for req_str in requirement_strings ] with safe_concurrent_creation(path) as safe_path: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True), log=self.context.log, ) pex_builder.add_resolved_requirements(reqs) pex_builder.freeze() return PEX(path, interpreter=interpreter)
def create( cls, path, # type: str interpreter=None, # type: Optional[PythonInterpreter] ): # type: (...) -> Pip """Creates a pip tool with PEX isolation at path. :param path: The path to assemble the pip tool at. :param interpreter: The interpreter to run Pip with. The current interpreter by default. :return: The path of a PEX that can be used to execute Pip in isolation. """ pip_interpreter = interpreter or PythonInterpreter.get() pip_pex_path = os.path.join(path, isolated().pex_hash) with atomic_directory(pip_pex_path, exclusive=True) as chroot: if not chroot.is_finalized: from pex.pex_builder import PEXBuilder isolated_pip_builder = PEXBuilder(path=chroot.work_dir) isolated_pip_builder.info.venv = True for dist_location in third_party.expose( ["pip", "setuptools", "wheel"]): isolated_pip_builder.add_dist_location(dist=dist_location) isolated_pip_builder.set_script("pip") isolated_pip_builder.freeze() pex_info = PexInfo.from_pex(pip_pex_path) pex_info.add_interpreter_constraint( str(pip_interpreter.identity.requirement)) return cls( ensure_venv( PEX(pip_pex_path, interpreter=pip_interpreter, pex_info=pex_info)))
def test_activate_interpreter_different_from_current(): with temporary_dir() as pex_root: interp_version = '3.6.3' if PY2 else '2.7.10' custom_interpreter = get_interpreter( python_interpreter=ensure_python_interpreter(interp_version), interpreter_cache_dir=os.path.join(pex_root, 'interpreters'), repos=None, # Default to PyPI. use_wheel=True) pex_info = PexInfo.default(custom_interpreter) pex_info.pex_root = pex_root with temporary_dir() as pex_chroot: pex_builder = PEXBuilder(path=pex_chroot, interpreter=custom_interpreter, pex_info=pex_info) with make_bdist(installer_impl=WheelInstaller, interpreter=custom_interpreter) as bdist: pex_builder.add_distribution(bdist) pex_builder.set_entry_point('sys:exit') pex_builder.freeze() pex = PEX(pex_builder.path(), interpreter=custom_interpreter) try: pex._activate() except SystemExit as e: pytest.fail('PEX activation of %s failed with %s' % (pex, e))
def test_pex_run_custom_pex_useable(): old_pex_version = '0.7.0' resolved_dists = resolve( ['pex=={}'.format(old_pex_version), 'setuptools==40.6.3']) dists = [resolved_dist.distribution for resolved_dist in resolved_dists] with temporary_dir() as temp_dir: from pex.version import __version__ pex = write_simple_pex( temp_dir, exe_contents=textwrap.dedent(""" import sys try: # The 0.7.0 release embedded the version directly in setup.py so it should only be # available via distribution metadata. from pex.version import __version__ sys.exit(1) except ImportError: import pkg_resources dist = pkg_resources.working_set.find(pkg_resources.Requirement.parse('pex')) print(dist.version) """), dists=dists, ) process = PEX(pex.path()).run(blocking=False, stdout=subprocess.PIPE) stdout, _ = process.communicate() assert process.returncode == 0 assert old_pex_version == stdout.strip().decode('utf-8') assert old_pex_version != __version__
def test_osx_platform_intel_issue_523(): def bad_interpreter(): return PythonInterpreter.from_binary(_KNOWN_BAD_APPLE_INTERPRETER) with temporary_dir() as cache: # We need to run the bad interpreter with a modern, non-Apple-Extras setuptools in order to # successfully install psutil; yield_pex_builder sets up the bad interpreter with our vendored # setuptools and wheel extras. with nested(yield_pex_builder(installer_impl=WheelInstaller, interpreter=bad_interpreter()), temporary_filename()) as (pb, pex_file): for resolved_dist in resolver.resolve(['psutil==5.4.3'], cache=cache, precedence=(SourcePackage, WheelPackage), interpreter=pb.interpreter): pb.add_dist_location(resolved_dist.distribution.location) pb.build(pex_file) # NB: We want PEX to find the bare bad interpreter at runtime. pex = PEX(pex_file, interpreter=bad_interpreter()) def run(args, **env): pex_env = os.environ.copy() pex_env['PEX_VERBOSE'] = '1' pex_env.update(**env) process = pex.run(args=args, env=pex_env, blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() return process.returncode, stdout, stderr returncode, _, stderr = run(['-c', 'import psutil']) assert 0 == returncode, ( 'Process failed with exit code {} and stderr:\n{}'.format(returncode, stderr) ) returncode, stdout, stderr = run(['-c', 'import pkg_resources']) assert 0 != returncode, ( 'Isolated pex process succeeded but should not have found pkg-resources:\n' 'STDOUT:\n' '{}\n' 'STDERR:\n' '{}' .format(stdout, stdout, stderr) ) returncode, stdout, stderr = run( ['-c', 'import pkg_resources; print(pkg_resources.get_supported_platform())'], # Let the bad interpreter site-packages setuptools leak in. PEX_INHERIT_PATH='1' ) assert 0 == returncode, ( 'Process failed with exit code {} and stderr:\n{}'.format(returncode, stderr) ) # Verify this worked along side the previously problematic pkg_resources-reported platform. release, _, _ = platform.mac_ver() major_minor = '.'.join(release.split('.')[:2]) assert to_bytes('macosx-{}-intel'.format(major_minor)) == stdout.strip()
def main(): parser = configure_clp() options, args = parser.parse_args() with TraceLogger.env_override(PEX_VERBOSE=options.verbosity): pex_builder = build_pex(args, options) if options.pex_name is not None: log('Saving PEX file to %s' % options.pex_name, v=options.verbosity) tmp_name = options.pex_name + '~' safe_delete(tmp_name) pex_builder.build(tmp_name) os.rename(tmp_name, options.pex_name) return 0 if options.platform != Platform.current(): log('WARNING: attempting to run PEX with differing platform!') pex_builder.freeze() log('Running PEX file at %s with args %s' % (pex_builder.path(), args), v=options.verbosity) pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter) return pex.run(args=list(args))
def resolve_requirements(self, req_libs): """Requirements resolution for PEX files. :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve. :returns: a PEX containing target requirements and any specified python dist targets. """ with self.invalidated(req_libs) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): with safe_concurrent_creation(path) as safe_path: builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True) dump_requirement_libs(builder, interpreter, req_libs, self.context.log) builder.freeze() return PEX(path, interpreter=interpreter)
def test_pex_paths(): # Tests that PEX_PATH allows importing sources from the referenced pex. with named_temporary_file() as fake_stdout: with temporary_dir() as temp_dir: pex1_path = os.path.join(temp_dir, "pex1") write_simple_pex( pex1_path, sources=[ ("foo_pkg/__init__.py", ""), ("foo_pkg/foo_module.py", 'def foo_func():\n return "42"'), ], ) pex2_path = os.path.join(temp_dir, "pex2") pex2 = write_simple_pex( pex2_path, "import sys; from bar_pkg.bar_module import bar_func; " "sys.stdout.write(bar_func()); sys.exit(0)", sources=[ ("bar_pkg/__init__.py", ""), ( "bar_pkg/bar_module.py", "from foo_pkg.foo_module import foo_func\ndef bar_func():\n return foo_func()", ), ], ) rc = PEX(pex2.path()).run(stdin=None, stdout=fake_stdout, env={"PEX_PATH": pex1_path}) assert rc == 0 fake_stdout.seek(0) assert fake_stdout.read() == b"42"
def execute_codegen(self, target, results_dir): self.context.log.info("Processing target {}".format(target)) requirements_pex = self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX) interpreter = self.context.products.get_data(PythonInterpreter) pex_info = PexInfo.default(interpreter) pex_info.pex_path = requirements_pex.path() with temporary_dir() as source_pex_chroot: sources_pex_builder = PEXBuilder( path=source_pex_chroot, interpreter=interpreter, copy=True, pex_info=pex_info ) pex_build_util.dump_sources(sources_pex_builder, target, self.context.log) sources_pex_builder.freeze() codegen_pex = PEX(sources_pex_builder.path(), interpreter) setup_py_paths = [] for source in target.sources_relative_to_source_root(): if os.path.basename(source) == 'setup.py': setup_py_paths.append(source) if len(setup_py_paths) != 1: raise TaskError( 'Expected target {} to own exactly one setup.py, found {}'.format( setup_py_paths, len(setup_py_paths) ) ) setup_py_path = setup_py_paths[0] result_code = codegen_pex.run( with_chroot=True, blocking=True, args=(setup_py_path, 'build_ext', '--inplace', '--verbose'), # Passing PATH helps cython find the correct c++ compiler env={'libraries': results_dir, 'PATH': os.getenv('PATH')} ) if result_code != 0: raise TaskError( 'creating cython library failed', exit_code=result_code, failed_targets=[target] ) library_source_path = os.path.join( sources_pex_builder.path(), os.path.dirname(setup_py_path), target.output ) library_output = os.path.join(results_dir, target.output) safe_mkdir_for(library_output) shutil.move(library_source_path, library_output) self.context.log.info( 'created library {}'.format(os.path.relpath(library_output, get_buildroot())) )
def nsutil_pex(self): interpreter = self.context.products.get_data(PythonInterpreter) chroot = os.path.join(self.workdir, 'nsutil', interpreter.version_string) if not os.path.exists(chroot): pex_info = PexInfo.default(interpreter=interpreter) with safe_concurrent_creation(chroot) as scratch: builder = PEXBuilder(path=scratch, interpreter=interpreter, pex_info=pex_info, copy=True) with temporary_file(binary_mode=False) as fp: declares_namespace_package_code = inspect.getsource( declares_namespace_package) fp.write( textwrap.dedent(""" import sys {declares_namespace_package_code} if __name__ == '__main__': for path in sys.argv[1:]: if declares_namespace_package(path): print(path) """).strip().format(declares_namespace_package_code= declares_namespace_package_code)) fp.close() builder.set_executable(filename=fp.name, env_filename='main.py') builder.freeze() return PEX(pex=chroot, interpreter=interpreter)
def execute(self): targets = self.context.targets( lambda tgt: isinstance(tgt, (PythonTarget, Resources))) with self.invalidated(targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of gathering # an empty set of sources, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' path = os.path.join(self.workdir, target_set_id) path_tmp = path + '.tmp' shutil.rmtree(path_tmp, ignore_errors=True) interpreter = self.context.products.get_data(PythonInterpreter) if not os.path.isdir(path): self._build_pex(interpreter, path_tmp, invalidation_check.all_vts) shutil.move(path_tmp, path) pex = PEX(os.path.realpath(path), interpreter=interpreter) self.context.products.get_data(self.PYTHON_SOURCES, lambda: pex)
def _spawn_pip_isolated(self, args, cache=None, interpreter=None): pip_args = [ '--disable-pip-version-check', '--isolated', '--exists-action', 'i' ] # The max pip verbosity is -vvv and for pex it's -vvvvvvvvv; so we scale down by a factor of 3. pex_verbosity = ENV.PEX_VERBOSE pip_verbosity = pex_verbosity // 3 if pip_verbosity > 0: pip_args.append('-{}'.format('v' * pip_verbosity)) else: pip_args.append('-q') if cache: pip_args.extend(['--cache-dir', cache]) else: pip_args.append('--no-cache-dir') command = pip_args + args with ENV.strip().patch(PEX_ROOT=ENV.PEX_ROOT, PEX_VERBOSE=str(pex_verbosity)) as env: from pex.pex import PEX pip = PEX(pex=self._pip_pex_path, interpreter=interpreter) return Job(command=pip.cmdline(command), process=pip.run(args=command, env=env, blocking=False))
def resolve_requirements(self, interpreter, req_libs): """Requirements resolution for PEX files. :param interpreter: Resolve against this :class:`PythonInterpreter`. :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve. :returns: a PEX containing target requirements and any specified python dist targets. """ with self.invalidated(req_libs) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. tgts = self.context.targets() maybe_platforms = ['current'] if build_for_current_platform_only_check(tgts) else None path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): with safe_concurrent_creation(path) as safe_path: builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True) dump_requirement_libs(builder, interpreter, req_libs, self.context.log, platforms=maybe_platforms) builder.freeze() return PEX(path, interpreter=interpreter)
def test_pex_run_conflicting_custom_setuptools_useable(): # Here we use our vendored, newer setuptools to build the pex which has an older setuptools # requirement. These setuptools dists have different pkg_resources APIs: # $ diff \ # <(zipinfo -1 setuptools-20.3.1-py2.py3-none-any.whl | grep pkg_resources/ | sort) \ # <(zipinfo -1 setuptools-40.6.2-py2.py3-none-any.whl | grep pkg_resources/ | sort) # 2a3,4 # > pkg_resources/py31compat.py # > pkg_resources/_vendor/appdirs.py resolved_dists = resolve(['setuptools==20.3.1']) dists = [resolved_dist.distribution for resolved_dist in resolved_dists] with temporary_dir() as temp_dir: pex = write_simple_pex( temp_dir, exe_contents=textwrap.dedent(""" import sys import pkg_resources try: from pkg_resources import appdirs sys.exit(1) except ImportError: pass try: from pkg_resources import py31compat sys.exit(2) except ImportError: pass """), dists=dists, ) rc = PEX(pex.path()).run(env={'PEX_VERBOSE': '9'}) assert rc == 0
def assert_access_zipped_assets(distribution_helper_import): test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with nested(temporary_dir(), temporary_dir()) as (td1, td2): pb = PEXBuilder(path=td1) with open(os.path.join(td1, 'exe.py'), 'w') as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, 'my_package', 'submodule') safe_mkdir(submodule) mod_path = os.path.join(submodule, 'mod.py') with open(mod_path, 'w') as fp: fp.write('accessed') pb.add_source(fp.name, 'my_package/submodule/mod.py') pb.add_source(None, 'my_package/__init__.py') pb.add_source(None, 'my_package/submodule/__init__.py') pex = os.path.join(td2, 'app.pex') pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b'accessed\n' == stdout return stderr
def test_pex_verify_entry_point_module_should_fail(): with _add_test_hello_to_pex('invalid.module') as pex_builder: # Expect InvalidEntryPoint due to invalid entry point module with pytest.raises(PEX.InvalidEntryPoint): PEX(pex_builder.path(), interpreter=pex_builder.interpreter, verify_entry_point=True)
def execute(self): req_libs = self.context.targets( lambda tgt: isinstance(tgt, PythonRequirementLibrary)) fs = PythonRequirementFingerprintStrategy(task=self) with self.invalidated(req_libs, fingerprint_strategy=fs) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.join(self.workdir, str(interpreter.identity), target_set_id) path_tmp = path + '.tmp' shutil.rmtree(path_tmp, ignore_errors=True) if not os.path.isdir(path): self._build_pex(interpreter, path_tmp, req_libs) shutil.move(path_tmp, path) pex = PEX(os.path.realpath(path), interpreter=interpreter) self.context.products.get_data(self.REQUIREMENTS_PEX, lambda: pex)
def test_pex_paths(): # Tests that PEX_PATH allows importing sources from the referenced pex. with named_temporary_file() as fake_stdout: with temporary_dir() as temp_dir: pex1_path = os.path.join(temp_dir, 'pex1') write_simple_pex(pex1_path, sources=[('foo_pkg/__init__.py', ''), ('foo_pkg/foo_module.py', 'def foo_func():\n return "42"')]) pex2_path = os.path.join(temp_dir, 'pex2') pex2 = write_simple_pex( pex2_path, 'import sys; from bar_pkg.bar_module import bar_func; ' 'sys.stdout.write(bar_func()); sys.exit(0)', sources= [('bar_pkg/__init__.py', ''), ('bar_pkg/bar_module.py', 'from foo_pkg.foo_module import foo_func\ndef bar_func():\n return foo_func()' )]) rc = PEX(pex2.path()).run(stdin=None, stdout=fake_stdout, env={'PEX_PATH': pex1_path}) assert rc == 0 fake_stdout.seek(0) assert fake_stdout.read() == b'42'
def _source_pex_for_versioned_target_closure(self, interpreter, vt): source_pex_path = os.path.realpath( os.path.join(self.workdir, vt.cache_key.hash)) if not os.path.isdir(source_pex_path): with safe_concurrent_creation(source_pex_path) as safe_path: self._build_source_pex(interpreter, safe_path, vt.target.closure()) return PEX(source_pex_path, interpreter=interpreter)
def test_pex_verify_entry_point_method_should_fail(): # type: () -> None with _add_test_hello_to_pex("test:invalid_entry_point") as pex_builder: # Expect InvalidEntryPoint due to invalid entry point method with pytest.raises(PEX.InvalidEntryPoint): PEX(pex_builder.path(), interpreter=pex_builder.interpreter, verify_entry_point=True)
def create_pex(self, pex_info=None): """Returns a wrapped pex that "merges" other pexes produced in previous tasks via PEX_PATH. This method always creates a PEX to run locally on the current platform and selected interpreter: to create a pex that is distributable to other environments, use the pex_build_util Subsystem. The returned pex will have the pexes from the ResolveRequirements and GatherSources tasks mixed into it via PEX_PATH. Any 3rdparty requirements declared with self.extra_requirements() will also be resolved for the global interpreter, and added to the returned pex via PEX_PATH. :param pex_info: An optional PexInfo instance to provide to self.merged_pex(). :type pex_info: :class:`pex.pex_info.PexInfo`, or None task. Otherwise, all of the interpreter constraints from all python targets will applied. :rtype: :class:`pex.pex.PEX` """ relevant_targets = self.context.targets( lambda tgt: isinstance(tgt, ( PythonDistribution, PythonRequirementLibrary, PythonTarget, Files))) with self.invalidated(relevant_targets) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' interpreter = self.context.products.get_data(PythonInterpreter) path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): pexes = [ self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX), self.context.products.get_data(GatherSources.PYTHON_SOURCES) ] if self.extra_requirements(): extra_requirements_pex = self.resolve_requirement_strings( interpreter, self.extra_requirements()) # Add the extra requirements first, so they take precedence over any colliding version # in the target set's dependency closure. pexes = [extra_requirements_pex] + pexes # NB: See docstring. We always use the previous selected interpreter. constraints = {str(interpreter.identity.requirement)} with self.merged_pex(path, pex_info, interpreter, pexes, constraints) as builder: for extra_file in self.extra_files(): extra_file.add_to(builder) builder.freeze(bytecode_compile=False) return PEX(path, interpreter)
def test_osx_platform_intel_issue_523(): def bad_interpreter(include_site_extras=True): return PythonInterpreter.from_binary( _KNOWN_BAD_APPLE_INTERPRETER, include_site_extras=include_site_extras) interpreter = bad_interpreter(include_site_extras=False) with temporary_dir() as cache: # We need to run the bad interpreter with a modern, non-Apple-Extras setuptools in order to # successfully install psutil. for requirement in (SETUPTOOLS_REQUIREMENT, WHEEL_REQUIREMENT): for resolved_dist in resolver.resolve( [requirement], cache=cache, # We can't use wheels since we're bootstrapping them. precedence=(SourcePackage, EggPackage), interpreter=interpreter): dist = resolved_dist.distribution interpreter = interpreter.with_extra(dist.key, dist.version, dist.location) with nested( yield_pex_builder(installer_impl=WheelInstaller, interpreter=interpreter), temporary_filename()) as (pb, pex_file): for resolved_dist in resolver.resolve(['psutil==5.4.3'], cache=cache, precedence=(SourcePackage, WheelPackage), interpreter=interpreter): pb.add_dist_location(resolved_dist.distribution.location) pb.build(pex_file) # NB: We want PEX to find the bare bad interpreter at runtime. pex = PEX(pex_file, interpreter=bad_interpreter()) args = [ '-c', 'import pkg_resources; print(pkg_resources.get_supported_platform())' ] env = os.environ.copy() env['PEX_VERBOSE'] = '1' process = pex.run(args=args, env=env, blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert 0 == process.returncode, ( 'Process failed with exit code {} and stderr:\n{}'.format( process.returncode, stderr)) # Verify this all worked under the previously problematic pkg_resources-reported platform. release, _, _ = platform.mac_ver() major_minor = '.'.join(release.split('.')[:2]) assert to_bytes( 'macosx-{}-intel'.format(major_minor)) == stdout.strip()
def run_simple_pex(pex, args=(), interpreter=None, stdin=None, **kwargs): p = PEX(pex, interpreter=interpreter) process = p.run(args=args, blocking=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) stdout, _ = process.communicate(input=stdin) return stdout.replace(b'\r', b''), process.returncode