def assert_access_zipped_assets(distribution_helper_import): test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with nested(temporary_dir(), temporary_dir()) as (td1, td2): pb = PEXBuilder(path=td1) with open(os.path.join(td1, 'exe.py'), 'w') as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, 'my_package', 'submodule') safe_mkdir(submodule) mod_path = os.path.join(submodule, 'mod.py') with open(mod_path, 'w') as fp: fp.write('accessed') pb.add_source(fp.name, 'my_package/submodule/mod.py') pb.add_source(None, 'my_package/__init__.py') pb.add_source(None, 'my_package/submodule/__init__.py') pex = os.path.join(td2, 'app.pex') pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b'accessed\n' == stdout return stderr
def execute(self): binary = self.require_single_root_target() if isinstance(binary, PythonBinary): # We can't throw if binary isn't a PythonBinary, because perhaps we were called on a # jvm_binary, in which case we have to no-op and let jvm_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(self.context.targets()) with self.temporary_pex_builder(interpreter=interpreter, pex_info=binary.pexinfo) as builder: chroot = PythonChroot( targets=[binary], builder=builder, platforms=binary.platforms, interpreter=interpreter, conn_timeout=self.conn_timeout) chroot.dump() builder.freeze() pex = PEX(builder.path(), interpreter=interpreter) self.context.release_lock() with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): args = self.get_options().args + self.get_passthru_args() po = pex.run(blocking=False, args=args) try: result = po.wait() if result != 0: raise TaskError('python {args} ... exited non-zero ({code})' % dict(args=args, code=result), exit_code=result) except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def execute(self, **pex_run_kwargs): (accept_predicate, reject_predicate) = Target.lang_discriminator('python') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: # We can't throw if the target isn't a python target, because perhaps we were called on a # JVM target, in which case we have to no-op and let scala repl do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(targets) extra_requirements = [] if self.get_options().ipython: entry_point = self.get_options().ipython_entry_point for req in self.get_options().ipython_requirements: extra_requirements.append(PythonRequirement(req)) else: entry_point = 'code:interact' with self.temporary_chroot(interpreter=interpreter, targets=targets, extra_requirements=extra_requirements, pre_freeze=lambda ch: ch.builder.set_entry_point(entry_point)) as chroot: pex = PEX(chroot.builder.path(), interpreter=interpreter) self.context.release_lock() with stty_utils.preserve_stty_settings(): with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): po = pex.run(blocking=False, **pex_run_kwargs) try: return po.wait() except KeyboardInterrupt: pass
def main(): parser, resolver_options_builder = configure_clp() # split arguments early because optparse is dumb args = sys.argv[1:] try: separator = args.index('--') args, cmdline = args[:separator], args[separator + 1:] except ValueError: args, cmdline = args, [] options, reqs = parser.parse_args(args=args) with ENV.patch(PEX_VERBOSE=str(options.verbosity)): with TRACER.timed('Building pex'): pex_builder = build_pex(reqs, options, resolver_options_builder) if options.pex_name is not None: log('Saving PEX file to %s' % options.pex_name, v=options.verbosity) tmp_name = options.pex_name + '~' safe_delete(tmp_name) pex_builder.build(tmp_name) os.rename(tmp_name, options.pex_name) return 0 if options.platform != Platform.current(): log('WARNING: attempting to run PEX with differing platform!') pex_builder.freeze() log('Running PEX file at %s with args %s' % (pex_builder.path(), cmdline), v=options.verbosity) pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter) sys.exit(pex.run(args=list(cmdline)))
def execute(self): binary = self.require_single_root_target() if isinstance(binary, PythonBinary): # We can't throw if binary isn't a PythonBinary, because perhaps we were called on a # jvm_binary, in which case we have to no-op and let jvm_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(self.context.targets()) with self.temporary_pex_builder(interpreter=interpreter, pex_info=binary.pexinfo) as builder: chroot = PythonChroot( targets=[binary], builder=builder, platforms=binary.platforms, interpreter=interpreter, conn_timeout=self.conn_timeout) chroot.dump() builder.freeze() pex = PEX(builder.path(), interpreter=interpreter) self.context.lock.release() with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): po = pex.run(blocking=False) try: return po.wait() except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def main(): parser = configure_clp() options, args = parser.parse_args() verbosity = 5 if options.verbosity else -1 with Tracer.env_override(PEX_VERBOSE=verbosity, PEX_HTTP=verbosity): pex_builder = build_pex(args, options) if options.pex_name is not None: log('Saving PEX file to %s' % options.pex_name, v=options.verbosity) tmp_name = options.pex_name + '~' safe_delete(tmp_name) pex_builder.build(tmp_name) os.rename(tmp_name, options.pex_name) return 0 if options.platform != Platform.current(): log('WARNING: attempting to run PEX with differing platform!') pex_builder.freeze() log('Running PEX file at %s with args %s' % (pex_builder.path(), args), v=options.verbosity) pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter) return pex.run(args=list(args))
def execute(self): binary = self.require_single_root_target() if isinstance(binary, PythonBinary): # We can't throw if binary isn't a PythonBinary, because perhaps we were called on a # jvm_binary, in which case we have to no-op and let jvm_run do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(self.context.targets()) with self.temporary_chroot(interpreter=interpreter, pex_info=binary.pexinfo, targets=[binary], platforms=binary.platforms) as chroot: pex = PEX(chroot.builder.path(), interpreter=interpreter) self.context.release_lock() with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): args = [] for arg in self.get_options().args: args.extend(safe_shlex_split(arg)) args += self.get_passthru_args() po = pex.run(blocking=False, args=args) try: result = po.wait() if result != 0: msg = '{interpreter} {entry_point} {args} ... exited non-zero ({code})'.format( interpreter=interpreter.binary, entry_point=binary.entry_point, args=' '.join(args), code=result) raise TaskError(msg, exit_code=result) except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def execute(self): if self.old_options.pex and self.old_options.ipython: self.error('Cannot specify both --pex and --ipython!') if self.old_options.entry_point and self.old_options.ipython: self.error('Cannot specify both --entry_point and --ipython!') if self.old_options.verbose: print('Build operating on targets: %s' % ' '.join(str(target) for target in self.targets)) builder = PEXBuilder(tempfile.mkdtemp(), interpreter=self.interpreter, pex_info=self.binary.pexinfo if self.binary else None) if self.old_options.entry_point: builder.set_entry_point(self.old_options.entry_point) if self.old_options.ipython: if not self.config.has_section('python-ipython'): self.error('No python-ipython sections defined in your pants.ini!') builder.info.entry_point = self.config.get('python-ipython', 'entry_point') if builder.info.entry_point is None: self.error('Must specify entry_point for IPython in the python-ipython section ' 'of your pants.ini!') requirements = self.config.getlist('python-ipython', 'requirements', default=[]) for requirement in requirements: self.extra_requirements.append(PythonRequirement(requirement)) executor = PythonChroot( targets=self.targets, extra_requirements=self.extra_requirements, builder=builder, platforms=self.binary.platforms if self.binary else None, interpreter=self.interpreter, conn_timeout=self.old_options.conn_timeout) executor.dump() if self.old_options.pex: pex_name = self.binary.name if self.binary else Target.maybe_readable_identify(self.targets) pex_path = os.path.join(self.root_dir, 'dist', '%s.pex' % pex_name) builder.build(pex_path) print('Wrote %s' % pex_path) return 0 else: builder.freeze() pex = PEX(builder.path(), interpreter=self.interpreter) po = pex.run(args=list(self.args), blocking=False) try: return po.wait() except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def run_simple_pex(pex, args=(), interpreter=None, stdin=None, **kwargs): p = PEX(pex, interpreter=interpreter) process = p.run(args=args, blocking=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) stdout, _ = process.communicate(input=stdin) print(stdout.decode('utf-8') if PY3 else stdout) return stdout.replace(b'\r', b''), process.returncode
def _run_mypy(self, py3_interpreter, mypy_args, **kwargs): pex_info = PexInfo.default() pex_info.entry_point = 'mypy' mypy_version = self.get_options().mypy_version mypy_requirement_pex = self.resolve_requirement_strings( py3_interpreter, ['mypy=={}'.format(mypy_version)]) path = os.path.realpath(os.path.join(self.workdir, str(py3_interpreter.identity), mypy_version)) if not os.path.isdir(path): self.merge_pexes(path, pex_info, py3_interpreter, [mypy_requirement_pex]) pex = PEX(path, py3_interpreter) return pex.run(mypy_args, **kwargs)
def main(args=None): args = args[:] if args else sys.argv[1:] args = [transform_legacy_arg(arg) for arg in args] parser, resolver_options_builder = configure_clp() try: separator = args.index('--') args, cmdline = args[:separator], args[separator + 1:] except ValueError: args, cmdline = args, [] options, reqs = parser.parse_args(args=args) if options.python and options.interpreter_constraint: die('The "--python" and "--interpreter-constraint" options cannot be used together.') if options.pex_root: ENV.set('PEX_ROOT', options.pex_root) else: options.pex_root = ENV.PEX_ROOT # If option not specified fallback to env variable. # Don't alter cache if it is disabled. if options.cache_dir: options.cache_dir = make_relative_to_root(options.cache_dir) with ENV.patch(PEX_VERBOSE=str(options.verbosity)): with TRACER.timed('Building pex'): pex_builder = build_pex(reqs, options, resolver_options_builder) pex_builder.freeze(bytecode_compile=options.compile) pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter, verify_entry_point=options.validate_ep) if options.pex_name is not None: log('Saving PEX file to %s' % options.pex_name, V=options.verbosity) tmp_name = options.pex_name + '~' safe_delete(tmp_name) pex_builder.build( tmp_name, bytecode_compile=options.compile, deterministic_timestamp=not options.use_system_time ) os.rename(tmp_name, options.pex_name) else: if not _compatible_with_current_platform(options.platforms): log('WARNING: attempting to run PEX with incompatible platforms!') log('Running PEX file at %s with args %s' % (pex_builder.path(), cmdline), V=options.verbosity) sys.exit(pex.run(args=list(cmdline)))
class PythonToolInstance(object): def __init__(self, pex_path, interpreter): self._pex = PEX(pex_path, interpreter=interpreter) self._interpreter = interpreter @property def pex(self): return self._pex @property def interpreter(self): return self._interpreter def _pretty_cmdline(self, args): return safe_shlex_join(self._pex.cmdline(args)) def output(self, args, stdin_payload=None, binary_mode=False, **kwargs): process = self._pex.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, with_chroot=False, blocking=False, **kwargs) if stdin_payload is not None: stdin_payload = ensure_binary(stdin_payload) (stdout, stderr) = process.communicate(input=stdin_payload) if not binary_mode: stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') return (stdout, stderr, process.returncode, self._pretty_cmdline(args)) @contextmanager def run_with(self, workunit_factory, args, **kwargs): cmdline = self._pretty_cmdline(args) with workunit_factory(cmd=cmdline) as workunit: exit_code = self._pex.run(args, stdout=workunit.output('stdout'), stderr=workunit.output('stderr'), with_chroot=False, blocking=True, **kwargs) yield cmdline, exit_code, workunit def run(self, *args, **kwargs): with self.run_with(*args, **kwargs) as (cmdline, exit_code, _): return cmdline, exit_code
def test_site_libs(): with nested(mock.patch.object(PEX, '_get_site_packages'), temporary_dir()) as ( mock_site_packages, tempdir): site_packages = os.path.join(tempdir, 'site-packages') os.mkdir(site_packages) mock_site_packages.return_value = set([site_packages]) site_libs = PEX.site_libs() assert site_packages in site_libs
def execute(self): binary = self.require_single_root_target() if isinstance(binary, PythonBinary): # We can't throw if binary isn't a PythonBinary, because perhaps we were called on a # jvm_binary, in which case we have to no-op and let jvm_run do its thing. # TODO(benjy): Use MutexTask to coordinate this. interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create a wrapper pex to "merge" the other pexes into via PEX_PATH. builder = PEXBuilder(tmpdir, interpreter, pex_info=binary.pexinfo) builder.freeze() pexes = [ self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX), self.context.products.get_data(GatherSources.PYTHON_SOURCES), ] # TODO: Expose the path as a property in pex, instead of relying on # fishing it out of the cmdline. pex_path = os.pathsep.join([pex.cmdline()[1] for pex in pexes]) pex = PEX(tmpdir, interpreter) self.context.release_lock() with self.context.new_workunit(name="run", labels=[WorkUnitLabel.RUN]): args = [] for arg in self.get_options().args: args.extend(safe_shlex_split(arg)) args += self.get_passthru_args() po = pex.run(blocking=False, args=args, env={"PEX_PATH": pex_path}) try: result = po.wait() if result != 0: msg = "{interpreter} {entry_point} {args} ... exited non-zero ({code})".format( interpreter=interpreter.binary, entry_point=binary.entry_point, args=" ".join(args), code=result, ) raise TaskError(msg, exit_code=result) except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def __init__(self, interpreter, pex): # Here we hack around `coverage.cmdline` nuking the 0th element of `sys.path` (our root pex) # by ensuring, the root pex is on the sys.path twice. # See: https://github.com/nedbat/coveragepy/issues/715 pex_path = pex.path() pex_info = PexInfo.from_pex(pex_path) pex_info.merge_pex_path(pex_path) # We're now on the sys.path twice. PEXBuilder(pex_path, interpreter=interpreter, pex_info=pex_info).freeze() self._pex = PEX(pex=pex_path, interpreter=interpreter) self._interpreter = interpreter
def test_site_libs_excludes_prefix(): """Windows returns sys.prefix as part of getsitepackages(). Make sure to exclude it.""" with nested(mock.patch.object(PEX, '_get_site_packages'), temporary_dir()) as ( mock_site_packages, tempdir): site_packages = os.path.join(tempdir, 'site-packages') os.mkdir(site_packages) mock_site_packages.return_value = set([site_packages, sys.prefix]) site_libs = PEX.site_libs() assert site_packages in site_libs assert sys.prefix not in site_libs
def main(args=None): args = args or sys.argv[1:] parser, resolver_options_builder = configure_clp() try: separator = args.index('--') args, cmdline = args[:separator], args[separator + 1:] except ValueError: args, cmdline = args, [] options, reqs = parser.parse_args(args=args) if options.pex_root: ENV.set('PEX_ROOT', options.pex_root) else: options.pex_root = ENV.PEX_ROOT # If option not specified fallback to env variable. # Don't alter cache if it is disabled. if options.cache_dir: options.cache_dir = make_relative_to_root(options.cache_dir) options.interpreter_cache_dir = make_relative_to_root(options.interpreter_cache_dir) with ENV.patch(PEX_VERBOSE=str(options.verbosity)): with TRACER.timed('Building pex'): pex_builder = build_pex(reqs, options, resolver_options_builder) if options.pex_name is not None: log('Saving PEX file to %s' % options.pex_name, v=options.verbosity) tmp_name = options.pex_name + '~' safe_delete(tmp_name) pex_builder.build(tmp_name) os.rename(tmp_name, options.pex_name) return 0 if options.platform != Platform.current(): log('WARNING: attempting to run PEX with differing platform!') pex_builder.freeze() log('Running PEX file at %s with args %s' % (pex_builder.path(), cmdline), v=options.verbosity) pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter) sys.exit(pex.run(args=list(cmdline)))
def test_site_libs_symlink(): with nested(mock.patch.object(PEX, '_get_site_packages'), temporary_dir()) as ( mock_site_packages, tempdir): site_packages = os.path.join(tempdir, 'site-packages') os.mkdir(site_packages) site_packages_link = os.path.join(tempdir, 'site-packages-link') os.symlink(site_packages, site_packages_link) mock_site_packages.return_value = set([site_packages_link]) site_libs = PEX.site_libs() assert os.path.realpath(site_packages) in site_libs assert site_packages_link in site_libs
def execute(self): (accept_predicate, reject_predicate) = Target.lang_discriminator('python') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: # We can't throw if the target isn't a python target, because perhaps we were called on a # JVM target, in which case we have to no-op and let scala repl do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(targets) extra_requirements = [] if self.get_options().ipython: entry_point = self.context.config.get('python-ipython', 'entry_point', default='IPython:start_ipython') ipython_requirements = self.context.config.getlist('python-ipython', 'requirements', default=['ipython==1.0.0']) for req in ipython_requirements: extra_requirements.append(PythonRequirement(req)) else: entry_point = 'code:interact' with self.temporary_pex_builder(interpreter=interpreter) as builder: builder.set_entry_point(entry_point) chroot = PythonChroot( targets=targets, extra_requirements=extra_requirements, builder=builder, interpreter=interpreter, conn_timeout=self.conn_timeout) chroot.dump() builder.freeze() pex = PEX(builder.path(), interpreter=interpreter) self.context.lock.release() with stty_utils.preserve_stty_settings(): with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): po = pex.run(blocking=False) try: return po.wait() except KeyboardInterrupt: pass
def test_minimum_sys_modules(): # builtins stay builtin_module = ModuleType('my_builtin') modules = {'my_builtin': builtin_module} new_modules = PEX.minimum_sys_modules([], modules) assert new_modules == modules new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == modules # tainted evict tainted_module = ModuleType('tainted_module') tainted_module.__path__ = ['bad_path'] modules = {'tainted_module': tainted_module} new_modules = PEX.minimum_sys_modules([], modules) assert new_modules == modules new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == {} assert tainted_module.__path__ == [] # tainted cleaned tainted_module = ModuleType('tainted_module') tainted_module.__path__ = ['bad_path', 'good_path'] modules = {'tainted_module': tainted_module} new_modules = PEX.minimum_sys_modules([], modules) assert new_modules == modules new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == modules assert tainted_module.__path__ == ['good_path']
def _run_python_tests(self, targets, stdout, stderr): coverage_rc = None coverage_enabled = 'PANTS_PY_COVERAGE' in os.environ try: builder = PEXBuilder(interpreter=self.interpreter) builder.info.entry_point = 'pytest' chroot = PythonChroot( targets=targets, extra_requirements=self._TESTING_TARGETS, builder=builder, platforms=('current',), interpreter=self.interpreter, conn_timeout=self._conn_timeout) builder = chroot.dump() builder.freeze() test_args = [] test_args.extend(PythonTestBuilder.generate_junit_args(targets)) test_args.extend(self.args) if coverage_enabled: coverage_rc, args = self.cov_setup(targets) test_args.extend(args) sources = list(itertools.chain(*[t.sources_relative_to_buildroot() for t in targets])) pex = PEX(builder.path(), interpreter=self.interpreter) rc = pex.run(args=test_args + sources, blocking=True, setsid=True, stdout=stdout, stderr=stderr) # TODO(wickman): If coverage is enabled, write an intermediate .html that points to # each of the coverage reports generated and webbrowser.open to that page. rv = PythonTestResult.rc(rc) except Exception: import traceback print('Failed to run test!', file=stderr) traceback.print_exc() rv = PythonTestResult.exception() finally: if coverage_rc: os.unlink(coverage_rc) return rv
def test_pex_builder_preamble(): with temporary_dir() as td: target = os.path.join(td, 'foo.pex') should_create = os.path.join(td, 'foo.1') tempfile_preamble = "\n".join([ "import sys", "open('{0}', 'w').close()".format(should_create), "sys.exit(3)" ]) pb = PEXBuilder(preamble=tempfile_preamble) pb.build(target) assert not os.path.exists(should_create) pex = PEX(target, interpreter=pb.interpreter) process = pex.run(blocking=False) process.wait() assert process.returncode == 3 assert os.path.exists(should_create)
class PytestBinary(object): """A `py.test` PEX binary with an embedded default (empty) `pytest.ini` config file.""" _COVERAGE_PLUGIN_MODULE_NAME = '__{}__'.format(__name__.replace('.', '_')) def __init__(self, interpreter, pex): # Here we hack around `coverage.cmdline` nuking the 0th element of `sys.path` (our root pex) # by ensuring, the root pex is on the sys.path twice. # See: https://github.com/nedbat/coveragepy/issues/715 pex_path = pex.path() pex_info = PexInfo.from_pex(pex_path) pex_info.merge_pex_path(pex_path) # We're now on the sys.path twice. PEXBuilder(pex_path, interpreter=interpreter, pex_info=pex_info).freeze() self._pex = PEX(pex=pex_path, interpreter=interpreter) self._interpreter = interpreter @property def pex(self): """Return the loose-source py.test binary PEX. :rtype: :class:`pex.pex.PEX` """ return self._pex @property def interpreter(self): """Return the interpreter used to build this PEX. :rtype: :class:`pex.interpreter.PythonInterpreter` """ return self._interpreter @property def config_path(self): """Return the absolute path of the `pytest.ini` config file in this py.test binary. :rtype: str """ return os.path.join(self._pex.path(), 'pytest.ini') @classmethod def coverage_plugin_module(cls): """Return the name of the coverage plugin module embedded in this py.test binary. :rtype: str """ return cls._COVERAGE_PLUGIN_MODULE_NAME
def test_minimum_sys_modules(): # builtins stay builtin_module = ModuleType('my_builtin') modules = {'my_builtin': builtin_module} new_modules = PEX.minimum_sys_modules([], modules) assert new_modules == modules new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == modules # tainted evict tainted_module = ModuleType('tainted_module') tainted_module.__path__ = ['bad_path'] modules = {'tainted_module': tainted_module} new_modules = PEX.minimum_sys_modules([], modules) assert new_modules == modules new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == {} assert tainted_module.__path__ == [] # tainted cleaned tainted_module = ModuleType('tainted_module') tainted_module.__path__ = ['bad_path', 'good_path'] modules = {'tainted_module': tainted_module} new_modules = PEX.minimum_sys_modules([], modules) assert new_modules == modules new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == modules assert tainted_module.__path__ == ['good_path'] # If __path__ is not a list the module is removed; typically this implies # it's a namespace package (https://www.python.org/dev/peps/pep-0420/) where # __path__ is a _NamespacePath. try: from importlib._bootstrap_external import _NamespacePath bad_path = _NamespacePath("hello", "world", None) except ImportError: bad_path = {"hello": "world"} class FakeModule(object): pass tainted_module = FakeModule() tainted_module.__path__ = bad_path # Not a list as expected modules = {'tainted_module': tainted_module} new_modules = PEX.minimum_sys_modules(['bad_path'], modules) assert new_modules == {}
def _compile_target(self, vt): """'Compiles' a python target. 'Compiling' means forming an isolated chroot of its sources and transitive deps and then attempting to import each of the target's sources in the case of a python library or else the entry point in the case of a python binary. For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like: if __name__ == '__main__': import lib.core import lib.util For a binary with entry point lib.bin:main the "compiler" main file would look like: if __name__ == '__main__': from lib.bin import main In either case the main file is executed within the target chroot to reveal missing BUILD dependencies. """ target = vt.target with self.context.new_workunit(name=target.address.spec): modules = self._get_modules(target) if not modules: # Nothing to eval, so a trivial compile success. return 0 interpreter = self._get_interpreter_for_target_closure(target) reqs_pex = self._resolve_requirements_for_versioned_target_closure(interpreter, vt) srcs_pex = self._source_pex_for_versioned_target_closure(interpreter, vt) # Create the executable pex. exec_pex_parent = os.path.join(self.workdir, 'executable_pex') executable_file_content = self._get_executable_file_content(exec_pex_parent, modules) hasher = hashlib.sha1() hasher.update(reqs_pex.path().encode('utf-8')) hasher.update(srcs_pex.path().encode('utf-8')) hasher.update(executable_file_content.encode('utf-8')) exec_file_hash = hasher.hexdigest() exec_pex_path = os.path.realpath(os.path.join(exec_pex_parent, exec_file_hash)) if not os.path.isdir(exec_pex_path): with safe_concurrent_creation(exec_pex_path) as safe_path: # Write the entry point. safe_mkdir(safe_path) with open(os.path.join(safe_path, '{}.py'.format(self._EXEC_NAME)), 'w') as outfile: outfile.write(executable_file_content) pex_info = (target.pexinfo if isinstance(target, PythonBinary) else None) or PexInfo() # Override any user-specified entry point, under the assumption that the # executable_file_content does what the user intends (including, probably, calling that # underlying entry point). pex_info.entry_point = self._EXEC_NAME pex_info.pex_path = ':'.join(pex.path() for pex in (reqs_pex, srcs_pex) if pex) builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info) builder.freeze() pex = PEX(exec_pex_path, interpreter) with self.context.new_workunit(name='eval', labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.RUN, WorkUnitLabel.TOOL], cmd=' '.join(pex.cmdline())) as workunit: returncode = pex.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) workunit.set_outcome(WorkUnit.SUCCESS if returncode == 0 else WorkUnit.FAILURE) if returncode != 0: self.context.log.error('Failed to eval {}'.format(target.address.spec)) return returncode
def checker_pex(self, interpreter): # TODO(John Sirois): Formalize in pants.base? pants_dev_mode = os.environ.get('PANTS_DEV', '0') != '0' if pants_dev_mode: checker_id = self.checker_target.transitive_invalidation_hash() else: checker_id = hash_all([self._CHECKER_REQ]) pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity)) if not os.path.exists(pex_path): with self.context.new_workunit(name='build-checker'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=chroot, interpreter=interpreter), log=self.context.log) # Constraining is required to guard against the case where the user # has a pexrc file set. pex_builder.add_interpreter_constraint( str(interpreter.identity.requirement)) if pants_dev_mode: pex_builder.add_sources_from(self.checker_target) req_libs = [ tgt for tgt in self.checker_target.closure() if isinstance(tgt, PythonRequirementLibrary) ] pex_builder.add_requirement_libs_from( req_libs=req_libs) else: try: # The checker is already on sys.path, eg: embedded in pants.pex. platform = Platform.current() platform_name = platform.platform env = Environment( search_path=sys.path, platform=platform_name, python=interpreter.version_string) working_set = WorkingSet(entries=sys.path) for dist in working_set.resolve( [Requirement.parse(self._CHECKER_REQ)], env=env): pex_builder.add_direct_requirements( dist.requires()) # NB: We add the dist location instead of the dist itself to make sure its a # distribution style pex knows how to package. pex_builder.add_dist_location(dist.location) pex_builder.add_direct_requirements( [self._CHECKER_REQ]) except (DistributionNotFound, PEXBuilder.InvalidDistribution): # We need to resolve the checker from a local or remote distribution repo. pex_builder.add_resolved_requirements( [PythonRequirement(self._CHECKER_REQ)]) pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT) pex_builder.freeze() return PEX(pex_path, interpreter=interpreter)
def _source_pex_for_versioned_target_closure(self, interpreter, vt): source_pex_path = os.path.realpath(os.path.join(self.workdir, vt.cache_key.hash)) if not os.path.isdir(source_pex_path): with safe_concurrent_creation(source_pex_path) as safe_path: self._build_source_pex(interpreter, safe_path, vt.target.closure()) return PEX(source_pex_path, interpreter=interpreter)
def _compile_target(self, vt): """'Compiles' a python target. 'Compiling' means forming an isolated chroot of its sources and transitive deps and then attempting to import each of the target's sources in the case of a python library or else the entry point in the case of a python binary. For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like: if __name__ == '__main__': import lib.core import lib.util For a binary with entry point lib.bin:main the "compiler" main file would look like: if __name__ == '__main__': from lib.bin import main In either case the main file is executed within the target chroot to reveal missing BUILD dependencies. """ target = vt.target with self.context.new_workunit(name=target.address.spec): modules = self._get_modules(target) if not modules: # Nothing to eval, so a trivial compile success. return 0 interpreter = self._get_interpreter_for_target_closure(target) reqs_pex = self._resolve_requirements_for_versioned_target_closure( interpreter, vt) srcs_pex = self._source_pex_for_versioned_target_closure( interpreter, vt) # Create the executable pex. exec_pex_parent = os.path.join(self.workdir, 'executable_pex') executable_file_content = self._get_executable_file_content( exec_pex_parent, modules) hasher = hashlib.sha1() hasher.update(executable_file_content) exec_file_hash = hasher.hexdigest() exec_pex_path = os.path.realpath( os.path.join(exec_pex_parent, exec_file_hash)) if not os.path.isdir(exec_pex_path): with safe_concurrent_creation(exec_pex_path) as safe_path: # Write the entry point. safe_mkdir(safe_path) with open( os.path.join(safe_path, '{}.py'.format(self._EXEC_NAME)), 'w') as outfile: outfile.write(executable_file_content) pex_info = (target.pexinfo if isinstance( target, PythonBinary) else None) or PexInfo() # Override any user-specified entry point, under the assumption that the # executable_file_content does what the user intends (including, probably, calling that # underlying entry point). pex_info.entry_point = self._EXEC_NAME builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info) builder.freeze() exec_pex = PEX(exec_pex_path, interpreter) extra_pex_paths = [ pex.path() for pex in filter(None, [reqs_pex, srcs_pex]) ] pex = WrappedPEX(exec_pex, extra_pex_paths, interpreter) with self.context.new_workunit( name='eval', labels=[ WorkUnitLabel.COMPILER, WorkUnitLabel.RUN, WorkUnitLabel.TOOL ], cmd=' '.join(exec_pex.cmdline())) as workunit: returncode = pex.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) workunit.set_outcome(WorkUnit.SUCCESS if returncode == 0 else WorkUnit.FAILURE) if returncode != 0: self.context.log.error('Failed to eval {}'.format( target.address.spec)) return returncode
def __init__(self, pex_path, interpreter): self._pex = PEX(pex_path, interpreter=interpreter) self._interpreter = interpreter
def test_osx_platform_intel_issue_523(): # type: () -> None def bad_interpreter(): # type: () -> PythonInterpreter return PythonInterpreter.from_binary(_KNOWN_BAD_APPLE_INTERPRETER) with temporary_dir() as cache: # We need to run the bad interpreter with a modern, non-Apple-Extras setuptools in order to # successfully install psutil; yield_pex_builder sets up the bad interpreter with our vendored # setuptools and wheel extras. with nested(yield_pex_builder(interpreter=bad_interpreter()), temporary_filename()) as ( pb, pex_file, ): for resolved_dist in resolver.resolve( ["psutil==5.4.3"], cache=cache, interpreter=pb.interpreter ): pb.add_dist_location(resolved_dist.distribution.location) pb.build(pex_file) # NB: We want PEX to find the bare bad interpreter at runtime. pex = PEX(pex_file, interpreter=bad_interpreter()) def run(args, **env): # type: (Iterable[str], **str) -> Tuple[int, str, str] pex_env = os.environ.copy() pex_env["PEX_VERBOSE"] = "1" pex_env.update(**env) process = pex.run( args=args, env=pex_env, blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() return process.returncode, stdout.decode("utf-8"), stderr.decode("utf-8") returncode, _, stderr = run(["-c", "import psutil"]) assert 0 == returncode, "Process failed with exit code {} and stderr:\n{}".format( returncode, stderr ) returncode, stdout, stderr = run(["-c", "import pkg_resources"]) assert 0 != returncode, ( "Isolated pex process succeeded but should not have found pkg-resources:\n" "STDOUT:\n" "{}\n" "STDERR:\n" "{}".format(stdout, stderr) ) returncode, stdout, stderr = run( ["-c", "import pkg_resources; print(pkg_resources.get_supported_platform())"], # Let the bad interpreter site-packages setuptools leak in. PEX_INHERIT_PATH=InheritPath.for_value(True).value, ) assert 0 == returncode, "Process failed with exit code {} and stderr:\n{}".format( returncode, stderr ) # Verify this worked along side the previously problematic pkg_resources-reported platform. release, _, _ = platform.mac_ver() major_minor = ".".join(release.split(".")[:2]) assert "macosx-{}-intel".format(major_minor) == stdout.strip()
def _compile_target(self, target): # "Compiles" a target by forming an isolated chroot of its sources and transitive deps and then # attempting to import each of the target's sources in the case of a python library or else the # entry point in the case of a python binary. # # For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like: # # if __name__ == '__main__': # import lib.core # import lib.util # # For a binary with entry point lib.bin:main the "compiler" main file would look like: # # if __name__ == '__main__': # from lib.bin import main # # In either case the main file is executed within the target chroot to reveal missing BUILD # dependencies. with self.context.new_workunit(name=target.address.spec): modules = [] if isinstance(target, PythonBinary): source = 'entry_point {}'.format(target.entry_point) components = target.entry_point.rsplit(':', 1) module = components[0] if len(components) == 2: function = components[1] data = TemplateData(source=source, import_statement='from {} import {}'.format(module, function)) else: data = TemplateData(source=source, import_statement='import {}'.format(module)) modules.append(data) else: for path in target.sources_relative_to_source_root(): if path.endswith('.py'): if os.path.basename(path) == '__init__.py': module_path = os.path.dirname(path) else: module_path, _ = os.path.splitext(path) source = 'file {}'.format(os.path.join(target.target_base, path)) module = module_path.replace(os.path.sep, '.') data = TemplateData(source=source, import_statement='import {}'.format(module)) modules.append(data) if not modules: # Nothing to eval, so a trivial compile success. return 0 interpreter = self.select_interpreter_for_targets([target]) if isinstance(target, PythonBinary): pexinfo, platforms = target.pexinfo, target.platforms else: pexinfo, platforms = None, None with temporary_file() as imports_file: def pre_freeze(chroot): generator = Generator(pkgutil.get_data(__name__, self._EVAL_TEMPLATE_PATH), chroot=chroot.path(), modules=modules) generator.write(imports_file) imports_file.close() chroot.builder.set_executable(imports_file.name, '__pants_python_eval__.py') with self.temporary_chroot(interpreter=interpreter, pex_info=pexinfo, targets=[target], platforms=platforms, pre_freeze=pre_freeze) as chroot: pex = PEX(chroot.builder.path(), interpreter=interpreter) with self.context.new_workunit(name='eval', labels=[WorkUnit.COMPILER, WorkUnit.RUN, WorkUnit.TOOL], cmd=' '.join(pex.cmdline())) as workunit: returncode = pex.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr')) workunit.set_outcome(WorkUnit.SUCCESS if returncode == 0 else WorkUnit.FAILURE) if returncode != 0: self.context.log.error('Failed to eval {}'.format(target.address.spec)) return returncode