def _upload_zip(zip_file: str, package_path: str, resolved_fs=None, force_upload: bool = False): packer = packaging.detect_packer_from_file(zip_file) if packer == packaging.PEX_PACKER and resolved_fs.exists(package_path): with tempfile.TemporaryDirectory() as tempdir: local_copy_path = os.path.join(tempdir, os.path.basename(package_path)) resolved_fs.get(package_path, local_copy_path) info_from_storage = PexInfo.from_pex(local_copy_path) into_to_upload = PexInfo.from_pex(zip_file) if not force_upload and info_from_storage.code_hash == into_to_upload.code_hash: _logger.info(f"skip upload of current {zip_file}" f" as it is already uploaded on {package_path}") return _logger.info(f"upload current {zip_file} to {package_path}") dir = os.path.dirname(package_path) if not resolved_fs.exists(dir): resolved_fs.mkdir(dir) resolved_fs.put(zip_file, package_path) # Remove previous metadata archive_meta_data = _get_archive_metadata_path(package_path) if resolved_fs.exists(archive_meta_data): resolved_fs.rm(archive_meta_data)
def test_pants_binary_interpreter_selection_with_pexrc(self): py27_path, py3_path = python_interpreter_path(PY_27), python_interpreter_path(PY_3) with setup_pexrc_with_pex_python_path([py27_path, py3_path]): with temporary_dir() as interpreters_cache: pants_ini_config = {'python-setup': {'interpreter_cache_dir': interpreters_cache}} pants_run_27 = self.run_pants( command=['binary', '{}:main_py2'.format(os.path.join(self.testproject, 'python_3_selection_testing'))], config=pants_ini_config ) self.assert_success(pants_run_27) pants_run_3 = self.run_pants( command=['binary', '{}:main_py3'.format(os.path.join(self.testproject, 'python_3_selection_testing'))], config=pants_ini_config ) self.assert_success(pants_run_3) # Ensure proper interpreter constraints were passed to built pexes. py2_pex = os.path.join(os.getcwd(), 'dist', 'main_py2.pex') py3_pex = os.path.join(os.getcwd(), 'dist', 'main_py3.pex') py2_info = PexInfo.from_pex(py2_pex) py3_info = PexInfo.from_pex(py3_pex) self.assertIn('CPython>2.7.6,<3', py2_info.interpreter_constraints) self.assertIn('CPython>3', py3_info.interpreter_constraints) # Cleanup created pexes. os.remove(py2_pex) os.remove(py3_pex)
def cached_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True) return self.create_chroot(interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements)
def test_pex_root_set_none(): # type: () -> None pex_info = PexInfo.default() pex_info.pex_root = None assert PexInfo.default().pex_root == pex_info.pex_root assert os.path.expanduser("~/.pex") == pex_info.pex_root
def cached_chroot(self, interpreter, pex_info, targets, platforms=None, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. :rtype: pants.backend.python.python_chroot.PythonChroot TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True) return self.create_chroot(interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements)
def test_copy(): # type: () -> None default_info = PexInfo.default() default_info_copy = default_info.copy() assert default_info is not default_info_copy assert default_info.dump() == default_info_copy.dump() info = PexInfo.default() info.unzip = True info.code_hash = "foo" info.inherit_path = InheritPath.FALLBACK info.add_requirement("bar==1") info.add_requirement("baz==2") info.add_distribution("bar.whl", "bar-sha") info.add_distribution("baz.whl", "baz-sha") info.add_interpreter_constraint(">=2.7.18") info.add_interpreter_constraint("CPython==2.7.9") info_copy = info.copy() assert info_copy.unzip is True assert "foo" == info_copy.code_hash assert InheritPath.FALLBACK == info_copy.inherit_path assert OrderedSet(["bar==1", "baz==2"]) == info_copy.requirements assert { "bar.whl": "bar-sha", "baz.whl": "baz-sha" } == info_copy.distributions assert {">=2.7.18", "CPython==2.7.9"} == set(info_copy.interpreter_constraints) assert info.dump() == info_copy.dump()
def test_from_env(): # type: () -> None with temporary_dir() as td: pex_root = os.path.realpath(os.path.join(td, "pex_root")) environ = dict( PEX_ROOT=pex_root, PEX_MODULE="entry:point", PEX_SCRIPT="script.sh", PEX_FORCE_LOCAL="true", PEX_UNZIP="true", PEX_INHERIT_PATH="prefer", PEX_IGNORE_ERRORS="true", PEX_ALWAYS_CACHE="true", ) info = dict( pex_root=pex_root, entry_point="entry:point", script="script.sh", zip_safe=False, unzip=True, inherit_path=True, ignore_errors=True, always_write_cache=True, ) assert_same_info(PexInfo(info=info), PexInfo.from_env(env=Variables(environ=environ)))
def __init__(self, pex=sys.argv[0], interpreter=None, env=ENV, verify_entry_point=False): self._pex = pex self._interpreter = interpreter or PythonInterpreter.get() self._pex_info = PexInfo.from_pex(self._pex) self._pex_info_overrides = PexInfo.from_env(env=env) self._vars = env self._envs = [] self._working_set = None if verify_entry_point: self._do_entry_point_verification()
def main( pex=None, # type: Optional[PEX] pex_prog_path=None, # type: Optional[str] ): # type: (...) -> int logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) with TRACER.timed("Executing PEX_TOOLS"): pex_prog_path = simplify_pex_path(pex_prog_path or pex.path()) if pex else None prog = ("PEX_TOOLS=1 {pex_path}".format( pex_path=pex_prog_path) if pex else "{python} {module}".format( python=sys.executable, module=".".join(__name__.split(".")[:-1]))) parser = ArgumentParser( prog=prog, description="Tools for working with {}.".format( pex_prog_path if pex else "PEX files"), ) if pex is None: parser.add_argument("pex", nargs=1, metavar="PATH", help="The path of the PEX file to operate on.") parser.set_defaults(func=functools.partial(show_help, parser)) subparsers = parser.add_subparsers( description= "{} can be operated on using any of the following subcommands.". format("The PEX file {}".format(pex_prog_path ) if pex else "A PEX file"), ) for command in commands.all_commands(): name = command.__class__.__name__.lower() # N.B.: We want to trigger the default argparse description if the doc string is empty. description = command.__doc__ or None help_text = description.splitlines()[0] if description else None command_parser = subparsers.add_parser(name, help=help_text, description=description) command.add_arguments(command_parser) command_parser.set_defaults(func=command.run) options = parser.parse_args() if pex is None: pex_info = PexInfo.from_pex(options.pex[0]) pex_info.update(PexInfo.from_env()) interpreter = pex_bootstrapper.find_compatible_interpreter( interpreter_constraints=pex_info.interpreter_constraints) pex = PEX(options.pex[0], interpreter=interpreter) func = cast("CommandFunc", options.func) result = func(pex, options) result.maybe_display() return result.exit_code
def cached_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info) chroot = PythonChroot(context=self.context, python_setup=PythonSetup.global_instance(), python_repos=PythonRepos.global_instance(), interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements) # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so # while transitioning calls to temporary_chroot to calls to cached_chroot. # We can revisit after that transition is complete. yield chroot
def nsutil_pex(self): interpreter = self.context.products.get_data(PythonInterpreter) chroot = os.path.join(self.workdir, 'nsutil', interpreter.version_string) if not os.path.exists(chroot): pex_info = PexInfo.default(interpreter=interpreter) with safe_concurrent_creation(chroot) as scratch: builder = PEXBuilder(path=scratch, interpreter=interpreter, pex_info=pex_info, copy=True) with temporary_file(binary_mode=False) as fp: declares_namespace_package_code = inspect.getsource( declares_namespace_package) fp.write( textwrap.dedent(""" import sys {declares_namespace_package_code} if __name__ == '__main__': for path in sys.argv[1:]: if declares_namespace_package(path): print(path) """).strip().format(declares_namespace_package_code= declares_namespace_package_code)) fp.close() builder.set_executable(filename=fp.name, env_filename='main.py') builder.freeze() return PEX(pex=chroot, interpreter=interpreter)
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append(PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def activate_pex(): entry_point = os.environ.get('UWSGI_PEX') if not entry_point: sys.stderr.write('couldnt determine pex from UWSGI_PEX environment variable, bailing!\n') sys.exit(1) sys.stderr.write('entry_point=%s\n' % entry_point) sys.path[0] = os.path.abspath(sys.path[0]) sys.path.insert(0, entry_point) sys.path.insert(0, os.path.abspath(os.path.join(entry_point, '.bootstrap'))) from pex import pex_bootstrapper from pex.environment import PEXEnvironment from pex.finders import register_finders from pex.pex_info import PexInfo pex_bootstrapper.monkeypatch_build_zipmanifest() register_finders() pex_info = PexInfo.from_pex(entry_point) print str(pex_info) env = PEXEnvironment(entry_point, pex_info) working_set = env.activate() sys.stderr.write('sys.path=%s\n\n' % sys.path) return entry_point, pex_info, env, working_set
def _activate(self): # type: () -> WorkingSet working_set = WorkingSet([]) # set up the local .pex environment pex_info = self.pex_info() self._envs.append(PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment.declare_namespace_packages(working_set) self.patch_pkg_resources(working_set) return working_set
def execute_codegen(self, target, results_dir): self.context.log.info("Processing target {}".format(target)) requirements_pex = self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX) interpreter = self.context.products.get_data(PythonInterpreter) pex_info = PexInfo.default(interpreter) pex_info.pex_path = requirements_pex.path() with temporary_dir() as source_pex_chroot: sources_pex_builder = PEXBuilder( path=source_pex_chroot, interpreter=interpreter, copy=True, pex_info=pex_info ) pex_build_util.dump_sources(sources_pex_builder, target, self.context.log) sources_pex_builder.freeze() codegen_pex = PEX(sources_pex_builder.path(), interpreter) setup_py_paths = [] for source in target.sources_relative_to_source_root(): if os.path.basename(source) == 'setup.py': setup_py_paths.append(source) if len(setup_py_paths) != 1: raise TaskError( 'Expected target {} to own exactly one setup.py, found {}'.format( setup_py_paths, len(setup_py_paths) ) ) setup_py_path = setup_py_paths[0] result_code = codegen_pex.run( with_chroot=True, blocking=True, args=(setup_py_path, 'build_ext', '--inplace', '--verbose'), # Passing PATH helps cython find the correct c++ compiler env={'libraries': results_dir, 'PATH': os.getenv('PATH')} ) if result_code != 0: raise TaskError( 'creating cython library failed', exit_code=result_code, failed_targets=[target] ) library_source_path = os.path.join( sources_pex_builder.path(), os.path.dirname(setup_py_path), target.output ) library_output = os.path.join(results_dir, target.output) safe_mkdir_for(library_output) shutil.move(library_source_path, library_output) self.context.log.info( 'created library {}'.format(os.path.relpath(library_output, get_buildroot())) )
def create( cls, path, # type: str interpreter=None, # type: Optional[PythonInterpreter] ): # type: (...) -> Pip """Creates a pip tool with PEX isolation at path. :param path: The path to assemble the pip tool at. :param interpreter: The interpreter to run Pip with. The current interpreter by default. :return: The path of a PEX that can be used to execute Pip in isolation. """ pip_interpreter = interpreter or PythonInterpreter.get() pip_pex_path = os.path.join(path, isolated().pex_hash) with atomic_directory(pip_pex_path, exclusive=True) as chroot: if not chroot.is_finalized: from pex.pex_builder import PEXBuilder isolated_pip_builder = PEXBuilder(path=chroot.work_dir) isolated_pip_builder.info.venv = True for dist_location in third_party.expose( ["pip", "setuptools", "wheel"]): isolated_pip_builder.add_dist_location(dist=dist_location) isolated_pip_builder.set_script("pip") isolated_pip_builder.freeze() pex_info = PexInfo.from_pex(pip_pex_path) pex_info.add_interpreter_constraint( str(pip_interpreter.identity.requirement)) return cls( ensure_venv( PEX(pip_pex_path, interpreter=pip_interpreter, pex_info=pex_info)))
def test_can_add_handles_invalid_wheel_filename(python_35_interpreter): # type: (PythonInterpreter) -> None pex_environment = PEXEnvironment( pex="", pex_info=PexInfo.default(python_35_interpreter), interpreter=python_35_interpreter) assert pex_environment.can_add(Distribution("pep427-invalid.whl")) is False
def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties builder = PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True) # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) elif has_python_requirements(tgt): req_tgts.append(tgt) # Dump everything into the builder's chroot. for tgt in source_tgts: dump_sources(builder, tgt, self.context.log) dump_requirements(builder, interpreter, req_tgts, self.context.log, binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name)) builder.build(pex_path) return pex_path
def add_from_requirements_pex(self, pex): """Add requirements from an existing pex. :param pex: The path to an existing .pex file or unzipped pex directory. """ self._ensure_unfrozen("Adding from pex") pex_info = PexInfo.from_pex(pex) def add(location, dname, expected_dhash): dhash = self._add_dist_dir(location, dname) if dhash != expected_dhash: raise self.InvalidDistribution( "Distribution {} at {} had hash {}, expected {}".format( dname, location, dhash, expected_dhash)) self._pex_info.add_distribution(dname, dhash) if os.path.isfile(pex): with open_zip(pex) as zf: for dist_name, dist_hash in pex_info.distributions.items(): internal_dist_path = "/".join( [pex_info.internal_cache, dist_name]) cached_location = os.path.join(pex_info.install_cache, dist_hash, dist_name) CacheHelper.cache_distribution(zf, internal_dist_path, cached_location) add(cached_location, dist_name, dist_hash) else: for dist_name, dist_hash in pex_info.distributions.items(): add(os.path.join(pex, pex_info.internal_cache, dist_name), dist_name, dist_hash) for req in pex_info.requirements: self._pex_info.add_requirement(req)
def __init__(self, path=None, interpreter=None, chroot=None, pex_info=None, preamble=None, copy=False): """Initialize a pex builder. :keyword path: The path to write the PEX as it is built. If ``None`` is specified, a temporary directory will be created. :keyword interpreter: The interpreter to use to build this PEX environment. If ``None`` is specified, the current interpreter is used. :keyword chroot: If specified, preexisting :class:`Chroot` to use for building the PEX. :keyword pex_info: A preexisting PexInfo to use to build the PEX. :keyword preamble: If supplied, execute this code prior to bootstrapping this PEX environment. :type preamble: str :keyword copy: If False, attempt to create the pex environment via hard-linking, falling back to copying across devices. If True, always copy. .. versionchanged:: 0.8 The temporary directory created when ``path`` is not specified is now garbage collected on interpreter exit. """ self._interpreter = interpreter or PythonInterpreter.get() self._chroot = chroot or Chroot(path or safe_mkdtemp()) self._pex_info = pex_info or PexInfo.default(self._interpreter) self._preamble = preamble or '' self._copy = copy self._shebang = self._interpreter.identity.hashbang() self._logger = logging.getLogger(__name__) self._frozen = False self._distributions = set()
def __init__( self, pex, # type: str pex_info=None, # type: Optional[PexInfo] interpreter=None, # type: Optional[PythonInterpreter] ): # type: (...) -> None self._pex = pex self._pex_info = pex_info or PexInfo.from_pex(pex) self._internal_cache = os.path.join(self._pex, self._pex_info.internal_cache) self._activated = False self._working_set = None self._interpreter = interpreter or PythonInterpreter.get() self._inherit_path = self._pex_info.inherit_path self._supported_tags = frozenset(self._interpreter.identity.supported_tags) self._target_interpreter_env = self._interpreter.identity.env_markers # For the bug this works around, see: https://bitbucket.org/pypy/pypy/issues/1686 # NB: This must be installed early before the underlying pex is loaded in any way. if self._interpreter.identity.python_tag.startswith("pp") and zipfile.is_zipfile(self._pex): self._install_pypy_zipimporter_workaround(self._pex) super(PEXEnvironment, self).__init__( search_path=[] if self._pex_info.inherit_path == InheritPath.FALSE else sys.path, platform=self._interpreter.identity.platform_tag, ) TRACER.log( "E: tags for %r x %r -> %s" % (self.platform, self._interpreter, self._supported_tags), V=9, )
def execute(self, **pex_run_kwargs): (accept_predicate, reject_predicate) = Target.lang_discriminator('python') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: # We can't throw if the target isn't a python target, because perhaps we were called on a # JVM target, in which case we have to no-op and let scala repl do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(targets) extra_requirements = [] if self.get_options().ipython: entry_point = self.get_options().ipython_entry_point for req in self.get_options().ipython_requirements: extra_requirements.append(PythonRequirement(req)) else: entry_point = 'code:interact' pex_info = PexInfo.default() pex_info.entry_point = entry_point with self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=None, extra_requirements=extra_requirements) as chroot: pex = chroot.pex() self.context.release_lock() with stty_utils.preserve_stty_settings(): with self.context.new_workunit(name='run', labels=[WorkUnitLabel.RUN]): po = pex.run(blocking=False, **pex_run_kwargs) try: return po.wait() except KeyboardInterrupt: pass
def __init__( self, pex, # type: str pex_info=None, # type: Optional[PexInfo] target=None, # type: Optional[DistributionTarget] ): # type: (...) -> None self._pex = os.path.realpath(pex) self._pex_info = pex_info or PexInfo.from_pex(pex) self._available_ranked_dists_by_key = defaultdict( list) # type: DefaultDict[str, List[_RankedDistribution]] self._activated_dists = None # type: Optional[Iterable[Distribution]] self._target = target or DistributionTarget.current() self._interpreter_version = self._target.get_python_version_str() # The supported_tags come ordered most specific (platform specific) to least specific # (universal). We want to rank most specific highest; so we need to reverse iteration order # here. self._supported_tags_to_rank = { tag: rank for rank, tag in enumerate( reversed(self._target.get_supported_tags())) } self._platform, _ = self._target.get_platform() # For the bug this works around, see: https://bitbucket.org/pypy/pypy/issues/1686 # NB: This must be installed early before the underlying pex is loaded in any way. if self._platform.impl == "pp" and zipfile.is_zipfile(self._pex): self._install_pypy_zipimporter_workaround(self._pex)
def test_target_constraints_with_no_sources(self): with temporary_dir() as interpreters_cache: pants_ini_config = { 'python-setup': { 'interpreter_cache_dir': interpreters_cache, 'interpreter_constraints': ['CPython>3'], } } # Run task. pants_run = self.run_pants( command=['run', '{}:test_bin'.format(os.path.join(self.testproject, 'test_target_with_no_sources'))], config=pants_ini_config ) self.assert_success(pants_run) self.assertIn('python3', pants_run.stdout_data) # Binary task. pants_run = self.run_pants( command=['binary', '{}:test_bin'.format(os.path.join(self.testproject, 'test_target_with_no_sources'))], config=pants_ini_config ) self.assert_success(pants_run) # Ensure proper interpreter constraints were passed to built pexes. py2_pex = os.path.join(os.getcwd(), 'dist', 'test_bin.pex') py2_info = PexInfo.from_pex(py2_pex) self.assertIn('CPython>3', py2_info.interpreter_constraints) # Cleanup. os.remove(py2_pex)
def execute(self, **pex_run_kwargs): (accept_predicate, reject_predicate) = Target.lang_discriminator('python') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: # We can't throw if the target isn't a python target, because perhaps we were called on a # JVM target, in which case we have to no-op and let scala repl do its thing. # TODO(benjy): Some more elegant way to coordinate how tasks claim targets. interpreter = self.select_interpreter_for_targets(targets) extra_requirements = [] if self.get_options().ipython: entry_point = self.get_options().ipython_entry_point for req in self.get_options().ipython_requirements: extra_requirements.append(PythonRequirement(req)) else: entry_point = 'code:interact' pex_info = PexInfo.default() pex_info.entry_point = entry_point with self.temporary_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=None, extra_requirements=extra_requirements) as chroot: pex = chroot.pex() self.context.release_lock() with stty_utils.preserve_stty_settings(): with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]): po = pex.run(blocking=False, **pex_run_kwargs) try: return po.wait() except KeyboardInterrupt: pass
def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties builder = PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True) if binary_tgt.shebang: self.context.log.info( 'Found Python binary target {} with customized shebang, using it: {}' .format(binary_tgt.name, binary_tgt.shebang)) builder.set_shebang(binary_tgt.shebang) else: self.context.log.debug( 'No customized shebang found for {}'.format( binary_tgt.name)) # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) elif has_python_requirements(tgt): req_tgts.append(tgt) # Add target's interpreter compatibility constraints to pex info. if is_python_target(tgt): for constraint in tgt.compatibility: builder.add_interpreter_constraint(constraint) # Dump everything into the builder's chroot. for tgt in source_tgts: dump_sources(builder, tgt, self.context.log) # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. build_for_current_platform_only_check(self.context.targets()) dump_requirement_libs(builder, interpreter, req_tgts, self.context.log, platforms=binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name)) builder.build(pex_path) return pex_path
def _activate(self): # type: () -> Iterable[Distribution] # set up the local .pex environment pex_info = self.pex_info() target = DistributionTarget.for_interpreter(self._interpreter) self._envs.append(PEXEnvironment(self._pex, pex_info, target=target)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, target=target)) # activate all of them activated_dists = [] # type: List[Distribution] for env in self._envs: activated_dists.extend(env.activate()) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment._declare_namespace_packages(activated_dists) return activated_dists
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append( PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append( PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def execute(self): if not self.context.targets(lambda t: isinstance(t, PythonTests)): return pex_info = PexInfo.default() pex_info.entry_point = 'pytest' pytest_binary = self.create_pex(pex_info) self.context.products.register_data(self.PytestBinary, self.PytestBinary(pytest_binary))
def test_activate_interpreter_different_from_current(): with temporary_dir() as pex_root: interp_version = '3.6.3' if PY2 else '2.7.10' custom_interpreter = get_interpreter( python_interpreter=ensure_python_interpreter(interp_version), interpreter_cache_dir=os.path.join(pex_root, 'interpreters'), repos=None, # Default to PyPI. use_wheel=True) pex_info = PexInfo.default(custom_interpreter) pex_info.pex_root = pex_root with temporary_dir() as pex_chroot: pex_builder = PEXBuilder(path=pex_chroot, interpreter=custom_interpreter, pex_info=pex_info) with make_bdist(installer_impl=WheelInstaller, interpreter=custom_interpreter) as bdist: pex_builder.add_distribution(bdist) pex_builder.set_entry_point('sys:exit') pex_builder.freeze() pex = PEX(pex_builder.path(), interpreter=custom_interpreter) try: pex._activate() except SystemExit as e: pytest.fail('PEX activation of %s failed with %s' % (pex, e))
def nsutil_pex(self): interpreter = self.context.products.get_data(PythonInterpreter) chroot = os.path.join(self.workdir, 'nsutil', interpreter.version_string) if not os.path.exists(chroot): pex_info = PexInfo.default(interpreter=interpreter) with safe_concurrent_creation(chroot) as scratch: builder = PEXBuilder(path=scratch, interpreter=interpreter, pex_info=pex_info, copy=True) with temporary_file(binary_mode=False) as fp: declares_namespace_package_code = inspect.getsource(declares_namespace_package) fp.write(textwrap.dedent(""" import sys {declares_namespace_package_code} if __name__ == '__main__': for path in sys.argv[1:]: if declares_namespace_package(path): print(path) """).strip().format(declares_namespace_package_code=declares_namespace_package_code)) fp.close() builder.set_executable(filename=fp.name, env_filename='main.py') builder.freeze() return PEX(pex=chroot, interpreter=interpreter)
def _test_runner(self, targets, workunit): interpreter = self.select_interpreter_for_targets(targets) pex_info = PexInfo.default() pex_info.entry_point = 'pytest' # We hard-code the requirements here because they can't be upgraded without # major changes to this code, and the PyTest subsystem now contains the versions # for the new PytestRun task. This one is about to be deprecated anyway. testing_reqs = [PythonRequirement(s) for s in [ 'pytest>=2.6,<2.7', 'pytest-timeout<1.0.0', 'pytest-cov>=1.8,<1.9', 'unittest2>=0.6.0,<=1.9.0', ]] chroot = self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=('current',), extra_requirements=testing_reqs) pex = chroot.pex() with self._maybe_shard() as shard_args: with self._maybe_emit_junit_xml(targets) as junit_args: with self._maybe_emit_coverage_data(targets, chroot.path(), pex, workunit) as coverage_args: yield pex, shard_args + junit_args + coverage_args
def _test_runner(self, targets, workunit): interpreter = self.select_interpreter_for_targets(targets) pex_info = PexInfo.default() pex_info.entry_point = 'pytest' # We hard-code the requirements here because they can't be upgraded without # major changes to this code, and the PyTest subsystem now contains the versions # for the new PytestRun task. This one is about to be deprecated anyway. testing_reqs = [ PythonRequirement(s) for s in [ 'pytest>=2.6,<2.7', 'pytest-timeout<1.0.0', 'pytest-cov>=1.8,<1.9', 'unittest2>=0.6.0,<=1.9.0', ] ] chroot = self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=('current', ), extra_requirements=testing_reqs) pex = chroot.pex() with self._maybe_shard() as shard_args: with self._maybe_emit_junit_xml(targets) as junit_args: with self._maybe_emit_coverage_data(targets, chroot.path(), pex, workunit) as coverage_args: yield pex, shard_args + junit_args + coverage_args
def setup_repl_session(self, targets): if self.get_options().ipython: entry_point = self.get_options().ipython_entry_point else: entry_point = 'code:interact' pex_info = PexInfo.default() pex_info.entry_point = entry_point return self.create_pex(pex_info)
def _test_runner(self, targets, sources_map): pex_info = PexInfo.default() pex_info.entry_point = 'pytest' pex = self.create_pex(pex_info) with self._conftest(sources_map) as conftest: with self._maybe_emit_coverage_data(targets, pex) as coverage_args: yield pex, [conftest] + coverage_args
def __init__( self, pex=sys.argv[0], # type: str interpreter=None, # type: Optional[PythonInterpreter] env=ENV, # type: Variables verify_entry_point=False, # type: bool ): # type: (...) -> None self._pex = pex self._interpreter = interpreter or PythonInterpreter.get() self._pex_info = PexInfo.from_pex(self._pex) self._pex_info_overrides = PexInfo.from_env(env=env) self._vars = env self._envs = [] # type: List[PEXEnvironment] self._activated_dists = None # type: Optional[Iterable[Distribution]] if verify_entry_point: self._do_entry_point_verification()
def temporary_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): path = tempfile.mkdtemp() # Not a contextmanager: chroot.delete() will clean this up anyway. pex_info = pex_info or PexInfo.default() chroot = self._build_chroot(path, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) yield chroot chroot.delete()
def execute(self): if not self.context.targets(lambda t: isinstance(t, PythonTests)): return pex_info = PexInfo.default() pex_info.entry_point = 'pytest' pytest_binary = self.create_pex(pex_info, pin_selected_interpreter=True) interpreter = self.context.products.get_data(PythonInterpreter) self.context.products.register_data(self.PytestBinary, self.PytestBinary(interpreter, pytest_binary))
def __init__(self, interpreter, pex): # Here we hack around `coverage.cmdline` nuking the 0th element of `sys.path` (our root pex) # by ensuring, the root pex is on the sys.path twice. # See: https://github.com/nedbat/coveragepy/issues/715 pex_path = pex.path() pex_info = PexInfo.from_pex(pex_path) pex_info.merge_pex_path(pex_path) # We're now on the sys.path twice. PEXBuilder(pex_path, interpreter=interpreter, pex_info=pex_info).freeze() self._pex = PEX(pex=pex_path, interpreter=interpreter) self._interpreter = interpreter
def _create_binary(self, binary_tgt, results_dir): """Create a .pex file for the specified binary target.""" # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX # and PYTHON_SOURCES products, because those products are already-built pexes, and there's # no easy way to merge them into a single pex file (for example, they each have a __main__.py, # metadata, and so on, which the merging code would have to handle specially). interpreter = self.context.products.get_data(PythonInterpreter) with temporary_dir() as tmpdir: # Create the pex_info for the binary. run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pex_info = binary_tgt.pexinfo.copy() pex_info.build_properties = build_properties pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=tmpdir, interpreter=interpreter, pex_info=pex_info, copy=True), log=self.context.log) if binary_tgt.shebang: self.context.log.info('Found Python binary target {} with customized shebang, using it: {}' .format(binary_tgt.name, binary_tgt.shebang)) pex_builder.set_shebang(binary_tgt.shebang) else: self.context.log.debug('No customized shebang found for {}'.format(binary_tgt.name)) # Find which targets provide sources and which specify requirements. source_tgts = [] req_tgts = [] constraint_tgts = [] for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE): if has_python_sources(tgt) or has_resources(tgt): source_tgts.append(tgt) elif has_python_requirements(tgt): req_tgts.append(tgt) if is_python_target(tgt): constraint_tgts.append(tgt) # Add interpreter compatibility constraints to pex info. This will first check the targets for any # constraints, and if they do not have any will resort to the global constraints. pex_builder.add_interpreter_constraints_from(constraint_tgts) # Dump everything into the builder's chroot. for tgt in source_tgts: pex_builder.add_sources_from(tgt) # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. self._python_native_code_settings.check_build_for_current_platform_only(self.context.targets()) pex_builder.add_requirement_libs_from(req_tgts, platforms=binary_tgt.platforms) # Build the .pex file. pex_path = os.path.join(results_dir, '{}.pex'.format(binary_tgt.name)) pex_builder.build(pex_path) return pex_path
def _test_runner(self, targets, workunit): pex_info = PexInfo.default() pex_info.entry_point = 'pytest' pex = self.create_pex(pex_info) with self._maybe_shard() as shard_args: with self._maybe_emit_junit_xml(targets) as junit_args: with self._maybe_emit_coverage_data(targets, pex, workunit) as coverage_args: yield pex, shard_args + junit_args + coverage_args
def cached_chroot(self, interpreter, pex_info, targets, platforms, extra_requirements=None, executable_file_content=None): """Returns a cached PythonChroot created with the specified args. The returned chroot will be cached for future use. TODO: Garbage-collect old chroots, so they don't pile up? TODO: Ideally chroots would just be products produced by some other task. But that's a bit too complicated to implement right now, as we'd need a way to request chroots for a variety of sets of targets. """ # This PexInfo contains any customizations specified by the caller. # The process of building a pex modifies it further. pex_info = pex_info or PexInfo.default() path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) if not os.path.exists(path): path_tmp = path + '.tmp' self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms, extra_requirements, executable_file_content) shutil.move(path_tmp, path) # We must read the PexInfo that was frozen into the pex, so we get the modifications # created when that pex was built. pex_info = PexInfo.from_pex(path) # Now create a PythonChroot wrapper without dumping it. builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info) chroot = PythonChroot( context=self.context, python_setup=PythonSetup.global_instance(), python_repos=PythonRepos.global_instance(), interpreter=interpreter, builder=builder, targets=targets, platforms=platforms, extra_requirements=extra_requirements) # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so # while transitioning calls to temporary_chroot to calls to cached_chroot. # We can revisit after that transition is complete. yield chroot
def pexinfo(self): info = PexInfo.default() for repo in self._repositories: info.add_repository(repo) for index in self._indices: info.add_index(index) info.zip_safe = self._zip_safe info.always_write_cache = self._always_write_cache info.inherit_path = self._inherit_path info.entry_point = self.entry_point info.ignore_errors = self._ignore_errors return info
def _run_mypy(self, py3_interpreter, mypy_args, **kwargs): pex_info = PexInfo.default() pex_info.entry_point = 'mypy' mypy_version = self.get_options().mypy_version mypy_requirement_pex = self.resolve_requirement_strings( py3_interpreter, ['mypy=={}'.format(mypy_version)]) path = os.path.realpath(os.path.join(self.workdir, str(py3_interpreter.identity), mypy_version)) if not os.path.isdir(path): self.merge_pexes(path, pex_info, py3_interpreter, [mypy_requirement_pex]) pex = WrappedPEX(PEX(path, py3_interpreter), py3_interpreter) return pex.run(mypy_args, **kwargs)
def create_binary(self, binary): interpreter = self.select_interpreter_for_targets(binary.closure()) run_info_dict = self.context.run_tracker.run_info.get_as_dict() build_properties = PexInfo.make_build_properties() build_properties.update(run_info_dict) pexinfo = binary.pexinfo.copy() pexinfo.build_properties = build_properties with self.temporary_chroot(interpreter=interpreter, pex_info=pexinfo, targets=[binary], platforms=binary.platforms) as chroot: pex_path = os.path.join(self._distdir, '{}.pex'.format(binary.name)) chroot.package_pex(pex_path)
def bootstrap_conan(self): pex_info = PexInfo.default() pex_info.entry_point = 'conans.conan' conan_bootstrap_dir = os.path.join(get_pants_cachedir(), 'conan_support') conan_pex_path = os.path.join(conan_bootstrap_dir, 'conan_binary') interpreter = PythonInterpreter.get() if not os.path.exists(conan_pex_path): with safe_concurrent_creation(conan_pex_path) as safe_path: builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info) reqs = [PythonRequirement(req) for req in self.get_options().conan_requirements] dump_requirements(builder, interpreter, reqs, logger) builder.freeze() conan_binary = PEX(conan_pex_path, interpreter) return self.ConanBinary(pex=conan_binary)
def bootstrap(self, interpreter, pex_file_path, extra_reqs=None): # Caching is done just by checking if the file at the specified path is already executable. if not is_executable(pex_file_path): pex_info = PexInfo.default(interpreter=interpreter) if self.entry_point is not None: pex_info.entry_point = self.entry_point with safe_concurrent_creation(pex_file_path) as safe_path: all_reqs = list(self.base_requirements) + list(extra_reqs or []) pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(interpreter=interpreter, pex_info=pex_info)) pex_builder.add_resolved_requirements(all_reqs, platforms=['current']) pex_builder.build(safe_path) return PEX(pex_file_path, interpreter)
def _test_runner(self, targets, workunit): interpreter = self.select_interpreter_for_targets(targets) pex_info = PexInfo.default() pex_info.entry_point = "pytest" chroot = self.cached_chroot( interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=("current",), extra_requirements=self._TESTING_TARGETS, ) pex = chroot.pex() with self._maybe_shard() as shard_args: with self._maybe_emit_junit_xml(targets) as junit_args: with self._maybe_emit_coverage_data(targets, chroot.path(), pex, workunit) as coverage_args: yield pex, shard_args + junit_args + coverage_args
def test_from_env(): environ = dict(PEX_ROOT='/pex_root', PEX_MODULE='entry:point', PEX_SCRIPT='script.sh', PEX_FORCE_LOCAL='true', PEX_INHERIT_PATH='true', PEX_IGNORE_ERRORS='true', PEX_ALWAYS_CACHE='true') info = dict(pex_root='/pex_root', entry_point='entry:point', script='script.sh', zip_safe=False, inherit_path=True, ignore_errors=True, always_write_cache=True) assert_same_info(PexInfo(info=info), PexInfo.from_env(env=Variables(environ=environ)))
def setup_repl_session(self, targets): interpreter = self.select_interpreter_for_targets(targets) extra_requirements = [] if self.get_options().ipython: entry_point = self.get_options().ipython_entry_point for req in self.get_options().ipython_requirements: extra_requirements.append(PythonRequirement(req)) else: entry_point = 'code:interact' pex_info = PexInfo.default() pex_info.entry_point = entry_point chroot = self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=None, extra_requirements=extra_requirements) return chroot.pex()
def _test_runner(self, targets, workunit): interpreter = self.select_interpreter_for_targets(targets) pex_info = PexInfo.default() pex_info.entry_point = 'pytest' testing_reqs = [PythonRequirement(s) for s in PyTest.global_instance().get_requirement_strings()] chroot = self.cached_chroot(interpreter=interpreter, pex_info=pex_info, targets=targets, platforms=('current',), extra_requirements=testing_reqs) pex = chroot.pex() with self._maybe_shard() as shard_args: with self._maybe_emit_junit_xml(targets) as junit_args: with self._maybe_emit_coverage_data(targets, chroot.path(), pex, workunit) as coverage_args: yield pex, shard_args + junit_args + coverage_args
def generate_targets_map(self, targets, classpath_products=None): """Generates a dictionary containing all pertinent information about the target graph. The return dictionary is suitable for serialization by json.dumps. :param targets: The list of targets to generate the map for. :param classpath_products: Optional classpath_products. If not provided when the --libraries option is `True`, this task will perform its own jar resolution. """ targets_map = {} resource_target_map = {} python_interpreter_targets_mapping = defaultdict(list) if self.get_options().libraries: # NB(gmalmquist): This supports mocking the classpath_products in tests. if classpath_products is None: classpath_products = self.resolve_jars(targets) else: classpath_products = None def process_target(current_target): """ :type current_target:pants.build_graph.target.Target """ def get_target_type(target): if target.is_test: return ExportTask.SourceRootTypes.TEST else: if (isinstance(target, Resources) and target in resource_target_map and resource_target_map[target].is_test): return ExportTask.SourceRootTypes.TEST_RESOURCE elif isinstance(target, Resources): return ExportTask.SourceRootTypes.RESOURCE else: return ExportTask.SourceRootTypes.SOURCE info = { 'targets': [], 'libraries': [], 'roots': [], 'target_type': get_target_type(current_target), 'is_code_gen': current_target.is_codegen, 'pants_target_type': self._get_pants_target_alias(type(current_target)) } if not current_target.is_synthetic: info['globs'] = current_target.globs_relative_to_buildroot() if self.get_options().sources: info['sources'] = list(current_target.sources_relative_to_buildroot()) if isinstance(current_target, PythonRequirementLibrary): reqs = current_target.payload.get_field_value('requirements', set()) """:type : set[pants.backend.python.python_requirement.PythonRequirement]""" info['requirements'] = [req.key for req in reqs] if isinstance(current_target, PythonTarget): interpreter_for_target = self.select_interpreter_for_targets([current_target]) if interpreter_for_target is None: raise TaskError('Unable to find suitable interpreter for {}' .format(current_target.address)) python_interpreter_targets_mapping[interpreter_for_target].append(current_target) info['python_interpreter'] = str(interpreter_for_target.identity) def iter_transitive_jars(jar_lib): """ :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary` :rtype: :class:`collections.Iterator` of :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate` """ if classpath_products: jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,)) for _, jar_entry in jar_products: coordinate = jar_entry.coordinate # We drop classifier and type_ since those fields are represented in the global # libraries dict and here we just want the key into that dict (see `_jar_id`). yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev) target_libraries = OrderedSet() if isinstance(current_target, JarLibrary): target_libraries = OrderedSet(iter_transitive_jars(current_target)) for dep in current_target.dependencies: info['targets'].append(dep.address.spec) if isinstance(dep, JarLibrary): for jar in dep.jar_dependencies: target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev)) # Add all the jars pulled in by this jar_library target_libraries.update(iter_transitive_jars(dep)) if isinstance(dep, Resources): resource_target_map[dep] = current_target if isinstance(current_target, ScalaLibrary): for dep in current_target.java_sources: info['targets'].append(dep.address.spec) process_target(dep) if isinstance(current_target, JvmTarget): info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes] info['platform'] = current_target.platform.name info['roots'] = map(lambda (source_root, package_prefix): { 'source_root': source_root, 'package_prefix': package_prefix }, self._source_roots_for_target(current_target)) if classpath_products: info['libraries'] = [self._jar_id(lib) for lib in target_libraries] targets_map[current_target.address.spec] = info for target in targets: process_target(target) jvm_platforms_map = { 'default_platform' : JvmPlatform.global_instance().default_platform.name, 'platforms': { str(platform_name): { 'target_level' : str(platform.target_level), 'source_level' : str(platform.source_level), 'args' : platform.args, } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() } } graph_info = { 'version': self.DEFAULT_EXPORT_VERSION, 'targets': targets_map, 'jvm_platforms': jvm_platforms_map, } jvm_distributions = DistributionLocator.global_instance().all_jdk_paths() if jvm_distributions: graph_info['jvm_distributions'] = jvm_distributions if classpath_products: graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products) if python_interpreter_targets_mapping: interpreters = self.interpreter_cache.select_interpreter( python_interpreter_targets_mapping.keys()) default_interpreter = interpreters[0] interpreters_info = {} for interpreter, targets in six.iteritems(python_interpreter_targets_mapping): chroot = self.cached_chroot( interpreter=interpreter, pex_info=PexInfo.default(), targets=targets ) interpreters_info[str(interpreter.identity)] = { 'binary': interpreter.binary, 'chroot': chroot.path() } graph_info['python_setup'] = { 'default_interpreter': str(default_interpreter.identity), 'interpreters': interpreters_info } return graph_info
def assert_pex_attribute(self, pex, attr, value): self.assertTrue(os.path.exists(pex)) pex_info = PexInfo.from_pex(pex) self.assertEqual(getattr(pex_info, attr), value)