예제 #1
0
  def _test_runner(self, targets, workunit):
    interpreter = self.select_interpreter_for_targets(targets)
    pex_info = PexInfo.default()
    pex_info.entry_point = 'pytest'

    # We hard-code the requirements here because they can't be upgraded without
    # major changes to this code, and the PyTest subsystem now contains the versions
    # for the new PytestRun task.  This one is about to be deprecated anyway.
    testing_reqs = [PythonRequirement(s) for s in [
      'pytest>=2.6,<2.7',
      'pytest-timeout<1.0.0',
      'pytest-cov>=1.8,<1.9',
      'unittest2>=0.6.0,<=1.9.0',
    ]]

    chroot = self.cached_chroot(interpreter=interpreter,
                                pex_info=pex_info,
                                targets=targets,
                                platforms=('current',),
                                extra_requirements=testing_reqs)
    pex = chroot.pex()
    with self._maybe_shard() as shard_args:
      with self._maybe_emit_junit_xml(targets) as junit_args:
        with self._maybe_emit_coverage_data(targets,
                                            chroot.path(),
                                            pex,
                                            workunit) as coverage_args:
          yield pex, shard_args + junit_args + coverage_args
예제 #2
0
파일: python_task.py 프로젝트: scode/pants
  def cached_chroot(self, interpreter, pex_info, targets, platforms,
                    extra_requirements=None, executable_file_content=None):
    """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
    # This PexInfo contains any customizations specified by the caller.
    # The process of building a pex modifies it further.
    pex_info = pex_info or PexInfo.default()

    path = self._chroot_path(interpreter, pex_info, targets, platforms, extra_requirements,
                             executable_file_content)
    if not os.path.exists(path):
      path_tmp = path + '.tmp'
      self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms,
                         extra_requirements, executable_file_content)
      shutil.move(path_tmp, path)

    # We must read the PexInfo that was frozen into the pex, so we get the modifications
    # created when that pex was built.
    pex_info = PexInfo.from_pex(path)
    # Now create a PythonChroot wrapper without dumping it.
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True)
    return self.create_chroot(interpreter=interpreter,
                              builder=builder,
                              targets=targets,
                              platforms=platforms,
                              extra_requirements=extra_requirements)
예제 #3
0
  def nsutil_pex(self):
    interpreter = self.context.products.get_data(PythonInterpreter)
    chroot = os.path.join(self.workdir, 'nsutil', interpreter.version_string)
    if not os.path.exists(chroot):
      pex_info = PexInfo.default(interpreter=interpreter)
      with safe_concurrent_creation(chroot) as scratch:
        builder = PEXBuilder(path=scratch, interpreter=interpreter, pex_info=pex_info, copy=True)
        with temporary_file(binary_mode=False) as fp:
          declares_namespace_package_code = inspect.getsource(declares_namespace_package)
          fp.write(textwrap.dedent("""
            import sys


            {declares_namespace_package_code}


            if __name__ == '__main__':
              for path in sys.argv[1:]:
                if declares_namespace_package(path):
                  print(path)
          """).strip().format(declares_namespace_package_code=declares_namespace_package_code))
          fp.close()
          builder.set_executable(filename=fp.name, env_filename='main.py')
          builder.freeze()
    return PEX(pex=chroot, interpreter=interpreter)
예제 #4
0
파일: python_repl.py 프로젝트: pcurry/pants
  def execute(self, **pex_run_kwargs):
    (accept_predicate, reject_predicate) = Target.lang_discriminator('python')
    targets = self.require_homogeneous_targets(accept_predicate, reject_predicate)
    if targets:
      # We can't throw if the target isn't a python target, because perhaps we were called on a
      # JVM target, in which case we have to no-op and let scala repl do its thing.
      # TODO(benjy): Some more elegant way to coordinate how tasks claim targets.
      interpreter = self.select_interpreter_for_targets(targets)

      extra_requirements = []
      if self.get_options().ipython:
        entry_point = self.get_options().ipython_entry_point
        for req in self.get_options().ipython_requirements:
          extra_requirements.append(PythonRequirement(req))
      else:
        entry_point = 'code:interact'

      pex_info = PexInfo.default()
      pex_info.entry_point = entry_point
      with self.temporary_chroot(interpreter=interpreter,
                                 pex_info=pex_info,
                                 targets=targets,
                                 platforms=None,
                                 extra_requirements=extra_requirements) as chroot:
        pex = chroot.pex()
        self.context.release_lock()
        with stty_utils.preserve_stty_settings():
          with self.context.new_workunit(name='run', labels=[WorkUnit.RUN]):
            po = pex.run(blocking=False, **pex_run_kwargs)
            try:
              return po.wait()
            except KeyboardInterrupt:
              pass
예제 #5
0
파일: pex_builder.py 프로젝트: jsirois/pex
  def __init__(self, path=None, interpreter=None, chroot=None, pex_info=None, preamble=None,
               copy=False):
    """Initialize a pex builder.

    :keyword path: The path to write the PEX as it is built.  If ``None`` is specified,
      a temporary directory will be created.
    :keyword interpreter: The interpreter to use to build this PEX environment.  If ``None``
      is specified, the current interpreter is used.
    :keyword chroot: If specified, preexisting :class:`Chroot` to use for building the PEX.
    :keyword pex_info: A preexisting PexInfo to use to build the PEX.
    :keyword preamble: If supplied, execute this code prior to bootstrapping this PEX
      environment.
    :type preamble: str
    :keyword copy: If False, attempt to create the pex environment via hard-linking, falling
                   back to copying across devices. If True, always copy.

    .. versionchanged:: 0.8
      The temporary directory created when ``path`` is not specified is now garbage collected on
      interpreter exit.
    """
    self._interpreter = interpreter or PythonInterpreter.get()
    self._chroot = chroot or Chroot(path or safe_mkdtemp())
    self._pex_info = pex_info or PexInfo.default(self._interpreter)
    self._preamble = preamble or ''
    self._copy = copy

    self._shebang = self._interpreter.identity.hashbang()
    self._logger = logging.getLogger(__name__)
    self._frozen = False
    self._distributions = set()
예제 #6
0
파일: pytest_run.py 프로젝트: wonlay/pants
  def _test_runner(self, targets, sources_map):
    pex_info = PexInfo.default()
    pex_info.entry_point = 'pytest'
    pex = self.create_pex(pex_info)

    with self._conftest(sources_map) as conftest:
      with self._maybe_emit_coverage_data(targets, pex) as coverage_args:
        yield pex, [conftest] + coverage_args
예제 #7
0
    def _test_runner(self, targets, sources_map):
        pex_info = PexInfo.default()
        pex_info.entry_point = 'pytest'
        pex = self.create_pex(pex_info)

        with self._conftest(sources_map) as conftest:
            with self._maybe_emit_coverage_data(targets, pex) as coverage_args:
                yield pex, [conftest] + coverage_args
예제 #8
0
파일: python_task.py 프로젝트: pcurry/pants
 def temporary_chroot(self, interpreter, pex_info, targets, platforms,
                      extra_requirements=None, executable_file_content=None):
   path = tempfile.mkdtemp()  # Not a contextmanager: chroot.delete() will clean this up anyway.
   pex_info = pex_info or PexInfo.default()
   chroot = self._build_chroot(path, interpreter, pex_info, targets, platforms,
                               extra_requirements, executable_file_content)
   yield chroot
   chroot.delete()
예제 #9
0
 def setup_repl_session(self, targets):
   if self.get_options().ipython:
     entry_point = self.get_options().ipython_entry_point
   else:
     entry_point = 'code:interact'
   pex_info = PexInfo.default()
   pex_info.entry_point = entry_point
   return self.create_pex(pex_info)
예제 #10
0
 def pexinfo(self):
     info = PexInfo.default()
     info.zip_safe = self.payload.zip_safe
     info.always_write_cache = self.payload.always_write_cache
     info.inherit_path = self.payload.inherit_path
     info.entry_point = self.entry_point
     info.ignore_errors = self.payload.ignore_errors
     info.emit_warnings = self.payload.emit_warnings
     return info
예제 #11
0
    def _test_runner(self, targets, workunit):
        pex_info = PexInfo.default()
        pex_info.entry_point = 'pytest'
        pex = self.create_pex(pex_info)

        with self._maybe_shard() as shard_args:
            with self._maybe_emit_coverage_data(targets, pex,
                                                workunit) as coverage_args:
                yield pex, shard_args + coverage_args
예제 #12
0
def test_can_add_handles_optional_build_tag_in_wheel(
    python_35_interpreter, wheel_distribution, wheel_is_linux
):
    # type: (PythonInterpreter, str, bool) -> None
    pex_environment = PEXEnvironment(
        pex="", pex_info=PexInfo.default(python_35_interpreter), interpreter=python_35_interpreter
    )
    native_wheel = IS_LINUX and wheel_is_linux
    assert pex_environment.can_add(wheel_distribution) is native_wheel
예제 #13
0
 def execute(self):
     if not self.context.targets(lambda t: isinstance(t, PythonTests)):
         return
     pex_info = PexInfo.default()
     pex_info.entry_point = 'pytest'
     pytest_binary = self.create_pex(pex_info)
     interpreter = self.context.products.get_data(PythonInterpreter)
     self.context.products.register_data(
         self.PytestBinary, self.PytestBinary(interpreter, pytest_binary))
예제 #14
0
 def execute(self):
   if not self.context.targets(lambda t: isinstance(t, PythonTests)):
     return
   pex_info = PexInfo.default()
   pex_info.entry_point = 'pytest'
   pytest_binary = self.create_pex(pex_info, pin_selected_interpreter=True)
   interpreter = self.context.products.get_data(PythonInterpreter)
   self.context.products.register_data(self.PytestBinary,
                                       self.PytestBinary(interpreter, pytest_binary))
예제 #15
0
def test_can_add_handles_optional_build_tag_in_wheel(python_35_interpreter,
                                                     wheel_filename,
                                                     wheel_is_linux):
    pex_environment = PEXEnvironment(
        pex="",
        pex_info=PexInfo.default(python_35_interpreter),
        interpreter=python_35_interpreter)
    native_wheel = IS_LINUX and wheel_is_linux
    assert pex_environment.can_add(
        Distribution(wheel_filename)) is native_wheel
예제 #16
0
파일: pytest_run.py 프로젝트: simeonf/pants
  def _test_runner(self, targets, workunit):
    pex_info = PexInfo.default()
    pex_info.entry_point = 'pytest'
    pex = self.create_pex(pex_info)

    with self._maybe_shard() as shard_args:
      with self._maybe_emit_junit_xml(targets) as junit_args:
        with self._maybe_emit_coverage_data(targets,
                                            pex,
                                            workunit) as coverage_args:
          yield pex, shard_args + junit_args + coverage_args
예제 #17
0
 def pexinfo(self):
     info = PexInfo.default()
     for repo in self.repositories:
         info.add_repository(repo)
     for index in self.indices:
         info.add_index(index)
     info.zip_safe = self.payload.zip_safe
     info.always_write_cache = self.payload.always_write_cache
     info.inherit_path = self.payload.inherit_path
     info.entry_point = self.entry_point
     info.ignore_errors = self.payload.ignore_errors
     return info
예제 #18
0
 def _run_mypy(self, py3_interpreter, mypy_args, **kwargs):
     pex_info = PexInfo.default()
     pex_info.entry_point = 'mypy'
     chroot = self.cached_chroot(interpreter=py3_interpreter,
                                 pex_info=pex_info,
                                 targets=[],
                                 extra_requirements=[
                                     PythonRequirement('mypy=={}'.format(
                                         self.get_options().mypy_version))
                                 ])
     pex = chroot.pex()
     return pex.run(mypy_args, **kwargs)
예제 #19
0
 def pexinfo(self):
   info = PexInfo.default()
   for repo in self._repositories:
     info.add_repository(repo)
   for index in self._indices:
     info.add_index(index)
   info.zip_safe = self._zip_safe
   info.always_write_cache = self._always_write_cache
   info.inherit_path = self._inherit_path
   info.entry_point = self.entry_point
   info.ignore_errors = self._ignore_errors
   return info
예제 #20
0
파일: mypy_task.py 프로젝트: benjyw/pants
  def _run_mypy(self, py3_interpreter, mypy_args, **kwargs):
    pex_info = PexInfo.default()
    pex_info.entry_point = 'mypy'
    mypy_version = self.get_options().mypy_version

    mypy_requirement_pex = self.resolve_requirement_strings(
      py3_interpreter, ['mypy=={}'.format(mypy_version)])

    path = os.path.realpath(os.path.join(self.workdir, str(py3_interpreter.identity), mypy_version))
    if not os.path.isdir(path):
      self.merge_pexes(path, pex_info, py3_interpreter, [mypy_requirement_pex])
    pex = WrappedPEX(PEX(path, py3_interpreter), py3_interpreter)
    return pex.run(mypy_args, **kwargs)
예제 #21
0
파일: mypy_task.py 프로젝트: lgirault/pants
  def _run_mypy(self, py3_interpreter, mypy_args, **kwargs):
    pex_info = PexInfo.default()
    pex_info.entry_point = 'mypy'
    mypy_version = self.get_options().mypy_version

    mypy_requirement_pex = self.resolve_requirement_strings(
      py3_interpreter, ['mypy=={}'.format(mypy_version)])

    path = os.path.realpath(os.path.join(self.workdir, str(py3_interpreter.identity), mypy_version))
    if not os.path.isdir(path):
      self.merge_pexes(path, pex_info, py3_interpreter, [mypy_requirement_pex])
    pex = PEX(path, py3_interpreter)
    return pex.run(mypy_args, **kwargs)
예제 #22
0
    def _get_mypy_pex(self, py3_interpreter: PythonInterpreter,
                      *extra_pexes: PEX) -> PEX:
        mypy_version = self._mypy_subsystem.options.version
        extras_hash = hash_utils.hash_all(
            hash_utils.hash_dir(Path(extra_pex.path()))
            for extra_pex in extra_pexes)

        path = Path(self.workdir, str(py3_interpreter.identity),
                    f"{mypy_version}-{extras_hash}")
        pex_dir = str(path)
        if not path.is_dir():
            mypy_requirement_pex = self.resolve_requirement_strings(
                py3_interpreter, [mypy_version])
            pex_info = PexInfo.default()
            pex_info.entry_point = "pants_mypy_launcher"
            with self.merged_pex(
                    path=pex_dir,
                    pex_info=pex_info,
                    interpreter=py3_interpreter,
                    pexes=[mypy_requirement_pex, *extra_pexes],
            ) as builder:
                with temporary_file(binary_mode=False) as exe_fp:
                    # MyPy searches for types for a package in packages containing a `py.types` marker file
                    # or else in a sibling `<package>-stubs` package as per PEP-0561. Going further than that
                    # PEP, MyPy restricts its search to `site-packages`. Since PEX deliberately isolates
                    # itself from `site-packages` as part of its raison d'etre, we monkey-patch
                    # `site.getsitepackages` to look inside the scrubbed PEX sys.path before handing off to
                    # `mypy`.
                    #
                    # See:
                    #   https://mypy.readthedocs.io/en/stable/installed_packages.html#installed-packages
                    #   https://www.python.org/dev/peps/pep-0561/#stub-only-packages
                    exe_fp.write(
                        dedent("""
                            import runpy
                            import site
                            import sys
                
                
                            site.getsitepackages = lambda: sys.path[:]
                
                            
                            runpy.run_module('mypy', run_name='__main__')
                            """))
                    exe_fp.flush()
                    builder.set_executable(
                        filename=exe_fp.name,
                        env_filename=f"{pex_info.entry_point}.py")
                builder.freeze(bytecode_compile=False)

        return PEX(pex_dir, py3_interpreter)
예제 #23
0
파일: conan.py 프로젝트: foursquare/pants
 def bootstrap_conan(self):
   pex_info = PexInfo.default()
   pex_info.entry_point = 'conans.conan'
   conan_bootstrap_dir = os.path.join(get_pants_cachedir(), 'conan_support')
   conan_pex_path = os.path.join(conan_bootstrap_dir, 'conan_binary')
   interpreter = PythonInterpreter.get()
   if not os.path.exists(conan_pex_path):
     with safe_concurrent_creation(conan_pex_path) as safe_path:
       builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
       reqs = [PythonRequirement(req) for req in self.get_options().conan_requirements]
       dump_requirements(builder, interpreter, reqs, logger)
       builder.freeze()
   conan_binary = PEX(conan_pex_path, interpreter)
   return self.ConanBinary(pex=conan_binary)
예제 #24
0
    def cached_chroot(self,
                      interpreter,
                      pex_info,
                      targets,
                      platforms,
                      extra_requirements=None,
                      executable_file_content=None):
        """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
        # This PexInfo contains any customizations specified by the caller.
        # The process of building a pex modifies it further.
        pex_info = pex_info or PexInfo.default()

        path = self._chroot_path(PythonSetup.global_instance(), interpreter,
                                 pex_info, targets, platforms,
                                 extra_requirements, executable_file_content)
        if not os.path.exists(path):
            path_tmp = path + '.tmp'
            self._build_chroot(path_tmp, interpreter, pex_info, targets,
                               platforms, extra_requirements,
                               executable_file_content)
            shutil.move(path_tmp, path)

        # We must read the PexInfo that was frozen into the pex, so we get the modifications
        # created when that pex was built.
        pex_info = PexInfo.from_pex(path)
        # Now create a PythonChroot wrapper without dumping it.
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info)
        chroot = PythonChroot(context=self.context,
                              python_setup=PythonSetup.global_instance(),
                              python_repos=PythonRepos.global_instance(),
                              interpreter=interpreter,
                              builder=builder,
                              targets=targets,
                              platforms=platforms,
                              extra_requirements=extra_requirements)
        # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so
        # while transitioning calls to temporary_chroot to calls to cached_chroot.
        # We can revisit after that transition is complete.
        yield chroot
예제 #25
0
def pack_in_pex(requirements: List[str],
                output: str,
                ignored_packages: Collection[str] = [],
                pex_inherit_path: str = "prefer",
                editable_requirements:  Dict[str, str] = {}
                ) -> str:
    """
    Pack current environment using a pex.

    :param requirements: list of requirements (ex {'tensorflow': '1.15.0'})
    :param output: location of the pex
    :param ignored_packages: packages to be exluded from pex
    :param pex_inherit_path: see https://github.com/pantsbuild/pex/blob/master/pex/bin/pex.py#L264,
                             possible values ['false', 'fallback', 'prefer']
    :return: destination of the archive, name of the pex
    """

    interpreter = PythonInterpreter.get()
    pex_info = PexInfo.default(interpreter)
    pex_info.inherit_path = pex_inherit_path
    pex_builder = PEXBuilder(
        copy=True,
        interpreter=interpreter,
        pex_info=pex_info)

    for current_package in editable_requirements.values():
        _logger.debug("Add current path as source", current_package)
        _walk_and_do(pex_builder.add_source, current_package)

    try:
        resolveds = resolve_multi(
            requirements=requirements,
            indexes=[CRITEO_PYPI_URL] if _is_criteo() else None)

        for resolved in resolveds:
            if resolved.distribution.key in ignored_packages:
                _logger.debug(f"Ignore requirement {resolved.distribution}")
                continue
            else:
                _logger.debug(f"Add requirement {resolved.distribution}")
            pex_builder.add_distribution(resolved.distribution)
            pex_builder.add_requirement(resolved.requirement)
    except (Unsatisfiable, Untranslatable):
        _logger.exception('Cannot create pex')
        raise

    pex_builder.build(output)

    return output
예제 #26
0
 def temporary_chroot(self,
                      interpreter,
                      pex_info,
                      targets,
                      platforms,
                      extra_requirements=None,
                      executable_file_content=None):
     path = tempfile.mkdtemp(
     )  # Not a contextmanager: chroot.delete() will clean this up anyway.
     pex_info = pex_info or PexInfo.default()
     chroot = self._build_chroot(path, interpreter, pex_info, targets,
                                 platforms, extra_requirements,
                                 executable_file_content)
     yield chroot
     chroot.delete()
예제 #27
0
  def bootstrap(self, interpreter, pex_file_path, extra_reqs=None):
    # Caching is done just by checking if the file at the specified path is already executable.
    if not is_executable(pex_file_path):
      pex_info = PexInfo.default(interpreter=interpreter)
      if self.entry_point is not None:
        pex_info.entry_point = self.entry_point

      with safe_concurrent_creation(pex_file_path) as safe_path:
        all_reqs = list(self.base_requirements) + list(extra_reqs or [])
        pex_builder = PexBuilderWrapper.Factory.create(
          builder=PEXBuilder(interpreter=interpreter, pex_info=pex_info))
        pex_builder.add_resolved_requirements(all_reqs, platforms=['current'])
        pex_builder.build(safe_path)

    return PEX(pex_file_path, interpreter)
예제 #28
0
파일: test_pex_info.py 프로젝트: tdyas/pex
def test_pex_root_set_unwriteable():
    with temporary_dir() as td:
        pex_root = os.path.realpath(os.path.join(td, "pex_root"))
        os.mkdir(pex_root, 0o444)

        pex_info = PexInfo.default()
        pex_info.pex_root = pex_root

        with warnings.catch_warnings(record=True) as log:
            assert pex_root != pex_info.pex_root

        assert 1 == len(log)
        message = log[0].message
        assert isinstance(message, PEXWarning)
        assert pex_root in str(message)
        assert pex_info.pex_root in str(message)
예제 #29
0
    def _test_runner(self, targets, workunit):
        interpreter = self.select_interpreter_for_targets(targets)
        pex_info = PexInfo.default()
        pex_info.entry_point = 'pytest'

        chroot = self.cached_chroot(interpreter=interpreter,
                                    pex_info=pex_info,
                                    targets=targets,
                                    platforms=('current', ),
                                    extra_requirements=self._TESTING_TARGETS)
        pex = chroot.pex()
        with self._maybe_shard() as shard_args:
            with self._maybe_emit_junit_xml(targets) as junit_args:
                with self._maybe_emit_coverage_data(targets, chroot.path(),
                                                    pex,
                                                    workunit) as coverage_args:
                    yield pex, shard_args + junit_args + coverage_args
예제 #30
0
    def _test_runner(self, targets, workunit):
        interpreter = self.select_interpreter_for_targets(targets)
        pex_info = PexInfo.default()
        pex_info.entry_point = "pytest"

        chroot = self.cached_chroot(
            interpreter=interpreter,
            pex_info=pex_info,
            targets=targets,
            platforms=("current",),
            extra_requirements=self._TESTING_TARGETS,
        )
        pex = chroot.pex()
        with self._maybe_shard() as shard_args:
            with self._maybe_emit_junit_xml(targets) as junit_args:
                with self._maybe_emit_coverage_data(targets, chroot.path(), pex, workunit) as coverage_args:
                    yield pex, shard_args + junit_args + coverage_args
예제 #31
0
    def bootstrap(self, interpreter, pex_file_path, extra_reqs=None):
        # Caching is done just by checking if the file at the specified path is already executable.
        if not is_executable(pex_file_path):
            pex_info = PexInfo.default(interpreter=interpreter)
            if self.entry_point is not None:
                pex_info.entry_point = self.entry_point

            with safe_concurrent_creation(pex_file_path) as safe_path:
                all_reqs = list(self.base_requirements) + list(extra_reqs
                                                               or [])
                pex_builder = PexBuilderWrapper.Factory.create(
                    builder=PEXBuilder(interpreter=interpreter,
                                       pex_info=pex_info))
                pex_builder.add_resolved_requirements(all_reqs,
                                                      platforms=['current'])
                pex_builder.build(safe_path)

        return PEX(pex_file_path, interpreter)
예제 #32
0
파일: conan.py 프로젝트: ryokugyu/pants
 def bootstrap_conan(self):
     pex_info = PexInfo.default()
     pex_info.entry_point = 'conans.conan'
     conan_bootstrap_dir = os.path.join(get_pants_cachedir(),
                                        'conan_support')
     conan_pex_path = os.path.join(conan_bootstrap_dir, 'conan_binary')
     interpreter = PythonInterpreter.get()
     if not os.path.exists(conan_pex_path):
         with safe_concurrent_creation(conan_pex_path) as safe_path:
             builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
             reqs = [
                 PythonRequirement(req)
                 for req in self.get_options().conan_requirements
             ]
             dump_requirements(builder, interpreter, reqs, logger)
             builder.freeze()
     conan_binary = PEX(conan_pex_path, interpreter)
     return self.ConanBinary(pex=conan_binary)
예제 #33
0
    def cached_chroot(self,
                      interpreter,
                      pex_info,
                      targets,
                      platforms=None,
                      extra_requirements=None,
                      executable_file_content=None):
        """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    :rtype: pants.backend.python.python_chroot.PythonChroot

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
        # This PexInfo contains any customizations specified by the caller.
        # The process of building a pex modifies it further.
        pex_info = pex_info or PexInfo.default()

        path = self._chroot_path(interpreter, pex_info, targets, platforms,
                                 extra_requirements, executable_file_content)
        if not os.path.exists(path):
            path_tmp = path + '.tmp'
            self._build_chroot(path_tmp, interpreter, pex_info, targets,
                               platforms, extra_requirements,
                               executable_file_content)
            shutil.move(path_tmp, path)

        # We must read the PexInfo that was frozen into the pex, so we get the modifications
        # created when that pex was built.
        pex_info = PexInfo.from_pex(path)
        # Now create a PythonChroot wrapper without dumping it.
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info,
                             copy=True)
        return self.create_chroot(interpreter=interpreter,
                                  builder=builder,
                                  targets=targets,
                                  platforms=platforms,
                                  extra_requirements=extra_requirements)
예제 #34
0
    def setup_repl_session(self, targets):
        interpreter = self.select_interpreter_for_targets(targets)

        extra_requirements = []
        if self.get_options().ipython:
            entry_point = self.get_options().ipython_entry_point
            for req in self.get_options().ipython_requirements:
                extra_requirements.append(PythonRequirement(req))
        else:
            entry_point = 'code:interact'

        pex_info = PexInfo.default()
        pex_info.entry_point = entry_point
        chroot = self.cached_chroot(interpreter=interpreter,
                                    pex_info=pex_info,
                                    targets=targets,
                                    platforms=None,
                                    extra_requirements=extra_requirements)
        return chroot.pex()
예제 #35
0
  def setup_repl_session(self, targets):
    interpreter = self.select_interpreter_for_targets(targets)

    extra_requirements = []
    if self.get_options().ipython:
      entry_point = self.get_options().ipython_entry_point
      for req in self.get_options().ipython_requirements:
        extra_requirements.append(PythonRequirement(req))
    else:
      entry_point = 'code:interact'

    pex_info = PexInfo.default()
    pex_info.entry_point = entry_point
    chroot = self.cached_chroot(interpreter=interpreter,
                                pex_info=pex_info,
                                targets=targets,
                                platforms=None,
                                extra_requirements=extra_requirements)
    return chroot.pex()
예제 #36
0
파일: test_pex.py 프로젝트: tdyas/pex
def test_activate_interpreter_different_from_current():
  with temporary_dir() as pex_root:
    interp_version = PY36 if PY2 else PY27
    custom_interpreter = PythonInterpreter.from_binary(ensure_python_interpreter(interp_version))
    pex_info = PexInfo.default(custom_interpreter)
    pex_info.pex_root = pex_root
    with temporary_dir() as pex_chroot:
      pex_builder = PEXBuilder(path=pex_chroot,
                               interpreter=custom_interpreter,
                               pex_info=pex_info)
      with make_bdist(interpreter=custom_interpreter) as bdist:
        pex_builder.add_distribution(bdist)
        pex_builder.set_entry_point('sys:exit')
        pex_builder.freeze()

        pex = PEX(pex_builder.path(), interpreter=custom_interpreter)
        try:
          pex._activate()
        except SystemExit as e:
          pytest.fail('PEX activation of %s failed with %s' % (pex, e))
예제 #37
0
파일: python_task.py 프로젝트: pcurry/pants
  def cached_chroot(self, interpreter, pex_info, targets, platforms,
                    extra_requirements=None, executable_file_content=None):
    """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
    # This PexInfo contains any customizations specified by the caller.
    # The process of building a pex modifies it further.
    pex_info = pex_info or PexInfo.default()

    path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets,
                             platforms, extra_requirements, executable_file_content)
    if not os.path.exists(path):
      path_tmp = path + '.tmp'
      self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms,
                         extra_requirements, executable_file_content)
      shutil.move(path_tmp, path)

    # We must read the PexInfo that was frozen into the pex, so we get the modifications
    # created when that pex was built.
    pex_info = PexInfo.from_pex(path)
    # Now create a PythonChroot wrapper without dumping it.
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    chroot = PythonChroot(
      context=self.context,
      python_setup=PythonSetup.global_instance(),
      python_repos=PythonRepos.global_instance(),
      interpreter=interpreter,
      builder=builder,
      targets=targets,
      platforms=platforms,
      extra_requirements=extra_requirements)
    # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so
    # while transitioning calls to temporary_chroot to calls to cached_chroot.
    # We can revisit after that transition is complete.
    yield chroot
예제 #38
0
    def __init__(
        self,
        path=None,
        interpreter=None,
        chroot=None,
        pex_info=None,
        preamble=None,
        copy=False,
        include_tools=False,
    ):
        """Initialize a pex builder.

        :keyword path: The path to write the PEX as it is built.  If ``None`` is specified,
          a temporary directory will be created.
        :keyword interpreter: The interpreter to use to build this PEX environment.  If ``None``
          is specified, the current interpreter is used.
        :keyword chroot: If specified, preexisting :class:`Chroot` to use for building the PEX.
        :keyword pex_info: A preexisting PexInfo to use to build the PEX.
        :keyword preamble: If supplied, execute this code prior to bootstrapping this PEX
          environment.
        :type preamble: str
        :keyword copy: If False, attempt to create the pex environment via hard-linking, falling
                       back to copying across devices. If True, always copy.
        :keyword include_tools: If True, include runtime tools which can be executed by exporting
                                `PEX_TOOLS=1`.

        .. versionchanged:: 0.8
          The temporary directory created when ``path`` is not specified is now garbage collected on
          interpreter exit.
        """
        self._interpreter = interpreter or PythonInterpreter.get()
        self._chroot = chroot or Chroot(path or safe_mkdtemp())
        self._pex_info = pex_info or PexInfo.default(self._interpreter)
        self._preamble = preamble or ""
        self._copy = copy
        self._include_tools = include_tools

        self._shebang = self._interpreter.identity.hashbang()
        self._logger = logging.getLogger(__name__)
        self._frozen = False
        self._distributions = set()
예제 #39
0
  def _test_runner(self, targets, workunit):
    interpreter = self.select_interpreter_for_targets(targets)
    pex_info = PexInfo.default()
    pex_info.entry_point = 'pytest'

    testing_reqs = [PythonRequirement(s)
                    for s in PyTest.global_instance().get_requirement_strings()]

    chroot = self.cached_chroot(interpreter=interpreter,
                                pex_info=pex_info,
                                targets=targets,
                                platforms=('current',),
                                extra_requirements=testing_reqs)
    pex = chroot.pex()
    with self._maybe_shard() as shard_args:
      with self._maybe_emit_junit_xml(targets) as junit_args:
        with self._maybe_emit_coverage_data(targets,
                                            chroot.path(),
                                            pex,
                                            workunit) as coverage_args:
          yield pex, shard_args + junit_args + coverage_args
예제 #40
0
    def __init__(
            self,
            path=None,  # type: Optional[str]
            interpreter=None,  # type: Optional[PythonInterpreter]
            chroot=None,  # type: Optional[Chroot]
            pex_info=None,  # type: Optional[PexInfo]
            preamble=None,  # type: Optional[str]
            copy_mode=CopyMode.LINK,  # type: CopyMode.Value
            include_tools=False,  # type: bool
    ):
        # type: (...) -> None
        """Initialize a pex builder.

        :keyword path: The path to write the PEX as it is built.  If ``None`` is specified,
          a temporary directory will be created.
        :keyword interpreter: The interpreter to use to build this PEX environment.  If ``None``
          is specified, the current interpreter is used.
        :keyword chroot: If specified, preexisting :class:`Chroot` to use for building the PEX.
        :keyword pex_info: A preexisting PexInfo to use to build the PEX.
        :keyword preamble: If supplied, execute this code prior to bootstrapping this PEX
          environment.
        :keyword copy_mode: Create the pex environment using the given copy mode.
        :keyword include_tools: If True, include runtime tools which can be executed by exporting
                                `PEX_TOOLS=1`.

        .. versionchanged:: 0.8
          The temporary directory created when ``path`` is not specified is now garbage collected on
          interpreter exit.
        """
        self._interpreter = interpreter or PythonInterpreter.get()
        self._chroot = chroot or Chroot(path or safe_mkdtemp())
        self._pex_info = pex_info or PexInfo.default(self._interpreter)
        self._preamble = preamble or ""
        self._copy_mode = copy_mode
        self._include_tools = include_tools

        self._shebang = self._interpreter.identity.hashbang()
        self._logger = logging.getLogger(__name__)
        self._frozen = False
        self._distributions = {}  # type: Dict[str, Distribution]
예제 #41
0
    def _test_runner(self, targets, workunit):
        interpreter = self.select_interpreter_for_targets(targets)
        pex_info = PexInfo.default()
        pex_info.entry_point = 'pytest'

        testing_reqs = [
            PythonRequirement(s)
            for s in PyTest.global_instance().get_requirement_strings()
        ]

        chroot = self.cached_chroot(interpreter=interpreter,
                                    pex_info=pex_info,
                                    targets=targets,
                                    platforms=('current', ),
                                    extra_requirements=testing_reqs)
        pex = chroot.pex()
        with self._maybe_shard() as shard_args:
            with self._maybe_emit_junit_xml(targets) as junit_args:
                with self._maybe_emit_coverage_data(targets, chroot.path(),
                                                    pex,
                                                    workunit) as coverage_args:
                    yield pex, shard_args + junit_args + coverage_args
예제 #42
0
    def execute(self, **pex_run_kwargs):
        (accept_predicate,
         reject_predicate) = Target.lang_discriminator('python')
        targets = self.require_homogeneous_targets(accept_predicate,
                                                   reject_predicate)
        if targets:
            # We can't throw if the target isn't a python target, because perhaps we were called on a
            # JVM target, in which case we have to no-op and let scala repl do its thing.
            # TODO(benjy): Some more elegant way to coordinate how tasks claim targets.
            interpreter = self.select_interpreter_for_targets(targets)

            extra_requirements = []
            if self.get_options().ipython:
                entry_point = self.get_options().ipython_entry_point
                for req in self.get_options().ipython_requirements:
                    extra_requirements.append(PythonRequirement(req))
            else:
                entry_point = 'code:interact'

            pex_info = PexInfo.default()
            pex_info.entry_point = entry_point
            with self.cached_chroot(
                    interpreter=interpreter,
                    pex_info=pex_info,
                    targets=targets,
                    platforms=None,
                    extra_requirements=extra_requirements) as chroot:
                pex = chroot.pex()
                self.context.release_lock()
                with stty_utils.preserve_stty_settings():
                    with self.context.new_workunit(name='run',
                                                   labels=[WorkUnit.RUN]):
                        po = pex.run(blocking=False, **pex_run_kwargs)
                        try:
                            return po.wait()
                        except KeyboardInterrupt:
                            pass
예제 #43
0
 def pex_info(inherit_path):
     pex_info = PexInfo.default()
     pex_info.inherit_path = inherit_path
     return pex_info
예제 #44
0
  def generate_targets_map(self, targets, classpath_products=None):
    """Generates a dictionary containing all pertinent information about the target graph.

    The return dictionary is suitable for serialization by json.dumps.
    :param targets: The list of targets to generate the map for.
    :param classpath_products: Optional classpath_products. If not provided when the --libraries
      option is `True`, this task will perform its own jar resolution.
    """
    targets_map = {}
    resource_target_map = {}
    python_interpreter_targets_mapping = defaultdict(list)

    if self.get_options().libraries:
      # NB(gmalmquist): This supports mocking the classpath_products in tests.
      if classpath_products is None:
        classpath_products = self.resolve_jars(targets)
    else:
      classpath_products = None

    def process_target(current_target):
      """
      :type current_target:pants.build_graph.target.Target
      """
      def get_target_type(target):
        if target.is_test:
          return ExportTask.SourceRootTypes.TEST
        else:
          if (isinstance(target, Resources) and
              target in resource_target_map and
              resource_target_map[target].is_test):
            return ExportTask.SourceRootTypes.TEST_RESOURCE
          elif isinstance(target, Resources):
            return ExportTask.SourceRootTypes.RESOURCE
          else:
            return ExportTask.SourceRootTypes.SOURCE

      info = {
        'targets': [],
        'libraries': [],
        'roots': [],
        'id': current_target.id,
        'target_type': get_target_type(current_target),
        # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher
        'is_code_gen': current_target.is_codegen,
        'is_synthetic': current_target.is_synthetic,
        'pants_target_type': self._get_pants_target_alias(type(current_target))
      }

      if not current_target.is_synthetic:
        info['globs'] = current_target.globs_relative_to_buildroot()
        if self.get_options().sources:
          info['sources'] = list(current_target.sources_relative_to_buildroot())

      if isinstance(current_target, PythonRequirementLibrary):
        reqs = current_target.payload.get_field_value('requirements', set())
        """:type : set[pants.backend.python.python_requirement.PythonRequirement]"""
        info['requirements'] = [req.key for req in reqs]

      if isinstance(current_target, PythonTarget):
        interpreter_for_target = self.select_interpreter_for_targets([current_target])
        if interpreter_for_target is None:
          raise TaskError('Unable to find suitable interpreter for {}'
                          .format(current_target.address))
        python_interpreter_targets_mapping[interpreter_for_target].append(current_target)
        info['python_interpreter'] = str(interpreter_for_target.identity)

      def iter_transitive_jars(jar_lib):
        """
        :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary`
        :rtype: :class:`collections.Iterator` of
                :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
        """
        if classpath_products:
          jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,))
          for _, jar_entry in jar_products:
            coordinate = jar_entry.coordinate
            # We drop classifier and type_ since those fields are represented in the global
            # libraries dict and here we just want the key into that dict (see `_jar_id`).
            yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev)

      target_libraries = OrderedSet()
      if isinstance(current_target, JarLibrary):
        target_libraries = OrderedSet(iter_transitive_jars(current_target))
      for dep in current_target.dependencies:
        info['targets'].append(dep.address.spec)
        if isinstance(dep, JarLibrary):
          for jar in dep.jar_dependencies:
            target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev))
          # Add all the jars pulled in by this jar_library
          target_libraries.update(iter_transitive_jars(dep))
        if isinstance(dep, Resources):
          resource_target_map[dep] = current_target

      if isinstance(current_target, ScalaLibrary):
        for dep in current_target.java_sources:
          info['targets'].append(dep.address.spec)
          process_target(dep)

      if isinstance(current_target, JvmTarget):
        info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes]
        info['platform'] = current_target.platform.name

      info['roots'] = map(lambda (source_root, package_prefix): {
        'source_root': source_root,
        'package_prefix': package_prefix
      }, self._source_roots_for_target(current_target))

      if classpath_products:
        info['libraries'] = [self._jar_id(lib) for lib in target_libraries]
      targets_map[current_target.address.spec] = info

    for target in targets:
      process_target(target)

    jvm_platforms_map = {
      'default_platform' : JvmPlatform.global_instance().default_platform.name,
      'platforms': {
        str(platform_name): {
          'target_level' : str(platform.target_level),
          'source_level' : str(platform.source_level),
          'args' : platform.args,
        } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }
    }

    graph_info = {
      'version': self.DEFAULT_EXPORT_VERSION,
      'targets': targets_map,
      'jvm_platforms': jvm_platforms_map,
    }
    jvm_distributions = DistributionLocator.global_instance().all_jdk_paths()
    if jvm_distributions:
      graph_info['jvm_distributions'] = jvm_distributions

    if classpath_products:
      graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products)

    if python_interpreter_targets_mapping:
      interpreters = self.interpreter_cache.select_interpreter(
        python_interpreter_targets_mapping.keys())
      default_interpreter = interpreters[0]

      interpreters_info = {}
      for interpreter, targets in six.iteritems(python_interpreter_targets_mapping):
        chroot = self.cached_chroot(
          interpreter=interpreter,
          pex_info=PexInfo.default(),
          targets=targets
        )
        interpreters_info[str(interpreter.identity)] = {
          'binary': interpreter.binary,
          'chroot': chroot.path()
        }

      graph_info['python_setup'] = {
        'default_interpreter': str(default_interpreter.identity),
        'interpreters': interpreters_info
      }

    return graph_info
예제 #45
0
 def execute(self):
     pex_info = PexInfo.default()
     pex_info.entry_point = 'pytest'
     pytest_binary = self.create_pex(pex_info)
     self.context.products.register_data(self.PYTEST_BINARY, pytest_binary)
예제 #46
0
def pex_info_no_emit_warnings():
  pex_info = PexInfo.default()
  pex_info.emit_warnings = False
  return pex_info
예제 #47
0
def test_emit_warnings_emit_env_off():
    assert_no_warnings(PexInfo.default(), PEX_EMIT_WARNINGS='0')
예제 #48
0
def test_emit_warnings_default_on():
    assert_warnings(PexInfo.default())
예제 #49
0
def pex_info_no_emit_warnings():
    pex_info = PexInfo.default()
    pex_info.emit_warnings = False
    return pex_info
예제 #50
0
def test_build_properties():
  assert pex_version == PexInfo.default().build_properties['pex_version']
예제 #51
0
def test_emit_warnings_default_on():
  assert_warnings(PexInfo.default())
예제 #52
0
def test_emit_warnings_emit_env_off():
  assert_no_warnings(PexInfo.default(), PEX_EMIT_WARNINGS='0')
예제 #53
0
파일: pytest_prep.py 프로젝트: benjyw/pants
 def execute(self):
   pex_info = PexInfo.default()
   pex_info.entry_point = 'pytest'
   pytest_binary = self.create_pex(pex_info)
   self.context.products.register_data(self.PYTEST_BINARY, pytest_binary)
예제 #54
0
파일: export.py 프로젝트: Gabriel439/pants
    def console_output(self, targets):
        targets_map = {}
        resource_target_map = {}
        ivy_info = None
        if self.get_options().libraries:
            ivy_jar_products = self.context.products.get_data("ivy_jar_products") or {}
            # This product is a list for historical reasons (exclusives groups) but in practice should
            # have either 0 or 1 entries.
            ivy_info_list = ivy_jar_products.get("default")
            if ivy_info_list:
                assert len(ivy_info_list) == 1, (
                    "The values in ivy_jar_products should always be length 1,"
                    " since we no longer have exclusives groups."
                )
                ivy_info = ivy_info_list[0]

        ivy_jar_memo = {}
        python_interpreter_targets_mapping = defaultdict(list)

        def process_target(current_target):
            """
      :type current_target:pants.base.target.Target
      """

            def get_target_type(target):
                if target.is_test:
                    return Export.SourceRootTypes.TEST
                else:
                    if (
                        isinstance(target, Resources)
                        and target in resource_target_map
                        and resource_target_map[target].is_test
                    ):
                        return Export.SourceRootTypes.TEST_RESOURCE
                    elif isinstance(target, Resources):
                        return Export.SourceRootTypes.RESOURCE
                    else:
                        return Export.SourceRootTypes.SOURCE

            def get_transitive_jars(jar_lib):
                """
        :type jar_lib: pants.backend.jvm.targets.jar_library.JarLibrary
        :rtype: twitter.common.collections.orderedset.OrderedSet
        """
                if not ivy_info or not self.get_options().libraries:
                    return OrderedSet()
                transitive_jars = OrderedSet()
                for jar in jar_lib.jar_dependencies:
                    transitive_jars.update(ivy_info.get_jars_for_ivy_module(jar, memo=ivy_jar_memo))
                return transitive_jars

            info = {
                "targets": [],
                "libraries": [],
                "roots": [],
                "target_type": get_target_type(current_target),
                "is_code_gen": current_target.is_codegen,
                "pants_target_type": self._get_pants_target_alias(type(current_target)),
            }

            if not current_target.is_synthetic:
                info["globs"] = current_target.globs_relative_to_buildroot()
                if self.get_options().sources:
                    info["sources"] = list(current_target.sources_relative_to_buildroot())

            if isinstance(current_target, PythonRequirementLibrary):
                reqs = current_target.payload.get_field_value("requirements", set())
                """:type : set[pants.backend.python.python_requirement.PythonRequirement]"""
                info["requirements"] = [req.key for req in reqs]

            if isinstance(current_target, PythonTarget):
                interpreter_for_target = self.select_interpreter_for_targets([current_target])
                if interpreter_for_target is None:
                    raise TaskError("Unable to find suitable interpreter for {}".format(current_target.address))
                python_interpreter_targets_mapping[interpreter_for_target].append(current_target)
                info["python_interpreter"] = str(interpreter_for_target.identity)

            target_libraries = OrderedSet()
            if isinstance(current_target, JarLibrary):
                target_libraries = get_transitive_jars(current_target)
            for dep in current_target.dependencies:
                info["targets"].append(dep.address.spec)
                if isinstance(dep, JarLibrary):
                    for jar in dep.jar_dependencies:
                        target_libraries.add(IvyModuleRef(jar.org, jar.name, jar.rev))
                    # Add all the jars pulled in by this jar_library
                    target_libraries.update(get_transitive_jars(dep))
                if isinstance(dep, Resources):
                    resource_target_map[dep] = current_target

            if isinstance(current_target, ScalaLibrary):
                for dep in current_target.java_sources:
                    info["targets"].append(dep.address.spec)
                    process_target(dep)

            if isinstance(current_target, JvmTarget):
                info["excludes"] = [self._exclude_id(exclude) for exclude in current_target.excludes]
                info["platform"] = current_target.platform.name

            info["roots"] = map(
                lambda (source_root, package_prefix): {"source_root": source_root, "package_prefix": package_prefix},
                self._source_roots_for_target(current_target),
            )

            if self.get_options().libraries:
                info["libraries"] = [self._jar_id(lib) for lib in target_libraries]
            targets_map[current_target.address.spec] = info

        for target in targets:
            process_target(target)

        jvm_platforms_map = {
            "default_platform": JvmPlatform.global_instance().default_platform.name,
            "platforms": {
                str(platform_name): {
                    "target_level": str(platform.target_level),
                    "source_level": str(platform.source_level),
                    "args": platform.args,
                }
                for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items()
            },
        }

        graph_info = {
            "version": self.DEFAULT_EXPORT_VERSION,
            "targets": targets_map,
            "jvm_platforms": jvm_platforms_map,
        }
        jvm_distributions = DistributionLocator.global_instance().all_jdk_paths()
        if jvm_distributions:
            graph_info["jvm_distributions"] = jvm_distributions

        if self.get_options().libraries:
            graph_info["libraries"] = self._resolve_jars_info()

        if python_interpreter_targets_mapping:
            default_interpreter = self.interpreter_cache.select_interpreter(python_interpreter_targets_mapping.keys())[
                0
            ]

            interpreters_info = {}
            for interpreter, targets in python_interpreter_targets_mapping.iteritems():
                chroot = self.cached_chroot(interpreter=interpreter, pex_info=PexInfo.default(), targets=targets)
                interpreters_info[str(interpreter.identity)] = {"binary": interpreter.binary, "chroot": chroot.path()}

            graph_info["python_setup"] = {
                "default_interpreter": str(default_interpreter.identity),
                "interpreters": interpreters_info,
            }

        if self.format:
            return json.dumps(graph_info, indent=4, separators=(",", ": ")).splitlines()
        else:
            return [json.dumps(graph_info)]
예제 #55
0
파일: export.py 프로젝트: jduan/pants
  def generate_targets_map(self, targets, classpath_products=None):
    """Generates a dictionary containing all pertinent information about the target graph.

    The return dictionary is suitable for serialization by json.dumps.
    :param targets: The list of targets to generate the map for.
    :param classpath_products: Optional classpath_products. If not provided when the --libraries
      option is `True`, this task will perform its own jar resolution.
    """
    targets_map = {}
    resource_target_map = {}
    python_interpreter_targets_mapping = defaultdict(list)

    if self.get_options().libraries:
      # NB(gmalmquist): This supports mocking the classpath_products in tests.
      if classpath_products is None:
        classpath_products = self.resolve_jars(targets)
    else:
      classpath_products = None

    def process_target(current_target):
      """
      :type current_target:pants.build_graph.target.Target
      """
      def get_target_type(target):
        if target.is_test:
          return ExportTask.SourceRootTypes.TEST
        else:
          if (isinstance(target, Resources) and
              target in resource_target_map and
              resource_target_map[target].is_test):
            return ExportTask.SourceRootTypes.TEST_RESOURCE
          elif isinstance(target, Resources):
            return ExportTask.SourceRootTypes.RESOURCE
          else:
            return ExportTask.SourceRootTypes.SOURCE

      info = {
        'targets': [],
        'libraries': [],
        'roots': [],
        'target_type': get_target_type(current_target),
        'is_code_gen': current_target.is_codegen,
        'pants_target_type': self._get_pants_target_alias(type(current_target))
      }

      if not current_target.is_synthetic:
        info['globs'] = current_target.globs_relative_to_buildroot()
        if self.get_options().sources:
          info['sources'] = list(current_target.sources_relative_to_buildroot())

      if isinstance(current_target, PythonRequirementLibrary):
        reqs = current_target.payload.get_field_value('requirements', set())
        """:type : set[pants.backend.python.python_requirement.PythonRequirement]"""
        info['requirements'] = [req.key for req in reqs]

      if isinstance(current_target, PythonTarget):
        interpreter_for_target = self.select_interpreter_for_targets([current_target])
        if interpreter_for_target is None:
          raise TaskError('Unable to find suitable interpreter for {}'
                          .format(current_target.address))
        python_interpreter_targets_mapping[interpreter_for_target].append(current_target)
        info['python_interpreter'] = str(interpreter_for_target.identity)

      def iter_transitive_jars(jar_lib):
        """
        :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary`
        :rtype: :class:`collections.Iterator` of
                :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
        """
        if classpath_products:
          jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,))
          for _, jar_entry in jar_products:
            coordinate = jar_entry.coordinate
            # We drop classifier and type_ since those fields are represented in the global
            # libraries dict and here we just want the key into that dict (see `_jar_id`).
            yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev)

      target_libraries = OrderedSet()
      if isinstance(current_target, JarLibrary):
        target_libraries = OrderedSet(iter_transitive_jars(current_target))
      for dep in current_target.dependencies:
        info['targets'].append(dep.address.spec)
        if isinstance(dep, JarLibrary):
          for jar in dep.jar_dependencies:
            target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev))
          # Add all the jars pulled in by this jar_library
          target_libraries.update(iter_transitive_jars(dep))
        if isinstance(dep, Resources):
          resource_target_map[dep] = current_target

      if isinstance(current_target, ScalaLibrary):
        for dep in current_target.java_sources:
          info['targets'].append(dep.address.spec)
          process_target(dep)

      if isinstance(current_target, JvmTarget):
        info['excludes'] = [self._exclude_id(exclude) for exclude in current_target.excludes]
        info['platform'] = current_target.platform.name

      info['roots'] = map(lambda (source_root, package_prefix): {
        'source_root': source_root,
        'package_prefix': package_prefix
      }, self._source_roots_for_target(current_target))

      if classpath_products:
        info['libraries'] = [self._jar_id(lib) for lib in target_libraries]
      targets_map[current_target.address.spec] = info

    for target in targets:
      process_target(target)

    jvm_platforms_map = {
      'default_platform' : JvmPlatform.global_instance().default_platform.name,
      'platforms': {
        str(platform_name): {
          'target_level' : str(platform.target_level),
          'source_level' : str(platform.source_level),
          'args' : platform.args,
        } for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items() }
    }

    graph_info = {
      'version': self.DEFAULT_EXPORT_VERSION,
      'targets': targets_map,
      'jvm_platforms': jvm_platforms_map,
    }
    jvm_distributions = DistributionLocator.global_instance().all_jdk_paths()
    if jvm_distributions:
      graph_info['jvm_distributions'] = jvm_distributions

    if classpath_products:
      graph_info['libraries'] = self._resolve_jars_info(targets, classpath_products)

    if python_interpreter_targets_mapping:
      interpreters = self.interpreter_cache.select_interpreter(
        python_interpreter_targets_mapping.keys())
      default_interpreter = interpreters[0]

      interpreters_info = {}
      for interpreter, targets in six.iteritems(python_interpreter_targets_mapping):
        chroot = self.cached_chroot(
          interpreter=interpreter,
          pex_info=PexInfo.default(),
          targets=targets
        )
        interpreters_info[str(interpreter.identity)] = {
          'binary': interpreter.binary,
          'chroot': chroot.path()
        }

      graph_info['python_setup'] = {
        'default_interpreter': str(default_interpreter.identity),
        'interpreters': interpreters_info
      }

    return graph_info
예제 #56
0
    def console_output(self, targets):
        targets_map = {}
        resource_target_map = {}
        classpath_products = (
            self.context.products.get_data("compile_classpath") if self.get_options().libraries else None
        )

        python_interpreter_targets_mapping = defaultdict(list)

        def process_target(current_target):
            """
      :type current_target:pants.build_graph.target.Target
      """

            def get_target_type(target):
                if target.is_test:
                    return Export.SourceRootTypes.TEST
                else:
                    if (
                        isinstance(target, Resources)
                        and target in resource_target_map
                        and resource_target_map[target].is_test
                    ):
                        return Export.SourceRootTypes.TEST_RESOURCE
                    elif isinstance(target, Resources):
                        return Export.SourceRootTypes.RESOURCE
                    else:
                        return Export.SourceRootTypes.SOURCE

            info = {
                "targets": [],
                "libraries": [],
                "roots": [],
                "target_type": get_target_type(current_target),
                "is_code_gen": current_target.is_codegen,
                "pants_target_type": self._get_pants_target_alias(type(current_target)),
            }

            if not current_target.is_synthetic:
                info["globs"] = current_target.globs_relative_to_buildroot()
                if self.get_options().sources:
                    info["sources"] = list(current_target.sources_relative_to_buildroot())

            if isinstance(current_target, PythonRequirementLibrary):
                reqs = current_target.payload.get_field_value("requirements", set())
                """:type : set[pants.backend.python.python_requirement.PythonRequirement]"""
                info["requirements"] = [req.key for req in reqs]

            if isinstance(current_target, PythonTarget):
                interpreter_for_target = self.select_interpreter_for_targets([current_target])
                if interpreter_for_target is None:
                    raise TaskError("Unable to find suitable interpreter for {}".format(current_target.address))
                python_interpreter_targets_mapping[interpreter_for_target].append(current_target)
                info["python_interpreter"] = str(interpreter_for_target.identity)

            def iter_transitive_jars(jar_lib):
                """
        :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary`
        :rtype: :class:`collections.Iterator` of
                :class:`pants.backend.jvm.jar_dependency_utils.M2Coordinate`
        """
                if classpath_products:
                    jar_products = classpath_products.get_artifact_classpath_entries_for_targets((jar_lib,))
                    for _, jar_entry in jar_products:
                        coordinate = jar_entry.coordinate
                        # We drop classifier and type_ since those fields are represented in the global
                        # libraries dict and here we just want the key into that dict (see `_jar_id`).
                        yield M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev)

            target_libraries = OrderedSet()
            if isinstance(current_target, JarLibrary):
                target_libraries = OrderedSet(iter_transitive_jars(current_target))
            for dep in current_target.dependencies:
                info["targets"].append(dep.address.spec)
                if isinstance(dep, JarLibrary):
                    for jar in dep.jar_dependencies:
                        target_libraries.add(M2Coordinate(jar.org, jar.name, jar.rev))
                    # Add all the jars pulled in by this jar_library
                    target_libraries.update(iter_transitive_jars(dep))
                if isinstance(dep, Resources):
                    resource_target_map[dep] = current_target

            if isinstance(current_target, ScalaLibrary):
                for dep in current_target.java_sources:
                    info["targets"].append(dep.address.spec)
                    process_target(dep)

            if isinstance(current_target, JvmTarget):
                info["excludes"] = [self._exclude_id(exclude) for exclude in current_target.excludes]
                info["platform"] = current_target.platform.name

            info["roots"] = map(
                lambda (source_root, package_prefix): {"source_root": source_root, "package_prefix": package_prefix},
                self._source_roots_for_target(current_target),
            )

            if classpath_products:
                info["libraries"] = [self._jar_id(lib) for lib in target_libraries]
            targets_map[current_target.address.spec] = info

        for target in targets:
            process_target(target)

        jvm_platforms_map = {
            "default_platform": JvmPlatform.global_instance().default_platform.name,
            "platforms": {
                str(platform_name): {
                    "target_level": str(platform.target_level),
                    "source_level": str(platform.source_level),
                    "args": platform.args,
                }
                for platform_name, platform in JvmPlatform.global_instance().platforms_by_name.items()
            },
        }

        graph_info = {
            "version": self.DEFAULT_EXPORT_VERSION,
            "targets": targets_map,
            "jvm_platforms": jvm_platforms_map,
        }
        jvm_distributions = DistributionLocator.global_instance().all_jdk_paths()
        if jvm_distributions:
            graph_info["jvm_distributions"] = jvm_distributions

        if classpath_products:
            graph_info["libraries"] = self._resolve_jars_info(targets, classpath_products)

        if python_interpreter_targets_mapping:
            interpreters = self.interpreter_cache.select_interpreter(python_interpreter_targets_mapping.keys())
            default_interpreter = interpreters[0]

            interpreters_info = {}
            for interpreter, targets in six.iteritems(python_interpreter_targets_mapping):
                chroot = self.cached_chroot(interpreter=interpreter, pex_info=PexInfo.default(), targets=targets)
                interpreters_info[str(interpreter.identity)] = {"binary": interpreter.binary, "chroot": chroot.path()}

            graph_info["python_setup"] = {
                "default_interpreter": str(default_interpreter.identity),
                "interpreters": interpreters_info,
            }

        if self.format:
            return json.dumps(graph_info, indent=4, separators=(",", ": ")).splitlines()
        else:
            return [json.dumps(graph_info)]