def resolve_requirement_strings(self, interpreter, requirement_strings):
        """Resolve a list of pip-style requirement strings."""
        requirement_strings = sorted(requirement_strings)
        if len(requirement_strings) == 0:
            req_strings_id = 'no_requirements'
        elif len(requirement_strings) == 1:
            req_strings_id = requirement_strings[0]
        else:
            req_strings_id = hash_all(requirement_strings)

        path = os.path.realpath(
            os.path.join(self.workdir, str(interpreter.identity),
                         req_strings_id))
        if not os.path.isdir(path):
            reqs = [
                PythonRequirement(req_str) for req_str in requirement_strings
            ]
            with safe_concurrent_creation(path) as safe_path:
                pex_builder = PexBuilderWrapper(
                    PEXBuilder(path=safe_path,
                               interpreter=interpreter,
                               copy=True), PythonRepos.global_instance(),
                    PythonSetup.global_instance(), self.context.log)
                pex_builder.add_resolved_requirements(reqs)
                pex_builder.freeze()
        return PEX(path, interpreter=interpreter)
Exemple #2
0
    def _resolve_requirements(self, target_roots, options=None):
        with temporary_dir() as cache_dir:
            options = options or {}
            options.setdefault(PythonSetup.options_scope,
                               {})['interpreter_cache_dir'] = cache_dir
            context = self.context(target_roots=target_roots,
                                   options=options,
                                   for_subsystems=[PythonSetup, PythonRepos])

            # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
            # to ensure that the interpreter has setuptools and wheel support.
            interpreter = PythonInterpreter.get()
            interpreter_cache = PythonInterpreterCache(
                PythonSetup.global_instance(),
                PythonRepos.global_instance(),
                logger=context.log.debug)
            interpreters = interpreter_cache.setup(
                paths=[os.path.dirname(interpreter.binary)],
                filters=[str(interpreter.identity.requirement)])
            context.products.get_data(PythonInterpreter,
                                      lambda: interpreters[0])

            task = self.create_task(context)
            task.execute()

            return context.products.get_data(
                ResolveRequirements.REQUIREMENTS_PEX)
Exemple #3
0
 def create(cls, builder, log=None):
     log = log or logging.getLogger(__name__)
     return PexBuilderWrapper(
         builder=builder,
         python_repos_subsystem=PythonRepos.global_instance(),
         python_setup_subsystem=PythonSetup.global_instance(),
         log=log)
Exemple #4
0
 def _build_source_pex(self, interpreter, path, targets):
     pex_builder = PexBuilderWrapper(
         PEXBuilder(path=path, interpreter=interpreter, copy=True),
         PythonRepos.global_instance(), PythonSetup.global_instance(),
         self.context.log)
     for target in targets:
         if has_python_sources(target):
             pex_builder.add_sources_from(target)
     pex_builder.freeze()
Exemple #5
0
    def checker_pex(self, interpreter):
        # TODO(John Sirois): Formalize in pants.base?
        pants_dev_mode = os.environ.get('PANTS_DEV')

        if pants_dev_mode:
            checker_id = self.checker_target.transitive_invalidation_hash()
        else:
            checker_id = hash_all([self._CHECKER_REQ])

        pex_path = os.path.join(self.workdir, 'checker', checker_id,
                                str(interpreter.identity))

        if not os.path.exists(pex_path):
            with self.context.new_workunit(name='build-checker'):
                with safe_concurrent_creation(pex_path) as chroot:
                    pex_builder = PexBuilderWrapper(
                        PEXBuilder(path=chroot, interpreter=interpreter),
                        PythonRepos.global_instance(),
                        PythonSetup.global_instance(), self.context.log)

                    # Constraining is required to guard against the case where the user
                    # has a pexrc file set.
                    pex_builder.add_interpreter_constraint(
                        str(interpreter.identity.requirement))

                    if pants_dev_mode:
                        pex_builder.add_sources_from(self.checker_target)
                        req_libs = [
                            tgt for tgt in self.checker_target.closure()
                            if isinstance(tgt, PythonRequirementLibrary)
                        ]

                        pex_builder.add_requirement_libs_from(
                            req_libs=req_libs)
                    else:
                        try:
                            # The checker is already on sys.path, eg: embedded in pants.pex.
                            working_set = WorkingSet(entries=sys.path)
                            for dist in working_set.resolve(
                                [Requirement.parse(self._CHECKER_REQ)]):
                                pex_builder.add_direct_requirements(
                                    dist.requires())
                                pex_builder.add_distribution(dist)
                            pex_builder.add_direct_requirements(
                                [self._CHECKER_REQ])
                        except DistributionNotFound:
                            # We need to resolve the checker from a local or remote distribution repo.
                            pex_builder.add_resolved_requirements(
                                [PythonRequirement(self._CHECKER_REQ)])

                    pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
                    pex_builder.freeze()

        return PEX(pex_path, interpreter=interpreter)
Exemple #6
0
 def build_isort_pex(cls, context, interpreter, pex_path,
                     requirements_lib):
     with safe_concurrent_creation(pex_path) as chroot:
         pex_builder = PexBuilderWrapper(
             PEXBuilder(path=chroot, interpreter=interpreter),
             PythonRepos.global_instance(),
             PythonSetup.global_instance(), context.log)
         pex_builder.add_requirement_libs_from(
             req_libs=[requirements_lib])
         pex_builder.set_script('isort')
         pex_builder.freeze()
Exemple #7
0
 def _build_tool_pex(self, context, interpreter, pex_path,
                     requirements_lib):
     with safe_concurrent_creation(pex_path) as chroot:
         pex_builder = PexBuilderWrapper(
             PEXBuilder(path=chroot, interpreter=interpreter),
             PythonRepos.global_instance(), PythonSetup.global_instance(),
             context.log)
         pex_builder.add_requirement_libs_from(req_libs=[requirements_lib])
         pex_builder.set_entry_point(
             self._tool_subsystem().get_entry_point())
         pex_builder.freeze()
 def _create_interpreter_path_file(self, interpreter_path_file, targets):
   interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                              PythonRepos.global_instance(),
                                              logger=self.context.log.debug)
   interpreter = interpreter_cache.select_interpreter_for_targets(targets)
   safe_mkdir_for(interpreter_path_file)
   with open(interpreter_path_file, 'w') as outfile:
     outfile.write(b'{}\n'.format(interpreter.binary))
     for dist, location in interpreter.extras.items():
       dist_name, dist_version = dist
       outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))
Exemple #9
0
    def create(cls, builder, log=None):
      options = cls.global_instance().get_options()
      setuptools_requirement = 'setuptools=={}'.format(options.setuptools_version)

      log = log or logging.getLogger(__name__)

      return PexBuilderWrapper(builder=builder,
                               python_repos_subsystem=PythonRepos.global_instance(),
                               python_setup_subsystem=PythonSetup.global_instance(),
                               setuptools_requirement=PythonRequirement(setuptools_requirement),
                               log=log)
Exemple #10
0
    def create(cls, builder, log=None):
      options = cls.global_instance().get_options()
      setuptools_requirement = f'setuptools=={options.setuptools_version}'

      log = log or logging.getLogger(__name__)

      return PexBuilderWrapper(builder=builder,
                               python_repos_subsystem=PythonRepos.global_instance(),
                               python_setup_subsystem=PythonSetup.global_instance(),
                               setuptools_requirement=PythonRequirement(setuptools_requirement),
                               log=log)
  def test_setup_using_eggs(self):
    def link_egg(repo_root, requirement):
      existing_dist_location = self._interpreter.get_location(requirement)
      if existing_dist_location is not None:
        existing_dist = Package.from_href(existing_dist_location)
        requirement = '{}=={}'.format(existing_dist.name, existing_dist.raw_version)

      distributions = resolve([requirement],
                              interpreter=self._interpreter,
                              precedence=(EggPackage, SourcePackage))
      self.assertEqual(1, len(distributions))
      dist_location = distributions[0].location

      self.assertRegexpMatches(dist_location, r'\.egg$')
      os.symlink(dist_location, os.path.join(repo_root, os.path.basename(dist_location)))

      return Package.from_href(dist_location).raw_version

    with temporary_dir() as root:
      egg_dir = os.path.join(root, 'eggs')
      os.makedirs(egg_dir)
      setuptools_version = link_egg(egg_dir, 'setuptools')
      wheel_version = link_egg(egg_dir, 'wheel')

      interpreter_requirement = self._interpreter.identity.requirement

      self.context(for_subsystems=[PythonSetup, PythonRepos], options={
        PythonSetup.options_scope: {
          'interpreter_cache_dir': None,
          'pants_workdir': os.path.join(root, 'workdir'),
          'constraints': [interpreter_requirement],
          'setuptools_version': setuptools_version,
          'wheel_version': wheel_version,
        },
        PythonRepos.options_scope: {
          'indexes': [],
          'repos': [egg_dir],
        }
      })
      cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance())

      interpereters = cache.setup(paths=[os.path.dirname(self._interpreter.binary)],
                                  filters=[str(interpreter_requirement)])
      self.assertGreater(len(interpereters), 0)

      def assert_egg_extra(interpreter, name, version):
        location = interpreter.get_location('{}=={}'.format(name, version))
        self.assertIsNotNone(location)
        self.assertIsInstance(Package.from_href(location), EggPackage)

      for interpreter in interpereters:
        assert_egg_extra(interpreter, 'setuptools', setuptools_version)
        assert_egg_extra(interpreter, 'wheel', wheel_version)
Exemple #12
0
 def _create_interpreter_path_file(self, interpreter_path_file, targets):
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     interpreter = interpreter_cache.select_interpreter_for_targets(targets)
     safe_mkdir_for(interpreter_path_file)
     with open(interpreter_path_file, 'w') as outfile:
         outfile.write(b'{}\n'.format(interpreter.binary))
         for dist, location in interpreter.extras.items():
             dist_name, dist_version = dist
             outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version,
                                                  location))
  def test_namespace_effective(self):
    self.create_file('src/thrift/com/foo/one.thrift', contents=dedent("""
    namespace py foo.bar

    struct One {}
    """))
    one = self.make_target(spec='src/thrift/com/foo:one',
                           target_type=PythonThriftLibrary,
                           sources=['one.thrift'])
    apache_thrift_gen, synthetic_target_one = self.generate_single_thrift_target(one)

    self.create_file('src/thrift2/com/foo/two.thrift', contents=dedent("""
    namespace py foo.baz

    struct Two {}
    """))
    two = self.make_target(spec='src/thrift2/com/foo:two',
                           target_type=PythonThriftLibrary,
                           sources=['two.thrift'])
    _, synthetic_target_two = self.generate_single_thrift_target(two)

    # Confirm separate PYTHONPATH entries, which we need to test namespace packages.
    self.assertNotEqual(synthetic_target_one.target_base, synthetic_target_two.target_base)

    targets = (synthetic_target_one, synthetic_target_two)
    self.context(for_subsystems=[PythonInterpreterCache, PythonRepos])
    interpreter_cache = PythonInterpreterCache.global_instance()
    python_repos = PythonRepos.global_instance()
    interpreter = interpreter_cache.select_interpreter_for_targets(targets)

    # We need setuptools to import namespace packages under python 2 (via pkg_resources), so we
    # prime the PYTHONPATH with a known good version of setuptools.
    # TODO(John Sirois): We really should be emitting setuptools in a
    # `synthetic_target_extra_dependencies` override in `ApacheThriftPyGen`:
    #   https://github.com/pantsbuild/pants/issues/5975
    pythonpath = [os.path.join(get_buildroot(), t.target_base) for t in targets]
    for resolved_dist in resolve(['thrift=={}'.format(self.get_thrift_version(apache_thrift_gen)),
                                  'setuptools==40.6.3'],
                                 interpreter=interpreter,
                                 context=python_repos.get_network_context(),
                                 fetchers=python_repos.get_fetchers()):
      pythonpath.append(resolved_dist.distribution.location)

    process = subprocess.Popen([interpreter.binary,
                                '-c',
                                'from foo.bar.ttypes import One; from foo.baz.ttypes import Two'],
                               env={'PYTHONPATH': os.pathsep.join(pythonpath)},
                               stderr=subprocess.PIPE)
    _, stderr = process.communicate()
    self.assertEqual(0, process.returncode, stderr)
  def test_namespace_effective(self):
    self.create_file('src/thrift/com/foo/one.thrift', contents=dedent("""
    namespace py foo.bar

    struct One {}
    """))
    one = self.make_target(spec='src/thrift/com/foo:one',
                           target_type=PythonThriftLibrary,
                           sources=['one.thrift'])
    apache_thrift_gen, synthetic_target_one = self.generate_single_thrift_target(one)

    self.create_file('src/thrift2/com/foo/two.thrift', contents=dedent("""
    namespace py foo.baz

    struct Two {}
    """))
    two = self.make_target(spec='src/thrift2/com/foo:two',
                           target_type=PythonThriftLibrary,
                           sources=['two.thrift'])
    _, synthetic_target_two = self.generate_single_thrift_target(two)

    # Confirm separate PYTHONPATH entries, which we need to test namespace packages.
    self.assertNotEqual(synthetic_target_one.target_base, synthetic_target_two.target_base)

    targets = (synthetic_target_one, synthetic_target_two)
    self.context(for_subsystems=[PythonInterpreterCache, PythonRepos])
    interpreter_cache = PythonInterpreterCache.global_instance()
    python_repos = PythonRepos.global_instance()
    interpreter = interpreter_cache.select_interpreter_for_targets(targets)

    # We need setuptools to import namespace packages (via pkg_resources), so we prime the
    # PYTHONPATH with interpreter extras, which Pants always populates with setuptools and wheel.
    # TODO(John Sirois): We really should be emitting setuptools in a
    # `synthetic_target_extra_dependencies` override in `ApacheThriftPyGen`:
    #   https://github.com/pantsbuild/pants/issues/5975
    pythonpath = list(interpreter.extras.values())
    pythonpath.extend(os.path.join(get_buildroot(), t.target_base) for t in targets)
    for resolved_dist in resolve(['thrift=={}'.format(self.get_thrift_version(apache_thrift_gen))],
                                 interpreter=interpreter,
                                 context=python_repos.get_network_context(),
                                 fetchers=python_repos.get_fetchers()):
      pythonpath.append(resolved_dist.distribution.location)

    process = subprocess.Popen([interpreter.binary,
                                '-c',
                                'from foo.bar.ttypes import One; from foo.baz.ttypes import Two'],
                               env={'PYTHONPATH': os.pathsep.join(pythonpath)},
                               stderr=subprocess.PIPE)
    _, stderr = process.communicate()
    self.assertEqual(0, process.returncode, stderr)
Exemple #15
0
def dump_requirement_libs(builder, interpreter, req_libs, log, platforms=None):
    """Multi-platform dependency resolution for PEX files.

  :param builder: Dump the requirements into this builder.
  :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
  :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
  :param log: Use this logger.
  :param platforms: A list of :class:`Platform`s to resolve requirements for.
                    Defaults to the platforms specified by PythonSetup.
  """
    deprecated(
        '1.11.0.dev0',
        'This function has been moved onto the PexBuilderWrapper class.')
    PexBuilderWrapper(builder, PythonRepos.global_instance(),
                      PythonSetup.global_instance(),
                      log).add_requirement_libs_from(req_libs, platforms)
Exemple #16
0
 def _resolve_requirements_for_versioned_target_closure(
         self, interpreter, vt):
     reqs_pex_path = os.path.realpath(
         os.path.join(self.workdir, str(interpreter.identity),
                      vt.cache_key.hash))
     if not os.path.isdir(reqs_pex_path):
         req_libs = [
             t for t in vt.target.closure() if has_python_requirements(t)
         ]
         with safe_concurrent_creation(reqs_pex_path) as safe_path:
             pex_builder = PexBuilderWrapper(
                 PEXBuilder(safe_path, interpreter=interpreter, copy=True),
                 PythonRepos.global_instance(),
                 PythonSetup.global_instance(), self.context.log)
             pex_builder.add_requirement_libs_from(req_libs)
             pex_builder.freeze()
     return PEX(reqs_pex_path, interpreter=interpreter)
Exemple #17
0
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
Exemple #19
0
 def execute(self):
     """"Run Checkstyle on all found non-synthetic source files."""
     python_tgts = self.context.targets(
         lambda tgt: isinstance(tgt, (PythonTarget)))
     if not python_tgts:
         return 0
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     with self.invalidated(self.get_targets(
             self._is_checked)) as invalidation_check:
         failure_count = 0
         tgts_by_compatibility, _ = interpreter_cache.partition_targets_by_compatibility(
             [vt.target for vt in invalidation_check.invalid_vts])
         for filters, targets in tgts_by_compatibility.items():
             if self.get_options(
             ).interpreter_constraints_whitelist is None and not self._constraints_are_whitelisted(
                     filters):
                 deprecated_conditional(
                     lambda: self.get_options(
                     ).interpreter_constraints_whitelist is None,
                     '1.14.0.dev2',
                     "Python linting is currently restricted to targets that match the global "
                     "interpreter constraints: {}. Pants detected unacceptable filters: {}. "
                     "Use the `--interpreter-constraints-whitelist` lint option to whitelist "
                     "compatibiltiy constraints.".format(
                         PythonSetup.global_instance().
                         interpreter_constraints, filters))
             else:
                 sources = self.calculate_sources([tgt for tgt in targets])
                 if sources:
                     allowed_interpreters = set(
                         interpreter_cache.setup(filters=filters))
                     if not allowed_interpreters:
                         raise TaskError(
                             'No valid interpreters found for targets: {}\n(filters: {})'
                             .format(targets, filters))
                     interpreter = min(allowed_interpreters)
                     failure_count += self.checkstyle(interpreter, sources)
         if failure_count > 0 and self.get_options().fail:
             raise TaskError(
                 '{} Python Style issues found. You may try `./pants fmt <targets>`'
                 .format(failure_count))
         return failure_count
Exemple #20
0
  def bootstrap(self, interpreter, pex_file_path, extra_reqs=None):
    # Caching is done just by checking if the file at the specified path is already executable.
    if not is_executable(pex_file_path):
      pex_info = PexInfo.default(interpreter=interpreter)
      if self.entry_point is not None:
        pex_info.entry_point = self.entry_point

      with safe_concurrent_creation(pex_file_path) as safe_path:
        all_reqs = list(self.base_requirements) + list(extra_reqs or [])
        pex_builder = PexBuilderWrapper(
          PEXBuilder(interpreter=interpreter, pex_info=pex_info),
          PythonRepos.global_instance(),
          PythonSetup.global_instance(),
          logger)
        pex_builder.add_resolved_requirements(all_reqs, platforms=['current'])
        pex_builder.build(safe_path)

    return PEX(pex_file_path, interpreter)
Exemple #21
0
  def _options(self):
    # NB: The PluginResolver runs very early in the pants startup sequence before the standard
    # Subsystem facility is wired up.  As a result PluginResolver is not itself a Subsystem with
    # PythonRepos as a dependency.  Instead it does the minimum possible work to hand-roll
    # bootstrapping of the Subsystems it needs.
    known_scope_infos = PythonRepos.known_scope_infos()
    options = self._options_bootstrapper.get_full_options(known_scope_infos)

    # Ignore command line flags since we'd blow up on any we don't understand (most of them).
    # If someone wants to bootstrap plugins in a one-off custom way they'll need to use env vars
    # or a --pants-config-files pointing to a custom pants.ini snippet.
    defaulted_only_options = options.drop_flag_values()

    GlobalOptionsRegistrar.register_options_on_scope(defaulted_only_options)
    distinct_optionable_classes = sorted({si.optionable_cls for si in known_scope_infos},
                                         key=lambda o: o.options_scope)
    for optionable_cls in distinct_optionable_classes:
      optionable_cls.register_options_on_scope(defaulted_only_options)
    return defaulted_only_options
    def resolve_requirements(self, interpreter, req_libs):
        """Requirements resolution for PEX files.

    :param interpreter: Resolve against this :class:`PythonInterpreter`.
    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
        with self.invalidated(req_libs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'

            # We need to ensure that we are resolving for only the current platform if we are
            # including local python dist targets that have native extensions.
            tgts = self.context.targets()
            if self._python_native_code_settings.check_build_for_current_platform_only(
                    tgts):
                maybe_platforms = ['current']
            else:
                maybe_platforms = None

            path = os.path.realpath(
                os.path.join(self.workdir, str(interpreter.identity),
                             target_set_id))
            # Note that we check for the existence of the directory, instead of for invalid_vts,
            # to cover the empty case.
            if not os.path.isdir(path):
                with safe_concurrent_creation(path) as safe_path:
                    pex_builder = PexBuilderWrapper(
                        PEXBuilder(path=safe_path,
                                   interpreter=interpreter,
                                   copy=True), PythonRepos.global_instance(),
                        PythonSetup.global_instance(), self.context.log)
                    pex_builder.add_requirement_libs_from(
                        req_libs, platforms=maybe_platforms)
                    pex_builder.freeze()
        return PEX(path, interpreter=interpreter)
Exemple #23
0
def resolve_multi(interpreter, requirements, platforms, find_links):
    """Multi-platform dependency resolution for PEX files.

  Returns a list of distributions that must be included in order to satisfy a set of requirements.
  That may involve distributions for multiple platforms.

  :param interpreter: The :class:`PythonInterpreter` to resolve for.
  :param requirements: A list of :class:`PythonRequirement` objects to resolve.
  :param platforms: A list of :class:`Platform`s to resolve for.
  :param find_links: Additional paths to search for source packages during resolution.
  :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
           to satisfy the requirements on that platform.
  """
    python_setup = PythonSetup.global_instance()
    python_repos = PythonRepos.global_instance()
    platforms = platforms or python_setup.platforms
    find_links = find_links or []
    distributions = {}
    fetchers = python_repos.get_fetchers()
    fetchers.extend(Fetcher([path]) for path in find_links)

    for platform in platforms:
        requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir,
                                              str(interpreter.identity))
        distributions[platform] = resolve(
            requirements=[req.requirement for req in requirements],
            interpreter=interpreter,
            fetchers=fetchers,
            platform=get_local_platform()
            if platform == 'current' else platform,
            context=python_repos.get_network_context(),
            cache=requirements_cache_dir,
            cache_ttl=python_setup.resolver_cache_ttl,
            allow_prereleases=python_setup.resolver_allow_prereleases,
            pkg_blacklist=python_setup.resolver_blacklist,
            use_manylinux=python_setup.use_manylinux)

    return distributions
Exemple #24
0
    def _options(self):
        # NB: The PluginResolver runs very early in the pants startup sequence before the standard
        # Subsystem facility is wired up.  As a result PluginResolver is not itself a Subsystem with
        # PythonRepos as a dependency.  Instead it does the minimum possible work to hand-roll
        # bootstrapping of the Subsystems it needs.
        known_scope_infos = PythonRepos.known_scope_infos()
        options = self._options_bootstrapper.get_full_options(
            known_scope_infos)

        # Ignore command line flags since we'd blow up on any we don't understand (most of them).
        # If someone wants to bootstrap plugins in a one-off custom way they'll need to use env vars
        # or a --pants-config-files pointing to a custom pants.ini snippet.
        defaulted_only_options = options.drop_flag_values()

        GlobalOptionsRegistrar.register_options_on_scope(
            defaulted_only_options)
        distinct_optionable_classes = sorted(
            {si.optionable_cls
             for si in known_scope_infos},
            key=lambda o: o.options_scope)
        for optionable_cls in distinct_optionable_classes:
            optionable_cls.register_options_on_scope(defaulted_only_options)
        return defaulted_only_options
Exemple #25
0
def resolve_multi(interpreter, requirements, platforms, find_links):
  """Multi-platform dependency resolution for PEX files.

  Returns a list of distributions that must be included in order to satisfy a set of requirements.
  That may involve distributions for multiple platforms.

  :param interpreter: The :class:`PythonInterpreter` to resolve for.
  :param requirements: A list of :class:`PythonRequirement` objects to resolve.
  :param platforms: A list of :class:`Platform`s to resolve for.
  :param find_links: Additional paths to search for source packages during resolution.
  :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
           to satisfy the requirements on that platform.
  """
  python_setup = PythonSetup.global_instance()
  python_repos = PythonRepos.global_instance()
  platforms = platforms or python_setup.platforms
  find_links = find_links or []
  distributions = {}
  fetchers = python_repos.get_fetchers()
  fetchers.extend(Fetcher([path]) for path in find_links)

  for platform in platforms:
    requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir,
                                          str(interpreter.identity))
    distributions[platform] = resolve(
      requirements=[req.requirement for req in requirements],
      interpreter=interpreter,
      fetchers=fetchers,
      platform=expand_and_maybe_adjust_platform(interpreter=interpreter, platform=platform),
      context=python_repos.get_network_context(),
      cache=requirements_cache_dir,
      cache_ttl=python_setup.resolver_cache_ttl,
      allow_prereleases=python_setup.resolver_allow_prereleases,
      pkg_blacklist=python_setup.resolver_blacklist,
      use_manylinux=python_setup.use_manylinux)

  return distributions
Exemple #26
0
def resolve_multi(interpreter, requirements, platforms, find_links):
    """Multi-platform dependency resolution for PEX files.

  Returns a list of distributions that must be included in order to satisfy a set of requirements.
  That may involve distributions for multiple platforms.

  :param interpreter: The :class:`PythonInterpreter` to resolve for.
  :param requirements: A list of :class:`PythonRequirement` objects to resolve.
  :param platforms: A list of :class:`Platform`s to resolve for.
  :param find_links: Additional paths to search for source packages during resolution.
  :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
           to satisfy the requirements on that platform.
  """
    deprecated(
        '1.11.0.dev0',
        'This function has been moved onto the PexBuilderWrapper class.')
    python_setup = PythonSetup.global_instance()
    python_repos = PythonRepos.global_instance()
    return PexBuilderWrapper(builder=None,
                             python_repos_subsystem=python_repos,
                             python_setup_subsystem=python_setup,
                             log=None)._resolve_multi(interpreter,
                                                      requirements, platforms,
                                                      find_links)
    def test_setup_using_eggs(self):
        def link_egg(repo_root, requirement):
            existing_dist_location = self._interpreter.get_location(
                requirement)
            if existing_dist_location is not None:
                existing_dist = Package.from_href(existing_dist_location)
                requirement = '{}=={}'.format(existing_dist.name,
                                              existing_dist.raw_version)

            distributions = resolve([requirement],
                                    interpreter=self._interpreter,
                                    precedence=(EggPackage, SourcePackage))
            self.assertEqual(1, len(distributions))
            dist_location = distributions[0].location

            self.assertRegexpMatches(dist_location, r'\.egg$')
            os.symlink(
                dist_location,
                os.path.join(repo_root, os.path.basename(dist_location)))

            return Package.from_href(dist_location).raw_version

        with temporary_dir() as root:
            egg_dir = os.path.join(root, 'eggs')
            os.makedirs(egg_dir)
            setuptools_version = link_egg(egg_dir, 'setuptools')
            wheel_version = link_egg(egg_dir, 'wheel')

            interpreter_requirement = self._interpreter.identity.requirement

            self.context(for_subsystems=[PythonSetup, PythonRepos],
                         options={
                             PythonSetup.options_scope: {
                                 'interpreter_cache_dir': None,
                                 'pants_workdir':
                                 os.path.join(root, 'workdir'),
                                 'constraints': [interpreter_requirement],
                                 'setuptools_version': setuptools_version,
                                 'wheel_version': wheel_version,
                             },
                             PythonRepos.options_scope: {
                                 'indexes': [],
                                 'repos': [egg_dir],
                             }
                         })
            cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                           PythonRepos.global_instance())

            interpereters = cache.setup(
                paths=[os.path.dirname(self._interpreter.binary)],
                filters=[str(interpreter_requirement)])
            self.assertGreater(len(interpereters), 0)

            def assert_egg_extra(interpreter, name, version):
                location = interpreter.get_location('{}=={}'.format(
                    name, version))
                self.assertIsNotNone(location)
                self.assertIsInstance(Package.from_href(location), EggPackage)

            for interpreter in interpereters:
                assert_egg_extra(interpreter, 'setuptools', setuptools_version)
                assert_egg_extra(interpreter, 'wheel', wheel_version)
Exemple #28
0
 def _python_repos(self):
     return PythonRepos.global_instance()
Exemple #29
0
 def _interpreter_cache(self):
   return PythonInterpreterCache(PythonSetup.global_instance(),
                                 PythonRepos.global_instance(),
                                 logger=self.context.log.debug)
Exemple #30
0
    def _create_binary(self, binary_tgt, results_dir):
        """Create a .pex file for the specified binary target."""
        # Note that we rebuild a chroot from scratch, instead of using the REQUIREMENTS_PEX
        # and PYTHON_SOURCES products, because those products are already-built pexes, and there's
        # no easy way to merge them into a single pex file (for example, they each have a __main__.py,
        # metadata, and so on, which the merging code would have to handle specially).
        interpreter = self.context.products.get_data(PythonInterpreter)
        with temporary_dir() as tmpdir:
            # Create the pex_info for the binary.
            run_info_dict = self.context.run_tracker.run_info.get_as_dict()
            build_properties = PexInfo.make_build_properties()
            build_properties.update(run_info_dict)
            pex_info = binary_tgt.pexinfo.copy()
            pex_info.build_properties = build_properties

            pex_builder = PexBuilderWrapper(
                PEXBuilder(path=tmpdir,
                           interpreter=interpreter,
                           pex_info=pex_info,
                           copy=True), PythonRepos.global_instance(),
                PythonSetup.global_instance(), self.context.log)

            if binary_tgt.shebang:
                self.context.log.info(
                    'Found Python binary target {} with customized shebang, using it: {}'
                    .format(binary_tgt.name, binary_tgt.shebang))
                pex_builder.set_shebang(binary_tgt.shebang)
            else:
                self.context.log.debug(
                    'No customized shebang found for {}'.format(
                        binary_tgt.name))

            # Find which targets provide sources and which specify requirements.
            source_tgts = []
            req_tgts = []
            constraint_tgts = []
            for tgt in binary_tgt.closure(exclude_scopes=Scopes.COMPILE):
                if has_python_sources(tgt) or has_resources(tgt):
                    source_tgts.append(tgt)
                elif has_python_requirements(tgt):
                    req_tgts.append(tgt)
                if is_python_target(tgt):
                    constraint_tgts.append(tgt)

            # Add target's interpreter compatibility constraints to pex info.
            pex_builder.add_interpreter_constraints_from(constraint_tgts)

            # Dump everything into the builder's chroot.
            for tgt in source_tgts:
                pex_builder.add_sources_from(tgt)

            # We need to ensure that we are resolving for only the current platform if we are
            # including local python dist targets that have native extensions.
            self._python_native_code_settings.check_build_for_current_platform_only(
                self.context.targets())
            pex_builder.add_requirement_libs_from(
                req_tgts, platforms=binary_tgt.platforms)

            # Build the .pex file.
            pex_path = os.path.join(results_dir,
                                    '{}.pex'.format(binary_tgt.name))
            pex_builder.build(pex_path)
            return pex_path
Exemple #31
0
 def _interpreter_cache(self):
     return PythonInterpreterCache(PythonSetup.global_instance(),
                                   PythonRepos.global_instance(),
                                   logger=self.context.log.debug)