Exemplo n.º 1
0
    def _resolve_multi(self, interpreter, requirements, find_links):
        """Multi-platform dependency resolution for PEX files.

    Returns a list of distributions that must be included in order to satisfy a set of requirements.
    That may involve distributions for multiple platforms.

    :param interpreter: The :class:`PythonInterpreter` to resolve for.
    :param requirements: A list of :class:`PythonRequirement` objects to resolve.
    :param find_links: Additional paths to search for source packages during resolution.
    :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
             to satisfy the requirements on that platform.
    """
        python_setup = PythonSetup.global_instance()
        python_repos = PythonRepos.global_instance()
        distributions = {}
        fetchers = python_repos.get_fetchers()
        fetchers.extend(Fetcher([path]) for path in find_links)

        for platform in python_setup.platforms:
            requirements_cache_dir = os.path.join(
                python_setup.resolver_cache_dir, str(interpreter.identity))
            distributions[platform] = resolve(
                requirements=[req.requirement for req in requirements],
                interpreter=interpreter,
                fetchers=fetchers,
                platform=None if platform == 'current' else platform,
                context=python_repos.get_network_context(),
                cache=requirements_cache_dir,
                cache_ttl=python_setup.resolver_cache_ttl)

        return distributions
Exemplo n.º 2
0
  def temporary_chroot(self, interpreter=None, pex_info=None, targets=None,
                       extra_requirements=None, platforms=None, pre_freeze=None):
    """Yields a temporary PythonChroot created with the specified args.

    pre_freeze is an optional function run on the chroot just before freezing its builder,
    to allow for any extra modification.
    """
    path = tempfile.mkdtemp()
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    with self.context.new_workunit('chroot'):
      chroot = PythonChroot(
        context=self.context,
        python_setup=PythonSetup.global_instance(),
        python_repos=PythonRepos.global_instance(),
        targets=targets,
        extra_requirements=extra_requirements,
        builder=builder,
        platforms=platforms,
        interpreter=interpreter)
      chroot.dump()
      if pre_freeze:
        pre_freeze(chroot)
      builder.freeze()
    yield chroot
    chroot.delete()
Exemplo n.º 3
0
    def execute(self):
        interpreter = None
        python_tgts = self.context.targets(
            lambda tgt: isinstance(tgt, PythonTarget))
        fs = PythonInterpreterFingerprintStrategy(task=self)
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = os.path.join(self.workdir, target_set_id,
                                                 'interpreter.path')
            if not os.path.exists(interpreter_path_file):
                interpreter_cache = PythonInterpreterCache(
                    PythonSetup.global_instance(),
                    PythonRepos.global_instance(),
                    logger=self.context.log.debug)

                # We filter the interpreter cache itself (and not just the interpreters we pull from it)
                # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
                # an escape hatch.
                filters = self.get_options().constraints or [b'']

                # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
                self.context.acquire_lock()
                try:
                    interpreter_cache.setup(filters=filters)
                finally:
                    self.context.release_lock()

                interpreter = interpreter_cache.select_interpreter_for_targets(
                    python_tgts)
                safe_mkdir_for(interpreter_path_file)
                with open(interpreter_path_file, 'w') as outfile:
                    outfile.write(b'{}\t{}\n'.format(
                        interpreter.binary, str(interpreter.identity)))
                    for dist, location in interpreter.extras.items():
                        dist_name, dist_version = dist
                        outfile.write(b'{}\t{}\t{}\n'.format(
                            dist_name, dist_version, location))

        if not interpreter:
            with open(interpreter_path_file, 'r') as infile:
                lines = infile.readlines()
                binary, identity = lines[0].strip().split('\t')
                extras = {}
                for line in lines[1:]:
                    dist_name, dist_version, location = line.strip().split(
                        '\t')
                    extras[(dist_name, dist_version)] = location

            interpreter = PythonInterpreter(binary,
                                            PythonIdentity.from_path(identity),
                                            extras)

        self.context.products.get_data(PythonInterpreter, lambda: interpreter)
Exemplo n.º 4
0
    def _resolve_multi(self, interpreter, requirements, find_links):
        """Multi-platform dependency resolution for PEX files.

    Returns a list of distributions that must be included in order to satisfy a set of requirements.
    That may involve distributions for multiple platforms.

    :param interpreter: The :class:`PythonInterpreter` to resolve for.
    :param requirements: A list of :class:`PythonRequirement` objects to resolve.
    :param find_links: Additional paths to search for source packages during resolution.
    :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed
             to satisfy the requirements on that platform.
    """
        python_setup = PythonSetup.global_instance()
        python_repos = PythonRepos.global_instance()
        distributions = {}
        fetchers = python_repos.get_fetchers()
        fetchers.extend(Fetcher([path]) for path in find_links)

        for platform in python_setup.platforms:
            requirements_cache_dir = os.path.join(python_setup.resolver_cache_dir, str(interpreter.identity))
            distributions[platform] = resolve(
                requirements=[req.requirement for req in requirements],
                interpreter=interpreter,
                fetchers=fetchers,
                platform=None if platform == "current" else platform,
                context=python_repos.get_network_context(),
                cache=requirements_cache_dir,
                cache_ttl=python_setup.resolver_cache_ttl,
            )

        return distributions
Exemplo n.º 5
0
 def _build_chroot(self,
                   path,
                   interpreter,
                   pex_info,
                   targets,
                   platforms,
                   extra_requirements=None,
                   executable_file_content=None):
     """Create a PythonChroot with the specified args."""
     builder = PEXBuilder(path=path,
                          interpreter=interpreter,
                          pex_info=pex_info)
     with self.context.new_workunit('chroot'):
         chroot = PythonChroot(context=self.context,
                               python_setup=PythonSetup.global_instance(),
                               python_repos=PythonRepos.global_instance(),
                               interpreter=interpreter,
                               builder=builder,
                               targets=targets,
                               platforms=platforms,
                               extra_requirements=extra_requirements)
         chroot.dump()
         if executable_file_content is not None:
             with open(
                     os.path.join(
                         path, '{}.py'.format(self.CHROOT_EXECUTABLE_NAME)),
                     'w') as outfile:
                 outfile.write(executable_file_content)
             # Override any user-specified entry point, under the assumption that the
             # executable_file_content does what the user intends (including, probably, calling that
             # underlying entry point).
             pex_info.entry_point = self.CHROOT_EXECUTABLE_NAME
         builder.freeze()
     return chroot
Exemplo n.º 6
0
 def _test_runner(self, targets, workunit):
   interpreter = self.select_interpreter_for_targets(targets)
   builder = PEXBuilder(interpreter=interpreter)
   builder.info.entry_point = 'pytest'
   chroot = PythonChroot(
     context=self.context,
     python_setup=PythonSetup.global_instance(),
     python_repos=PythonRepos.global_instance(),
     targets=targets,
     extra_requirements=self._TESTING_TARGETS,
     builder=builder,
     platforms=('current',),
     interpreter=interpreter)
   try:
     builder = chroot.dump()
     builder.freeze()
     pex = PEX(builder.path(), interpreter=interpreter)
     with self._maybe_shard() as shard_args:
       with self._maybe_emit_junit_xml(targets) as junit_args:
         with self._maybe_emit_coverage_data(targets,
                                             builder.path(),
                                             pex,
                                             workunit) as coverage_args:
           yield pex, shard_args + junit_args + coverage_args
   finally:
     chroot.delete()
Exemplo n.º 7
0
 def create_chroot(self, interpreter, builder, targets, platforms, extra_requirements):
   return PythonChroot(python_setup=PythonSetup.global_instance(),
                       python_repos=PythonRepos.global_instance(),
                       ivy_bootstrapper=self.ivy_bootstrapper,
                       thrift_binary_factory=self.thrift_binary_factory,
                       interpreter=interpreter,
                       builder=builder,
                       targets=targets,
                       platforms=platforms,
                       extra_requirements=extra_requirements)
Exemplo n.º 8
0
 def create_chroot(self, interpreter, builder, targets, platforms,
                   extra_requirements):
     return PythonChroot(python_setup=PythonSetup.global_instance(),
                         python_repos=PythonRepos.global_instance(),
                         ivy_bootstrapper=self.ivy_bootstrapper,
                         thrift_binary_factory=self.thrift_binary_factory,
                         interpreter=interpreter,
                         builder=builder,
                         targets=targets,
                         platforms=platforms,
                         extra_requirements=extra_requirements)
Exemplo n.º 9
0
  def test_setup_using_eggs(self):
    def link_egg(repo_root, requirement):
      existing_dist_location = self._interpreter.get_location(requirement)
      if existing_dist_location is not None:
        existing_dist = Package.from_href(existing_dist_location)
        requirement = '{}=={}'.format(existing_dist.name, existing_dist.raw_version)

      distributions = resolve([requirement],
                              interpreter=self._interpreter,
                              precedence=(EggPackage, SourcePackage))
      self.assertEqual(1, len(distributions))
      dist_location = distributions[0].location

      self.assertRegexpMatches(dist_location, r'\.egg$')
      os.symlink(dist_location, os.path.join(repo_root, os.path.basename(dist_location)))

      return Package.from_href(dist_location).raw_version

    with temporary_dir() as root:
      egg_dir = os.path.join(root, 'eggs')
      os.makedirs(egg_dir)
      setuptools_version = link_egg(egg_dir, 'setuptools')
      wheel_version = link_egg(egg_dir, 'wheel')

      interpreter_requirement = self._interpreter.identity.requirement

      self.context(for_subsystems=[PythonSetup, PythonRepos], options={
        PythonSetup.options_scope: {
          'interpreter_cache_dir': None,
          'pants_workdir': os.path.join(root, 'workdir'),
          'interpreter_requirement': interpreter_requirement,
          'setuptools_version': setuptools_version,
          'wheel_version': wheel_version,
        },
        PythonRepos.options_scope: {
          'indexes': [],
          'repos': [egg_dir],
        }
      })
      cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance())

      interpereters = cache.setup(paths=[os.path.dirname(self._interpreter.binary)],
                                  filters=[str(interpreter_requirement)])
      self.assertGreater(len(interpereters), 0)

      def assert_egg_extra(interpreter, name, version):
        location = interpreter.get_location('{}=={}'.format(name, version))
        self.assertIsNotNone(location)
        self.assertIsInstance(Package.from_href(location), EggPackage)

      for interpreter in interpereters:
        assert_egg_extra(interpreter, 'setuptools', setuptools_version)
        assert_egg_extra(interpreter, 'wheel', wheel_version)
Exemplo n.º 10
0
  def execute(self):
    interpreter = None
    python_tgts = self.context.targets(lambda tgt: isinstance(tgt, PythonTarget))
    fs = PythonInterpreterFingerprintStrategy(task=self)
    with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of selecting
      # an interpreter, to prevent downstream tasks from having to check for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'
      interpreter_path_file = os.path.join(self.workdir, target_set_id, 'interpreter.path')
      if not os.path.exists(interpreter_path_file):
        interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                   PythonRepos.global_instance(),
                                                   logger=self.context.log.debug)

        # We filter the interpreter cache itself (and not just the interpreters we pull from it)
        # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
        # an escape hatch.
        filters = self.get_options().constraints or [b'']

        # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
        self.context.acquire_lock()
        try:
          interpreter_cache.setup(filters=filters)
        finally:
          self.context.release_lock()

        interpreter = interpreter_cache.select_interpreter_for_targets(python_tgts)
        safe_mkdir_for(interpreter_path_file)
        with open(interpreter_path_file, 'w') as outfile:
          outfile.write(b'{}\t{}\n'.format(interpreter.binary, str(interpreter.identity)))
          for dist, location in interpreter.extras.items():
            dist_name, dist_version = dist
            outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))

    if not interpreter:
      with open(interpreter_path_file, 'r') as infile:
        lines = infile.readlines()
        binary, identity = lines[0].strip().split('\t')
        extras = {}
        for line in lines[1:]:
          dist_name, dist_version, location = line.strip().split('\t')
          extras[(dist_name, dist_version)] = location

      interpreter = PythonInterpreter(binary, PythonIdentity.from_path(identity), extras)

    self.context.products.get_data(PythonInterpreter, lambda: interpreter)
Exemplo n.º 11
0
    def cached_chroot(self,
                      interpreter,
                      pex_info,
                      targets,
                      platforms,
                      extra_requirements=None,
                      executable_file_content=None):
        """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
        # This PexInfo contains any customizations specified by the caller.
        # The process of building a pex modifies it further.
        pex_info = pex_info or PexInfo.default()

        path = self._chroot_path(PythonSetup.global_instance(), interpreter,
                                 pex_info, targets, platforms,
                                 extra_requirements, executable_file_content)
        if not os.path.exists(path):
            path_tmp = path + '.tmp'
            self._build_chroot(path_tmp, interpreter, pex_info, targets,
                               platforms, extra_requirements,
                               executable_file_content)
            shutil.move(path_tmp, path)

        # We must read the PexInfo that was frozen into the pex, so we get the modifications
        # created when that pex was built.
        pex_info = PexInfo.from_pex(path)
        # Now create a PythonChroot wrapper without dumping it.
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info)
        chroot = PythonChroot(context=self.context,
                              python_setup=PythonSetup.global_instance(),
                              python_repos=PythonRepos.global_instance(),
                              interpreter=interpreter,
                              builder=builder,
                              targets=targets,
                              platforms=platforms,
                              extra_requirements=extra_requirements)
        # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so
        # while transitioning calls to temporary_chroot to calls to cached_chroot.
        # We can revisit after that transition is complete.
        yield chroot
Exemplo n.º 12
0
  def interpreter_cache(self):
    if self._interpreter_cache is None:
      self._interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                       PythonRepos.global_instance(),
                                                       logger=self.context.log.debug)

      # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
      self.context.acquire_lock()
      try:
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self._interpreter_cache.setup(filters=self._compatibilities)
      finally:
        self.context.release_lock()
    return self._interpreter_cache
Exemplo n.º 13
0
  def interpreter_cache(self):
    if self._interpreter_cache is None:
      self._interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                       PythonRepos.global_instance(),
                                                       logger=self.context.log.debug)

      # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
      self.context.acquire_lock()
      try:
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self._interpreter_cache.setup(filters=self._compatibilities)
      finally:
        self.context.release_lock()
    return self._interpreter_cache
Exemplo n.º 14
0
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
Exemplo n.º 15
0
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
Exemplo n.º 16
0
  def _resolve_requirements(self, target_roots, options=None):
    context = self.context(target_roots=target_roots, options=options)

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX)
Exemplo n.º 17
0
    def _resolve_requirements(self, target_roots, options=None):
        context = self.context(target_roots=target_roots, options=options)

        # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
        # to ensure that the interpreter has setuptools and wheel support.
        interpreter = PythonInterpreter.get()
        interpreter_cache = PythonInterpreterCache(
            PythonSetup.global_instance(), PythonRepos.global_instance(), logger=context.log.debug
        )
        interpreters = interpreter_cache.setup(
            paths=[os.path.dirname(interpreter.binary)], filters=[str(interpreter.identity.requirement)]
        )
        context.products.get_data(PythonInterpreter, lambda: interpreters[0])

        task = self.create_task(context)
        task.execute()

        return context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX)
Exemplo n.º 18
0
    def dumped_chroot(self, targets):
        # TODO(benjy): We shouldn't need to mention DistributionLocator here, as IvySubsystem
        # declares it as a dependency. However if we don't then test_antlr() below fails on
        # uninitialized options for that subsystem.  Hopefully my pending (as of 9/2016) change
        # to clean up how we initialize and create instances of subsystems in tests will make
        # this problem go away.
        self.context(
            for_subsystems=[
                PythonRepos,
                PythonSetup,
                IvySubsystem,
                DistributionLocator,
                ThriftBinary.Factory,
                BinaryUtil.Factory,
            ]
        )
        python_repos = PythonRepos.global_instance()
        ivy_bootstrapper = Bootstrapper(ivy_subsystem=IvySubsystem.global_instance())
        thrift_binary_factory = ThriftBinary.Factory.global_instance().create

        interpreter_cache = PythonInterpreterCache(self.python_setup, python_repos)
        interpreter_cache.setup()
        interpreters = list(interpreter_cache.matched_interpreters([self.python_setup.interpreter_requirement]))
        self.assertGreater(len(interpreters), 0)
        interpreter = interpreters[0]

        with temporary_dir() as chroot:
            pex_builder = PEXBuilder(path=chroot, interpreter=interpreter)

            python_chroot = PythonChroot(
                python_setup=self.python_setup,
                python_repos=python_repos,
                ivy_bootstrapper=ivy_bootstrapper,
                thrift_binary_factory=thrift_binary_factory,
                interpreter=interpreter,
                builder=pex_builder,
                targets=targets,
                platforms=["current"],
            )
            try:
                python_chroot.dump()
                yield pex_builder, python_chroot
            finally:
                python_chroot.delete()
Exemplo n.º 19
0
    def dumped_chroot(self, targets):
        # TODO(benjy): We shouldn't need to mention DistributionLocator here, as IvySubsystem
        # declares it as a dependency. However if we don't then test_antlr() below fails on
        # uninitialized options for that subsystem.  Hopefully my pending (as of 9/2016) change
        # to clean up how we initialize and create instances of subsystems in tests will make
        # this problem go away.
        self.context(for_subsystems=[
            PythonRepos, PythonSetup, IvySubsystem, DistributionLocator,
            ThriftBinary.Factory, BinaryUtil.Factory
        ])
        python_repos = PythonRepos.global_instance()
        ivy_bootstrapper = Bootstrapper(
            ivy_subsystem=IvySubsystem.global_instance())
        thrift_binary_factory = ThriftBinary.Factory.global_instance().create

        interpreter_cache = PythonInterpreterCache(self.python_setup,
                                                   python_repos)
        interpreter_cache.setup()
        interpreters = list(
            interpreter_cache.matched_interpreters(
                [self.python_setup.interpreter_requirement]))
        self.assertGreater(len(interpreters), 0)
        interpreter = interpreters[0]

        with temporary_dir() as chroot:
            pex_builder = PEXBuilder(path=chroot, interpreter=interpreter)

            python_chroot = PythonChroot(
                python_setup=self.python_setup,
                python_repos=python_repos,
                ivy_bootstrapper=ivy_bootstrapper,
                thrift_binary_factory=thrift_binary_factory,
                interpreter=interpreter,
                builder=pex_builder,
                targets=targets,
                platforms=['current'])
            try:
                python_chroot.dump()
                yield pex_builder, python_chroot
            finally:
                python_chroot.delete()
Exemplo n.º 20
0
  def cached_chroot(self, interpreter, pex_info, targets, platforms,
                    extra_requirements=None, executable_file_content=None):
    """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
    # This PexInfo contains any customizations specified by the caller.
    # The process of building a pex modifies it further.
    pex_info = pex_info or PexInfo.default()

    path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets,
                             platforms, extra_requirements, executable_file_content)
    if not os.path.exists(path):
      path_tmp = path + '.tmp'
      self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms,
                         extra_requirements, executable_file_content)
      shutil.move(path_tmp, path)

    # We must read the PexInfo that was frozen into the pex, so we get the modifications
    # created when that pex was built.
    pex_info = PexInfo.from_pex(path)
    # Now create a PythonChroot wrapper without dumping it.
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    chroot = PythonChroot(
      context=self.context,
      python_setup=PythonSetup.global_instance(),
      python_repos=PythonRepos.global_instance(),
      interpreter=interpreter,
      builder=builder,
      targets=targets,
      platforms=platforms,
      extra_requirements=extra_requirements)
    # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so
    # while transitioning calls to temporary_chroot to calls to cached_chroot.
    # We can revisit after that transition is complete.
    yield chroot
Exemplo n.º 21
0
 def _build_chroot(self, path, interpreter, pex_info, targets, platforms,
                    extra_requirements=None, executable_file_content=None):
   """Create a PythonChroot with the specified args."""
   builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
   with self.context.new_workunit('chroot'):
     chroot = PythonChroot(
       context=self.context,
       python_setup=PythonSetup.global_instance(),
       python_repos=PythonRepos.global_instance(),
       interpreter=interpreter,
       builder=builder,
       targets=targets,
       platforms=platforms,
       extra_requirements=extra_requirements)
     chroot.dump()
     if executable_file_content is not None:
       with open(os.path.join(path, '{}.py'.format(self.CHROOT_EXECUTABLE_NAME)), 'w') as outfile:
         outfile.write(executable_file_content)
       # Override any user-specified entry point, under the assumption that the
       # executable_file_content does what the user intends (including, probably, calling that
       # underlying entry point).
       pex_info.entry_point = self.CHROOT_EXECUTABLE_NAME
     builder.freeze()
   return chroot