コード例 #1
0
  def dumped_chroot(self, targets):
    # TODO(benjy): We shouldn't need to mention DistributionLocator here, as IvySubsystem
    # declares it as a dependency. However if we don't then test_antlr() below fails on
    # uninitialized options for that subsystem.  Hopefully my pending (as of 9/2016) change
    # to clean up how we initialize and create instances of subsystems in tests will make
    # this problem go away.
    self.context(for_subsystems=[PythonRepos, PythonSetup, IvySubsystem,
                                 DistributionLocator, ThriftBinary.Factory, BinaryUtil.Factory])
    python_repos = PythonRepos.global_instance()
    ivy_bootstrapper = Bootstrapper(ivy_subsystem=IvySubsystem.global_instance())
    thrift_binary_factory = ThriftBinary.Factory.global_instance().create

    interpreter_cache = PythonInterpreterCache(self.python_setup, python_repos)
    interpreter_cache.setup()
    interpreters = list(interpreter_cache.matched_interpreters(
      self.python_setup.interpreter_constraints))
    self.assertGreater(len(interpreters), 0)
    interpreter = interpreters[0]

    with temporary_dir() as chroot:
      pex_builder = PEXBuilder(path=chroot, interpreter=interpreter)

      python_chroot = PythonChroot(python_setup=self.python_setup,
                                   python_repos=python_repos,
                                   ivy_bootstrapper=ivy_bootstrapper,
                                   thrift_binary_factory=thrift_binary_factory,
                                   interpreter=interpreter,
                                   builder=pex_builder,
                                   targets=targets,
                                   platforms=['current'])
      try:
        python_chroot.dump()
        yield pex_builder, python_chroot
      finally:
        python_chroot.delete()
コード例 #2
0
ファイル: test_python_chroot.py プロジェクト: Gointer/pants
  def dumped_chroot(self, targets):
    python_repos = create_subsystem(PythonRepos)

    with subsystem_instance(IvySubsystem) as ivy_subsystem:
      ivy_bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem)

      with subsystem_instance(ThriftBinary.Factory) as thrift_binary_factory:
        interpreter_cache = PythonInterpreterCache(self.python_setup, python_repos)
        interpreter_cache.setup()
        interpreters = list(interpreter_cache.matched_interpreters([
          self.python_setup.interpreter_requirement]))
        self.assertGreater(len(interpreters), 0)
        interpreter = interpreters[0]

        with temporary_dir() as chroot:
          pex_builder = PEXBuilder(path=chroot, interpreter=interpreter)

          python_chroot = PythonChroot(python_setup=self.python_setup,
                                       python_repos=python_repos,
                                       ivy_bootstrapper=ivy_bootstrapper,
                                       thrift_binary_factory=thrift_binary_factory.create,
                                       interpreter=interpreter,
                                       builder=pex_builder,
                                       targets=targets,
                                       platforms=['current'])
          try:
            python_chroot.dump()
            yield pex_builder, python_chroot
          finally:
            python_chroot.delete()
コード例 #3
0
    def dumped_chroot(self, targets):
        python_repos = create_subsystem(PythonRepos)

        with subsystem_instance(IvySubsystem) as ivy_subsystem:
            ivy_bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem)

            with subsystem_instance(
                    ThriftBinary.Factory) as thrift_binary_factory:
                interpreter_cache = PythonInterpreterCache(
                    self.python_setup, python_repos)
                interpreter_cache.setup()
                interpreters = list(
                    interpreter_cache.matches(
                        [self.python_setup.interpreter_requirement]))
                self.assertGreater(len(interpreters), 0)
                interpreter = interpreters[0]

                with temporary_dir() as chroot:
                    pex_builder = PEXBuilder(path=chroot,
                                             interpreter=interpreter)

                    python_chroot = PythonChroot(
                        python_setup=self.python_setup,
                        python_repos=python_repos,
                        ivy_bootstrapper=ivy_bootstrapper,
                        thrift_binary_factory=thrift_binary_factory.create,
                        interpreter=interpreter,
                        builder=pex_builder,
                        targets=targets,
                        platforms=['current'])
                    try:
                        python_chroot.dump()
                        yield pex_builder, python_chroot
                    finally:
                        python_chroot.delete()
コード例 #4
0
    def execute(self):
        interpreter = None
        python_tgts = self.context.targets(
            lambda tgt: isinstance(tgt, PythonTarget))
        fs = PythonInterpreterFingerprintStrategy(task=self)
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = os.path.join(self.workdir, target_set_id,
                                                 'interpreter.path')
            if not os.path.exists(interpreter_path_file):
                interpreter_cache = PythonInterpreterCache(
                    PythonSetup.global_instance(),
                    PythonRepos.global_instance(),
                    logger=self.context.log.debug)

                # We filter the interpreter cache itself (and not just the interpreters we pull from it)
                # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
                # an escape hatch.
                filters = self.get_options().constraints or [b'']

                # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
                self.context.acquire_lock()
                try:
                    interpreter_cache.setup(filters=filters)
                finally:
                    self.context.release_lock()

                interpreter = interpreter_cache.select_interpreter_for_targets(
                    python_tgts)
                safe_mkdir_for(interpreter_path_file)
                with open(interpreter_path_file, 'w') as outfile:
                    outfile.write(b'{}\t{}\n'.format(
                        interpreter.binary, str(interpreter.identity)))
                    for dist, location in interpreter.extras.items():
                        dist_name, dist_version = dist
                        outfile.write(b'{}\t{}\t{}\n'.format(
                            dist_name, dist_version, location))

        if not interpreter:
            with open(interpreter_path_file, 'r') as infile:
                lines = infile.readlines()
                binary, identity = lines[0].strip().split('\t')
                extras = {}
                for line in lines[1:]:
                    dist_name, dist_version, location = line.strip().split(
                        '\t')
                    extras[(dist_name, dist_version)] = location

            interpreter = PythonInterpreter(binary,
                                            PythonIdentity.from_path(identity),
                                            extras)

        self.context.products.get_data(PythonInterpreter, lambda: interpreter)
コード例 #5
0
ファイル: python_task.py プロジェクト: aoen/pants
class PythonTask(Task):
  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    option_group.add_option(mkflag('timeout'), dest='python_conn_timeout', type='int',
                            default=0, help='Number of seconds to wait for http connections.')

  def __init__(self, context, workdir):
    super(PythonTask, self).__init__(context, workdir)
    self.conn_timeout = (self.context.options.python_conn_timeout or
                         self.context.config.getdefault('connection_timeout'))
    compatibilities = self.context.options.interpreter or [b'']

    self.interpreter_cache = PythonInterpreterCache(self.context.config,
                                                    logger=self.context.log.debug)
    # We pass in filters=compatibilities because setting up some python versions
    # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
    self.interpreter_cache.setup(filters=compatibilities)

    # Select a default interpreter to use.
    self._interpreter = self.select_interpreter(compatibilities)

  @property
  def interpreter(self):
    """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
    return self._interpreter

  def select_interpreter(self, compatibilities):
    """Subclasses can use this to be more specific about interpreter selection."""
    interpreters = self.interpreter_cache.select_interpreter(
      list(self.interpreter_cache.matches(compatibilities)))
    if len(interpreters) != 1:
      raise TaskError('Unable to detect suitable interpreter.')
    interpreter = interpreters[0]
    self.context.log.debug('Selected %s' % interpreter)
    return interpreter
コード例 #6
0
ファイル: select_interpreter.py プロジェクト: lahosken/pants
 def _interpreter_cache(self):
   interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                              PythonRepos.global_instance(),
                                              logger=self.context.log.debug)
   # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
   self.context.acquire_lock()
   try:
     interpreter_cache.setup()
   finally:
     self.context.release_lock()
   return interpreter_cache
コード例 #7
0
 def _interpreter_cache(self):
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
     self.context.acquire_lock()
     try:
         interpreter_cache.setup()
     finally:
         self.context.release_lock()
     return interpreter_cache
コード例 #8
0
class SetupPythonEnvironment(Task):
  """
    Establishes the python intepreter(s) for downstream Python tasks e.g. Resolve, Run, PytestRun.

    Populates the product namespace (for typename = 'python'):
      'intepreters': ordered list of PythonInterpreter objects
  """
  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    option_group.add_option(mkflag("force"), dest="python_setup_force",
                            action="store_true", default=False,
                            help="Force clean and install.")
    option_group.add_option(mkflag("path"), dest="python_setup_paths",
                            action="append", default=[],
                            help="Add a path to search for interpreters, by default PATH.")
    option_group.add_option(mkflag("interpreter"), dest="python_interpreter",
                            default=[], action='append',
                            help="Constrain what Python interpreters to use.  Uses Requirement "
                                 "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
                                 "By default, no constraints are used.  Multiple constraints may "
                                 "be added.  They will be ORed together.")
    option_group.add_option(mkflag("multi"), dest="python_multi",
                            default=False, action='store_true',
                            help="Allow multiple interpreters to be bound to an upstream chroot.")

  def __init__(self, context, workdir):
    context.products.require('python')
    self._cache = PythonInterpreterCache(context.config, logger=context.log.debug)
    super(SetupPythonEnvironment, self).__init__(context, workdir)

  def execute(self):
    ifilters = self.context.options.python_interpreter
    self._cache.setup(force=self.context.options.python_setup_force,
        paths=self.context.options.python_setup_paths,
        filters=ifilters or [b''])
    all_interpreters = set(self._cache.interpreters)
    for target in self.context.targets(is_python_root):
      self.context.log.info('Setting up interpreters for %s' % target)
      closure = target.closure()
      self.context.log.debug('  - Target closure: %d targets' % len(closure))
      target_compatibilities = [
          set(self._cache.matches(getattr(closure_target, 'compatibility', [''])))
          for closure_target in closure]
      target_compatibilities = reduce(set.intersection, target_compatibilities, all_interpreters)
      self.context.log.debug('  - Target minimum compatibility: %s' % (
        ' '.join(interp.version_string for interp in target_compatibilities)))
      interpreters = self._cache.select_interpreter(target_compatibilities,
          allow_multiple=self.context.options.python_multi)
      self.context.log.debug('  - Selected: %s' % interpreters)
      if not interpreters:
        raise TaskError('No compatible interpreters for %s' % target)
      target.interpreters = interpreters
コード例 #9
0
  def execute(self):
    interpreter = None
    python_tgts = self.context.targets(lambda tgt: isinstance(tgt, PythonTarget))
    fs = PythonInterpreterFingerprintStrategy(task=self)
    with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of selecting
      # an interpreter, to prevent downstream tasks from having to check for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'
      interpreter_path_file = os.path.join(self.workdir, target_set_id, 'interpreter.path')
      if not os.path.exists(interpreter_path_file):
        interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                   PythonRepos.global_instance(),
                                                   logger=self.context.log.debug)

        # We filter the interpreter cache itself (and not just the interpreters we pull from it)
        # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
        # an escape hatch.
        filters = self.get_options().constraints or [b'']

        # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
        self.context.acquire_lock()
        try:
          interpreter_cache.setup(filters=filters)
        finally:
          self.context.release_lock()

        interpreter = interpreter_cache.select_interpreter_for_targets(python_tgts)
        safe_mkdir_for(interpreter_path_file)
        with open(interpreter_path_file, 'w') as outfile:
          outfile.write(b'{}\t{}\n'.format(interpreter.binary, str(interpreter.identity)))
          for dist, location in interpreter.extras.items():
            dist_name, dist_version = dist
            outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))

    if not interpreter:
      with open(interpreter_path_file, 'r') as infile:
        lines = infile.readlines()
        binary, identity = lines[0].strip().split('\t')
        extras = {}
        for line in lines[1:]:
          dist_name, dist_version, location = line.strip().split('\t')
          extras[(dist_name, dist_version)] = location

      interpreter = PythonInterpreter(binary, PythonIdentity.from_path(identity), extras)

    self.context.products.get_data(PythonInterpreter, lambda: interpreter)
コード例 #10
0
    def _resolve_requirements(self, target_roots, options=None):
        with temporary_dir() as cache_dir:
            options = options or {}
            options.setdefault(PythonSetup.options_scope,
                               {})['interpreter_cache_dir'] = cache_dir
            context = self.context(target_roots=target_roots,
                                   options=options,
                                   for_subsystems=[PythonSetup, PythonRepos])

            # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
            # to ensure that the interpreter has setuptools and wheel support.
            interpreter = PythonInterpreter.get()
            interpreter_cache = PythonInterpreterCache(
                PythonSetup.global_instance(),
                PythonRepos.global_instance(),
                logger=context.log.debug)
            interpreters = interpreter_cache.setup(
                paths=[os.path.dirname(interpreter.binary)],
                filters=[str(interpreter.identity.requirement)])
            context.products.get_data(PythonInterpreter,
                                      lambda: interpreters[0])

            task = self.create_task(context)
            task.execute()

            return context.products.get_data(
                ResolveRequirements.REQUIREMENTS_PEX)
コード例 #11
0
    def test_setup_using_eggs(self):
        def link_egg(repo_root, requirement):
            existing_dist_location = self._interpreter.get_location(
                requirement)
            if existing_dist_location is not None:
                existing_dist = Package.from_href(existing_dist_location)
                requirement = '{}=={}'.format(existing_dist.name,
                                              existing_dist.raw_version)

            distributions = resolve([requirement],
                                    interpreter=self._interpreter,
                                    precedence=(EggPackage, SourcePackage))
            self.assertEqual(1, len(distributions))
            dist_location = distributions[0].location

            self.assertRegexpMatches(dist_location, r'\.egg$')
            os.symlink(
                dist_location,
                os.path.join(repo_root, os.path.basename(dist_location)))

            return Package.from_href(dist_location).raw_version

        with temporary_dir() as root:
            egg_dir = os.path.join(root, 'eggs')
            os.makedirs(egg_dir)
            setuptools_version = link_egg(egg_dir, 'setuptools')
            wheel_version = link_egg(egg_dir, 'wheel')

            interpreter_requirement = self._interpreter.identity.requirement

            python_setup, python_repos = self.create_python_subsystems(
                setup_options={
                    'interpreter_cache_dir': None,
                    'pants_workdir': os.path.join(root, 'workdir'),
                    'constraints': [interpreter_requirement],
                    'setuptools_version': setuptools_version,
                    'wheel_version': wheel_version,
                },
                repos_options={
                    'indexes': [],
                    'repos': [egg_dir],
                })
            cache = PythonInterpreterCache(python_setup=python_setup,
                                           python_repos=python_repos)

            interpereters = cache.setup(
                paths=[os.path.dirname(self._interpreter.binary)],
                filters=[str(interpreter_requirement)])
            self.assertGreater(len(interpereters), 0)

            def assert_egg_extra(interpreter, name, version):
                location = interpreter.get_location('{}=={}'.format(
                    name, version))
                self.assertIsNotNone(location)
                self.assertIsInstance(Package.from_href(location), EggPackage)

            for interpreter in interpereters:
                assert_egg_extra(interpreter, 'setuptools', setuptools_version)
                assert_egg_extra(interpreter, 'wheel', wheel_version)
コード例 #12
0
ファイル: test_test_builder.py プロジェクト: huanding/pants
    def _cache_current_interpreter(self):
        cache = PythonInterpreterCache(self.config())

        # We only need to cache the current interpreter, avoid caching for every interpreter on the
        # PATH.
        current_interpreter = PythonInterpreter.get()
        for cached_interpreter in cache.setup(
                filters=[current_interpreter.identity.requirement]):
            if cached_interpreter == current_interpreter:
                return cached_interpreter
        raise RuntimeError('Could not find suitable interpreter to run tests.')
コード例 #13
0
ファイル: test_python_chroot.py プロジェクト: lgvital/pants
    def dumped_chroot(self, targets):
        # TODO(benjy): We shouldn't need to mention DistributionLocator here, as IvySubsystem
        # declares it as a dependency. However if we don't then test_antlr() below fails on
        # uninitialized options for that subsystem.  Hopefully my pending (as of 9/2016) change
        # to clean up how we initialize and create instances of subsystems in tests will make
        # this problem go away.
        self.context(for_subsystems=[
            PythonRepos, PythonSetup, IvySubsystem, DistributionLocator,
            ThriftBinary.Factory, BinaryUtil.Factory
        ])
        python_repos = PythonRepos.global_instance()
        ivy_bootstrapper = Bootstrapper(
            ivy_subsystem=IvySubsystem.global_instance())
        thrift_binary_factory = ThriftBinary.Factory.global_instance().create

        interpreter_cache = PythonInterpreterCache(self.python_setup,
                                                   python_repos)
        interpreter_cache.setup()
        interpreters = list(
            interpreter_cache.matched_interpreters(
                self.python_setup.interpreter_constraints))
        self.assertGreater(len(interpreters), 0)
        interpreter = interpreters[0]

        with temporary_dir() as chroot:
            pex_builder = PEXBuilder(path=chroot, interpreter=interpreter)

            python_chroot = PythonChroot(
                python_setup=self.python_setup,
                python_repos=python_repos,
                ivy_bootstrapper=ivy_bootstrapper,
                thrift_binary_factory=thrift_binary_factory,
                interpreter=interpreter,
                builder=pex_builder,
                targets=targets,
                platforms=['current'])
            try:
                python_chroot.dump()
                yield pex_builder, python_chroot
            finally:
                python_chroot.delete()
コード例 #14
0
  def test_setup_using_eggs(self):
    def link_egg(repo_root, requirement):
      existing_dist_location = self._interpreter.get_location(requirement)
      if existing_dist_location is not None:
        existing_dist = Package.from_href(existing_dist_location)
        requirement = '{}=={}'.format(existing_dist.name, existing_dist.raw_version)

      distributions = resolve([requirement],
                              interpreter=self._interpreter,
                              precedence=(EggPackage, SourcePackage))
      self.assertEqual(1, len(distributions))
      dist_location = distributions[0].location

      self.assertRegexpMatches(dist_location, r'\.egg$')
      os.symlink(dist_location, os.path.join(repo_root, os.path.basename(dist_location)))

      return Package.from_href(dist_location).raw_version

    with temporary_dir() as root:
      egg_dir = os.path.join(root, 'eggs')
      os.makedirs(egg_dir)
      setuptools_version = link_egg(egg_dir, 'setuptools')
      wheel_version = link_egg(egg_dir, 'wheel')

      interpreter_requirement = self._interpreter.identity.requirement

      self.context(for_subsystems=[PythonSetup, PythonRepos], options={
        PythonSetup.options_scope: {
          'interpreter_cache_dir': None,
          'pants_workdir': os.path.join(root, 'workdir'),
          'constraints': [interpreter_requirement],
          'setuptools_version': setuptools_version,
          'wheel_version': wheel_version,
        },
        PythonRepos.options_scope: {
          'indexes': [],
          'repos': [egg_dir],
        }
      })
      cache = PythonInterpreterCache(PythonSetup.global_instance(), PythonRepos.global_instance())

      interpereters = cache.setup(paths=[os.path.dirname(self._interpreter.binary)],
                                  filters=[str(interpreter_requirement)])
      self.assertGreater(len(interpereters), 0)

      def assert_egg_extra(interpreter, name, version):
        location = interpreter.get_location('{}=={}'.format(name, version))
        self.assertIsNotNone(location)
        self.assertIsInstance(Package.from_href(location), EggPackage)

      for interpreter in interpereters:
        assert_egg_extra(interpreter, 'setuptools', setuptools_version)
        assert_egg_extra(interpreter, 'wheel', wheel_version)
コード例 #15
0
ファイル: python_task.py プロジェクト: dbieber/pants
class PythonTask(Task):
    @classmethod
    def setup_parser(cls, option_group, args, mkflag):
        option_group.add_option(
            mkflag('timeout'),
            dest='python_conn_timeout',
            type='int',
            default=0,
            help='Number of seconds to wait for http connections.')

    def __init__(self, context, workdir):
        super(PythonTask, self).__init__(context, workdir)
        self.conn_timeout = (
            self.context.options.python_conn_timeout
            or self.context.config.getdefault('connection_timeout'))
        compatibilities = self.context.options.interpreter or [b'']

        self.interpreter_cache = PythonInterpreterCache(
            self.context.config, logger=self.context.log.debug)
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self.interpreter_cache.setup(filters=compatibilities)

        # Select a default interpreter to use.
        self._interpreter = self.select_interpreter(compatibilities)

    @property
    def interpreter(self):
        """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
        return self._interpreter

    def select_interpreter(self, compatibilities):
        """Subclasses can use this to be more specific about interpreter selection."""
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(compatibilities)))
        if len(interpreters) != 1:
            raise TaskError('Unable to detect suitable interpreter.')
        interpreter = interpreters[0]
        self.context.log.debug('Selected %s' % interpreter)
        return interpreter
コード例 #16
0
  def _cache_current_interpreter(self):
    cache = PythonInterpreterCache(self.config())

    # We only need to cache the current interpreter, avoid caching for every interpreter on the
    # PATH.
    current_interpreter = PythonInterpreter.get()
    current_id = (current_interpreter.binary, current_interpreter.identity)
    for cached_interpreter in cache.setup(filters=[current_interpreter.identity.requirement]):
      # TODO(John Sirois): Revert to directly comparing interpreters when
      # https://github.com/pantsbuild/pex/pull/31 is in, released and consumed by pants.
      if (cached_interpreter.binary, cached_interpreter.identity) == current_id:
        return cached_interpreter
    raise RuntimeError('Could not find suitable interpreter to run tests.')
コード例 #17
0
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
コード例 #18
0
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
コード例 #19
0
  def test_cache_setup_with_no_filters_uses_repo_default_excluded(self, MockSetup):
    interpreter = PythonInterpreter.get()

    mock_setup = MockSetup.return_value
    type(mock_setup).interpreter_requirement = mock.PropertyMock(return_value=None)

    with temporary_dir() as path:
      mock_setup.scratch_dir.return_value = path

      cache = PythonInterpreterCache(mock.MagicMock())

      def set_interpreters(_):
        cache._interpreters.add(interpreter)

      cache._setup_cached = mock.Mock(side_effect=set_interpreters)

      self.assertEqual(cache.setup(), [interpreter])
コード例 #20
0
ファイル: checkstyle.py プロジェクト: StephanErb/pants
 def execute(self):
     """"Run Checkstyle on all found non-synthetic source files."""
     python_tgts = self.context.targets(
         lambda tgt: isinstance(tgt, (PythonTarget)))
     if not python_tgts:
         return 0
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     with self.invalidated(self.get_targets(
             self._is_checked)) as invalidation_check:
         failure_count = 0
         tgts_by_compatibility, _ = interpreter_cache.partition_targets_by_compatibility(
             [vt.target for vt in invalidation_check.invalid_vts])
         for filters, targets in tgts_by_compatibility.items():
             if self.get_options(
             ).interpreter_constraints_whitelist is None and not self._constraints_are_whitelisted(
                     filters):
                 deprecated_conditional(
                     lambda: self.get_options(
                     ).interpreter_constraints_whitelist is None,
                     '1.14.0.dev2',
                     "Python linting is currently restricted to targets that match the global "
                     "interpreter constraints: {}. Pants detected unacceptable filters: {}. "
                     "Use the `--interpreter-constraints-whitelist` lint option to whitelist "
                     "compatibiltiy constraints.".format(
                         PythonSetup.global_instance().
                         interpreter_constraints, filters))
             else:
                 sources = self.calculate_sources([tgt for tgt in targets])
                 if sources:
                     allowed_interpreters = set(
                         interpreter_cache.setup(filters=filters))
                     if not allowed_interpreters:
                         raise TaskError(
                             'No valid interpreters found for targets: {}\n(filters: {})'
                             .format(targets, filters))
                     interpreter = min(allowed_interpreters)
                     failure_count += self.checkstyle(interpreter, sources)
         if failure_count > 0 and self.get_options().fail:
             raise TaskError(
                 '{} Python Style issues found. You may try `./pants fmt <targets>`'
                 .format(failure_count))
         return failure_count
コード例 #21
0
    def _resolve_requirements(self, target_roots, options=None):
        context = self.context(target_roots=target_roots, options=options)

        # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
        # to ensure that the interpreter has setuptools and wheel support.
        interpreter = PythonInterpreter.get()
        interpreter_cache = PythonInterpreterCache(
            PythonSetup.global_instance(), PythonRepos.global_instance(), logger=context.log.debug
        )
        interpreters = interpreter_cache.setup(
            paths=[os.path.dirname(interpreter.binary)], filters=[str(interpreter.identity.requirement)]
        )
        context.products.get_data(PythonInterpreter, lambda: interpreters[0])

        task = self.create_task(context)
        task.execute()

        return context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX)
コード例 #22
0
  def _do_test(self, interpreter_requirement, filters, expected):
    mock_setup = mock.MagicMock().return_value

    # Explicitly set a repo-wide requirement that excludes our one interpreter.
    type(mock_setup).interpreter_requirement = mock.PropertyMock(
      return_value=interpreter_requirement)

    with temporary_dir() as path:
      mock_setup.scratch_dir = path
      cache = PythonInterpreterCache(mock_setup, mock.MagicMock())

      def set_interpreters(_):
        cache._interpreters.add(self._interpreter)

      cache._setup_cached = mock.Mock(side_effect=set_interpreters)
      cache._setup_paths = mock.Mock()

      self.assertEqual(cache.setup(filters=filters), expected)
コード例 #23
0
    def _do_test(self, interpreter_requirement, filters, expected):
        mock_setup = mock.MagicMock().return_value

        # Explicitly set a repo-wide requirement that excludes our one interpreter.
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=interpreter_requirement)

        with temporary_dir() as path:
            mock_setup.interpreter_cache_dir = path
            cache = PythonInterpreterCache(mock_setup, mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(self._interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)
            cache._setup_paths = mock.Mock()

            self.assertEqual(cache.setup(filters=filters), expected)
コード例 #24
0
    def test_cache_setup_with_no_filters_uses_repo_default_excluded(
            self, MockSetup):
        interpreter = PythonInterpreter.get()

        mock_setup = MockSetup.return_value
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=None)

        with temporary_dir() as path:
            mock_setup.scratch_dir.return_value = path

            cache = PythonInterpreterCache(mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)

            self.assertEqual(cache.setup(), [interpreter])
コード例 #25
0
  def test_cache_setup_with_filter_overrides_repo_default(self, MockSetup):
    interpreter = PythonInterpreter.get()

    mock_setup = MockSetup.return_value
    # Explicitly set a repo-wide requirement that excludes our one interpreter
    type(mock_setup).interpreter_requirement = mock.PropertyMock(
        return_value=self._make_bad_requirement(interpreter.identity.requirement))

    with temporary_dir() as path:
      mock_setup.scratch_dir.return_value = path

      cache = PythonInterpreterCache(mock.MagicMock())

      def set_interpreters(_):
        cache._interpreters.add(interpreter)

      cache._setup_cached = mock.Mock(side_effect=set_interpreters)

      self.assertEqual(cache.setup(filters=(str(interpreter.identity.requirement),)), [interpreter])
コード例 #26
0
  def test_cache_setup_with_no_filters_uses_repo_default_excluded(self, MockSetup):
    # This is the interpreter we'll inject into the cache
    interpreter = PythonInterpreter.get()

    mock_setup = MockSetup.return_value
    # Explicitly set a repo-wide requirement that excludes our one interpreter
    type(mock_setup).interpreter_requirement = mock.PropertyMock(
        return_value=self._make_bad_requirement(interpreter.identity.requirement))

    with temporary_dir() as path:
      mock_setup.scratch_dir.return_value = path

      cache = PythonInterpreterCache(mock.MagicMock())

      def set_interpreters(_):
        cache._interpreters.add(interpreter)

      cache._setup_cached = mock.Mock(side_effect=set_interpreters)
      cache._setup_paths = mock.Mock()

      self.assertEqual(len(cache.setup()), 0)
コード例 #27
0
    def test_cache_setup_with_filter_overrides_repo_default(self, MockSetup):
        interpreter = PythonInterpreter.get()

        mock_setup = MockSetup.return_value
        # Explicitly set a repo-wide requirement that excludes our one interpreter
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=self._make_bad_requirement(
                interpreter.identity.requirement))

        with temporary_dir() as path:
            mock_setup.scratch_dir.return_value = path

            cache = PythonInterpreterCache(mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)

            self.assertEqual(
                cache.setup(filters=(str(interpreter.identity.requirement), )),
                [interpreter])
コード例 #28
0
    def test_cache_setup_with_no_filters_uses_repo_default_excluded(
            self, MockSetup):
        # This is the interpreter we'll inject into the cache
        interpreter = PythonInterpreter.get()

        mock_setup = MockSetup.return_value
        # Explicitly set a repo-wide requirement that excludes our one interpreter
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=self._make_bad_requirement(
                interpreter.identity.requirement))

        with temporary_dir() as path:
            mock_setup.scratch_dir.return_value = path

            cache = PythonInterpreterCache(mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)
            cache._setup_paths = mock.Mock()

            self.assertEqual(len(cache.setup()), 0)
コード例 #29
0
class SetupPythonEnvironment(Task):
    """
    Establishes the python intepreter(s) for downstream Python tasks e.g. Resolve, Run, PytestRun.

    Populates the product namespace (for typename = 'python'):
      'intepreters': ordered list of PythonInterpreter objects
  """
    @classmethod
    def setup_parser(cls, option_group, args, mkflag):
        option_group.add_option(mkflag("force"),
                                dest="python_setup_force",
                                action="store_true",
                                default=False,
                                help="Force clean and install.")
        option_group.add_option(
            mkflag("path"),
            dest="python_setup_paths",
            action="append",
            default=[],
            help="Add a path to search for interpreters, by default PATH.")
        option_group.add_option(
            mkflag("interpreter"),
            dest="python_interpreter",
            default=[],
            action='append',
            help="Constrain what Python interpreters to use.  Uses Requirement "
            "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
            "By default, no constraints are used.  Multiple constraints may "
            "be added.  They will be ORed together.")
        option_group.add_option(
            mkflag("multi"),
            dest="python_multi",
            default=False,
            action='store_true',
            help=
            "Allow multiple interpreters to be bound to an upstream chroot.")

    def __init__(self, context, workdir):
        context.products.require('python')
        self._cache = PythonInterpreterCache(context.config,
                                             logger=context.log.debug)
        super(SetupPythonEnvironment, self).__init__(context, workdir)

    def execute(self):
        ifilters = self.context.options.python_interpreter
        self._cache.setup(force=self.context.options.python_setup_force,
                          paths=self.context.options.python_setup_paths,
                          filters=ifilters or [b''])
        all_interpreters = set(self._cache.interpreters)
        for target in self.context.targets(is_python_root):
            self.context.log.info('Setting up interpreters for %s' % target)
            closure = target.closure()
            self.context.log.debug('  - Target closure: %d targets' %
                                   len(closure))
            target_compatibilities = [
                set(
                    self._cache.matches(
                        getattr(closure_target, 'compatibility', [''])))
                for closure_target in closure
            ]
            target_compatibilities = reduce(set.intersection,
                                            target_compatibilities,
                                            all_interpreters)
            self.context.log.debug(
                '  - Target minimum compatibility: %s' %
                (' '.join(interp.version_string
                          for interp in target_compatibilities)))
            interpreters = self._cache.select_interpreter(
                target_compatibilities,
                allow_multiple=self.context.options.python_multi)
            self.context.log.debug('  - Selected: %s' % interpreters)
            if not interpreters:
                raise TaskError('No compatible interpreters for %s' % target)
            target.interpreters = interpreters
コード例 #30
0
ファイル: build.py プロジェクト: cheecheeo/pants
class Build(Command):
  """Builds a specified target."""

  __command__ = 'build'

  def setup_parser(self, parser, args):
    parser.set_usage("\n"
                     "  %prog build (options) [spec] (build args)\n"
                     "  %prog build (options) [spec]... -- (build args)")
    parser.add_option("-t", "--timeout", dest="conn_timeout", type="int",
                      default=Config.load().getdefault('connection_timeout'),
                      help="Number of seconds to wait for http connections.")
    parser.add_option('-i', '--interpreter', dest='interpreters', default=[], action='append',
                      help="Constrain what Python interpreters to use.  Uses Requirement "
                           "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
                           "By default, no constraints are used.  Multiple constraints may "
                           "be added.  They will be ORed together.")
    parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true',
                      help='Show verbose output.')
    parser.add_option('-f', '--fast', dest='fast', default=False, action='store_true',
                      help='Run tests in a single chroot.')
    parser.disable_interspersed_args()
    parser.epilog = ('Builds the specified Python target(s). Use ./pants goal for JVM and other '
                     'targets.')

  def __init__(self, *args, **kwargs):
    super(Build, self).__init__(*args, **kwargs)

    if not self.args:
      self.error("A spec argument is required")

    self.config = Config.load()

    interpreters = self.options.interpreters or [b'']
    self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug)
    self.interpreter_cache.setup(filters=interpreters)
    interpreters = self.interpreter_cache.select_interpreter(
        list(self.interpreter_cache.matches(interpreters)))
    if len(interpreters) != 1:
      self.error('Unable to detect suitable interpreter.')
    else:
      self.debug('Selected %s' % interpreters[0])
    self.interpreter = interpreters[0]

    try:
      specs_end = self.args.index('--')
      if len(self.args) > specs_end:
        self.build_args = self.args[specs_end+1:len(self.args)+1]
      else:
        self.build_args = []
    except ValueError:
      specs_end = 1
      self.build_args = self.args[1:] if len(self.args) > 1 else []

    self.targets = OrderedSet()
    spec_parser = CmdLineSpecParser(self.root_dir, self.build_file_parser)
    self.top_level_addresses = set()

    specs = self.args[0:specs_end]
    addresses = spec_parser.parse_addresses(specs)

    for address in addresses:
      self.top_level_addresses.add(address)
      try:
        self.build_file_parser.inject_address_closure_into_build_graph(address, self.build_graph)
        target = self.build_graph.get_target(address)
      except:
        self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc()))

      if not target:
        self.error("Target %s does not exist" % address)

      transitive_targets = self.build_graph.transitive_subgraph_of_addresses([target.address])
      for transitive_target in transitive_targets:
        self.targets.add(transitive_target)

    self.targets = [target for target in self.targets if target.is_python]

  def debug(self, message):
    if self.options.verbose:
      print(message, file=sys.stderr)

  def execute(self):
    print("Build operating on top level addresses: %s" % self.top_level_addresses)

    python_targets = OrderedSet()
    for target in self.targets:
      if target.is_python:
        python_targets.add(target)
      else:
        self.error("Cannot build target %s" % target)

    if python_targets:
      status = self._python_build(python_targets)
    else:
      status = -1

    return status

  def _python_build(self, targets):
    try:
      executor = PythonBuilder(self.run_tracker)
      return executor.build(
        targets,
        self.build_args,
        interpreter=self.interpreter,
        conn_timeout=self.options.conn_timeout,
        fast_tests=self.options.fast)
    except:
      self.error("Problem executing PythonBuilder for targets %s: %s" % (targets,
                                                                         traceback.format_exc()))
コード例 #31
0
ファイル: py.py プロジェクト: dbieber/pants
class Py(Command):
    """Python chroot manipulation."""

    __command__ = 'py'

    def setup_parser(self, parser, args):
        parser.set_usage('\n' '  %prog py (options) [spec] args\n')
        parser.disable_interspersed_args()
        parser.add_option(
            '-t',
            '--timeout',
            dest='conn_timeout',
            type='int',
            default=Config.load().getdefault('connection_timeout'),
            help='Number of seconds to wait for http connections.')
        parser.add_option(
            '--pex',
            dest='pex',
            default=False,
            action='store_true',
            help=
            'Dump a .pex of this chroot instead of attempting to execute it.')
        parser.add_option(
            '--ipython',
            dest='ipython',
            default=False,
            action='store_true',
            help='Run the target environment in an IPython interpreter.')
        parser.add_option(
            '-r',
            '--req',
            dest='extra_requirements',
            default=[],
            action='append',
            help='Additional Python requirements to add to this chroot.')
        parser.add_option(
            '-i',
            '--interpreter',
            dest='interpreters',
            default=[],
            action='append',
            help="Constrain what Python interpreters to use.  Uses Requirement "
            "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
            "By default, no constraints are used.  Multiple constraints may "
            "be added.  They will be ORed together.")
        parser.add_option('-e',
                          '--entry_point',
                          dest='entry_point',
                          default=None,
                          help='The entry point for the generated PEX.')
        parser.add_option('-v',
                          '--verbose',
                          dest='verbose',
                          default=False,
                          action='store_true',
                          help='Show verbose output.')
        parser.epilog = """Interact with the chroot of the specified target."""

    def __init__(self, run_tracker, root_dir, parser, argv, build_file_parser,
                 build_graph):
        Command.__init__(self, run_tracker, root_dir, parser, argv,
                         build_file_parser, build_graph)

        self.binary = None
        self.targets = []
        self.extra_requirements = []
        self.config = Config.load()

        interpreters = self.options.interpreters or [b'']
        self.interpreter_cache = PythonInterpreterCache(self.config,
                                                        logger=self.debug)
        self.interpreter_cache.setup(filters=interpreters)
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(interpreters)))
        if len(interpreters) != 1:
            self.error('Unable to detect suitable interpreter.')
        self.interpreter = interpreters[0]

        for req in self.options.extra_requirements:
            self.extra_requirements.append(
                PythonRequirement(req, use_2to3=True))

        # We parse each arg in the context of the cli usage:
        #   ./pants command (options) [spec] (build args)
        #   ./pants command (options) [spec]... -- (build args)
        # Our command token and our options are parsed out so we see args of the form:
        #   [spec] (build args)
        #   [spec]... -- (build args)
        for k in range(len(self.args)):
            arg = self.args.pop(0)
            if arg == '--':
                break

            def not_a_target(debug_msg):
                self.debug('Not a target, assuming option: %s.' % debug_msg)
                # We failed to parse the arg as a target or else it was in valid address format but did not
                # correspond to a real target.  Assume this is the 1st of the build args and terminate
                # processing args for target addresses.
                self.args.insert(0, arg)

            try:
                print(root_dir, arg)
                self.build_file_parser.inject_spec_closure_into_build_graph(
                    arg, self.build_graph)
                spec_path, target_name = parse_spec(arg)
                build_file = BuildFile(root_dir, spec_path)
                address = BuildFileAddress(build_file, target_name)
                target = self.build_graph.get_target(address)
                if target is None:
                    not_a_target(debug_msg='Unrecognized target')
                    break
            except Exception as e:
                not_a_target(debug_msg=e)
                break

            if isinstance(target, PythonBinary):
                if self.binary:
                    self.error(
                        'Can only process 1 binary target. Found %s and %s.' %
                        (self.binary, target))
                else:
                    self.binary = target
            self.targets.append(target)

        if not self.targets:
            self.error('No valid targets specified!')

    def debug(self, message):
        if self.options.verbose:
            print(message, file=sys.stderr)

    def execute(self):
        if self.options.pex and self.options.ipython:
            self.error('Cannot specify both --pex and --ipython!')

        if self.options.entry_point and self.options.ipython:
            self.error('Cannot specify both --entry_point and --ipython!')

        if self.options.verbose:
            print('Build operating on targets: %s' %
                  ' '.join(str(target) for target in self.targets))

        builder = PEXBuilder(
            tempfile.mkdtemp(),
            interpreter=self.interpreter,
            pex_info=self.binary.pexinfo if self.binary else None)

        if self.options.entry_point:
            builder.set_entry_point(self.options.entry_point)

        if self.options.ipython:
            if not self.config.has_section('python-ipython'):
                self.error(
                    'No python-ipython sections defined in your pants.ini!')

            builder.info.entry_point = self.config.get('python-ipython',
                                                       'entry_point')
            if builder.info.entry_point is None:
                self.error(
                    'Must specify entry_point for IPython in the python-ipython section '
                    'of your pants.ini!')

            requirements = self.config.getlist('python-ipython',
                                               'requirements',
                                               default=[])

            for requirement in requirements:
                self.extra_requirements.append(PythonRequirement(requirement))

        executor = PythonChroot(
            targets=self.targets,
            extra_requirements=self.extra_requirements,
            builder=builder,
            platforms=self.binary.platforms if self.binary else None,
            interpreter=self.interpreter,
            conn_timeout=self.options.conn_timeout)

        executor.dump()

        if self.options.pex:
            pex_name = self.binary.name if self.binary else Target.maybe_readable_identify(
                self.targets)
            pex_path = os.path.join(self.root_dir, 'dist', '%s.pex' % pex_name)
            builder.build(pex_path)
            print('Wrote %s' % pex_path)
            return 0
        else:
            builder.freeze()
            pex = PEX(builder.path(), interpreter=self.interpreter)
            po = pex.run(args=list(self.args), blocking=False)
            try:
                return po.wait()
            except KeyboardInterrupt:
                po.send_signal(signal.SIGINT)
                raise
コード例 #32
0
ファイル: py.py プロジェクト: rgbenson/pants
class Py(Command):
  """Python chroot manipulation."""

  __command__ = 'py'

  def setup_parser(self, parser, args):
    parser.set_usage('\n'
                     '  %prog py (options) [spec] args\n')
    parser.disable_interspersed_args()
    parser.add_option('-t', '--timeout', dest='conn_timeout', type='int',
                      default=Config.from_cache().getdefault('connection_timeout'),
                      help='Number of seconds to wait for http connections.')
    parser.add_option('--pex', dest='pex', default=False, action='store_true',
                      help='Dump a .pex of this chroot instead of attempting to execute it.')
    parser.add_option('--ipython', dest='ipython', default=False, action='store_true',
                      help='Run the target environment in an IPython interpreter.')
    parser.add_option('-r', '--req', dest='extra_requirements', default=[], action='append',
                      help='Additional Python requirements to add to this chroot.')
    parser.add_option('-i', '--interpreter', dest='interpreters', default=[], action='append',
                      help="Constrain what Python interpreters to use.  Uses Requirement "
                           "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
                           "By default, no constraints are used.  Multiple constraints may "
                           "be added.  They will be ORed together.")
    parser.add_option('-e', '--entry_point', dest='entry_point', default=None,
                      help='The entry point for the generated PEX.')
    parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true',
                      help='Show verbose output.')
    parser.epilog = """Interact with the chroot of the specified target."""

  def __init__(self, *args, **kwargs):
    super(Py, self).__init__(*args, **kwargs)

    self.binary = None
    self.targets = []
    self.extra_requirements = []
    self.config = Config.from_cache()

    interpreters = self.old_options.interpreters or [b'']
    self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug)
    self.interpreter_cache.setup(filters=interpreters)
    interpreters = self.interpreter_cache.select_interpreter(
        list(self.interpreter_cache.matches(interpreters)))
    if len(interpreters) != 1:
      self.error('Unable to detect suitable interpreter.')
    self.interpreter = interpreters[0]

    for req in self.old_options.extra_requirements:
      self.extra_requirements.append(PythonRequirement(req, use_2to3=True))

    # We parse each arg in the context of the cli usage:
    #   ./pants command (options) [spec] (build args)
    #   ./pants command (options) [spec]... -- (build args)
    # Our command token and our options are parsed out so we see args of the form:
    #   [spec] (build args)
    #   [spec]... -- (build args)
    for k in range(len(self.args)):
      arg = self.args.pop(0)
      if arg == '--':
        break

      def not_a_target(debug_msg):
        self.debug('Not a target, assuming option: %s.' % debug_msg)
        # We failed to parse the arg as a target or else it was in valid address format but did not
        # correspond to a real target.  Assume this is the 1st of the build args and terminate
        # processing args for target addresses.
        self.args.insert(0, arg)

      try:
        print(self.root_dir, arg, file=sys.stderr)
        self.build_graph.inject_spec_closure(arg)
        spec_path, target_name = parse_spec(arg)
        build_file = BuildFile.from_cache(self.root_dir, spec_path)
        address = BuildFileAddress(build_file, target_name)
        target = self.build_graph.get_target(address)
        if target is None:
          not_a_target(debug_msg='Unrecognized target')
          break
      except Exception as e:
        not_a_target(debug_msg=e)
        break

      if isinstance(target, PythonBinary):
        if self.binary:
          self.error('Can only process 1 binary target. Found %s and %s.' % (self.binary, target))
        else:
          self.binary = target
      self.targets.append(target)

    if not self.targets:
      self.error('No valid targets specified!')

  def debug(self, message):
    if self.old_options.verbose:
      print(message, file=sys.stderr)

  def execute(self):
    if self.old_options.pex and self.old_options.ipython:
      self.error('Cannot specify both --pex and --ipython!')

    if self.old_options.entry_point and self.old_options.ipython:
      self.error('Cannot specify both --entry_point and --ipython!')

    if self.old_options.verbose:
      print('Build operating on targets: %s' % ' '.join(str(target) for target in self.targets))


    builder = PEXBuilder(tempfile.mkdtemp(), interpreter=self.interpreter,
                         pex_info=self.binary.pexinfo if self.binary else None)

    if self.old_options.entry_point:
      builder.set_entry_point(self.old_options.entry_point)

    if self.old_options.ipython:
      if not self.config.has_section('python-ipython'):
        self.error('No python-ipython sections defined in your pants.ini!')

      builder.info.entry_point = self.config.get('python-ipython', 'entry_point')
      if builder.info.entry_point is None:
        self.error('Must specify entry_point for IPython in the python-ipython section '
                   'of your pants.ini!')

      requirements = self.config.getlist('python-ipython', 'requirements', default=[])

      for requirement in requirements:
        self.extra_requirements.append(PythonRequirement(requirement))

    executor = PythonChroot(
        targets=self.targets,
        extra_requirements=self.extra_requirements,
        builder=builder,
        platforms=self.binary.platforms if self.binary else None,
        interpreter=self.interpreter,
        conn_timeout=self.old_options.conn_timeout)

    executor.dump()

    if self.old_options.pex:
      pex_name = self.binary.name if self.binary else Target.maybe_readable_identify(self.targets)
      pex_path = os.path.join(self.root_dir, 'dist', '%s.pex' % pex_name)
      builder.build(pex_path)
      print('Wrote %s' % pex_path)
      return 0
    else:
      builder.freeze()
      pex = PEX(builder.path(), interpreter=self.interpreter)
      po = pex.run(args=list(self.args), blocking=False)
      try:
        return po.wait()
      except KeyboardInterrupt:
        po.send_signal(signal.SIGINT)
        raise
コード例 #33
0
class PythonTask(Task):
    # If needed, we set this as the executable entry point of any chroots we create.
    CHROOT_EXECUTABLE_NAME = '__pants_executable__'

    @classmethod
    def global_subsystems(cls):
        return super(PythonTask, cls).global_subsystems() + (
            IvySubsystem, PythonSetup, PythonRepos)

    @classmethod
    def task_subsystems(cls):
        return super(PythonTask,
                     cls).task_subsystems() + (ThriftBinary.Factory, )

    def __init__(self, *args, **kwargs):
        super(PythonTask, self).__init__(*args, **kwargs)
        self._compatibilities = self.get_options().interpreter or [b'']
        self._interpreter_cache = None
        self._interpreter = None

    @property
    def interpreter_cache(self):
        if self._interpreter_cache is None:
            self._interpreter_cache = PythonInterpreterCache(
                PythonSetup.global_instance(),
                PythonRepos.global_instance(),
                logger=self.context.log.debug)

            # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
            self.context.acquire_lock()
            try:
                # We pass in filters=compatibilities because setting up some python versions
                # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
                self._interpreter_cache.setup(filters=self._compatibilities)
            finally:
                self.context.release_lock()
        return self._interpreter_cache

    @property
    def interpreter(self):
        """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
        if self._interpreter is None:
            self._interpreter = self.select_interpreter(self._compatibilities)
        return self._interpreter

    def select_interpreter_for_targets(self, targets):
        """Pick an interpreter compatible with all the specified targets."""
        allowed_interpreters = OrderedSet(self.interpreter_cache.interpreters)
        targets_with_compatibilities = []  # Used only for error messages.

        # Constrain allowed_interpreters based on each target's compatibility requirements.
        for target in targets:
            if target.is_python and hasattr(
                    target, 'compatibility') and target.compatibility:
                targets_with_compatibilities.append(target)
                compatible_with_target = list(
                    self.interpreter_cache.matches(target.compatibility))
                allowed_interpreters &= compatible_with_target

        if not allowed_interpreters:
            # Create a helpful error message.
            unique_compatibilities = set(
                tuple(t.compatibility) for t in targets_with_compatibilities)
            unique_compatibilities_strs = [
                ','.join(x) for x in unique_compatibilities if x
            ]
            targets_with_compatibilities_strs = [
                str(t) for t in targets_with_compatibilities
            ]
            raise TaskError(
                'Unable to detect a suitable interpreter for compatibilities: {} '
                '(Conflicting targets: {})'.format(
                    ' && '.join(unique_compatibilities_strs),
                    ', '.join(targets_with_compatibilities_strs)))

        # Return the lowest compatible interpreter.
        return self.interpreter_cache.select_interpreter(
            allowed_interpreters)[0]

    def select_interpreter(self, filters):
        """Subclasses can use this to be more specific about interpreter selection."""
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(filters)))
        if len(interpreters) != 1:
            raise TaskError('Unable to detect a suitable interpreter.')
        interpreter = interpreters[0]
        self.context.log.debug('Selected {}'.format(interpreter))
        return interpreter

    @property
    def chroot_cache_dir(self):
        return PythonSetup.global_instance().chroot_cache_dir

    @property
    def ivy_bootstrapper(self):
        return Bootstrapper(ivy_subsystem=IvySubsystem.global_instance())

    @property
    def thrift_binary_factory(self):
        return ThriftBinary.Factory.scoped_instance(self).create

    def create_chroot(self, interpreter, builder, targets, platforms,
                      extra_requirements):
        return PythonChroot(python_setup=PythonSetup.global_instance(),
                            python_repos=PythonRepos.global_instance(),
                            ivy_bootstrapper=self.ivy_bootstrapper,
                            thrift_binary_factory=self.thrift_binary_factory,
                            interpreter=interpreter,
                            builder=builder,
                            targets=targets,
                            platforms=platforms,
                            extra_requirements=extra_requirements)

    @contextmanager
    def cached_chroot(self,
                      interpreter,
                      pex_info,
                      targets,
                      platforms,
                      extra_requirements=None,
                      executable_file_content=None):
        """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
        # This PexInfo contains any customizations specified by the caller.
        # The process of building a pex modifies it further.
        pex_info = pex_info or PexInfo.default()

        path = self._chroot_path(interpreter, pex_info, targets, platforms,
                                 extra_requirements, executable_file_content)
        if not os.path.exists(path):
            path_tmp = path + '.tmp'
            self._build_chroot(path_tmp, interpreter, pex_info, targets,
                               platforms, extra_requirements,
                               executable_file_content)
            shutil.move(path_tmp, path)

        # We must read the PexInfo that was frozen into the pex, so we get the modifications
        # created when that pex was built.
        pex_info = PexInfo.from_pex(path)
        # Now create a PythonChroot wrapper without dumping it.
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info,
                             copy=True)
        chroot = self.create_chroot(interpreter=interpreter,
                                    builder=builder,
                                    targets=targets,
                                    platforms=platforms,
                                    extra_requirements=extra_requirements)
        # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so
        # while transitioning calls to temporary_chroot to calls to cached_chroot.
        # We can revisit after that transition is complete.
        yield chroot

    @contextmanager
    def temporary_chroot(self,
                         interpreter,
                         pex_info,
                         targets,
                         platforms,
                         extra_requirements=None,
                         executable_file_content=None):
        path = tempfile.mkdtemp(
        )  # Not a contextmanager: chroot.delete() will clean this up anyway.
        pex_info = pex_info or PexInfo.default()
        chroot = self._build_chroot(path, interpreter, pex_info, targets,
                                    platforms, extra_requirements,
                                    executable_file_content)
        yield chroot
        chroot.delete()

    def _build_chroot(self,
                      path,
                      interpreter,
                      pex_info,
                      targets,
                      platforms,
                      extra_requirements=None,
                      executable_file_content=None):
        """Create a PythonChroot with the specified args."""
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info,
                             copy=True)
        with self.context.new_workunit('chroot'):
            chroot = self.create_chroot(interpreter=interpreter,
                                        builder=builder,
                                        targets=targets,
                                        platforms=platforms,
                                        extra_requirements=extra_requirements)
            chroot.dump()
            if executable_file_content is not None:
                with open(
                        os.path.join(
                            path, '{}.py'.format(self.CHROOT_EXECUTABLE_NAME)),
                        'w') as outfile:
                    outfile.write(executable_file_content)
                # Override any user-specified entry point, under the assumption that the
                # executable_file_content does what the user intends (including, probably, calling that
                # underlying entry point).
                pex_info.entry_point = self.CHROOT_EXECUTABLE_NAME
            builder.freeze()
        return chroot

    def _chroot_path(self, interpreter, pex_info, targets, platforms,
                     extra_requirements, executable_file_content):
        """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
        fingerprint_components = [str(interpreter.identity)]

        if pex_info:
            # TODO(John Sirois): When https://rbcommons.com/s/twitter/r/2517/ lands, leverage the dump
            # **kwargs to sort keys or else find some other better way to get a stable fingerprint of
            # PexInfo.
            fingerprint_components.append(
                json.dumps(json.loads(pex_info.dump()), sort_keys=True))

        fingerprint_components.extend(
            sorted(t.transitive_invalidation_hash() for t in set(targets)))

        if platforms:
            fingerprint_components.extend(sorted(set(platforms)))

        if extra_requirements:
            # TODO(John Sirois): The extras should be uniqified before fingerprinting, but
            # PythonRequirement arguably does not have a proper __eq__.  For now we lean on the cache_key
            # of unique PythonRequirement being unique - which is probably good enough (the cache key is
            # narrower than the full scope of PythonRequirement attributes at present, thus the hedge).
            fingerprint_components.extend(
                sorted(set(r.cache_key() for r in extra_requirements)))

        if executable_file_content is not None:
            fingerprint_components.append(executable_file_content)

        fingerprint = hash_utils.hash_all(fingerprint_components)
        return os.path.join(self.chroot_cache_dir, fingerprint)
コード例 #34
0
ファイル: python_task.py プロジェクト: mccoysg/pants
class PythonTask(Task):
    def __init__(self, *args, **kwargs):
        super(PythonTask, self).__init__(*args, **kwargs)
        self._compatibilities = self.get_options().interpreter or [b'']
        self._interpreter_cache = None
        self._interpreter = None

    @property
    def interpreter_cache(self):
        if self._interpreter_cache is None:
            self._interpreter_cache = PythonInterpreterCache(
                self.context.config, logger=self.context.log.debug)

            # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
            self.context.acquire_lock()
            try:
                # We pass in filters=compatibilities because setting up some python versions
                # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
                self._interpreter_cache.setup(filters=self._compatibilities)
            finally:
                self.context.release_lock()
        return self._interpreter_cache

    @property
    def interpreter(self):
        """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
        if self._interpreter is None:
            self._interpreter = self.select_interpreter(self._compatibilities)
        return self._interpreter

    def select_interpreter_for_targets(self, targets):
        """Pick an interpreter compatible with all the specified targets."""
        allowed_interpreters = OrderedSet(self.interpreter_cache.interpreters)
        targets_with_compatibilities = []  # Used only for error messages.

        # Constrain allowed_interpreters based on each target's compatibility requirements.
        for target in targets:
            if target.is_python and hasattr(
                    target, 'compatibility') and target.compatibility:
                targets_with_compatibilities.append(target)
                compatible_with_target = list(
                    self.interpreter_cache.matches(target.compatibility))
                allowed_interpreters &= compatible_with_target

        if not allowed_interpreters:
            # Create a helpful error message.
            unique_compatibilities = set(
                tuple(t.compatibility) for t in targets_with_compatibilities)
            unique_compatibilities_strs = [
                ','.join(x) for x in unique_compatibilities if x
            ]
            targets_with_compatibilities_strs = [
                str(t) for t in targets_with_compatibilities
            ]
            raise TaskError(
                'Unable to detect a suitable interpreter for compatibilities: %s '
                '(Conflicting targets: %s)' %
                (' && '.join(unique_compatibilities_strs),
                 ', '.join(targets_with_compatibilities_strs)))

        # Return the lowest compatible interpreter.
        return self.interpreter_cache.select_interpreter(
            allowed_interpreters)[0]

    def select_interpreter(self, filters):
        """Subclasses can use this to be more specific about interpreter selection."""
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(filters)))
        if len(interpreters) != 1:
            raise TaskError('Unable to detect a suitable interpreter.')
        interpreter = interpreters[0]
        self.context.log.debug('Selected %s' % interpreter)
        return interpreter

    @contextmanager
    def temporary_pex_builder(self,
                              interpreter=None,
                              pex_info=None,
                              parent_dir=None):
        """Yields a PEXBuilder and cleans up its chroot when it goes out of context."""
        path = tempfile.mkdtemp(dir=parent_dir)
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info)
        yield builder
        builder.chroot().delete()
コード例 #35
0
ファイル: python_task.py プロジェクト: ericzundel/pants
class PythonTask(Task):
  # If needed, we set this as the executable entry point of any chroots we create.
  CHROOT_EXECUTABLE_NAME = '__pants_executable__'

  @classmethod
  def implementation_version(cls):
    return super(PythonTask, cls).implementation_version() + [('PythonTask', 1)]

  @classmethod
  def subsystem_dependencies(cls):
    return (super(PythonTask, cls).subsystem_dependencies() +
            (IvySubsystem, PythonSetup, PythonRepos, ThriftBinary.Factory.scoped(cls)))

  def __init__(self, *args, **kwargs):
    super(PythonTask, self).__init__(*args, **kwargs)
    self._compatibilities = self.get_options().interpreter or [b'']
    self._interpreter_cache = None
    self._interpreter = None

  @property
  def interpreter_cache(self):
    if self._interpreter_cache is None:
      self._interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                       PythonRepos.global_instance(),
                                                       logger=self.context.log.debug)

      # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
      self.context.acquire_lock()
      try:
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self._interpreter_cache.setup(filters=self._compatibilities)
      finally:
        self.context.release_lock()
    return self._interpreter_cache

  def select_interpreter_for_targets(self, targets):
    """Pick an interpreter compatible with all the specified targets."""
    return self.interpreter_cache.select_interpreter_for_targets(targets)

  @property
  def chroot_cache_dir(self):
    return PythonSetup.global_instance().chroot_cache_dir

  @property
  def ivy_bootstrapper(self):
    return Bootstrapper(ivy_subsystem=IvySubsystem.global_instance())

  @property
  def thrift_binary_factory(self):
    return ThriftBinary.Factory.scoped_instance(self).create

  def create_chroot(self, interpreter, builder, targets, platforms, extra_requirements):
    return PythonChroot(python_setup=PythonSetup.global_instance(),
                        python_repos=PythonRepos.global_instance(),
                        ivy_bootstrapper=self.ivy_bootstrapper,
                        thrift_binary_factory=self.thrift_binary_factory,
                        interpreter=interpreter,
                        builder=builder,
                        targets=targets,
                        platforms=platforms,
                        extra_requirements=extra_requirements,
                        log=self.context.log)

  def cached_chroot(self, interpreter, pex_info, targets, platforms=None,
                    extra_requirements=None, executable_file_content=None):
    """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    :rtype: pants.backend.python.python_chroot.PythonChroot

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
    # This PexInfo contains any customizations specified by the caller.
    # The process of building a pex modifies it further.
    pex_info = pex_info or PexInfo.default()

    path = self._chroot_path(interpreter, pex_info, targets, platforms, extra_requirements,
                             executable_file_content)
    if not os.path.exists(path):
      path_tmp = path + '.tmp'
      self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms,
                         extra_requirements, executable_file_content)
      shutil.move(path_tmp, path)

    # We must read the PexInfo that was frozen into the pex, so we get the modifications
    # created when that pex was built.
    pex_info = PexInfo.from_pex(path)
    # Now create a PythonChroot wrapper without dumping it.
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True)
    return self.create_chroot(interpreter=interpreter,
                              builder=builder,
                              targets=targets,
                              platforms=platforms,
                              extra_requirements=extra_requirements)

  @contextmanager
  def temporary_chroot(self, interpreter, pex_info, targets, platforms,
                       extra_requirements=None, executable_file_content=None):
    path = tempfile.mkdtemp()  # Not a contextmanager: chroot.delete() will clean this up anyway.
    pex_info = pex_info or PexInfo.default()
    chroot = self._build_chroot(path, interpreter, pex_info, targets, platforms,
                                extra_requirements, executable_file_content)
    yield chroot
    chroot.delete()

  def _build_chroot(self, path, interpreter, pex_info, targets, platforms,
                     extra_requirements=None, executable_file_content=None):
    """Create a PythonChroot with the specified args."""
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info, copy=True)
    with self.context.new_workunit('chroot'):
      chroot = self.create_chroot(
        interpreter=interpreter,
        builder=builder,
        targets=targets,
        platforms=platforms,
        extra_requirements=extra_requirements)
      chroot.dump()
      if executable_file_content is not None:
        with open(os.path.join(path, '{}.py'.format(self.CHROOT_EXECUTABLE_NAME)), 'w') as outfile:
          outfile.write(executable_file_content)
        # Override any user-specified entry point, under the assumption that the
        # executable_file_content does what the user intends (including, probably, calling that
        # underlying entry point).
        pex_info.entry_point = self.CHROOT_EXECUTABLE_NAME
      builder.freeze()
    return chroot

  def _chroot_path(self, interpreter, pex_info, targets, platforms, extra_requirements,
                   executable_file_content):
    """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
    fingerprint_components = [str(interpreter.identity)]

    if pex_info:
      # TODO(John Sirois): When https://rbcommons.com/s/twitter/r/2517/ lands, leverage the dump
      # **kwargs to sort keys or else find some other better way to get a stable fingerprint of
      # PexInfo.
      fingerprint_components.append(json.dumps(json.loads(pex_info.dump()), sort_keys=True))

    fingerprint_components.extend(sorted(t.transitive_invalidation_hash() for t in set(targets)))

    if platforms:
      fingerprint_components.extend(sorted(set(platforms)))

    if extra_requirements:
      # TODO(John Sirois): The extras should be uniqified before fingerprinting, but
      # PythonRequirement arguably does not have a proper __eq__.  For now we lean on the cache_key
      # of unique PythonRequirement being unique - which is probably good enough (the cache key is
      # narrower than the full scope of PythonRequirement attributes at present, thus the hedge).
      fingerprint_components.extend(sorted(set(r.cache_key() for r in extra_requirements)))

    if executable_file_content is not None:
      fingerprint_components.append(executable_file_content)

    fingerprint = hash_utils.hash_all(fingerprint_components)
    return os.path.join(self.chroot_cache_dir, fingerprint)
コード例 #36
0
ファイル: python_task.py プロジェクト: lenucksi/pants
class PythonTask(Task):
    # If needed, we set this as the executable entry point of any chroots we create.
    CHROOT_EXECUTABLE_NAME = '__pants_executable__'

    @classmethod
    def implementation_version(cls):
        return super(PythonTask,
                     cls).implementation_version() + [('PythonTask', 1)]

    @classmethod
    def subsystem_dependencies(cls):
        return (super(PythonTask, cls).subsystem_dependencies() +
                (IvySubsystem, PythonSetup, PythonRepos,
                 ThriftBinary.Factory.scoped(cls)))

    def __init__(self, *args, **kwargs):
        super(PythonTask, self).__init__(*args, **kwargs)
        self._interpreter_cache = None
        self._interpreter = None

    @property
    def interpreter_cache(self):
        if self._interpreter_cache is None:
            self._interpreter_cache = PythonInterpreterCache(
                PythonSetup.global_instance(),
                PythonRepos.global_instance(),
                logger=self.context.log.debug)

            # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
            self.context.acquire_lock()
            try:
                self._interpreter_cache.setup()
            finally:
                self.context.release_lock()
        return self._interpreter_cache

    def select_interpreter_for_targets(self, targets):
        """Pick an interpreter compatible with all the specified targets."""
        return self.interpreter_cache.select_interpreter_for_targets(targets)

    @property
    def chroot_cache_dir(self):
        return PythonSetup.global_instance().chroot_cache_dir

    @property
    def ivy_bootstrapper(self):
        return Bootstrapper(ivy_subsystem=IvySubsystem.global_instance())

    @property
    def thrift_binary_factory(self):
        return ThriftBinary.Factory.scoped_instance(self).create

    def create_chroot(self, interpreter, builder, targets, platforms,
                      extra_requirements):
        return PythonChroot(python_setup=PythonSetup.global_instance(),
                            python_repos=PythonRepos.global_instance(),
                            ivy_bootstrapper=self.ivy_bootstrapper,
                            thrift_binary_factory=self.thrift_binary_factory,
                            interpreter=interpreter,
                            builder=builder,
                            targets=targets,
                            platforms=platforms,
                            extra_requirements=extra_requirements,
                            log=self.context.log)

    def cached_chroot(self,
                      interpreter,
                      pex_info,
                      targets,
                      platforms=None,
                      extra_requirements=None,
                      executable_file_content=None):
        """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    :rtype: pants.backend.python.python_chroot.PythonChroot

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
        # This PexInfo contains any customizations specified by the caller.
        # The process of building a pex modifies it further.
        pex_info = pex_info or PexInfo.default()

        path = self._chroot_path(interpreter, pex_info, targets, platforms,
                                 extra_requirements, executable_file_content)
        if not os.path.exists(path):
            path_tmp = path + '.tmp'
            self._build_chroot(path_tmp, interpreter, pex_info, targets,
                               platforms, extra_requirements,
                               executable_file_content)
            shutil.move(path_tmp, path)

        # We must read the PexInfo that was frozen into the pex, so we get the modifications
        # created when that pex was built.
        pex_info = PexInfo.from_pex(path)
        # Now create a PythonChroot wrapper without dumping it.

        self._configure_shebang(targets, path, interpreter, pex_info)

        return self.create_chroot(interpreter=interpreter,
                                  builder=builder,
                                  targets=targets,
                                  platforms=platforms,
                                  extra_requirements=extra_requirements)

    @contextmanager
    def temporary_chroot(self,
                         interpreter,
                         pex_info,
                         targets,
                         platforms,
                         extra_requirements=None,
                         executable_file_content=None):
        path = tempfile.mkdtemp(
        )  # Not a contextmanager: chroot.delete() will clean this up anyway.
        pex_info = pex_info or PexInfo.default()
        chroot = self._build_chroot(path, interpreter, pex_info, targets,
                                    platforms, extra_requirements,
                                    executable_file_content)
        yield chroot
        chroot.delete()

    def _build_chroot(self,
                      path,
                      interpreter,
                      pex_info,
                      targets,
                      platforms,
                      extra_requirements=None,
                      executable_file_content=None):
        """Create a PythonChroot with the specified args."""

        self._configure_shebang(targets, path, interpreter, pex_info)

        with self.context.new_workunit('chroot'):
            chroot = self.create_chroot(interpreter=interpreter,
                                        builder=builder,
                                        targets=targets,
                                        platforms=platforms,
                                        extra_requirements=extra_requirements)
            chroot.dump()
            if executable_file_content is not None:
                with open(
                        os.path.join(
                            path, '{}.py'.format(self.CHROOT_EXECUTABLE_NAME)),
                        'w') as outfile:
                    outfile.write(executable_file_content)
                # Override any user-specified entry point, under the assumption that the
                # executable_file_content does what the user intends (including, probably, calling that
                # underlying entry point).
                pex_info.entry_point = self.CHROOT_EXECUTABLE_NAME
            builder.freeze()
        return chroot

    def _chroot_path(self, interpreter, pex_info, targets, platforms,
                     extra_requirements, executable_file_content):
        """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
        fingerprint_components = [str(interpreter.identity)]

        if pex_info:
            # TODO(John Sirois): When https://rbcommons.com/s/twitter/r/2517/ lands, leverage the dump
            # **kwargs to sort keys or else find some other better way to get a stable fingerprint of
            # PexInfo.
            fingerprint_components.append(
                json.dumps(json.loads(pex_info.dump()), sort_keys=True))

        fingerprint_components.extend(
            sorted(t.transitive_invalidation_hash() for t in set(targets)))

        if platforms:
            fingerprint_components.extend(sorted(set(platforms)))

        if extra_requirements:
            # TODO(John Sirois): The extras should be uniqified before fingerprinting, but
            # PythonRequirement arguably does not have a proper __eq__.  For now we lean on the cache_key
            # of unique PythonRequirement being unique - which is probably good enough (the cache key is
            # narrower than the full scope of PythonRequirement attributes at present, thus the hedge).
            fingerprint_components.extend(
                sorted(set(r.cache_key() for r in extra_requirements)))

        if executable_file_content is not None:
            fingerprint_components.append(executable_file_content)

        fingerprint = hash_utils.hash_all(fingerprint_components)
        return os.path.join(self.chroot_cache_dir, fingerprint)

    def _configure_shebang(self, targets, path, interpreter, pex_info):
        builder = PEXBuilder(path=path,
                             interpreter=interpreter,
                             pex_info=pex_info,
                             copy=True)

        binary_targets = [
            target for target in targets if isinstance(target, PythonBinary)
        ]
        if len(binary_targets) == 1 and binary_targets[0].shebang:
            builder.set_shebang(binary_targets[0].shebang)
コード例 #37
0
ファイル: python_task.py プロジェクト: digideskio/pants
class PythonTask(Task):
  @classmethod
  def register_options(cls, register):
    super(PythonTask, cls).register_options(register)
    register('--timeout', type=int, default=0,
             help='Number of seconds to wait for http connections.')

  def __init__(self, *args, **kwargs):
    super(PythonTask, self).__init__(*args, **kwargs)
    self.conn_timeout = (self.get_options().timeout or
                         self.context.config.getdefault('connection_timeout'))

    self._compatibilities = self.get_options().interpreter or [b'']
    self._interpreter_cache = None
    self._interpreter = None

  @property
  def interpreter_cache(self):
    if self._interpreter_cache is None:
      self._interpreter_cache = PythonInterpreterCache(self.context.config,
                                                       logger=self.context.log.debug)

      # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
      self.context.acquire_lock()
      try:
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self._interpreter_cache.setup(filters=self._compatibilities)
      finally:
        self.context.release_lock()
    return self._interpreter_cache

  @property
  def interpreter(self):
    """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
    if self._interpreter is None:
      self._interpreter = self.select_interpreter(self._compatibilities)
    return self._interpreter

  def select_interpreter_for_targets(self, targets):
    """Pick an interpreter compatible with all the specified targets."""
    allowed_interpreters = OrderedSet(self.interpreter_cache.interpreters)
    targets_with_compatibilities = []  # Used only for error messages.

    # Constrain allowed_interpreters based on each target's compatibility requirements.
    for target in targets:
      if target.is_python and hasattr(target, 'compatibility') and target.compatibility:
        targets_with_compatibilities.append(target)
        compatible_with_target = list(self.interpreter_cache.matches(target.compatibility))
        allowed_interpreters &= compatible_with_target

    if not allowed_interpreters:
      # Create a helpful error message.
      unique_compatibilities = set(tuple(t.compatibility) for t in targets_with_compatibilities)
      unique_compatibilities_strs = [','.join(x) for x in unique_compatibilities if x]
      targets_with_compatibilities_strs = [str(t) for t in targets_with_compatibilities]
      raise TaskError('Unable to detect a suitable interpreter for compatibilities: %s '
                      '(Conflicting targets: %s)' % (' && '.join(unique_compatibilities_strs),
                                                     ', '.join(targets_with_compatibilities_strs)))

    # Return the lowest compatible interpreter.
    return self.interpreter_cache.select_interpreter(allowed_interpreters)[0]

  def select_interpreter(self, filters):
    """Subclasses can use this to be more specific about interpreter selection."""
    interpreters = self.interpreter_cache.select_interpreter(
      list(self.interpreter_cache.matches(filters)))
    if len(interpreters) != 1:
      raise TaskError('Unable to detect a suitable interpreter.')
    interpreter = interpreters[0]
    self.context.log.debug('Selected %s' % interpreter)
    return interpreter

  @contextmanager
  def temporary_pex_builder(self, interpreter=None, pex_info=None, parent_dir=None):
    """Yields a PEXBuilder and cleans up its chroot when it goes out of context."""
    path = tempfile.mkdtemp(dir=parent_dir)
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    yield builder
    builder.chroot().delete()
コード例 #38
0
ファイル: python_task.py プロジェクト: sheltowt/pants
class PythonTask(Task):
  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    option_group.add_option(mkflag('timeout'), dest='python_conn_timeout', type='int',
                            default=0, help='Number of seconds to wait for http connections.')

  def __init__(self, *args, **kwargs):
    super(PythonTask, self).__init__(*args, **kwargs)
    self.conn_timeout = (self.context.options.python_conn_timeout or
                         self.context.config.getdefault('connection_timeout'))
    compatibilities = self.context.options.interpreter or [b'']

    self.interpreter_cache = PythonInterpreterCache(self.context.config,
                                                    logger=self.context.log.debug)
    # We pass in filters=compatibilities because setting up some python versions
    # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
    self.interpreter_cache.setup(filters=compatibilities)

    # Select a default interpreter to use.
    self._interpreter = self.select_interpreter(compatibilities)

  @property
  def interpreter(self):
    """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
    return self._interpreter

  def select_interpreter_for_targets(self, targets):
    """Pick an interpreter compatible with all the specified targets."""
    allowed_interpreters = OrderedSet(self.interpreter_cache.interpreters)
    targets_with_compatibilities = []  # Used only for error messages.

    # Constrain allowed_interpreters based on each target's compatibility requirements.
    for target in targets:
      if target.is_python and hasattr(target, 'compatibility') and target.compatibility:
        targets_with_compatibilities.append(target)
        compatible_with_target = list(self.interpreter_cache.matches(target.compatibility))
        allowed_interpreters &= compatible_with_target

    if not allowed_interpreters:
      # Create a helpful error message.
      unique_compatibilities = set(tuple(t.compatibility) for t in targets_with_compatibilities)
      unique_compatibilities_strs = [','.join(x) for x in unique_compatibilities if x]
      targets_with_compatibilities_strs = [str(t) for t in targets_with_compatibilities]
      raise TaskError('Unable to detect a suitable interpreter for compatibilities: %s '
                      '(Conflicting targets: %s)' % (' && '.join(unique_compatibilities_strs),
                                                     ', '.join(targets_with_compatibilities_strs)))

    # Return the lowest compatible interpreter.
    return self.interpreter_cache.select_interpreter(allowed_interpreters)[0]

  def select_interpreter(self, filters):
    """Subclasses can use this to be more specific about interpreter selection."""
    interpreters = self.interpreter_cache.select_interpreter(
      list(self.interpreter_cache.matches(filters)))
    if len(interpreters) != 1:
      raise TaskError('Unable to detect a suitable interpreter.')
    interpreter = interpreters[0]
    self.context.log.debug('Selected %s' % interpreter)
    return interpreter

  @contextmanager
  def temporary_pex_builder(self, interpreter=None, pex_info=None, parent_dir=None):
    """Yields a PEXBuilder and cleans up its chroot when it goes out of context."""
    path = tempfile.mkdtemp(dir=parent_dir)
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    yield builder
    builder.chroot().delete()
コード例 #39
0
ファイル: python_task.py プロジェクト: pcurry/pants
class PythonTask(Task):
  # If needed, we set this as the executable entry point of any chroots we create.
  CHROOT_EXECUTABLE_NAME = '__pants_executable__'

  @classmethod
  def global_subsystems(cls):
    return super(PythonTask, cls).global_subsystems() + (PythonSetup, PythonRepos)

  def __init__(self, *args, **kwargs):
    super(PythonTask, self).__init__(*args, **kwargs)
    self._compatibilities = self.get_options().interpreter or [b'']
    self._interpreter_cache = None
    self._interpreter = None

  @property
  def interpreter_cache(self):
    if self._interpreter_cache is None:
      self._interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                       PythonRepos.global_instance(),
                                                       logger=self.context.log.debug)

      # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
      self.context.acquire_lock()
      try:
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self._interpreter_cache.setup(filters=self._compatibilities)
      finally:
        self.context.release_lock()
    return self._interpreter_cache

  @property
  def interpreter(self):
    """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
    if self._interpreter is None:
      self._interpreter = self.select_interpreter(self._compatibilities)
    return self._interpreter

  def select_interpreter_for_targets(self, targets):
    """Pick an interpreter compatible with all the specified targets."""
    allowed_interpreters = OrderedSet(self.interpreter_cache.interpreters)
    targets_with_compatibilities = []  # Used only for error messages.

    # Constrain allowed_interpreters based on each target's compatibility requirements.
    for target in targets:
      if target.is_python and hasattr(target, 'compatibility') and target.compatibility:
        targets_with_compatibilities.append(target)
        compatible_with_target = list(self.interpreter_cache.matches(target.compatibility))
        allowed_interpreters &= compatible_with_target

    if not allowed_interpreters:
      # Create a helpful error message.
      unique_compatibilities = set(tuple(t.compatibility) for t in targets_with_compatibilities)
      unique_compatibilities_strs = [','.join(x) for x in unique_compatibilities if x]
      targets_with_compatibilities_strs = [str(t) for t in targets_with_compatibilities]
      raise TaskError('Unable to detect a suitable interpreter for compatibilities: {} '
                      '(Conflicting targets: {})'.format(' && '.join(unique_compatibilities_strs),
                                                         ', '.join(targets_with_compatibilities_strs)))

    # Return the lowest compatible interpreter.
    return self.interpreter_cache.select_interpreter(allowed_interpreters)[0]

  def select_interpreter(self, filters):
    """Subclasses can use this to be more specific about interpreter selection."""
    interpreters = self.interpreter_cache.select_interpreter(
      list(self.interpreter_cache.matches(filters)))
    if len(interpreters) != 1:
      raise TaskError('Unable to detect a suitable interpreter.')
    interpreter = interpreters[0]
    self.context.log.debug('Selected {}'.format(interpreter))
    return interpreter

  @property
  def chroot_cache_dir(self):
    return PythonSetup.global_instance().chroot_cache_dir

  @contextmanager
  def cached_chroot(self, interpreter, pex_info, targets, platforms,
                    extra_requirements=None, executable_file_content=None):
    """Returns a cached PythonChroot created with the specified args.

    The returned chroot will be cached for future use.

    TODO: Garbage-collect old chroots, so they don't pile up?
    TODO: Ideally chroots would just be products produced by some other task. But that's
          a bit too complicated to implement right now, as we'd need a way to request
          chroots for a variety of sets of targets.
    """
    # This PexInfo contains any customizations specified by the caller.
    # The process of building a pex modifies it further.
    pex_info = pex_info or PexInfo.default()

    path = self._chroot_path(PythonSetup.global_instance(), interpreter, pex_info, targets,
                             platforms, extra_requirements, executable_file_content)
    if not os.path.exists(path):
      path_tmp = path + '.tmp'
      self._build_chroot(path_tmp, interpreter, pex_info, targets, platforms,
                         extra_requirements, executable_file_content)
      shutil.move(path_tmp, path)

    # We must read the PexInfo that was frozen into the pex, so we get the modifications
    # created when that pex was built.
    pex_info = PexInfo.from_pex(path)
    # Now create a PythonChroot wrapper without dumping it.
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    chroot = PythonChroot(
      context=self.context,
      python_setup=PythonSetup.global_instance(),
      python_repos=PythonRepos.global_instance(),
      interpreter=interpreter,
      builder=builder,
      targets=targets,
      platforms=platforms,
      extra_requirements=extra_requirements)
    # TODO: Doesn't really need to be a contextmanager, but it's convenient to make it so
    # while transitioning calls to temporary_chroot to calls to cached_chroot.
    # We can revisit after that transition is complete.
    yield chroot

  @contextmanager
  def temporary_chroot(self, interpreter, pex_info, targets, platforms,
                       extra_requirements=None, executable_file_content=None):
    path = tempfile.mkdtemp()  # Not a contextmanager: chroot.delete() will clean this up anyway.
    pex_info = pex_info or PexInfo.default()
    chroot = self._build_chroot(path, interpreter, pex_info, targets, platforms,
                                extra_requirements, executable_file_content)
    yield chroot
    chroot.delete()

  def _build_chroot(self, path, interpreter, pex_info, targets, platforms,
                     extra_requirements=None, executable_file_content=None):
    """Create a PythonChroot with the specified args."""
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    with self.context.new_workunit('chroot'):
      chroot = PythonChroot(
        context=self.context,
        python_setup=PythonSetup.global_instance(),
        python_repos=PythonRepos.global_instance(),
        interpreter=interpreter,
        builder=builder,
        targets=targets,
        platforms=platforms,
        extra_requirements=extra_requirements)
      chroot.dump()
      if executable_file_content is not None:
        with open(os.path.join(path, '{}.py'.format(self.CHROOT_EXECUTABLE_NAME)), 'w') as outfile:
          outfile.write(executable_file_content)
        # Override any user-specified entry point, under the assumption that the
        # executable_file_content does what the user intends (including, probably, calling that
        # underlying entry point).
        pex_info.entry_point = self.CHROOT_EXECUTABLE_NAME
      builder.freeze()
    return chroot

  def _chroot_path(self, python_setup, interpreter, pex_info, targets, platforms,
                   extra_requirements, executable_file_content):
    """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
    fingerprint_components = [str(interpreter.identity)]
    if pex_info:
      fingerprint_components.append(pex_info.dump())
    fingerprint_components.extend(filter(None, [t.payload.fingerprint() for t in targets]))
    if platforms:
      fingerprint_components.extend(platforms)
    if extra_requirements:
      fingerprint_components.extend([r.cache_key() for r in extra_requirements])
    if executable_file_content is not None:
      fingerprint_components.append(executable_file_content)

    fingerprint = hash_utils.hash_all(fingerprint_components)
    return os.path.join(python_setup.chroot_cache_dir, fingerprint)
コード例 #40
0
ファイル: python_task.py プロジェクト: WamBamBoozle/pants
class PythonTask(Task):
  def __init__(self, *args, **kwargs):
    super(PythonTask, self).__init__(*args, **kwargs)
    self._compatibilities = self.get_options().interpreter or [b'']
    self._interpreter_cache = None
    self._interpreter = None

  @property
  def interpreter_cache(self):
    if self._interpreter_cache is None:
      self._interpreter_cache = PythonInterpreterCache(PythonSetup(self.context.config),
                                                       PythonRepos(self.context.config),
                                                       logger=self.context.log.debug)

      # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
      self.context.acquire_lock()
      try:
        # We pass in filters=compatibilities because setting up some python versions
        # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
        self._interpreter_cache.setup(filters=self._compatibilities)
      finally:
        self.context.release_lock()
    return self._interpreter_cache

  @property
  def interpreter(self):
    """Subclasses can use this if they're fine with the default interpreter (the usual case)."""
    if self._interpreter is None:
      self._interpreter = self.select_interpreter(self._compatibilities)
    return self._interpreter

  def select_interpreter_for_targets(self, targets):
    """Pick an interpreter compatible with all the specified targets."""
    allowed_interpreters = OrderedSet(self.interpreter_cache.interpreters)
    targets_with_compatibilities = []  # Used only for error messages.

    # Constrain allowed_interpreters based on each target's compatibility requirements.
    for target in targets:
      if target.is_python and hasattr(target, 'compatibility') and target.compatibility:
        targets_with_compatibilities.append(target)
        compatible_with_target = list(self.interpreter_cache.matches(target.compatibility))
        allowed_interpreters &= compatible_with_target

    if not allowed_interpreters:
      # Create a helpful error message.
      unique_compatibilities = set(tuple(t.compatibility) for t in targets_with_compatibilities)
      unique_compatibilities_strs = [','.join(x) for x in unique_compatibilities if x]
      targets_with_compatibilities_strs = [str(t) for t in targets_with_compatibilities]
      raise TaskError('Unable to detect a suitable interpreter for compatibilities: {} '
                      '(Conflicting targets: {})'.format(' && '.join(unique_compatibilities_strs),
                                                         ', '.join(targets_with_compatibilities_strs)))

    # Return the lowest compatible interpreter.
    return self.interpreter_cache.select_interpreter(allowed_interpreters)[0]

  def select_interpreter(self, filters):
    """Subclasses can use this to be more specific about interpreter selection."""
    interpreters = self.interpreter_cache.select_interpreter(
      list(self.interpreter_cache.matches(filters)))
    if len(interpreters) != 1:
      raise TaskError('Unable to detect a suitable interpreter.')
    interpreter = interpreters[0]
    self.context.log.debug('Selected {}'.format(interpreter))
    return interpreter

  @contextmanager
  def temporary_chroot(self, interpreter=None, pex_info=None, targets=None,
                       extra_requirements=None, platforms=None, pre_freeze=None):
    """Yields a temporary PythonChroot created with the specified args.

    pre_freeze is an optional function run on the chroot just before freezing its builder,
    to allow for any extra modification.
    """
    path = tempfile.mkdtemp()
    builder = PEXBuilder(path=path, interpreter=interpreter, pex_info=pex_info)
    with self.context.new_workunit('chroot'):
      chroot = PythonChroot(
        context=self.context,
        targets=targets,
        extra_requirements=extra_requirements,
        builder=builder,
        platforms=platforms,
        interpreter=interpreter)
      chroot.dump()
      if pre_freeze:
        pre_freeze(chroot)
      builder.freeze()
    yield chroot
    chroot.delete()
コード例 #41
0
ファイル: build.py プロジェクト: dbieber/pants
class Build(Command):
    """Builds a specified target."""

    __command__ = 'build'

    def setup_parser(self, parser, args):
        parser.set_usage("\n"
                         "  %prog build (options) [spec] (build args)\n"
                         "  %prog build (options) [spec]... -- (build args)")
        parser.add_option(
            "-t",
            "--timeout",
            dest="conn_timeout",
            type="int",
            default=Config.load().getdefault('connection_timeout'),
            help="Number of seconds to wait for http connections.")
        parser.add_option(
            '-i',
            '--interpreter',
            dest='interpreters',
            default=[],
            action='append',
            help="Constrain what Python interpreters to use.  Uses Requirement "
            "format from pkg_resources, e.g. 'CPython>=2.6,<3' or 'PyPy'. "
            "By default, no constraints are used.  Multiple constraints may "
            "be added.  They will be ORed together.")
        parser.add_option('-v',
                          '--verbose',
                          dest='verbose',
                          default=False,
                          action='store_true',
                          help='Show verbose output.')
        parser.add_option('-f',
                          '--fast',
                          dest='fast',
                          default=False,
                          action='store_true',
                          help='Run tests in a single chroot.')
        parser.disable_interspersed_args()
        parser.epilog = (
            'Builds the specified Python target(s). Use ./pants goal for JVM and other '
            'targets.')

    def __init__(self, *args, **kwargs):
        super(Build, self).__init__(*args, **kwargs)

        if not self.args:
            self.error("A spec argument is required")

        self.config = Config.load()

        interpreters = self.options.interpreters or [b'']
        self.interpreter_cache = PythonInterpreterCache(self.config,
                                                        logger=self.debug)
        self.interpreter_cache.setup(filters=interpreters)
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(interpreters)))
        if len(interpreters) != 1:
            self.error('Unable to detect suitable interpreter.')
        else:
            self.debug('Selected %s' % interpreters[0])
        self.interpreter = interpreters[0]

        try:
            specs_end = self.args.index('--')
            if len(self.args) > specs_end:
                self.build_args = self.args[specs_end + 1:len(self.args) + 1]
            else:
                self.build_args = []
        except ValueError:
            specs_end = 1
            self.build_args = self.args[1:] if len(self.args) > 1 else []

        self.targets = OrderedSet()
        spec_parser = SpecParser(self.root_dir, self.build_file_parser)
        self.top_level_addresses = set()

        for spec in self.args[0:specs_end]:
            try:
                addresses = spec_parser.parse_addresses(spec)
            except:
                self.error("Problem parsing spec %s: %s" %
                           (spec, traceback.format_exc()))

            for address in addresses:
                self.top_level_addresses.add(address)
                try:
                    self.build_file_parser.inject_address_closure_into_build_graph(
                        address, self.build_graph)
                    target = self.build_graph.get_target(address)
                except:
                    self.error("Problem parsing BUILD target %s: %s" %
                               (address, traceback.format_exc()))

                if not target:
                    self.error("Target %s does not exist" % address)

                transitive_targets = self.build_graph.transitive_subgraph_of_addresses(
                    [target.address])
                for transitive_target in transitive_targets:
                    self.targets.add(transitive_target)

        self.targets = [target for target in self.targets if target.is_python]

    def debug(self, message):
        if self.options.verbose:
            print(message, file=sys.stderr)

    def execute(self):
        print("Build operating on top level addresses: %s" %
              self.top_level_addresses)

        python_targets = OrderedSet()
        for target in self.targets:
            if target.is_python:
                python_targets.add(target)
            else:
                self.error("Cannot build target %s" % target)

        if python_targets:
            status = self._python_build(python_targets)
        else:
            status = -1

        return status

    def _python_build(self, targets):
        try:
            executor = PythonBuilder(self.run_tracker)
            return executor.build(targets,
                                  self.build_args,
                                  interpreter=self.interpreter,
                                  conn_timeout=self.options.conn_timeout,
                                  fast_tests=self.options.fast)
        except:
            self.error("Problem executing PythonBuilder for targets %s: %s" %
                       (targets, traceback.format_exc()))