コード例 #1
0
    def execute(self):
        interpreter = None
        python_tgts = self.context.targets(
            lambda tgt: isinstance(tgt, PythonTarget))
        fs = PythonInterpreterFingerprintStrategy(task=self)
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = os.path.join(self.workdir, target_set_id,
                                                 'interpreter.path')
            if not os.path.exists(interpreter_path_file):
                interpreter_cache = PythonInterpreterCache(
                    PythonSetup.global_instance(),
                    PythonRepos.global_instance(),
                    logger=self.context.log.debug)

                # We filter the interpreter cache itself (and not just the interpreters we pull from it)
                # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
                # an escape hatch.
                filters = self.get_options().constraints or [b'']

                # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
                self.context.acquire_lock()
                try:
                    interpreter_cache.setup(filters=filters)
                finally:
                    self.context.release_lock()

                interpreter = interpreter_cache.select_interpreter_for_targets(
                    python_tgts)
                safe_mkdir_for(interpreter_path_file)
                with open(interpreter_path_file, 'w') as outfile:
                    outfile.write(b'{}\t{}\n'.format(
                        interpreter.binary, str(interpreter.identity)))
                    for dist, location in interpreter.extras.items():
                        dist_name, dist_version = dist
                        outfile.write(b'{}\t{}\t{}\n'.format(
                            dist_name, dist_version, location))

        if not interpreter:
            with open(interpreter_path_file, 'r') as infile:
                lines = infile.readlines()
                binary, identity = lines[0].strip().split('\t')
                extras = {}
                for line in lines[1:]:
                    dist_name, dist_version, location = line.strip().split(
                        '\t')
                    extras[(dist_name, dist_version)] = location

            interpreter = PythonInterpreter(binary,
                                            PythonIdentity.from_path(identity),
                                            extras)

        self.context.products.get_data(PythonInterpreter, lambda: interpreter)
コード例 #2
0
    def dumped_chroot(self, targets):
        python_repos = create_subsystem(PythonRepos)

        with subsystem_instance(IvySubsystem) as ivy_subsystem:
            ivy_bootstrapper = Bootstrapper(ivy_subsystem=ivy_subsystem)

            with subsystem_instance(
                    ThriftBinary.Factory) as thrift_binary_factory:
                interpreter_cache = PythonInterpreterCache(
                    self.python_setup, python_repos)
                interpreter_cache.setup()
                interpreters = list(
                    interpreter_cache.matches(
                        [self.python_setup.interpreter_requirement]))
                self.assertGreater(len(interpreters), 0)
                interpreter = interpreters[0]

                with temporary_dir() as chroot:
                    pex_builder = PEXBuilder(path=chroot,
                                             interpreter=interpreter)

                    python_chroot = PythonChroot(
                        python_setup=self.python_setup,
                        python_repos=python_repos,
                        ivy_bootstrapper=ivy_bootstrapper,
                        thrift_binary_factory=thrift_binary_factory.create,
                        interpreter=interpreter,
                        builder=pex_builder,
                        targets=targets,
                        platforms=['current'])
                    try:
                        python_chroot.dump()
                        yield pex_builder, python_chroot
                    finally:
                        python_chroot.delete()
コード例 #3
0
    def _resolve_requirements(self, target_roots, options=None):
        with temporary_dir() as cache_dir:
            options = options or {}
            options.setdefault(PythonSetup.options_scope,
                               {})['interpreter_cache_dir'] = cache_dir
            context = self.context(target_roots=target_roots,
                                   options=options,
                                   for_subsystems=[PythonSetup, PythonRepos])

            # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
            # to ensure that the interpreter has setuptools and wheel support.
            interpreter = PythonInterpreter.get()
            interpreter_cache = PythonInterpreterCache(
                PythonSetup.global_instance(),
                PythonRepos.global_instance(),
                logger=context.log.debug)
            interpreters = interpreter_cache.setup(
                paths=[os.path.dirname(interpreter.binary)],
                filters=[str(interpreter.identity.requirement)])
            context.products.get_data(PythonInterpreter,
                                      lambda: interpreters[0])

            task = self.create_task(context)
            task.execute()

            return context.products.get_data(
                ResolveRequirements.REQUIREMENTS_PEX)
コード例 #4
0
ファイル: build.py プロジェクト: dbieber/pants
    def __init__(self, *args, **kwargs):
        super(Build, self).__init__(*args, **kwargs)

        if not self.args:
            self.error("A spec argument is required")

        self.config = Config.load()

        interpreters = self.options.interpreters or [b'']
        self.interpreter_cache = PythonInterpreterCache(self.config,
                                                        logger=self.debug)
        self.interpreter_cache.setup(filters=interpreters)
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(interpreters)))
        if len(interpreters) != 1:
            self.error('Unable to detect suitable interpreter.')
        else:
            self.debug('Selected %s' % interpreters[0])
        self.interpreter = interpreters[0]

        try:
            specs_end = self.args.index('--')
            if len(self.args) > specs_end:
                self.build_args = self.args[specs_end + 1:len(self.args) + 1]
            else:
                self.build_args = []
        except ValueError:
            specs_end = 1
            self.build_args = self.args[1:] if len(self.args) > 1 else []

        self.targets = OrderedSet()
        spec_parser = SpecParser(self.root_dir, self.build_file_parser)
        self.top_level_addresses = set()

        for spec in self.args[0:specs_end]:
            try:
                addresses = spec_parser.parse_addresses(spec)
            except:
                self.error("Problem parsing spec %s: %s" %
                           (spec, traceback.format_exc()))

            for address in addresses:
                self.top_level_addresses.add(address)
                try:
                    self.build_file_parser.inject_address_closure_into_build_graph(
                        address, self.build_graph)
                    target = self.build_graph.get_target(address)
                except:
                    self.error("Problem parsing BUILD target %s: %s" %
                               (address, traceback.format_exc()))

                if not target:
                    self.error("Target %s does not exist" % address)

                transitive_targets = self.build_graph.transitive_subgraph_of_addresses(
                    [target.address])
                for transitive_target in transitive_targets:
                    self.targets.add(transitive_target)

        self.targets = [target for target in self.targets if target.is_python]
コード例 #5
0
  def dumped_chroot(self, targets):
    # TODO(benjy): We shouldn't need to mention DistributionLocator here, as IvySubsystem
    # declares it as a dependency. However if we don't then test_antlr() below fails on
    # uninitialized options for that subsystem.  Hopefully my pending (as of 9/2016) change
    # to clean up how we initialize and create instances of subsystems in tests will make
    # this problem go away.
    self.context(for_subsystems=[PythonRepos, PythonSetup, IvySubsystem,
                                 DistributionLocator, ThriftBinary.Factory, BinaryUtil.Factory])
    python_repos = PythonRepos.global_instance()
    ivy_bootstrapper = Bootstrapper(ivy_subsystem=IvySubsystem.global_instance())
    thrift_binary_factory = ThriftBinary.Factory.global_instance().create

    interpreter_cache = PythonInterpreterCache(self.python_setup, python_repos)
    interpreter = interpreter_cache.select_interpreter_for_targets(targets)
    self.assertIsNotNone(interpreter)

    with temporary_dir() as chroot:
      pex_builder = PEXBuilder(path=chroot, interpreter=interpreter)

      python_chroot = PythonChroot(python_setup=self.python_setup,
                                   python_repos=python_repos,
                                   ivy_bootstrapper=ivy_bootstrapper,
                                   thrift_binary_factory=thrift_binary_factory,
                                   interpreter=interpreter,
                                   builder=pex_builder,
                                   targets=targets,
                                   platforms=['current'])
      try:
        python_chroot.dump()
        yield pex_builder, python_chroot
      finally:
        python_chroot.delete()
コード例 #6
0
    def test_setup_using_eggs(self):
        def link_egg(repo_root, requirement):
            existing_dist_location = self._interpreter.get_location(
                requirement)
            if existing_dist_location is not None:
                existing_dist = Package.from_href(existing_dist_location)
                requirement = '{}=={}'.format(existing_dist.name,
                                              existing_dist.raw_version)

            distributions = resolve([requirement],
                                    interpreter=self._interpreter,
                                    precedence=(EggPackage, SourcePackage))
            self.assertEqual(1, len(distributions))
            dist_location = distributions[0].location

            self.assertRegexpMatches(dist_location, r'\.egg$')
            os.symlink(
                dist_location,
                os.path.join(repo_root, os.path.basename(dist_location)))

            return Package.from_href(dist_location).raw_version

        with temporary_dir() as root:
            egg_dir = os.path.join(root, 'eggs')
            os.makedirs(egg_dir)
            setuptools_version = link_egg(egg_dir, 'setuptools')
            wheel_version = link_egg(egg_dir, 'wheel')

            interpreter_requirement = self._interpreter.identity.requirement

            python_setup, python_repos = self.create_python_subsystems(
                setup_options={
                    'interpreter_cache_dir': None,
                    'pants_workdir': os.path.join(root, 'workdir'),
                    'constraints': [interpreter_requirement],
                    'setuptools_version': setuptools_version,
                    'wheel_version': wheel_version,
                },
                repos_options={
                    'indexes': [],
                    'repos': [egg_dir],
                })
            cache = PythonInterpreterCache(python_setup=python_setup,
                                           python_repos=python_repos)

            interpereters = cache.setup(
                paths=[os.path.dirname(self._interpreter.binary)],
                filters=[str(interpreter_requirement)])
            self.assertGreater(len(interpereters), 0)

            def assert_egg_extra(interpreter, name, version):
                location = interpreter.get_location('{}=={}'.format(
                    name, version))
                self.assertIsNotNone(location)
                self.assertIsInstance(Package.from_href(location), EggPackage)

            for interpreter in interpereters:
                assert_egg_extra(interpreter, 'setuptools', setuptools_version)
                assert_egg_extra(interpreter, 'wheel', wheel_version)
コード例 #7
0
 def _setup_cache_at(self, path, constraints=None):
     setup_options = {'interpreter_cache_dir': path}
     if constraints:
         setup_options.update(interpreter_constraints=constraints)
     python_setup, python_repos = self.create_python_subsystems(
         setup_options=setup_options)
     return PythonInterpreterCache(python_setup=python_setup,
                                   python_repos=python_repos)
コード例 #8
0
    def test_namespace_effective(self):
        self.create_file('src/thrift/com/foo/one.thrift',
                         contents=dedent("""
    namespace py foo.bar

    struct One {}
    """))
        one = self.make_target(spec='src/thrift/com/foo:one',
                               target_type=PythonThriftLibrary,
                               sources=['one.thrift'])
        apache_thrift_gen, synthetic_target_one = self.generate_single_thrift_target(
            one)

        self.create_file('src/thrift2/com/foo/two.thrift',
                         contents=dedent("""
    namespace py foo.baz

    struct Two {}
    """))
        two = self.make_target(spec='src/thrift2/com/foo:two',
                               target_type=PythonThriftLibrary,
                               sources=['two.thrift'])
        _, synthetic_target_two = self.generate_single_thrift_target(two)

        # Confirm separate PYTHONPATH entries, which we need to test namespace packages.
        self.assertNotEqual(synthetic_target_one.target_base,
                            synthetic_target_two.target_base)

        targets = (synthetic_target_one, synthetic_target_two)

        python_repos = global_subsystem_instance(PythonRepos)
        python_setup = global_subsystem_instance(PythonSetup)
        interpreter_cache = PythonInterpreterCache(python_setup, python_repos)
        interpreter = interpreter_cache.select_interpreter_for_targets(targets)

        pythonpath = [
            os.path.join(get_buildroot(), t.target_base) for t in targets
        ]
        for dist in resolve(
            ['thrift=={}'.format(self.get_thrift_version(apache_thrift_gen))],
                interpreter=interpreter,
                context=python_repos.get_network_context(),
                fetchers=python_repos.get_fetchers()):
            pythonpath.append(dist.location)

        process = subprocess.Popen([
            interpreter.binary, '-c',
            'from foo.bar.ttypes import One; from foo.baz.ttypes import Two'
        ],
                                   env={
                                       'PYTHONPATH':
                                       os.pathsep.join(pythonpath)
                                   },
                                   stderr=subprocess.PIPE)
        _, stderr = process.communicate()
        self.assertEqual(0, process.returncode, stderr)
コード例 #9
0
ファイル: test_test_builder.py プロジェクト: huanding/pants
    def _cache_current_interpreter(self):
        cache = PythonInterpreterCache(self.config())

        # We only need to cache the current interpreter, avoid caching for every interpreter on the
        # PATH.
        current_interpreter = PythonInterpreter.get()
        for cached_interpreter in cache.setup(
                filters=[current_interpreter.identity.requirement]):
            if cached_interpreter == current_interpreter:
                return cached_interpreter
        raise RuntimeError('Could not find suitable interpreter to run tests.')
コード例 #10
0
 def _interpreter_cache(self):
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
     self.context.acquire_lock()
     try:
         interpreter_cache.setup()
     finally:
         self.context.release_lock()
     return interpreter_cache
コード例 #11
0
  def _cache_current_interpreter(self):
    cache = PythonInterpreterCache(self.config())

    # We only need to cache the current interpreter, avoid caching for every interpreter on the
    # PATH.
    current_interpreter = PythonInterpreter.get()
    current_id = (current_interpreter.binary, current_interpreter.identity)
    for cached_interpreter in cache.setup(filters=[current_interpreter.identity.requirement]):
      # TODO(John Sirois): Revert to directly comparing interpreters when
      # https://github.com/pantsbuild/pex/pull/31 is in, released and consumed by pants.
      if (cached_interpreter.binary, cached_interpreter.identity) == current_id:
        return cached_interpreter
    raise RuntimeError('Could not find suitable interpreter to run tests.')
コード例 #12
0
 def _create_interpreter_path_file(self, interpreter_path_file, targets):
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     interpreter = interpreter_cache.select_interpreter_for_targets(targets)
     safe_mkdir_for(interpreter_path_file)
     with open(interpreter_path_file, 'w') as outfile:
         outfile.write(b'{}\n'.format(interpreter.binary))
         for dist, location in interpreter.extras.items():
             dist_name, dist_version = dist
             outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version,
                                                  location))
コード例 #13
0
  def test_namespace_effective(self):
    self.create_file('src/thrift/com/foo/one.thrift', contents=dedent("""
    namespace py foo.bar

    struct One {}
    """))
    one = self.make_target(spec='src/thrift/com/foo:one',
                           target_type=PythonThriftLibrary,
                           sources=['one.thrift'])
    apache_thrift_gen, synthetic_target_one = self.generate_single_thrift_target(one)

    self.create_file('src/thrift2/com/foo/two.thrift', contents=dedent("""
    namespace py foo.baz

    struct Two {}
    """))
    two = self.make_target(spec='src/thrift2/com/foo:two',
                           target_type=PythonThriftLibrary,
                           sources=['two.thrift'])
    _, synthetic_target_two = self.generate_single_thrift_target(two)

    # Confirm separate PYTHONPATH entries, which we need to test namespace packages.
    self.assertNotEqual(synthetic_target_one.target_base, synthetic_target_two.target_base)

    targets = (synthetic_target_one, synthetic_target_two)

    python_repos = global_subsystem_instance(PythonRepos)
    python_setup = global_subsystem_instance(PythonSetup)
    interpreter_cache = PythonInterpreterCache(python_setup, python_repos)
    interpreter = interpreter_cache.select_interpreter_for_targets(targets)

    # We need setuptools to import namespace packages (via pkg_resources), so we prime the
    # PYTHONPATH with interpreter extras, which Pants always populates with setuptools and wheel.
    # TODO(John Sirois): We really should be emitting setuptools in a
    # `synthetic_target_extra_dependencies` override in `ApacheThriftPyGen`:
    #   https://github.com/pantsbuild/pants/issues/5975
    pythonpath = interpreter.extras.values()
    pythonpath.extend(os.path.join(get_buildroot(), t.target_base) for t in targets)
    for dist in resolve(['thrift=={}'.format(self.get_thrift_version(apache_thrift_gen))],
                        interpreter=interpreter,
                        context=python_repos.get_network_context(),
                        fetchers=python_repos.get_fetchers()):
      pythonpath.append(dist.location)

    process = subprocess.Popen([interpreter.binary,
                                '-c',
                                'from foo.bar.ttypes import One; from foo.baz.ttypes import Two'],
                               env={'PYTHONPATH': os.pathsep.join(pythonpath)},
                               stderr=subprocess.PIPE)
    _, stderr = process.communicate()
    self.assertEqual(0, process.returncode, stderr)
コード例 #14
0
ファイル: python_task.py プロジェクト: sheltowt/pants
  def __init__(self, *args, **kwargs):
    super(PythonTask, self).__init__(*args, **kwargs)
    self.conn_timeout = (self.context.options.python_conn_timeout or
                         self.context.config.getdefault('connection_timeout'))
    compatibilities = self.context.options.interpreter or [b'']

    self.interpreter_cache = PythonInterpreterCache(self.context.config,
                                                    logger=self.context.log.debug)
    # We pass in filters=compatibilities because setting up some python versions
    # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
    self.interpreter_cache.setup(filters=compatibilities)

    # Select a default interpreter to use.
    self._interpreter = self.select_interpreter(compatibilities)
コード例 #15
0
ファイル: python_task.py プロジェクト: mccoysg/pants
    def interpreter_cache(self):
        if self._interpreter_cache is None:
            self._interpreter_cache = PythonInterpreterCache(
                self.context.config, logger=self.context.log.debug)

            # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
            self.context.acquire_lock()
            try:
                # We pass in filters=compatibilities because setting up some python versions
                # (e.g., 3<=python<3.3) crashes, and this gives us an escape hatch.
                self._interpreter_cache.setup(filters=self._compatibilities)
            finally:
                self.context.release_lock()
        return self._interpreter_cache
コード例 #16
0
    def _setup_test(self, constraints=None):
        mock_setup = mock.MagicMock().return_value
        type(mock_setup).interpreter_constraints = mock.PropertyMock(
            return_value=constraints)

        with temporary_dir() as path:
            mock_setup.interpreter_cache_dir = path
            cache = PythonInterpreterCache(mock_setup, mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(self._interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)
            cache._setup_paths = mock.Mock()
            yield cache, path
コード例 #17
0
  def _setup_test(self, interpreter_requirement=None):
    mock_setup = mock.MagicMock().return_value

    # Explicitly set a repo-wide requirement that excludes our one interpreter.
    type(mock_setup).interpreter_requirement = mock.PropertyMock(
      return_value=interpreter_requirement)

    with temporary_dir() as path:
      mock_setup.interpreter_cache_dir = path
      cache = PythonInterpreterCache(mock_setup, mock.MagicMock())

      def set_interpreters(_):
        cache._interpreters.add(self._interpreter)

      cache._setup_cached = mock.Mock(side_effect=set_interpreters)
      cache._setup_paths = mock.Mock()
      yield cache, path
コード例 #18
0
  def _gather_sources(self, target_roots):
    context = self.context(target_roots=target_roots, for_subsystems=[PythonSetup, PythonRepos])

    # We must get an interpreter via the cache, instead of using PythonInterpreter.get() directly,
    # to ensure that the interpreter has setuptools and wheel support.
    interpreter = PythonInterpreter.get()
    interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                               PythonRepos.global_instance(),
                                               logger=context.log.debug)
    interpreters = interpreter_cache.setup(paths=[os.path.dirname(interpreter.binary)],
                                           filters=[str(interpreter.identity.requirement)])
    context.products.get_data(PythonInterpreter, lambda: interpreters[0])

    task = self.create_task(context)
    task.execute()

    return context.products.get_data(GatherSources.PYTHON_SOURCES)
コード例 #19
0
ファイル: checkstyle.py プロジェクト: StephanErb/pants
 def execute(self):
     """"Run Checkstyle on all found non-synthetic source files."""
     python_tgts = self.context.targets(
         lambda tgt: isinstance(tgt, (PythonTarget)))
     if not python_tgts:
         return 0
     interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
     with self.invalidated(self.get_targets(
             self._is_checked)) as invalidation_check:
         failure_count = 0
         tgts_by_compatibility, _ = interpreter_cache.partition_targets_by_compatibility(
             [vt.target for vt in invalidation_check.invalid_vts])
         for filters, targets in tgts_by_compatibility.items():
             if self.get_options(
             ).interpreter_constraints_whitelist is None and not self._constraints_are_whitelisted(
                     filters):
                 deprecated_conditional(
                     lambda: self.get_options(
                     ).interpreter_constraints_whitelist is None,
                     '1.14.0.dev2',
                     "Python linting is currently restricted to targets that match the global "
                     "interpreter constraints: {}. Pants detected unacceptable filters: {}. "
                     "Use the `--interpreter-constraints-whitelist` lint option to whitelist "
                     "compatibiltiy constraints.".format(
                         PythonSetup.global_instance().
                         interpreter_constraints, filters))
             else:
                 sources = self.calculate_sources([tgt for tgt in targets])
                 if sources:
                     allowed_interpreters = set(
                         interpreter_cache.setup(filters=filters))
                     if not allowed_interpreters:
                         raise TaskError(
                             'No valid interpreters found for targets: {}\n(filters: {})'
                             .format(targets, filters))
                     interpreter = min(allowed_interpreters)
                     failure_count += self.checkstyle(interpreter, sources)
         if failure_count > 0 and self.get_options().fail:
             raise TaskError(
                 '{} Python Style issues found. You may try `./pants fmt <targets>`'
                 .format(failure_count))
         return failure_count
コード例 #20
0
    def test_cache_setup_with_no_filters_uses_repo_default_excluded(
            self, MockSetup):
        interpreter = PythonInterpreter.get()

        mock_setup = MockSetup.return_value
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=None)

        with temporary_dir() as path:
            mock_setup.scratch_dir.return_value = path

            cache = PythonInterpreterCache(mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)

            self.assertEqual(cache.setup(), [interpreter])
コード例 #21
0
    def _setup_test(self,
                    constraints=None,
                    mock_setup_paths_interpreters=None):
        mock_setup = mock.MagicMock().return_value
        type(mock_setup).interpreter_constraints = mock.PropertyMock(
            return_value=constraints)

        with temporary_dir() as path:
            mock_setup.interpreter_cache_dir = path
            cache = PythonInterpreterCache(mock_setup, mock.MagicMock())
            cache._setup_cached = mock.Mock(return_value=[self._interpreter])
            if mock_setup_paths_interpreters:
                cache._setup_paths = mock.Mock(return_value=[
                    PythonInterpreter.from_binary(
                        mock_setup_paths_interpreters[0]),
                    PythonInterpreter.from_binary(
                        mock_setup_paths_interpreters[1])
                ])
            else:
                cache._setup_paths = mock.Mock(return_value=[])
            yield cache, path
コード例 #22
0
    def test_cache_setup_with_filter_overrides_repo_default(self, MockSetup):
        interpreter = PythonInterpreter.get()

        mock_setup = MockSetup.return_value
        # Explicitly set a repo-wide requirement that excludes our one interpreter
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=self._make_bad_requirement(
                interpreter.identity.requirement))

        with temporary_dir() as path:
            mock_setup.scratch_dir.return_value = path

            cache = PythonInterpreterCache(mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)

            self.assertEqual(
                cache.setup(filters=(str(interpreter.identity.requirement), )),
                [interpreter])
コード例 #23
0
    def test_cache_setup_with_no_filters_uses_repo_default_excluded(
            self, MockSetup):
        # This is the interpreter we'll inject into the cache
        interpreter = PythonInterpreter.get()

        mock_setup = MockSetup.return_value
        # Explicitly set a repo-wide requirement that excludes our one interpreter
        type(mock_setup).interpreter_requirement = mock.PropertyMock(
            return_value=self._make_bad_requirement(
                interpreter.identity.requirement))

        with temporary_dir() as path:
            mock_setup.scratch_dir.return_value = path

            cache = PythonInterpreterCache(mock.MagicMock())

            def set_interpreters(_):
                cache._interpreters.add(interpreter)

            cache._setup_cached = mock.Mock(side_effect=set_interpreters)
            cache._setup_paths = mock.Mock()

            self.assertEqual(len(cache.setup()), 0)
コード例 #24
0
 def __init__(self, context, workdir):
     context.products.require('python')
     self._cache = PythonInterpreterCache(context.config,
                                          logger=context.log.debug)
     super(SetupPythonEnvironment, self).__init__(context, workdir)
コード例 #25
0
 def __init__(self, *args, **kwargs):
     super(PythonTask, self).__init__(*args, **kwargs)
     self._interpreter_cache = PythonInterpreterCache(
         PythonSetup.global_instance(),
         PythonRepos.global_instance(),
         logger=self.context.log.debug)
コード例 #26
0
ファイル: py.py プロジェクト: dbieber/pants
    def __init__(self, run_tracker, root_dir, parser, argv, build_file_parser,
                 build_graph):
        Command.__init__(self, run_tracker, root_dir, parser, argv,
                         build_file_parser, build_graph)

        self.binary = None
        self.targets = []
        self.extra_requirements = []
        self.config = Config.load()

        interpreters = self.options.interpreters or [b'']
        self.interpreter_cache = PythonInterpreterCache(self.config,
                                                        logger=self.debug)
        self.interpreter_cache.setup(filters=interpreters)
        interpreters = self.interpreter_cache.select_interpreter(
            list(self.interpreter_cache.matches(interpreters)))
        if len(interpreters) != 1:
            self.error('Unable to detect suitable interpreter.')
        self.interpreter = interpreters[0]

        for req in self.options.extra_requirements:
            self.extra_requirements.append(
                PythonRequirement(req, use_2to3=True))

        # We parse each arg in the context of the cli usage:
        #   ./pants command (options) [spec] (build args)
        #   ./pants command (options) [spec]... -- (build args)
        # Our command token and our options are parsed out so we see args of the form:
        #   [spec] (build args)
        #   [spec]... -- (build args)
        for k in range(len(self.args)):
            arg = self.args.pop(0)
            if arg == '--':
                break

            def not_a_target(debug_msg):
                self.debug('Not a target, assuming option: %s.' % debug_msg)
                # We failed to parse the arg as a target or else it was in valid address format but did not
                # correspond to a real target.  Assume this is the 1st of the build args and terminate
                # processing args for target addresses.
                self.args.insert(0, arg)

            try:
                print(root_dir, arg)
                self.build_file_parser.inject_spec_closure_into_build_graph(
                    arg, self.build_graph)
                spec_path, target_name = parse_spec(arg)
                build_file = BuildFile(root_dir, spec_path)
                address = BuildFileAddress(build_file, target_name)
                target = self.build_graph.get_target(address)
                if target is None:
                    not_a_target(debug_msg='Unrecognized target')
                    break
            except Exception as e:
                not_a_target(debug_msg=e)
                break

            if isinstance(target, PythonBinary):
                if self.binary:
                    self.error(
                        'Can only process 1 binary target. Found %s and %s.' %
                        (self.binary, target))
                else:
                    self.binary = target
            self.targets.append(target)

        if not self.targets:
            self.error('No valid targets specified!')
コード例 #27
0
ファイル: mypy_task.py プロジェクト: omerzach/pants
 def _interpreter_cache(self):
     return PythonInterpreterCache(PythonSetup.global_instance(),
                                   PythonRepos.global_instance(),
                                   logger=self.context.log.debug)