Example #1
0
    def test_resolve_conflicts_with_prior(self):
        """
        A ContextualVersionConflict should be raised when a requirement
        conflicts with a prior requirement for a different package.
        """
        # Create installation where Foo depends on Baz 1.0 and Bar depends on
        # Baz 2.0.
        ws = WorkingSet([])
        md = Metadata(('depends.txt', "Baz==1.0"))
        Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
        ws.add(Foo)
        md = Metadata(('depends.txt', "Baz==2.0"))
        Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
        ws.add(Bar)
        Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
        ws.add(Baz)
        Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
        ws.add(Baz)

        with pytest.raises(VersionConflict) as vc:
            ws.resolve(parse_requirements("Foo\nBar\n"))

        msg = "Baz 1.0 is installed but Baz==2.0 is required by "
        msg += repr(set(['Bar']))
        assert vc.value.report() == msg
Example #2
0
    def test_resolve_conflicts_with_prior(self):
        """
        A ContextualVersionConflict should be raised when a requirement
        conflicts with a prior requirement for a different package.
        """
        # Create installation where Foo depends on Baz 1.0 and Bar depends on
        # Baz 2.0.
        ws = WorkingSet([])
        md = Metadata(('depends.txt', "Baz==1.0"))
        Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
        ws.add(Foo)
        md = Metadata(('depends.txt', "Baz==2.0"))
        Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
        ws.add(Bar)
        Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
        ws.add(Baz)
        Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
        ws.add(Baz)

        with pytest.raises(VersionConflict) as vc:
            ws.resolve(parse_requirements("Foo\nBar\n"))

        msg = "Baz 1.0 is installed but Baz==2.0 is required by "
        msg += repr(set(['Bar']))
        assert vc.value.report() == msg
Example #3
0
 def test_marker_evaluation_with_extras_normlized(self):
     """Extras are also evaluated as markers at resolution time."""
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     Foo = Distribution.from_filename(
         "/foo_dir/Foo-1.2.dist-info",
         metadata=Metadata(("METADATA", "Provides-Extra: baz-lightyear\n"
                            "Requires-Dist: quux; extra=='baz-lightyear'")))
     ad.add(Foo)
     assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
     quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
     ad.add(quux)
     res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad))
     assert res == [Foo, quux]
Example #4
0
 def test_marker_evaluation_with_extras_normlized(self):
     """Extras are also evaluated as markers at resolution time."""
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     Foo = Distribution.from_filename(
         "/foo_dir/Foo-1.2.dist-info",
         metadata=Metadata(("METADATA", "Provides-Extra: baz-lightyear\n"
                            "Requires-Dist: quux; extra=='baz-lightyear'"))
     )
     ad.add(Foo)
     assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
     quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
     ad.add(quux)
     res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad))
     assert res == [Foo, quux]
Example #5
0
  def _activate(self):
    self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info))

    if not self._pex_info.zip_safe and os.path.isfile(self._pex):
      self.update_module_paths(self.force_local(self._pex, self._pex_info))

    all_reqs = [Requirement.parse(req) for req in self._pex_info.requirements]

    working_set = WorkingSet([])

    with TRACER.timed('Resolving %s' %
        ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list', V=2):
      try:
        resolved = working_set.resolve(all_reqs, env=self)
      except DistributionNotFound as e:
        TRACER.log('Failed to resolve a requirement: %s' % e)
        TRACER.log('Distributions contained within this pex:')
        if not self._pex_info.distributions:
          TRACER.log('  None')
        else:
          for dist in self._pex_info.distributions:
            TRACER.log('  - %s' % dist)
        die('Failed to execute PEX file, missing compatible dependency for %s' % e)

    for dist in resolved:
      with TRACER.timed('Activating %s' % dist):
        working_set.add(dist)

        if os.path.isdir(dist.location):
          with TRACER.timed('Adding sitedir'):
            site.addsitedir(dist.location)

        dist.activate()

    return working_set
Example #6
0
 def test_environment_marker_evaluation_positive(self):
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info")
     ad.add(Foo)
     res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad)
     assert list(res) == [Foo]
Example #7
0
    def _activate(self):
        self.update_candidate_distributions(
            self.load_internal_cache(self._pex, self._pex_info))

        if not self._pex_info.zip_safe and os.path.isfile(self._pex):
            self.update_module_paths(
                self.force_local(self._pex, self._pex_info))

        # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
        # or a non-empty repository.
        all_reqs = [
            Requirement.parse(req) for req, _, _ in self._pex_info.requirements
        ]

        working_set = WorkingSet([])

        # for req in all_reqs:
        with TRACER.timed('Resolving %s' % ' '.join(map(str, all_reqs))
                          if all_reqs else 'empty dependency list'):
            try:
                resolved = working_set.resolve(all_reqs, env=self)
            except DistributionNotFound as e:
                TRACER.log('Failed to resolve %s: %s' % (req, e))
                TRACER.log('Current working set:')
                for dist in working_set:
                    TRACER.log('  - %s' % dist)
                raise

        for dist in resolved:
            with TRACER.timed('Activated %s' % dist):
                dist.activate()

        self._working_set = working_set
        self._activated = True
Example #8
0
  def _activate(self):
    self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info))

    if not self._pex_info.zip_safe and os.path.isfile(self._pex):
      self.update_module_paths(self.force_local(self._pex, self._pex_info))

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    working_set = WorkingSet([])

    with TRACER.timed('Resolving %s' %
        ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list'):
      try:
        resolved = working_set.resolve(all_reqs, env=self)
      except DistributionNotFound as e:
        TRACER.log('Failed to resolve a requirement: %s' % e)
        TRACER.log('Current working set:')
        for dist in working_set:
          TRACER.log('  - %s' % dist)
        raise

    for dist in resolved:
      with TRACER.timed('Activating %s' % dist):
        working_set.add(dist)

        if os.path.isdir(dist.location):
          with TRACER.timed('Adding sitedir'):
            site.addsitedir(dist.location)

        dist.activate()

    return working_set
Example #9
0
 def test_environment_marker_evaluation_positive(self):
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info")
     ad.add(Foo)
     res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad)
     assert list(res) == [Foo]
Example #10
0
 def test_marker_evaluation_with_extras_loop(self):
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     # Metadata needs to be native strings due to cStringIO behaviour in
     # 2.6, so use str().
     a = Distribution.from_filename(
         "/foo_dir/a-0.2.dist-info",
         metadata=Metadata(("METADATA", str("Requires-Dist: c[a]")))
     )
     b = Distribution.from_filename(
         "/foo_dir/b-0.3.dist-info",
         metadata=Metadata(("METADATA", str("Requires-Dist: c[b]")))
     )
     c = Distribution.from_filename(
         "/foo_dir/c-1.0.dist-info",
         metadata=Metadata(("METADATA", str("Provides-Extra: a\n"
                            "Requires-Dist: b;extra=='a'\n"
                            "Provides-Extra: b\n"
                            "Requires-Dist: foo;extra=='b'")))
     )
     foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info")
     for dist in (a, b, c, foo):
         ad.add(dist)
     res = list(ws.resolve(parse_requirements("a"), ad))
     assert res == [a, c, b, foo]
Example #11
0
 def test_marker_evaluation_with_extras_loop(self):
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     # Metadata needs to be native strings due to cStringIO behaviour in
     # 2.6, so use str().
     a = Distribution.from_filename("/foo_dir/a-0.2.dist-info",
                                    metadata=Metadata(
                                        ("METADATA",
                                         str("Requires-Dist: c[a]"))))
     b = Distribution.from_filename("/foo_dir/b-0.3.dist-info",
                                    metadata=Metadata(
                                        ("METADATA",
                                         str("Requires-Dist: c[b]"))))
     c = Distribution.from_filename(
         "/foo_dir/c-1.0.dist-info",
         metadata=Metadata(("METADATA",
                            str("Provides-Extra: a\n"
                                "Requires-Dist: b;extra=='a'\n"
                                "Provides-Extra: b\n"
                                "Requires-Dist: foo;extra=='b'"))))
     foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info")
     for dist in (a, b, c, foo):
         ad.add(dist)
     res = list(ws.resolve(parse_requirements("a"), ad))
     assert res == [a, c, b, foo]
Example #12
0
    def tests_make_dummy_dist_working_set(self):
        """
        Dummy distributions should work with pkg_resources.WorkingSet
        """

        # This also shows how WorkingSet might work.
        # A WorkingSet is basically a way to get to a collection of
        # distributions via the list of specified paths.  By default it
        # will go for sys.path, but for testing purposes we can control
        # this by creating our own instance on a temporary directory.

        parentpkg = make_dummy_dist(self, (  # noqa: F841
            ('requires.txt', '\n'.join([
            ])),
        ), 'parentpkg', '0.8')

        childpkg = make_dummy_dist(self, (  # noqa: F841
            ('requires.txt', '\n'.join([
                'parentpkg>=0.8',
            ])),
        ), 'childpkg', '0.1')

        grandchildpkg = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([
                'childpkg>=0.1',
                'parentpkg>=0.8',
            ])),
        ), 'grandchildpkg', '0.8')

        working_set = WorkingSet([self._calmjs_testing_tmpdir])
        distributions = working_set.resolve(grandchildpkg.requires())
        self.assertEqual(len(distributions), 2)
        self.assertEqual(distributions[0].requires(), [])
        self.assertEqual(distributions[1].requires(), [
            Requirement.parse('parentpkg>=0.8')])

        # overwrite should work
        make_dummy_dist(self, (
            ('requires.txt', '\n'.join([
                'parentpkg>=0.7',
            ])),
        ), 'childpkg', '0.1')
        # but the data have to be recreated
        working_set = WorkingSet([self._calmjs_testing_tmpdir])
        distributions = working_set.resolve(grandchildpkg.requires())
        self.assertEqual(distributions[1].requires(), [
            Requirement.parse('parentpkg>=0.7')])
Example #13
0
 def test_marker_evaluation_with_extras(self):
     """Extras are also evaluated as markers at resolution time."""
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     # Metadata needs to be native strings due to cStringIO behaviour in
     # 2.6, so use str().
     Foo = Distribution.from_filename(
         "/foo_dir/Foo-1.2.dist-info",
         metadata=Metadata(("METADATA", str("Provides-Extra: baz\n"
                            "Requires-Dist: quux; extra=='baz'")))
     )
     ad.add(Foo)
     assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
     quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
     ad.add(quux)
     res = list(ws.resolve(parse_requirements("Foo[baz]"), ad))
     assert res == [Foo, quux]
Example #14
0
 def test_marker_evaluation_with_extras(self):
     """Extras are also evaluated as markers at resolution time."""
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     # Metadata needs to be native strings due to cStringIO behaviour in
     # 2.6, so use str().
     Foo = Distribution.from_filename(
         "/foo_dir/Foo-1.2.dist-info",
         metadata=Metadata(("METADATA",
                            str("Provides-Extra: baz\n"
                                "Requires-Dist: quux; extra=='baz'"))))
     ad.add(Foo)
     assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
     quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
     ad.add(quux)
     res = list(ws.resolve(parse_requirements("Foo[baz]"), ad))
     assert res == [Foo, quux]
Example #15
0
def resolve(requirements,
            cache=None,
            crawler=None,
            fetchers=None,
            obtainer=None,
            interpreter=None,
            platform=None):
    """Resolve a list of requirements into distributions.

     :param requirements: A list of strings or :class:`pkg_resources.Requirement` objects to be
                          resolved.
     :param cache: The filesystem path to cache distributions or None for no caching.
     :param crawler: The :class:`Crawler` object to use to crawl for artifacts.  If None specified
                     a default crawler will be constructed.
     :param fetchers: A list of :class:`Fetcher` objects for generating links.  If None specified,
                      default to fetching from PyPI.
     :param obtainer: An :class:`Obtainer` object for converting from links to
                      :class:`pkg_resources.Distribution` objects.  If None specified, a default
                      will be provided that accepts eggs or building from source.
     :param interpreter: A :class:`PythonInterpreter` object to resolve against.  If None specified,
                         use the current interpreter.
     :param platform: The string representing the platform to be resolved, such as `'linux-x86_64'`
                      or `'macosx-10.7-intel'`.  If None specified, the current platform is used.
  """
    requirements = maybe_requirement_list(requirements)

    # Construct defaults
    crawler = crawler or Crawler()
    fetchers = fetchers or [PyPIFetcher()]
    interpreter = interpreter or PythonInterpreter.get()
    platform = platform or Platform.current()

    # wire up translators / obtainer
    shared_options = dict(install_cache=cache, platform=platform)
    egg_translator = EggTranslator(python=interpreter.python, **shared_options)
    cache_obtainer = Obtainer(crawler, [Fetcher([cache])],
                              egg_translator) if cache else None
    source_translator = SourceTranslator(interpreter=interpreter,
                                         **shared_options)
    translator = ChainedTranslator(egg_translator, source_translator)
    obtainer = Obtainer(crawler, fetchers, translator)

    # make installer
    def installer(req):
        if cache_obtainer and requirement_is_exact(req):
            dist = cache_obtainer.obtain(req)
            if dist:
                return dist
        return obtainer.obtain(req)

    # resolve
    working_set = WorkingSet(entries=[])
    env = ResolverEnvironment(search_path=[],
                              platform=platform,
                              python=interpreter.python)
    return working_set.resolve(requirements, env=env, installer=installer)
Example #16
0
def resolve(requirements,
            cache=None,
            crawler=None,
            fetchers=None,
            obtainer=None,
            interpreter=None,
            platform=None):
  """Resolve a list of requirements into distributions.

     :param requirements: A list of strings or :class:`pkg_resources.Requirement` objects to be
                          resolved.
     :param cache: The filesystem path to cache distributions or None for no caching.
     :param crawler: The :class:`Crawler` object to use to crawl for artifacts.  If None specified
                     a default crawler will be constructed.
     :param fetchers: A list of :class:`Fetcher` objects for generating links.  If None specified,
                      default to fetching from PyPI.
     :param obtainer: An :class:`Obtainer` object for converting from links to
                      :class:`pkg_resources.Distribution` objects.  If None specified, a default
                      will be provided that accepts eggs or building from source.
     :param interpreter: A :class:`PythonInterpreter` object to resolve against.  If None specified,
                         use the current interpreter.
     :param platform: The string representing the platform to be resolved, such as `'linux-x86_64'`
                      or `'macosx-10.7-intel'`.  If None specified, the current platform is used.
  """
  requirements = maybe_requirement_list(requirements)

  # Construct defaults
  crawler = crawler or Crawler()
  fetchers = fetchers or [PyPIFetcher()]
  interpreter = interpreter or PythonInterpreter.get()
  platform = platform or Platform.current()

  # wire up translators / obtainer
  if cache:
    shared_options = dict(install_cache=cache, platform=platform, interpreter=interpreter)
    translator = EggTranslator(**shared_options)
    cache_obtainer = Obtainer(crawler, [Fetcher([cache])], translator)
  else:
    cache_obtainer = None

  if not obtainer:
    translator = Translator.default(install_cache=cache, platform=platform, interpreter=interpreter)
    obtainer = Obtainer(crawler, fetchers, translator)

  # make installer
  def installer(req):
    if cache_obtainer and requirement_is_exact(req):
      dist = cache_obtainer.obtain(req)
      if dist:
        return dist
    return obtainer.obtain(req)

  # resolve
  working_set = WorkingSet(entries=[])
  env = ResolverEnvironment(interpreter, search_path=[], platform=platform)
  return working_set.resolve(requirements, env=env, installer=installer)
Example #17
0
    def checker_pex(self, interpreter):
        # TODO(John Sirois): Formalize in pants.base?
        pants_dev_mode = os.environ.get('PANTS_DEV')

        if pants_dev_mode:
            checker_id = self.checker_target.transitive_invalidation_hash()
        else:
            checker_id = hash_all([self._CHECKER_REQ])

        pex_path = os.path.join(self.workdir, 'checker', checker_id,
                                str(interpreter.identity))

        if not os.path.exists(pex_path):
            with self.context.new_workunit(name='build-checker'):
                with safe_concurrent_creation(pex_path) as chroot:
                    pex_builder = PexBuilderWrapper(
                        PEXBuilder(path=chroot, interpreter=interpreter),
                        PythonRepos.global_instance(),
                        PythonSetup.global_instance(), self.context.log)

                    # Constraining is required to guard against the case where the user
                    # has a pexrc file set.
                    pex_builder.add_interpreter_constraint(
                        str(interpreter.identity.requirement))

                    if pants_dev_mode:
                        pex_builder.add_sources_from(self.checker_target)
                        req_libs = [
                            tgt for tgt in self.checker_target.closure()
                            if isinstance(tgt, PythonRequirementLibrary)
                        ]

                        pex_builder.add_requirement_libs_from(
                            req_libs=req_libs)
                    else:
                        try:
                            # The checker is already on sys.path, eg: embedded in pants.pex.
                            working_set = WorkingSet(entries=sys.path)
                            for dist in working_set.resolve(
                                [Requirement.parse(self._CHECKER_REQ)]):
                                pex_builder.add_direct_requirements(
                                    dist.requires())
                                pex_builder.add_distribution(dist)
                            pex_builder.add_direct_requirements(
                                [self._CHECKER_REQ])
                        except DistributionNotFound:
                            # We need to resolve the checker from a local or remote distribution repo.
                            pex_builder.add_resolved_requirements(
                                [PythonRequirement(self._CHECKER_REQ)])

                    pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
                    pex_builder.freeze()

        return PEX(pex_path, interpreter=interpreter)
    def testResolve(self):
        ad = pkg_resources.Environment([])
        ws = WorkingSet([])
        # Resolving no requirements -> nothing to install
        self.assertEqual(list(ws.resolve([],ad)), [])
        # Request something not in the collection -> DistributionNotFound
        self.assertRaises(
            pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
        )
        Foo = Distribution.from_filename(
            "/foo_dir/Foo-1.2.egg",
            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
        )
        ad.add(Foo)
        ad.add(Distribution.from_filename("Foo-0.9.egg"))

        # Request thing(s) that are available -> list to activate
        for i in range(3):
            targets = list(ws.resolve(parse_requirements("Foo"), ad))
            self.assertEqual(targets, [Foo])
            list(map(ws.add,targets))
        self.assertRaises(VersionConflict, ws.resolve,
            parse_requirements("Foo==0.9"), ad)
        ws = WorkingSet([]) # reset

        # Request an extra that causes an unresolved dependency for "Baz"
        self.assertRaises(
            pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
        )
        Baz = Distribution.from_filename(
            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
        )
        ad.add(Baz)

        # Activation list now includes resolved dependency
        self.assertEqual(
            list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
        )
        # Requests for conflicting versions produce VersionConflict
        self.assertRaises(VersionConflict,
            ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
    def testResolve(self):
        ad = pkg_resources.Environment([])
        ws = WorkingSet([])
        # Resolving no requirements -> nothing to install
        self.assertEqual(list(ws.resolve([],ad)), [])
        # Request something not in the collection -> DistributionNotFound
        self.assertRaises(
            pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
        )
        Foo = Distribution.from_filename(
            "/foo_dir/Foo-1.2.egg",
            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
        )
        ad.add(Foo)
        ad.add(Distribution.from_filename("Foo-0.9.egg"))

        # Request thing(s) that are available -> list to activate
        for i in range(3):
            targets = list(ws.resolve(parse_requirements("Foo"), ad))
            self.assertEqual(targets, [Foo])
            list(map(ws.add,targets))
        self.assertRaises(VersionConflict, ws.resolve,
            parse_requirements("Foo==0.9"), ad)
        ws = WorkingSet([]) # reset

        # Request an extra that causes an unresolved dependency for "Baz"
        self.assertRaises(
            pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
        )
        Baz = Distribution.from_filename(
            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
        )
        ad.add(Baz)

        # Activation list now includes resolved dependency
        self.assertEqual(
            list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
        )
        # Requests for conflicting versions produce VersionConflict
        self.assertRaises(VersionConflict,
            ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
Example #20
0
  def checker_pex(self, interpreter):
    # TODO(John Sirois): Formalize in pants.base?
    pants_dev_mode = os.environ.get('PANTS_DEV')

    if pants_dev_mode:
      checker_id = self.checker_target.transitive_invalidation_hash()
    else:
      checker_id = hash_all([self._CHECKER_REQ])

    pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity))

    if not os.path.exists(pex_path):
      with self.context.new_workunit(name='build-checker'):
        with safe_concurrent_creation(pex_path) as chroot:
          pex_builder = PexBuilderWrapper.Factory.create(
            builder=PEXBuilder(path=chroot, interpreter=interpreter),
            log=self.context.log)

          # Constraining is required to guard against the case where the user
          # has a pexrc file set.
          pex_builder.add_interpreter_constraint(str(interpreter.identity.requirement))

          if pants_dev_mode:
            pex_builder.add_sources_from(self.checker_target)
            req_libs = [tgt for tgt in self.checker_target.closure()
                        if isinstance(tgt, PythonRequirementLibrary)]

            pex_builder.add_requirement_libs_from(req_libs=req_libs)
          else:
            try:
              # The checker is already on sys.path, eg: embedded in pants.pex.
              platform = Platform.current()
              platform_name = platform.platform
              env = Environment(search_path=sys.path,
                                platform=platform_name,
                                python=interpreter.version_string)
              working_set = WorkingSet(entries=sys.path)
              for dist in working_set.resolve([Requirement.parse(self._CHECKER_REQ)], env=env):
                pex_builder.add_direct_requirements(dist.requires())
                # NB: We add the dist location instead of the dist itself to make sure its a
                # distribution style pex knows how to package.
                pex_builder.add_dist_location(dist.location)
              pex_builder.add_direct_requirements([self._CHECKER_REQ])
            except (DistributionNotFound, PEXBuilder.InvalidDistribution):
              # We need to resolve the checker from a local or remote distribution repo.
              pex_builder.add_resolved_requirements(
                [PythonRequirement(self._CHECKER_REQ)])

          pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
          pex_builder.freeze()

    return PEX(pex_path, interpreter=interpreter)
Example #21
0
 def test_marker_evaluation_with_multiple_extras(self):
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     Foo = Distribution.from_filename(
         "/foo_dir/Foo-1.2.dist-info",
         metadata=Metadata(("METADATA", "Provides-Extra: baz\n"
                            "Requires-Dist: quux; extra=='baz'\n"
                            "Provides-Extra: bar\n"
                            "Requires-Dist: fred; extra=='bar'\n")))
     ad.add(Foo)
     quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
     ad.add(quux)
     fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info")
     ad.add(fred)
     res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad))
     assert sorted(res) == [fred, quux, Foo]
Example #22
0
 def test_marker_evaluation_with_multiple_extras(self):
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     Foo = Distribution.from_filename(
         "/foo_dir/Foo-1.2.dist-info",
         metadata=Metadata(("METADATA", "Provides-Extra: baz\n"
                            "Requires-Dist: quux; extra=='baz'\n"
                            "Provides-Extra: bar\n"
                            "Requires-Dist: fred; extra=='bar'\n"))
     )
     ad.add(Foo)
     quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
     ad.add(quux)
     fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info")
     ad.add(fred)
     res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad))
     assert sorted(res) == [fred, quux, Foo]
Example #23
0
 def get_distributions(self, requirements):
     ws = WorkingSet()
     remote_fetches = []
     def installer(req):
         dist = find_dist(req)
         if dist is None: return None
         self.install_dist(dist)
         return dist
     def find_dist(req):
         self._pi_local.find_packages(req)
         for dist in sorted(self._pi_local[req.key], key=lambda d: -d.precedence):
             if dist in req: return dist
         remote_fetches.append(req)
         self._pi_remote.find_packages(req)
         for dist in sorted(self._pi_remote[req.key], key=lambda d: -d.precedence):
             if dist in req: return dist
     result = ws.resolve(requirements, installer=installer)
     log.debug('Remote fetches:')
     for req in remote_fetches:
         log.debug('%s', req)
     return result
Example #24
0
    def tests_make_dummy_dist_working_set(self):
        """
        Dummy distributions should work with pkg_resources.WorkingSet
        """

        # This also shows how WorkingSet might work.
        # A WorkingSet is basically a way to get to a collection of
        # distributions via the list of specified paths.  By default it
        # will go for sys.path, but for testing purposes we can control
        # this by creating our own instance on a temporary directory.

        parentpkg = make_dummy_dist(
            self,
            (  # noqa: F841
                ('requires.txt', '\n'.join([])), ),
            'parentpkg',
            '0.8')

        childpkg = make_dummy_dist(
            self,
            (  # noqa: F841
                ('requires.txt', '\n'.join([
                    'parentpkg>=0.8',
                ])), ),
            'childpkg',
            '0.1')

        grandchildpkg = make_dummy_dist(self, (('requires.txt', '\n'.join([
            'childpkg>=0.1',
            'parentpkg>=0.8',
        ])), ), 'grandchildpkg', '0.8')

        working_set = WorkingSet([self._calmjs_testing_tmpdir])
        distributions = working_set.resolve(grandchildpkg.requires())
        self.assertEqual(len(distributions), 2)
        self.assertEqual(distributions[0].requires(), [])
        self.assertEqual(distributions[1].requires(),
                         [Requirement.parse('parentpkg>=0.8')])
Example #25
0
    def _activate(self):
        self.update_candidate_distributions(
            self.load_internal_cache(self._pex, self._pex_info))

        if not self._pex_info.zip_safe and os.path.isfile(self._pex):
            self.update_module_paths(
                self.force_local(self._pex, self._pex_info))

        all_reqs = [
            Requirement.parse(req) for req in self._pex_info.requirements
        ]

        working_set = WorkingSet([])

        with TRACER.timed('Resolving %s' % ' '.join(map(str, all_reqs))
                          if all_reqs else 'empty dependency list',
                          V=2):
            try:
                resolved = working_set.resolve(all_reqs, env=self)
            except DistributionNotFound as e:
                TRACER.log('Failed to resolve a requirement: %s' % e)
                TRACER.log('Current working set:')
                for dist in working_set:
                    TRACER.log('  - %s' % dist)
                raise

        for dist in resolved:
            with TRACER.timed('Activating %s' % dist):
                working_set.add(dist)

                if os.path.isdir(dist.location):
                    with TRACER.timed('Adding sitedir'):
                        site.addsitedir(dist.location)

                dist.activate()

        return working_set
Example #26
0
def resolve_multi(config,
                  requirements,
                  interpreter=None,
                  platforms=None,
                  conn_timeout=None,
                  ttl=3600):
  """Multi-platform dependency resolution for PEX files.

     Given a pants configuration and a set of requirements, return a list of distributions
     that must be included in order to satisfy them.  That may involve distributions for
     multiple platforms.

     :param config: Pants :class:`Config` object.
     :param requirements: A list of :class:`PythonRequirement` objects to resolve.
     :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved.
                         If None specified, defaults to current interpreter.
     :param platforms: Optional list of platforms against requirements will be resolved. If
                         None specified, the defaults from `config` will be used.
     :param conn_timeout: Optional connection timeout for any remote fetching.
     :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g.
                 "flask>=0.2" if a matching distribution is available on disk.  Defaults
                 to 3600.
  """
  now = time.time()
  distributions = {}

  interpreter = interpreter or PythonInterpreter.get()
  if not isinstance(interpreter, PythonInterpreter):
    raise TypeError('Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter))

  install_cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs')
  platforms = get_platforms(platforms or config.getlist('python-setup', 'platforms', ['current']))
  crawler = crawler_from_config(config, conn_timeout=conn_timeout)
  fetchers = fetchers_from_config(config)

  for platform in platforms:
    env = PantsEnvironment(interpreter, platform=platform)
    working_set = WorkingSet(entries=[])

    shared_options = dict(install_cache=install_cache, platform=platform)
    egg_translator = EggTranslator(interpreter=interpreter, **shared_options)
    egg_obtainer = Obtainer(crawler, [Fetcher([install_cache])], egg_translator)

    def installer(req):
      # Attempt to obtain the egg from the local cache.  If it's an exact match, we can use it.
      # If it's not an exact match, then if it's been resolved sufficiently recently, we still
      # use it.
      dist = egg_obtainer.obtain(req)
      if dist and (requirement_is_exact(req) or now - os.path.getmtime(dist.location) < ttl):
        return dist

      # Failed, so follow through to "remote" resolution
      source_translator = SourceTranslator(
           interpreter=interpreter,
           use_2to3=getattr(req, 'use_2to3', False),
           **shared_options)
      translator = ChainedTranslator(egg_translator, source_translator)
      obtainer = Obtainer(
          crawler,
          [Fetcher([req.repository])] if getattr(req, 'repository', None) else fetchers,
          translator)
      dist = obtainer.obtain(req)
      if dist:
        try:
          touch(dist.location)
        except OSError:
          pass
      return dist

    distributions[platform] = working_set.resolve(requirements, env=env, installer=installer)

  return distributions
Example #27
0
def resolve_multi(config,
                  requirements,
                  interpreter=None,
                  platforms=None,
                  conn_timeout=None,
                  ttl=3600):
  """Multi-platform dependency resolution for PEX files.

     Given a pants configuration and a set of requirements, return a list of distributions
     that must be included in order to satisfy them.  That may involve distributions for
     multiple platforms.

     :param config: Pants :class:`Config` object.
     :param requirements: A list of :class:`PythonRequirement` objects to resolve.
     :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved.
                         If None specified, defaults to current interpreter.
     :param platforms: Optional list of platforms against requirements will be resolved. If
                         None specified, the defaults from `config` will be used.
     :param conn_timeout: Optional connection timeout for any remote fetching.
     :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g.
                 "flask>=0.2" if a matching distribution is available on disk.  Defaults
                 to 3600.
  """
  now = time.time()
  distributions = {}

  interpreter = interpreter or PythonInterpreter.get()
  if not isinstance(interpreter, PythonInterpreter):
    raise TypeError('Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter))

  install_cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs')
  platforms = get_platforms(platforms or config.getlist('python-setup', 'platforms', ['current']))
  crawler = crawler_from_config(config, conn_timeout=conn_timeout)
  fetchers = fetchers_from_config(config)

  for platform in platforms:
    env = PantsEnvironment(search_path=[], platform=platform, python=interpreter.python)
    working_set = WorkingSet(entries=[])

    shared_options = dict(install_cache=install_cache, platform=platform)
    egg_translator = EggTranslator(python=interpreter.python, **shared_options)
    egg_obtainer = Obtainer(crawler, [Fetcher([install_cache])], egg_translator)

    def installer(req):
      # Attempt to obtain the egg from the local cache.  If it's an exact match, we can use it.
      # If it's not an exact match, then if it's been resolved sufficiently recently, we still
      # use it.
      dist = egg_obtainer.obtain(req)
      if dist and (requirement_is_exact(req) or now - os.path.getmtime(dist.location) < ttl):
        return dist

      # Failed, so follow through to "remote" resolution
      source_translator = SourceTranslator(
           interpreter=interpreter,
           use_2to3=getattr(req, 'use_2to3', False),
           **shared_options)
      translator = ChainedTranslator(egg_translator, source_translator)
      obtainer = Obtainer(
          crawler,
          [Fetcher([req.repository])] if getattr(req, 'repository', None) else fetchers,
          translator)
      dist = obtainer.obtain(req)
      if dist:
        try:
          touch(dist.location)
        except OSError:
          pass
      return dist

    distributions[platform] = working_set.resolve(requirements, env=env, installer=installer)

  return distributions
Example #28
0
class Resolver(Environment):
  """
    Resolve a series of requirements.

    Simplest use-case (cache-less)
      >>> from twitter.common.python.resolver import Resolver
      >>> from twitter.common.python.fetcher import Fetcher
      >>> pypi = Fetcher.pypi()
      >>> resolver = Resolver(fetcher = pypi)
      Calling environment super => 0.045ms
      >>> resolver.resolve('mako')
      Fetching mako => 6691.651ms
      Building mako => 557.141ms
      Fetching MarkupSafe>=0.9.2 => 3314.960ms
      Building MarkupSafe>=0.9.2 => 542.930ms
      Resolving mako => 11110.769ms
      [Mako 0.6.2 (/private/var/folders/Uh/UhXpeRIeFfGF7HoogOKC+++++TI/-Tmp-/tmplyR5kH/lib/python2.6/site-packages),
       MarkupSafe 0.15 (/private/var/folders/Uh/UhXpeRIeFfGF7HoogOKC+++++TI/-Tmp-/tmptUWECl/lib/python2.6/site-packages)]

    With an install cache:
      >>> resolver = Resolver(fetcher = pypi,
      ...                     caches = [os.path.expanduser('~/.pex/install')],
      ...                     install_cache = os.path.expanduser('~/.pex/install'))

    First invocation:
      >>> resolver.resolve('mako')
      Activating cache /Users/wickman/.pex/install-new => 6.091ms
      ...
      Resolving mako => 3693.405ms
      [Mako 0.6.2 (/Users/wickman/.pex/install-new/Mako-0.6.2-py2.6.egg),
       MarkupSafe 0.15 (/Users/wickman/.pex/install-new/MarkupSafe-0.15-py2.6-macosx-10.4-x86_64.egg)]

    Second invocation (distilled and memoized in the cache):
      >>> resolver.resolve('mako')
      Resolving mako => 1.813ms
      [Mako 0.6.2 (/Users/wickman/.pex/install-new/Mako-0.6.2-py2.6.egg),
       MarkupSafe 0.15 (/Users/wickman/.pex/install-new/MarkupSafe-0.15-py2.6-macosx-10.4-x86_64.egg)]
  """

  class Subcache(object):
    def __init__(self, path, env):
      self._activated = False
      self._path = path
      self._env = env

    @property
    def activated(self):
      return self._activated

    def activate(self):
      if not self._activated:
        with self._env.timed('Activating cache %s' % self._path):
          for dist in find_distributions(self._path):
            if self._env.can_add(dist):
              self._env.add(dist)
        self._activated = True

  @classmethod
  @contextlib.contextmanager
  def timed(cls, prefix):
    start_time = time.time()
    yield
    cls._log('%s => %.3fms' % (prefix, 1000.0 * (time.time() - start_time)))

  @classmethod
  def _log(cls, msg, *args, **kw):
    print(msg, *args, **kw)

  def __init__(self,
               caches=(),
               install_cache=None,
               fetcher=None,
               fetcher_provider=None,
               platform=Platform.current(),
               python=sys.version[:3]):
    assert (fetcher is not None) + (fetcher_provider is not None) == 1, (
      "At most one of fetcher or fetcher_provider should be supplied")
    self._subcaches = [Resolver.Subcache(cache, self) for cache in caches]
    self._fetcher = fetcher
    self._fetcher_provider = fetcher_provider
    self._install_cache = install_cache
    self._ws = WorkingSet([])
    with self.timed('Calling environment super'):
      super(Resolver, self).__init__(search_path=[], platform=platform, python=python)

  @property
  def fetcher(self):
    if not self._fetcher:
      self._fetcher = self._fetcher_provider()
    return self._fetcher

  def resolve(self, requirements, ignore_errors=False):
    if isinstance(requirements, (list, tuple, GeneratorType)):
      reqs = list(RequirementWrapper.get(req) for req in requirements)
    else:
      reqs = [RequirementWrapper.get(requirements)]
    resolved = OrderedSet()
    for req in reqs:
      with self.timed('Resolved %s' % req):
        try:
          distributions = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          self._log('Failed to resolve %s' % req)
          if not ignore_errors:
            raise
          continue
        resolved.update(distributions)
    return list(resolved)

  def can_add(self, dist):
    def version_compatible():
      return any([self.python is None, dist.py_version is None, dist.py_version == self.python])
    def platform_compatible():
      return Platform.compatible(dist.platform, self.platform)
    return version_compatible() and platform_compatible()

  def best_match(self, req, *ignore_args, **ignore_kwargs):
    while True:
      resolved_req = super(Resolver, self).best_match(req, self._ws)
      if resolved_req:
        return resolved_req
      if all(subcache.activated for subcache in self._subcaches):
        print('Failed to resolve %s, your installation may not work properly.' % req, file=sys.stderr)
        break
      else:
        for subcache in self._subcaches:
          if not subcache.activated:
            subcache.activate()
            break

  def obtain(self, req, *ignore_args, **ignore_kwargs):
    if not all(subcache.activated for subcache in self._subcaches):
      # Only fetch once all subcaches have been exhausted.
      return None
    with self.timed('Fetching %s' % req):
      fetched_req = self.fetcher.fetch(req)
    if not fetched_req:
      print('Failed to fetch %s' % req)
      return None
    installer = Installer(fetched_req)
    with self.timed('Building %s' % req):
      try:
        dist = installer.distribution()
      except Installer.InstallFailure as e:
        print('Failed to install %s' % req, file=sys.stderr)
        return None
    if self._install_cache:
      with self.timed('Distilling %s' % req):
        distilled = Distiller(dist).distill(into=self._install_cache)
      with self.timed('Constructing distribution %s' % req):
        metadata = EggMetadata(EggZipImporter(distilled))
        dist = Distribution.from_filename(distilled, metadata)
    self.add(dist)
    return dist
Example #29
0
 def test_environment_marker_evaluation_negative(self):
     """Environment markers are evaluated at resolution time."""
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad)
     assert list(res) == []
Example #30
0
    def testResolve(self):
        ad = pkg_resources.Environment([])
        ws = WorkingSet([])
        # Resolving no requirements -> nothing to install
        assert list(ws.resolve([], ad)) == []
        # Request something not in the collection -> DistributionNotFound
        with pytest.raises(pkg_resources.DistributionNotFound):
            ws.resolve(parse_requirements("Foo"), ad)

        Foo = Distribution.from_filename(
            "/foo_dir/Foo-1.2.egg",
            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
        )
        ad.add(Foo)
        ad.add(Distribution.from_filename("Foo-0.9.egg"))

        # Request thing(s) that are available -> list to activate
        for i in range(3):
            targets = list(ws.resolve(parse_requirements("Foo"), ad))
            assert targets == [Foo]
            list(map(ws.add, targets))
        with pytest.raises(VersionConflict):
            ws.resolve(parse_requirements("Foo==0.9"), ad)
        ws = WorkingSet([])  # reset

        # Request an extra that causes an unresolved dependency for "Baz"
        with pytest.raises(pkg_resources.DistributionNotFound):
            ws.resolve(parse_requirements("Foo[bar]"), ad)
        Baz = Distribution.from_filename(
            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
        )
        ad.add(Baz)

        # Activation list now includes resolved dependency
        assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz]
        # Requests for conflicting versions produce VersionConflict
        with pytest.raises(VersionConflict) as vc:
            ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)

        msg = 'Foo 0.9 is installed but Foo==1.2 is required'
        assert vc.value.report() == msg
Example #31
0
class PEXEnvironment(Environment):
  class Subcache(object):
    def __init__(self, path, env):
      self._activated = False
      self._path = path
      self._env = env

    @property
    def activated(self):
      return self._activated

    def activate(self):
      if not self._activated:
        with TRACER.timed('Activating cache %s' % self._path):
          for dist in find_distributions(self._path):
            if self._env.can_add(dist):
              self._env.add(dist)
        self._activated = True

  @staticmethod
  def _really_zipsafe(dist):
    try:
      pez_info = dist.resource_listdir('/PEZ-INFO')
    except OSError:
      pez_info = []
    if 'zip-safe' in pez_info:
      return True
    egg_metadata = dist.metadata_listdir('/')
    return 'zip-safe' in egg_metadata and 'native_libs.txt' not in egg_metadata

  def __init__(self, pex, pex_info, platform=Platform.current(), python=Platform.python()):
    subcaches = sum([
      [os.path.join(pex, pex_info.internal_cache)],
      [cache for cache in pex_info.egg_caches],
      [pex_info.install_cache if pex_info.install_cache else []]],
      [])
    self._pex_info = pex_info
    self._activated = False
    self._subcaches = [self.Subcache(cache, self) for cache in subcaches]
    self._ws = WorkingSet([])
    with TRACER.timed('Calling environment super'):
      super(PEXEnvironment, self).__init__(search_path=[], platform=platform, python=python)

  def resolve(self, requirements, ignore_errors=False):
    reqs = maybe_requirement_list(requirements)
    resolved = OrderedSet()
    for req in reqs:
      with TRACER.timed('Resolved %s' % req):
        try:
          distributions = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          TRACER.log('Failed to resolve %s' % req)
          if not ignore_errors:
            raise
          continue
        resolved.update(distributions)
    return list(resolved)

  def can_add(self, dist):
    return Platform.distribution_compatible(dist, self.python, self.platform)

  def best_match(self, req, *ignore_args, **ignore_kwargs):
    while True:
      resolved_req = super(PEXEnvironment, self).best_match(req, self._ws)
      if resolved_req:
        return resolved_req
      for subcache in self._subcaches:
        if not subcache.activated:
          subcache.activate()
          break
      else:
        # TODO(wickman)  Add per-requirement optional/ignore_errors flag.
        print('Failed to resolve %s, your installation may not work properly.' % req,
            file=sys.stderr)
        break

  def activate(self):
    if self._activated:
      return
    if self._pex_info.inherit_path:
      self._ws = WorkingSet(sys.path)

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    for req in all_reqs:
      with TRACER.timed('Resolved %s' % str(req)):
        try:
          resolved = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          TRACER.log('Failed to resolve %s: %s' % (req, e))
          if not self._pex_info.ignore_errors:
            raise
          continue
      for dist in resolved:
        with TRACER.timed('  Activated %s' % dist):
          if os.environ.get('PEX_FORCE_LOCAL', not self._really_zipsafe(dist)):
            with TRACER.timed('    Locally caching'):
              new_dist = DistributionHelper.maybe_locally_cache(dist, self._pex_info.install_cache)
              new_dist.activate()
          else:
            self._ws.add(dist)
            dist.activate()

    self._activated = True
Example #32
0
class PEXEnvironment(Environment):
    class Subcache(object):
        def __init__(self, path, env):
            self._activated = False
            self._path = path
            self._env = env

        @property
        def activated(self):
            return self._activated

        def activate(self):
            if not self._activated:
                with TRACER.timed('Activating cache %s' % self._path):
                    for dist in find_distributions(self._path):
                        if self._env.can_add(dist):
                            self._env.add(dist)
                self._activated = True

    @staticmethod
    def _really_zipsafe(dist):
        try:
            pez_info = dist.resource_listdir('/PEZ-INFO')
        except OSError:
            pez_info = []
        if 'zip-safe' in pez_info:
            return True
        egg_metadata = dist.metadata_listdir('/')
        return 'zip-safe' in egg_metadata and 'native_libs.txt' not in egg_metadata

    def __init__(self,
                 pex,
                 pex_info,
                 platform=Platform.current(),
                 python=Platform.python()):
        subcaches = sum(
            [[os.path.join(pex, pex_info.internal_cache)],
             [cache for cache in pex_info.egg_caches],
             [pex_info.install_cache if pex_info.install_cache else []]], [])
        self._pex_info = pex_info
        self._activated = False
        self._subcaches = [self.Subcache(cache, self) for cache in subcaches]
        self._ws = WorkingSet([])
        with TRACER.timed('Calling environment super'):
            super(PEXEnvironment, self).__init__(search_path=[],
                                                 platform=platform,
                                                 python=python)

    def resolve(self, requirements, ignore_errors=False):
        reqs = maybe_requirement_list(requirements)
        resolved = OrderedSet()
        for req in reqs:
            with TRACER.timed('Resolved %s' % req):
                try:
                    distributions = self._ws.resolve([req], env=self)
                except DistributionNotFound as e:
                    TRACER.log('Failed to resolve %s' % req)
                    if not ignore_errors:
                        raise
                    continue
                resolved.update(distributions)
        return list(resolved)

    def can_add(self, dist):
        return Platform.distribution_compatible(dist, self.python,
                                                self.platform)

    def best_match(self, req, *ignore_args, **ignore_kwargs):
        while True:
            resolved_req = super(PEXEnvironment,
                                 self).best_match(req, self._ws)
            if resolved_req:
                return resolved_req
            for subcache in self._subcaches:
                if not subcache.activated:
                    subcache.activate()
                    break
            else:
                # TODO(wickman)  Add per-requirement optional/ignore_errors flag.
                print(
                    'Failed to resolve %s, your installation may not work properly.'
                    % req,
                    file=sys.stderr)
                break

    def activate(self):
        if self._activated:
            return
        if self._pex_info.inherit_path:
            self._ws = WorkingSet(sys.path)

        # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
        # or a non-empty repository.
        all_reqs = [
            Requirement.parse(req) for req, _, _ in self._pex_info.requirements
        ]

        for req in all_reqs:
            with TRACER.timed('Resolved %s' % str(req)):
                try:
                    resolved = self._ws.resolve([req], env=self)
                except DistributionNotFound as e:
                    TRACER.log('Failed to resolve %s: %s' % (req, e))
                    if not self._pex_info.ignore_errors:
                        raise
                    continue
            for dist in resolved:
                with TRACER.timed('  Activated %s' % dist):
                    if os.environ.get('PEX_FORCE_LOCAL',
                                      not self._really_zipsafe(dist)):
                        with TRACER.timed('    Locally caching'):
                            new_dist = DistributionHelper.maybe_locally_cache(
                                dist, self._pex_info.install_cache)
                            new_dist.activate()
                    else:
                        self._ws.add(dist)
                        dist.activate()

        self._activated = True
Example #33
0
    def testResolve(self):
        ad = pkg_resources.Environment([])
        ws = WorkingSet([])
        # Resolving no requirements -> nothing to install
        assert list(ws.resolve([], ad)) == []
        # Request something not in the collection -> DistributionNotFound
        with pytest.raises(pkg_resources.DistributionNotFound):
            ws.resolve(parse_requirements("Foo"), ad)

        Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg",
                                         metadata=Metadata(
                                             ('depends.txt',
                                              "[bar]\nBaz>=2.0")))
        ad.add(Foo)
        ad.add(Distribution.from_filename("Foo-0.9.egg"))

        # Request thing(s) that are available -> list to activate
        for i in range(3):
            targets = list(ws.resolve(parse_requirements("Foo"), ad))
            assert targets == [Foo]
            list(map(ws.add, targets))
        with pytest.raises(VersionConflict):
            ws.resolve(parse_requirements("Foo==0.9"), ad)
        ws = WorkingSet([])  # reset

        # Request an extra that causes an unresolved dependency for "Baz"
        with pytest.raises(pkg_resources.DistributionNotFound):
            ws.resolve(parse_requirements("Foo[bar]"), ad)
        Baz = Distribution.from_filename("/foo_dir/Baz-2.1.egg",
                                         metadata=Metadata(
                                             ('depends.txt', "Foo")))
        ad.add(Baz)

        # Activation list now includes resolved dependency
        assert (list(ws.resolve(parse_requirements("Foo[bar]"),
                                ad)) == [Foo, Baz])
        # Requests for conflicting versions produce VersionConflict
        with pytest.raises(VersionConflict) as vc:
            ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)

        msg = 'Foo 0.9 is installed but Foo==1.2 is required'
        assert vc.value.report() == msg
Example #34
0
 def test_environment_marker_evaluation_negative(self):
     """Environment markers are evaluated at resolution time."""
     ad = pkg_resources.Environment([])
     ws = WorkingSet([])
     res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad)
     assert list(res) == []
Example #35
0
class PEXEnvironment(Resolver):
  @classmethod
  def _log(cls, msg, *args, **kw):
    PEX.debug(msg)

  def __init__(self, pex, pex_info):
    self._pex_info = pex_info
    subcaches = sum([
      [os.path.join(pex, pex_info.internal_cache)],
      [cache for cache in pex_info.egg_caches],
      [pex_info.install_cache if pex_info.install_cache else []]],
      [])
    self._activated = False
    super(PEXEnvironment, self).__init__(
      caches=subcaches,
      install_cache=pex_info.install_cache,
      fetcher_provider=PEXEnvironment.get_fetcher_provider(pex_info))

  @classmethod
  def get_fetcher_provider(cls, pex_info):
    def fetcher_provider():
      from twitter.common.python.fetcher import Fetcher
      cls._log('Initializing fetcher:')
      cls._log('  repositories: %s' % ' '.join(pex_info.repositories))
      cls._log('       indices: %s' % ' '.join(pex_info.indices))
      cls._log('     with pypi: %s' % pex_info.allow_pypi)
      return Fetcher(
        repositories = pex_info.repositories,
        indices = pex_info.indices,
        external = pex_info.allow_pypi,
        download_cache = pex_info.download_cache
      )
    return fetcher_provider

  @staticmethod
  def _really_zipsafe(dist):
    try:
      pez_info = dist.resource_listdir('/PEZ-INFO')
    except OSError:
      pez_info = []
    if 'zip-safe' in pez_info:
      return True
    egg_metadata = dist.metadata_listdir('/')
    return 'zip-safe' in egg_metadata and 'native_libs.txt' not in egg_metadata

  def activate(self):
    from pkg_resources import Requirement, WorkingSet, DistributionNotFound

    if self._activated:
      return
    if self._pex_info.inherit_path:
      self._ws = WorkingSet(sys.path)

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    for req in all_reqs:
      with PEX.timed('Resolved %s' % str(req)):
        try:
          resolved = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          self._log('Failed to resolve %s: %s' % (req, e))
          if not self._pex_info.ignore_errors:
            raise
          continue
      for dist in resolved:
        with PEX.timed('  Activated %s' % dist):
          if self._really_zipsafe(dist):
            self._ws.add(dist)
            dist.activate()
          else:
            with PEX.timed('    Locally caching %s' % dist):
              new_dist = DistributionHelper.locally_cache(dist, self._pex_info.install_cache)
              new_dist.activate()

    self._activated = True