Example #1
0
File: pex.py Project: pfmoore/pex
  def _activate(self):
    if not self._working_set:
      working_set = WorkingSet([])

      # set up the local .pex environment
      pex_info = self._pex_info.copy()
      pex_info.update(self._pex_info_overrides)
      pex_info.merge_pex_path(self._vars.PEX_PATH)
      self._envs.append(PEXEnvironment(self._pex, pex_info))
      # N.B. by this point, `pex_info.pex_path` will contain a single pex path
      # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment.
      # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set
      # in the environment.
      if pex_info.pex_path:
        # set up other environments as specified in pex_path
        for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)):
          pex_info = PexInfo.from_pex(pex_path)
          pex_info.update(self._pex_info_overrides)
          self._envs.append(PEXEnvironment(pex_path, pex_info))

      # activate all of them
      for env in self._envs:
        for dist in env.activate():
          working_set.add(dist)

      self._working_set = working_set

    return self._working_set
Example #2
0
  def _activate(self):
    self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info))

    if not self._pex_info.zip_safe and os.path.isfile(self._pex):
      self.update_module_paths(self.force_local(self._pex, self._pex_info))

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    working_set = WorkingSet([])

    with TRACER.timed('Resolving %s' %
        ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list', V=2):
      try:
        resolved = working_set.resolve(all_reqs, env=self)
      except DistributionNotFound as e:
        TRACER.log('Failed to resolve a requirement: %s' % e)
        TRACER.log('Current working set:')
        for dist in working_set:
          TRACER.log('  - %s' % dist)
        raise

    for dist in resolved:
      with TRACER.timed('Activating %s' % dist):
        working_set.add(dist)

        if os.path.isdir(dist.location):
          with TRACER.timed('Adding sitedir'):
            site.addsitedir(dist.location)

        dist.activate()

    return working_set
def test_remove_from_ws__removes_all_entries():
    ws = WorkingSet([])
    dist1 = create_dist("a", "1.0", location="a10")
    dist2 = create_dist("a", "2.0", location="a20")

    assert dist1 not in ws
    assert dist2 not in ws

    ws.add(dist1)
    assert dist1 in ws
    assert dist1.location in ws.entries
    assert dist2 not in ws
    assert dist2.location not in ws.entries

    ws.add_entry(dist2.location)
    assert dist1 in ws
    assert dist1.location in ws.entries
    assert dist2 not in ws
    assert dist2.location in ws.entries

    dependency.remove_from_ws(ws, dist2)

    assert dist1 not in ws
    assert dist2 not in ws

    assert len([d for d in ws]) == 0
Example #4
0
 def _working_set(self):
     trac_version = self.trac_version()
     metadata = InMemoryMetadataStub(name='trac', version=trac_version)
     trac_distribution = Distribution('/invalid/path', metadata, project_name='trac', version=trac_version)
     working_set = WorkingSet(entries=())
     working_set.add(trac_distribution)
     return working_set
Example #5
0
  def _activate(self):
    self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info))

    if not self._pex_info.zip_safe and os.path.isfile(self._pex):
      self.update_module_paths(self.force_local(self._pex, self._pex_info))

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    working_set = WorkingSet([])

    with TRACER.timed('Resolving %s' %
        ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list'):
      try:
        resolved = working_set.resolve(all_reqs, env=self)
      except DistributionNotFound as e:
        TRACER.log('Failed to resolve a requirement: %s' % e)
        TRACER.log('Current working set:')
        for dist in working_set:
          TRACER.log('  - %s' % dist)
        raise

    for dist in resolved:
      with TRACER.timed('Activating %s' % dist):
        working_set.add(dist)

        if os.path.isdir(dist.location):
          with TRACER.timed('Adding sitedir'):
            site.addsitedir(dist.location)

        dist.activate()

    return working_set
Example #6
0
    def _activate(self):
        if not self._working_set:
            working_set = WorkingSet([])

            # set up the local .pex environment
            pex_info = self._pex_info.copy()
            pex_info.update(self._pex_info_overrides)
            pex_info.merge_pex_path(self._vars.PEX_PATH)
            self._envs.append(
                PEXEnvironment(self._pex,
                               pex_info,
                               interpreter=self._interpreter))
            # N.B. by this point, `pex_info.pex_path` will contain a single pex path
            # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment.
            # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set
            # in the environment.
            if pex_info.pex_path:
                # set up other environments as specified in pex_path
                for pex_path in filter(None,
                                       pex_info.pex_path.split(os.pathsep)):
                    pex_info = PexInfo.from_pex(pex_path)
                    pex_info.update(self._pex_info_overrides)
                    self._envs.append(
                        PEXEnvironment(pex_path,
                                       pex_info,
                                       interpreter=self._interpreter))

            # activate all of them
            for env in self._envs:
                for dist in env.activate():
                    working_set.add(dist)

            self._working_set = working_set

        return self._working_set
Example #7
0
File: prybar.py Project: h4l/prybar
    def _create_context_manager(group: str,
                                entrypoint: pkg_resources.EntryPoint,
                                working_set: pkg_resources.WorkingSet,
                                scope: str):
        name = entrypoint.name
        # We need a Distribution to register our dynamic entrypoints within.
        # We have to always instantiate it to find our key, as key can be
        # different from the project_name
        dist = pkg_resources.Distribution(location=__file__,
                                          project_name=scope)

        # Prevent creating entrypoints in distributions not created by us,
        # otherwise we could remove the distributions when cleaning up.
        if (dist.key in working_set.by_key
                and working_set.by_key[dist.key].location != __file__):
            raise ValueError(f'scope {format_scope(scope, dist)} already '
                             f'exists in working set at location '
                             f'{working_set.by_key[dist.key].location}')

        if dist.key not in working_set.by_key:
            working_set.add(dist)
        # Reference the actual registered dist if we didn't just register it
        dist = working_set.by_key[dist.key]

        # Ensure the group exists in our distribution
        group_entries = dist.get_entry_map().setdefault(group, {})

        # Create an entry for the specified entrypoint
        if name in group_entries:
            raise ValueError(f'{name!r} is already registered under {group!r} '
                             f'in scope {format_scope(scope, dist)}')

        assert entrypoint.dist is None
        entrypoint.dist = dist
        group_entries[name] = entrypoint

        # Wait for something to happen with the entrypoint...
        try:
            yield
        finally:
            # Tidy up
            del group_entries[name]
            # If we re-use this entrypoint (by re-entering the context) the
            # dist may well have changed (because it gets deleted from the
            # working set) so we shouldn't remember it.
            assert entrypoint.dist is dist
            entrypoint.dist = None
            if len(group_entries) == 0:
                del dist.get_entry_map()[group]

            if len(dist.get_entry_map()) == 0:
                del working_set.by_key[dist.key]
                working_set.entry_keys[__file__].remove(dist.key)

                if not working_set.entry_keys[__file__]:
                    del working_set.entry_keys[__file__]
                    working_set.entries.remove(__file__)
Example #8
0
    def test_node_modules_registry_flattening(self):
        lib = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('bower.json',
             json.dumps({
                 'dependencies': {
                     'jquery': '~1.8.3',
                     'underscore': '1.8.3',
                 },
             })),
            ('extras_calmjs.json',
             json.dumps({
                 'bower_components': {
                     'jquery': 'jquery/dist/jquery.js',
                     'underscore': 'underscore/underscore-min.js',
                 },
                 'something_else': {
                     'parent': 'lib'
                 },
             })),
        ), 'lib', '1.0.0')

        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([
                'lib>=1.0.0',
            ])),
            ('bower.json',
             json.dumps({
                 'dependencies': {
                     'jquery': '~3.0.0',
                 },
             })),
            ('extras_calmjs.json',
             json.dumps({
                 'bower_components': {
                     'jquery': 'jquery/dist/jquery.min.js',
                 },
                 'something_else': {
                     'child': 'named'
                 },
             })),
        ), 'app', '2.0')

        working_set = WorkingSet()
        working_set.add(lib, self._calmjs_testing_tmpdir)
        working_set.add(app, self._calmjs_testing_tmpdir)

        results = dist.flatten_extras_calmjs(['app'], working_set=working_set)
        self.assertEqual(
            results['bower_components'], {
                'jquery': 'jquery/dist/jquery.min.js',
                'underscore': 'underscore/underscore-min.js',
            })
        # child takes precedences as this was not specified to be merged
        self.assertEqual(results['something_else'], {'child': 'named'})
def test_remove_from_ws__removes_distribution():
    ws = WorkingSet([])
    dist = create_dist("a", "1.0")

    assert dist not in ws

    ws.add(dist)
    assert dist in ws

    dependency.remove_from_ws(ws, dist)
    assert dist not in ws
Example #10
0
def _add_working_set_mocks(mocks, virtualenv_dists):
    ws = WorkingSet(entries=[])
    [ws.add(d) for d in _find_distributions('setuptools', 'zc.buildout')]
    [ws.add(d) for d in virtualenv_dists]

    default_ws = WorkingSet(entries=ws.entries)
    [default_ws.add(d) for d in virtualenv_dists]

    _add_mock(mocks, _pkgr_ws,
              lambda: Mock(side_effect=lambda entries: ws if entries
                           else WorkingSet([])))
    _add_mock(mocks, _pkgr_default_ws, lambda: default_ws)
Example #11
0
    def test_find_conflicting(self):
        ws = WorkingSet([])
        Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
        ws.add(Foo)

        # create a requirement that conflicts with Foo 1.2
        req = next(parse_requirements("Foo<1.2"))

        with pytest.raises(VersionConflict) as vc:
            ws.find(req)

        msg = 'Foo 1.2 is installed but Foo<1.2 is required'
        assert vc.value.report() == msg
Example #12
0
    def test_find_conflicting(self):
        ws = WorkingSet([])
        Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
        ws.add(Foo)

        # create a requirement that conflicts with Foo 1.2
        req = next(parse_requirements("Foo<1.2"))

        with pytest.raises(VersionConflict) as vc:
            ws.find(req)

        msg = 'Foo 1.2 is installed but Foo<1.2 is required'
        assert vc.value.report() == msg
Example #13
0
    def test_yarn_install_package_json_no_overwrite_interactive(self):
        """
        Most of these package_json testing will be done in the next test
        class specific for ``yarn init``.
        """

        # Testing the implied init call
        stub_mod_call(self, cli)
        stub_stdouts(self)
        stub_stdin(self, 'n\n')
        stub_check_interactive(self, True)
        tmpdir = mkdtemp(self)
        os.chdir(tmpdir)

        # All the pre-made setup.
        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json',
             json.dumps({
                 'dependencies': {
                     'jquery': '~1.11.0'
                 },
             })),
        ), 'foo', '1.9.0')
        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)
        stub_item_attr_value(self, dist, 'default_working_set', working_set)

        # We are going to have a fake package.json
        with open(join(tmpdir, 'package.json'), 'w') as fd:
            json.dump({}, fd)

        # capture the logging explicitly as the conditions which
        # determines how the errors are outputted differs from different
        # test harnesses.  Verify that later.
        with pretty_logging(stream=StringIO()) as stderr:
            # This is faked.
            yarn.yarn_install('foo', callback=prompt_overwrite_json)

        self.assertIn(
            "Overwrite '%s'? (Yes/No) [No] " % join(tmpdir, 'package.json'),
            sys.stdout.getvalue())
        # Ensure the error message.  Normally this is printed through
        # stderr via distutils custom logger and our handler bridge for
        # that which is tested elsewhere.
        self.assertIn("not continuing with 'yarn install'", stderr.getvalue())

        with open(join(tmpdir, 'package.json')) as fd:
            result = fd.read()
        # This should remain unchanged as no to overwrite is default.
        self.assertEqual(result, '{}')
Example #14
0
    def test_resolve_conflicts_with_prior(self):
        """
        A ContextualVersionConflict should be raised when a requirement
        conflicts with a prior requirement for a different package.
        """
        # Create installation where Foo depends on Baz 1.0 and Bar depends on
        # Baz 2.0.
        ws = WorkingSet([])
        md = Metadata(('depends.txt', "Baz==1.0"))
        Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
        ws.add(Foo)
        md = Metadata(('depends.txt', "Baz==2.0"))
        Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
        ws.add(Bar)
        Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
        ws.add(Baz)
        Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
        ws.add(Baz)

        with pytest.raises(VersionConflict) as vc:
            ws.resolve(parse_requirements("Foo\nBar\n"))

        msg = "Baz 1.0 is installed but Baz==2.0 is required by "
        msg += repr(set(['Bar']))
        assert vc.value.report() == msg
Example #15
0
    def test_resolve_conflicts_with_prior(self):
        """
        A ContextualVersionConflict should be raised when a requirement
        conflicts with a prior requirement for a different package.
        """
        # Create installation where Foo depends on Baz 1.0 and Bar depends on
        # Baz 2.0.
        ws = WorkingSet([])
        md = Metadata(('depends.txt', "Baz==1.0"))
        Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
        ws.add(Foo)
        md = Metadata(('depends.txt', "Baz==2.0"))
        Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
        ws.add(Bar)
        Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
        ws.add(Baz)
        Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
        ws.add(Baz)

        with pytest.raises(VersionConflict) as vc:
            ws.resolve(parse_requirements("Foo\nBar\n"))

        msg = "Baz 1.0 is installed but Baz==2.0 is required by "
        msg += repr(set(['Bar']))
        assert vc.value.report() == msg
Example #16
0
    def test_yarn_install_package_json_no_overwrite_interactive(self):
        """
        Most of these package_json testing will be done in the next test
        class specific for ``yarn init``.
        """

        # Testing the implied init call
        stub_mod_call(self, cli)
        stub_stdouts(self)
        stub_stdin(self, 'n\n')
        stub_check_interactive(self, True)
        tmpdir = mkdtemp(self)
        os.chdir(tmpdir)

        # All the pre-made setup.
        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~1.11.0'},
            })),
        ), 'foo', '1.9.0')
        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)
        stub_item_attr_value(self, dist, 'default_working_set', working_set)

        # We are going to have a fake package.json
        with open(join(tmpdir, 'package.json'), 'w') as fd:
            json.dump({}, fd)

        # capture the logging explicitly as the conditions which
        # determines how the errors are outputted differs from different
        # test harnesses.  Verify that later.
        with pretty_logging(stream=StringIO()) as stderr:
            # This is faked.
            yarn.yarn_install('foo', callback=prompt_overwrite_json)

        self.assertIn(
            "Overwrite '%s'? (Yes/No) [No] " % join(tmpdir, 'package.json'),
            sys.stdout.getvalue())
        # Ensure the error message.  Normally this is printed through
        # stderr via distutils custom logger and our handler bridge for
        # that which is tested elsewhere.
        self.assertIn("not continuing with 'yarn install'", stderr.getvalue())

        with open(join(tmpdir, 'package.json')) as fd:
            result = fd.read()
        # This should remain unchanged as no to overwrite is default.
        self.assertEqual(result, '{}')
Example #17
0
    def test_yarn_install_package_json_overwrite_interactive(self):
        # Testing the implied init call
        stub_mod_call(self, cli)
        stub_stdin(self, 'y\n')
        stub_stdouts(self)
        tmpdir = mkdtemp(self)
        os.chdir(tmpdir)

        # All the pre-made setup.
        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json',
             json.dumps({
                 'dependencies': {
                     'jquery': '~1.11.0'
                 },
             })),
        ), 'foo', '1.9.0')
        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)
        stub_item_attr_value(self, dist, 'default_working_set', working_set)

        # We are going to have a fake package.json
        with open(join(tmpdir, 'package.json'), 'w') as fd:
            json.dump({}, fd)

        # This is faked.
        yarn.yarn_install('foo', overwrite=True)

        with open(join(tmpdir, 'package.json')) as fd:
            config = json.load(fd)

        # Overwritten
        self.assertEqual(
            config, {
                'dependencies': {
                    'jquery': '~1.11.0'
                },
                'devDependencies': {},
                'name': 'foo',
            })

        # No log level set.
        self.assertEqual(sys.stdout.getvalue(), '')
        self.assertEqual(sys.stderr.getvalue(), '')
Example #18
0
def main(args, options):
    from pkg_resources import WorkingSet, Requirement, find_distributions

    if not options.site_dir:
        app.error('Must supply --site')

    distributions = list(find_distributions(options.site_dir))
    working_set = WorkingSet()
    for dist in distributions:
        working_set.add(dist)

    for arg in args:
        arg_req = Requirement.parse(arg)
        found_dist = working_set.find(arg_req)
        if not found_dist:
            print('Could not find %s!' % arg_req)
        out_zip = Distiller(found_dist).distill()
        print('Dumped %s => %s' % (arg_req, out_zip))
Example #19
0
def main(args, options):
  from pkg_resources import WorkingSet, Requirement, find_distributions

  if not options.site_dir:
    app.error('Must supply --site')

  distributions = list(find_distributions(options.site_dir))
  working_set = WorkingSet()
  for dist in distributions:
    working_set.add(dist)

  for arg in args:
    arg_req = Requirement.parse(arg)
    found_dist = working_set.find(arg_req)
    if not found_dist:
      print('Could not find %s!' % arg_req)
    out_zip = Distiller(found_dist).distill()
    print('Dumped %s => %s' % (arg_req, out_zip))
    def testCollection(self):
        # empty path should produce no distributions
        ad = pkg_resources.Environment([], platform=None, python=None)
        self.assertEqual(list(ad), [])
        self.assertEqual(ad['FooPkg'],[])
        ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
        ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
        ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))

        # Name is in there now
        self.assertTrue(ad['FooPkg'])
        # But only 1 package
        self.assertEqual(list(ad), ['foopkg'])

        # Distributions sort by version
        self.assertEqual(
            [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
        )
        # Removing a distribution leaves sequence alone
        ad.remove(ad['FooPkg'][1])
        self.assertEqual(
            [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
        )
        # And inserting adds them in order
        ad.add(dist_from_fn("FooPkg-1.9.egg"))
        self.assertEqual(
            [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
        )

        ws = WorkingSet([])
        foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
        foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
        req, = parse_requirements("FooPkg>=1.3")

        # Nominal case: no distros on path, should yield all applicable
        self.assertEqual(ad.best_match(req,ws).version, '1.9')
        # If a matching distro is already installed, should return only that
        ws.add(foo14)
        self.assertEqual(ad.best_match(req,ws).version, '1.4')

        # If the first matching distro is unsuitable, it's a version conflict
        ws = WorkingSet([])
        ws.add(foo12)
        ws.add(foo14)
        self.assertRaises(VersionConflict, ad.best_match, req, ws)

        # If more than one match on the path, the first one takes precedence
        ws = WorkingSet([])
        ws.add(foo14)
        ws.add(foo12)
        ws.add(foo14)
        self.assertEqual(ad.best_match(req,ws).version, '1.4')
    def testCollection(self):
        # empty path should produce no distributions
        ad = pkg_resources.Environment([], platform=None, python=None)
        self.assertEqual(list(ad), [])
        self.assertEqual(ad['FooPkg'],[])
        ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
        ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
        ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))

        # Name is in there now
        self.assertTrue(ad['FooPkg'])
        # But only 1 package
        self.assertEqual(list(ad), ['foopkg'])

        # Distributions sort by version
        self.assertEqual(
            [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
        )
        # Removing a distribution leaves sequence alone
        ad.remove(ad['FooPkg'][1])
        self.assertEqual(
            [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
        )
        # And inserting adds them in order
        ad.add(dist_from_fn("FooPkg-1.9.egg"))
        self.assertEqual(
            [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
        )

        ws = WorkingSet([])
        foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
        foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
        req, = parse_requirements("FooPkg>=1.3")

        # Nominal case: no distros on path, should yield all applicable
        self.assertEqual(ad.best_match(req,ws).version, '1.9')
        # If a matching distro is already installed, should return only that
        ws.add(foo14)
        self.assertEqual(ad.best_match(req,ws).version, '1.4')

        # If the first matching distro is unsuitable, it's a version conflict
        ws = WorkingSet([])
        ws.add(foo12)
        ws.add(foo14)
        self.assertRaises(VersionConflict, ad.best_match, req, ws)

        # If more than one match on the path, the first one takes precedence
        ws = WorkingSet([])
        ws.add(foo14)
        ws.add(foo12)
        ws.add(foo14)
        self.assertEqual(ad.best_match(req,ws).version, '1.4')
Example #22
0
    def setUp(self):
        remember_cwd(self)

        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~1.11.0'},
            })),
        ), 'foo', '1.9.0')

        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)

        # Stub out the flatten_egginfo_json calls with one that uses our
        # custom working_set here.
        stub_item_attr_value(self, dist, 'default_working_set', working_set)
        # Quiet stdout from distutils logs
        stub_stdouts(self)
        # Force auto-detected interactive mode to True, because this is
        # typically executed within an interactive context.
        stub_check_interactive(self, True)
Example #23
0
    def setUp(self):
        remember_cwd(self)

        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~1.11.0'},
            })),
        ), 'foo', '1.9.0')

        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)

        # Stub out the flatten_egginfo_json calls with one that uses our
        # custom working_set here.
        stub_item_attr_value(self, dist, 'default_working_set', working_set)
        # Quiet stdout from distutils logs
        stub_stdouts(self)
        # Force auto-detected interactive mode to True, because this is
        # typically executed within an interactive context.
        stub_mod_check_interactive(self, [cli], True)
Example #24
0
    def setUp(self):
        # save working directory
        remember_cwd(self)

        # All the pre-made setup.
        stub_mod_call(self, cli)
        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~1.11.0'},
            })),
        ), 'foo', '1.9.0')
        underscore = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'underscore': '~1.8.0'},
            })),
        ), 'underscore', '1.8.0')
        named = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~3.0.0'},
                'name': 'named-js',
            })),
        ), 'named', '2.0.0')
        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)
        working_set.add(underscore, self._calmjs_testing_tmpdir)
        working_set.add(named, self._calmjs_testing_tmpdir)
        stub_item_attr_value(self, dist, 'default_working_set', working_set)
        stub_check_interactive(self, True)
Example #25
0
    def setUp(self):
        # save working directory
        remember_cwd(self)

        # All the pre-made setup.
        stub_mod_call(self, cli)
        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~1.11.0'},
            })),
        ), 'foo', '1.9.0')
        underscore = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'underscore': '~1.8.0'},
            })),
        ), 'underscore', '1.8.0')
        named = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~3.0.0'},
                'name': 'named-js',
            })),
        ), 'named', '2.0.0')
        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)
        working_set.add(underscore, self._calmjs_testing_tmpdir)
        working_set.add(named, self._calmjs_testing_tmpdir)
        stub_item_attr_value(self, dist, 'default_working_set', working_set)
        stub_mod_check_interactive(self, [cli], True)
        # also save this
        self.inst_interactive = npm.npm.cli_driver.interactive
Example #26
0
    def testCollection(self):
        # empty path should produce no distributions
        ad = pkg_resources.Environment([], platform=None, python=None)
        assert list(ad) == []
        assert ad["FooPkg"] == []
        ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
        ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
        ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))

        # Name is in there now
        assert ad["FooPkg"]
        # But only 1 package
        assert list(ad) == ["foopkg"]

        # Distributions sort by version
        assert [dist.version for dist in ad["FooPkg"]] == ["1.4", "1.3-1", "1.2"]

        # Removing a distribution leaves sequence alone
        ad.remove(ad["FooPkg"][1])
        assert [dist.version for dist in ad["FooPkg"]] == ["1.4", "1.2"]

        # And inserting adds them in order
        ad.add(dist_from_fn("FooPkg-1.9.egg"))
        assert [dist.version for dist in ad["FooPkg"]] == ["1.9", "1.4", "1.2"]

        ws = WorkingSet([])
        foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
        foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
        req, = parse_requirements("FooPkg>=1.3")

        # Nominal case: no distros on path, should yield all applicable
        assert ad.best_match(req, ws).version == "1.9"
        # If a matching distro is already installed, should return only that
        ws.add(foo14)
        assert ad.best_match(req, ws).version == "1.4"

        # If the first matching distro is unsuitable, it's a version conflict
        ws = WorkingSet([])
        ws.add(foo12)
        ws.add(foo14)
        with pytest.raises(VersionConflict):
            ad.best_match(req, ws)

        # If more than one match on the path, the first one takes precedence
        ws = WorkingSet([])
        ws.add(foo14)
        ws.add(foo12)
        ws.add(foo14)
        assert ad.best_match(req, ws).version == "1.4"
Example #27
0
    def test_yarn_install_package_json_overwrite_interactive(self):
        # Testing the implied init call
        stub_mod_call(self, cli)
        stub_stdin(self, 'y\n')
        stub_stdouts(self)
        tmpdir = mkdtemp(self)
        os.chdir(tmpdir)

        # All the pre-made setup.
        app = make_dummy_dist(self, (
            ('requires.txt', '\n'.join([])),
            ('package.json', json.dumps({
                'dependencies': {'jquery': '~1.11.0'},
            })),
        ), 'foo', '1.9.0')
        working_set = WorkingSet()
        working_set.add(app, self._calmjs_testing_tmpdir)
        stub_item_attr_value(self, dist, 'default_working_set', working_set)

        # We are going to have a fake package.json
        with open(join(tmpdir, 'package.json'), 'w') as fd:
            json.dump({}, fd)

        # This is faked.
        yarn.yarn_install('foo', overwrite=True)

        with open(join(tmpdir, 'package.json')) as fd:
            config = json.load(fd)

        # Overwritten
        self.assertEqual(config, {
            'dependencies': {'jquery': '~1.11.0'},
            'devDependencies': {},
            'name': 'foo',
        })

        # No log level set.
        self.assertEqual(sys.stdout.getvalue(), '')
        self.assertEqual(sys.stderr.getvalue(), '')
def list_plugins():
    working_set = WorkingSet()
    # Make sure user site packages are added
    # to the set so user plugins are listed.
    user_site_packages = site.USER_SITE
    if not running_under_virtualenv() and \
       user_site_packages not in working_set.entries:
        working_set.entry_keys.setdefault(user_site_packages, [])
        working_set.entries.append(user_site_packages)
        for dist in find_distributions(user_site_packages, only=True):
            working_set.add(dist, user_site_packages, replace=True)
    plugins = defaultdict(list)
    for dist in working_set.by_key.values():
        if dist.key == 'plover':
            continue
        for entrypoint_type in dist.get_entry_map().keys():
            if entrypoint_type.startswith('plover.'):
                break
        else:
            continue
        if isinstance(dist, DistInfoDistribution):
            metadata_entry = 'METADATA'
        else:
            # Assume it's an egg distribution...
            metadata_entry = 'PKG-INFO'
        if not dist.has_metadata(metadata_entry):
            log.warning('ignoring distribution (missing metadata): %s', dist)
            continue
        metadata = Metadata()
        metadata.parse(dist.get_metadata(metadata_entry))
        plugin_metadata = PluginMetadata.from_dict({
            attr: getattr(metadata, attr)
            for attr in PluginMetadata._fields
        })
        plugins[dist.key].append(plugin_metadata)
    return {
        name: list(sorted(versions))
        for name, versions in plugins.items()
    }
Example #29
0
  def _activate(self):
    self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info))

    if not self._pex_info.zip_safe and os.path.isfile(self._pex):
      self.update_module_paths(self.force_local(self._pex, self._pex_info))

    all_reqs = [Requirement.parse(req) for req in self._pex_info.requirements]

    working_set = WorkingSet([])
    resolved = self._resolve(working_set, all_reqs)

    for dist in resolved:
      with TRACER.timed('Activating %s' % dist, V=2):
        working_set.add(dist)

        if os.path.isdir(dist.location):
          with TRACER.timed('Adding sitedir', V=2):
            site.addsitedir(dist.location)

        dist.activate()

    return working_set
Example #30
0
    def _activate(self):
        self.update_candidate_distributions(
            self.load_internal_cache(self._pex, self._pex_info))

        if not self._pex_info.zip_safe and os.path.isfile(self._pex):
            self.update_module_paths(
                self.force_local(self._pex, self._pex_info))

        all_reqs = [
            Requirement.parse(req) for req in self._pex_info.requirements
        ]

        working_set = WorkingSet([])
        resolved = self._resolve(working_set, all_reqs)

        for dist in resolved:
            with TRACER.timed('Activating %s' % dist, V=2):
                working_set.add(dist)

                if os.path.isdir(dist.location):
                    with TRACER.timed('Adding sitedir', V=2):
                        if dist.location not in sys.path and self._inherit_path == "fallback":
                            # Prepend location to sys.path.
                            # This ensures that bundled versions of libraries will be used before system-installed
                            # versions, in case something is installed in both, helping to favor hermeticity in
                            # the case of non-hermetic PEX files (i.e. those with inherit_path=True).
                            #
                            # If the path is not already in sys.path, site.addsitedir will append (not prepend)
                            # the path to sys.path. But if the path is already in sys.path, site.addsitedir will
                            # leave sys.path unmodified, but will do everything else it would do. This is not part
                            # of its advertised contract (which is very vague), but has been verified to be the
                            # case by inspecting its source for both cpython 2.7 and cpython 3.7.
                            sys.path.insert(0, dist.location)
                        site.addsitedir(dist.location)

                dist.activate()

        return working_set
Example #31
0
  def _activate(self):
    if not self._working_set:
      working_set = WorkingSet([])

      # set up the local .pex environment
      pex_info = self._pex_info.copy()
      pex_info.update(self._pex_info_overrides)
      self._envs.append(PEXEnvironment(self._pex, pex_info))

      # set up other environments as specified in PEX_PATH
      for pex_path in filter(None, self._vars.PEX_PATH.split(os.pathsep)):
        pex_info = PexInfo.from_pex(pex_path)
        pex_info.update(self._pex_info_overrides)
        self._envs.append(PEXEnvironment(pex_path, pex_info))

      # activate all of them
      for env in self._envs:
        for dist in env.activate():
          working_set.add(dist)

      self._working_set = working_set

    return self._working_set
Example #32
0
  def _activate(self):
    if not self._working_set:
      working_set = WorkingSet([])

      # set up the local .pex environment
      pex_info = self._pex_info.copy()
      pex_info.update(self._pex_info_overrides)
      self._envs.append(PEXEnvironment(self._pex, pex_info))

      # set up other environments as specified in PEX_PATH
      for pex_path in filter(None, self._vars.PEX_PATH.split(os.pathsep)):
        pex_info = PexInfo.from_pex(pex_path)
        pex_info.update(self._pex_info_overrides)
        self._envs.append(PEXEnvironment(pex_path, pex_info))

      # activate all of them
      for env in self._envs:
        for dist in env.activate():
          working_set.add(dist)

      self._working_set = working_set

    return self._working_set
Example #33
0
class PEXEnvironment(Environment):
    class Subcache(object):
        def __init__(self, path, env):
            self._activated = False
            self._path = path
            self._env = env

        @property
        def activated(self):
            return self._activated

        def activate(self):
            if not self._activated:
                with TRACER.timed('Activating cache %s' % self._path):
                    for dist in find_distributions(self._path):
                        if self._env.can_add(dist):
                            self._env.add(dist)
                self._activated = True

    @staticmethod
    def _really_zipsafe(dist):
        try:
            pez_info = dist.resource_listdir('/PEZ-INFO')
        except OSError:
            pez_info = []
        if 'zip-safe' in pez_info:
            return True
        egg_metadata = dist.metadata_listdir('/')
        return 'zip-safe' in egg_metadata and 'native_libs.txt' not in egg_metadata

    def __init__(self,
                 pex,
                 pex_info,
                 platform=Platform.current(),
                 python=Platform.python()):
        subcaches = sum(
            [[os.path.join(pex, pex_info.internal_cache)],
             [cache for cache in pex_info.egg_caches],
             [pex_info.install_cache if pex_info.install_cache else []]], [])
        self._pex_info = pex_info
        self._activated = False
        self._subcaches = [self.Subcache(cache, self) for cache in subcaches]
        self._ws = WorkingSet([])
        with TRACER.timed('Calling environment super'):
            super(PEXEnvironment, self).__init__(search_path=[],
                                                 platform=platform,
                                                 python=python)

    def resolve(self, requirements, ignore_errors=False):
        reqs = maybe_requirement_list(requirements)
        resolved = OrderedSet()
        for req in reqs:
            with TRACER.timed('Resolved %s' % req):
                try:
                    distributions = self._ws.resolve([req], env=self)
                except DistributionNotFound as e:
                    TRACER.log('Failed to resolve %s' % req)
                    if not ignore_errors:
                        raise
                    continue
                resolved.update(distributions)
        return list(resolved)

    def can_add(self, dist):
        return Platform.distribution_compatible(dist, self.python,
                                                self.platform)

    def best_match(self, req, *ignore_args, **ignore_kwargs):
        while True:
            resolved_req = super(PEXEnvironment,
                                 self).best_match(req, self._ws)
            if resolved_req:
                return resolved_req
            for subcache in self._subcaches:
                if not subcache.activated:
                    subcache.activate()
                    break
            else:
                # TODO(wickman)  Add per-requirement optional/ignore_errors flag.
                print(
                    'Failed to resolve %s, your installation may not work properly.'
                    % req,
                    file=sys.stderr)
                break

    def activate(self):
        if self._activated:
            return
        if self._pex_info.inherit_path:
            self._ws = WorkingSet(sys.path)

        # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
        # or a non-empty repository.
        all_reqs = [
            Requirement.parse(req) for req, _, _ in self._pex_info.requirements
        ]

        for req in all_reqs:
            with TRACER.timed('Resolved %s' % str(req)):
                try:
                    resolved = self._ws.resolve([req], env=self)
                except DistributionNotFound as e:
                    TRACER.log('Failed to resolve %s: %s' % (req, e))
                    if not self._pex_info.ignore_errors:
                        raise
                    continue
            for dist in resolved:
                with TRACER.timed('  Activated %s' % dist):
                    if os.environ.get('PEX_FORCE_LOCAL',
                                      not self._really_zipsafe(dist)):
                        with TRACER.timed('    Locally caching'):
                            new_dist = DistributionHelper.maybe_locally_cache(
                                dist, self._pex_info.install_cache)
                            new_dist.activate()
                    else:
                        self._ws.add(dist)
                        dist.activate()

        self._activated = True
Example #34
0
def plugin_resolution(
    rule_runner: RuleRunner,
    *,
    interpreter: PythonInterpreter | None = None,
    chroot: str | None = None,
    plugins: Sequence[Plugin] = (),
    sdist: bool = True,
    working_set_entries: Sequence[Distribution] = (),
    use_pypi: bool = False,
):
    @contextmanager
    def provide_chroot(existing):
        if existing:
            yield existing, False
        else:
            with temporary_dir() as new_chroot:
                yield new_chroot, True

    # Default to resolving with whatever we're currently running with.
    interpreter_constraints = (InterpreterConstraints(
        [f"=={interpreter.identity.version_str}"]) if interpreter else None)
    artifact_interpreter_constraints = interpreter_constraints or InterpreterConstraints(
        [f"=={'.'.join(map(str, sys.version_info[:3]))}"])

    with provide_chroot(chroot) as (root_dir, create_artifacts):
        env: Dict[str, str] = {}
        repo_dir = None
        if plugins:
            repo_dir = os.path.join(root_dir, "repo")
            env.update(
                PANTS_PYTHON_REPOS_REPOS=f"['file://{repo_dir}']",
                PANTS_PYTHON_RESOLVER_CACHE_TTL="1",
            )
            if not use_pypi:
                env.update(PANTS_PYTHON_REPOS_INDEXES="[]")
            plugin_list = []
            for plugin in plugins:
                version = plugin.version
                plugin_list.append(
                    f"{plugin.name}=={version}" if version else plugin.name)
                if create_artifacts:
                    setup_py_args = [
                        "sdist" if sdist else "bdist_wheel", "--dist-dir",
                        "dist/"
                    ]
                    _run_setup_py(
                        rule_runner,
                        plugin.name,
                        artifact_interpreter_constraints,
                        version,
                        plugin.install_requires,
                        setup_py_args,
                        repo_dir,
                    )
            env["PANTS_PLUGINS"] = f"[{','.join(map(repr, plugin_list))}]"

        configpath = os.path.join(root_dir, "pants.toml")
        if create_artifacts:
            touch(configpath)
        args = [f"--pants-config-files=['{configpath}']"]

        options_bootstrapper = OptionsBootstrapper.create(env=env,
                                                          args=args,
                                                          allow_pantsrc=False)
        complete_env = CompleteEnvironment({
            **{
                k: os.environ[k]
                for k in ["PATH", "HOME", "PYENV_ROOT"] if k in os.environ
            },
            **env
        })
        bootstrap_scheduler = create_bootstrap_scheduler(options_bootstrapper)
        cache_dir = options_bootstrapper.bootstrap_options.for_global_scope(
        ).named_caches_dir

        input_working_set = WorkingSet(entries=[])
        for dist in working_set_entries:
            input_working_set.add(dist)
        plugin_resolver = PluginResolver(bootstrap_scheduler,
                                         interpreter_constraints,
                                         input_working_set)
        working_set = plugin_resolver.resolve(
            options_bootstrapper,
            complete_env,
        )
        for dist in working_set:
            assert (Path(os.path.realpath(cache_dir))
                    in Path(os.path.realpath(dist.location)).parents)

        yield working_set, root_dir, repo_dir
Example #35
0
class LoaderTest(unittest.TestCase):
  def setUp(self):
    self.build_configuration = BuildConfiguration()
    self.working_set = WorkingSet()
    for entry in working_set.entries:
      self.working_set.add_entry(entry)

  def tearDown(self):
    Goal.clear()

  @contextmanager
  def create_register(self, build_file_aliases=None, register_goals=None, global_subsystems=None, module_name='register'):

    package_name = b'__test_package_{0}'.format(uuid.uuid4().hex)
    self.assertFalse(package_name in sys.modules)

    package_module = types.ModuleType(package_name)
    sys.modules[package_name] = package_module
    try:
      register_module_fqn = b'{0}.{1}'.format(package_name, module_name)
      register_module = types.ModuleType(register_module_fqn)
      setattr(package_module, module_name, register_module)
      sys.modules[register_module_fqn] = register_module

      def register_entrypoint(function_name, function):
        if function:
          setattr(register_module, function_name, function)

      register_entrypoint('build_file_aliases', build_file_aliases)
      register_entrypoint('global_subsystems', global_subsystems)
      register_entrypoint('register_goals', register_goals)

      yield package_name
    finally:
      del sys.modules[package_name]

  def assert_empty_aliases(self):
    registered_aliases = self.build_configuration.registered_aliases()
    self.assertEqual(0, len(registered_aliases.targets))
    self.assertEqual(0, len(registered_aliases.objects))
    self.assertEqual(0, len(registered_aliases.context_aware_object_factories))
    self.assertEqual(self.build_configuration.subsystem_types(), set())

  def test_load_valid_empty(self):
    with self.create_register() as backend_package:
      load_backend(self.build_configuration, backend_package)
      self.assert_empty_aliases()

  def test_load_valid_partial_aliases(self):
    aliases = BuildFileAliases.create(targets={'bob': DummyTarget},
                                      objects={'obj1': DummyObject1,
                                               'obj2': DummyObject2})
    with self.create_register(build_file_aliases=lambda: aliases) as backend_package:
      load_backend(self.build_configuration, backend_package)
      registered_aliases = self.build_configuration.registered_aliases()
      self.assertEqual(DummyTarget, registered_aliases.targets['bob'])
      self.assertEqual(DummyObject1, registered_aliases.objects['obj1'])
      self.assertEqual(DummyObject2, registered_aliases.objects['obj2'])
      self.assertEqual(self.build_configuration.subsystem_types(),
                       set([DummySubsystem1, DummySubsystem2]))

  def test_load_valid_partial_goals(self):
    def register_goals():
      Goal.by_name('jack').install(TaskRegistrar('jill', DummyTask))

    with self.create_register(register_goals=register_goals) as backend_package:
      Goal.clear()
      self.assertEqual(0, len(Goal.all()))

      load_backend(self.build_configuration, backend_package)
      self.assert_empty_aliases()
      self.assertEqual(1, len(Goal.all()))

      task_names = Goal.by_name('jack').ordered_task_names()
      self.assertEqual(1, len(task_names))

      task_name = task_names[0]
      self.assertEqual('jill', task_name)

  def test_load_invalid_entrypoint(self):
    def build_file_aliases(bad_arg):
      return BuildFileAliases.create()

    with self.create_register(build_file_aliases=build_file_aliases) as backend_package:
      with self.assertRaises(BuildConfigurationError):
        load_backend(self.build_configuration, backend_package)

  def test_load_invalid_module(self):
    with self.create_register(module_name='register2') as backend_package:
      with self.assertRaises(BuildConfigurationError):
        load_backend(self.build_configuration, backend_package)

  def test_load_missing_plugin(self):
    with self.assertRaises(PluginNotFound):
      self.load_plugins(['Foobar'])


  def get_mock_plugin(self, name, version, reg=None, alias=None, after=None):
    """Make a fake Distribution (optionally with entry points)

    Note the entry points do not actually point to code in the returned distribution --
    the distribution does not even have a location and does not contain any code, just metadata.

    A module is synthesized on the fly and installed into sys.modules under a random name.
    If optional entry point callables are provided, those are added as methods to the module and
    their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked
    metadata added to the returned dist.

    :param str name: project_name for distribution (see pkg_resources)
    :param str version: version for distribution (see pkg_resources)
    :param callable reg: Optional callable for goal registration entry point
    :param callable alias: Optional callable for build_file_aliases entry point
    :param callable after: Optional callable for load_after list entry point
    """

    plugin_pkg = b'demoplugin{0}'.format(uuid.uuid4().hex)
    pkg = types.ModuleType(plugin_pkg)
    sys.modules[plugin_pkg] = pkg
    module_name = b'{0}.{1}'.format(plugin_pkg, 'demo')
    plugin = types.ModuleType(module_name)
    setattr(pkg, 'demo', plugin)
    sys.modules[module_name] = plugin

    metadata = {}
    entry_lines = []

    if reg is not None:
      setattr(plugin, 'foo', reg)
      entry_lines.append('register_goals = {}:foo\n'.format(module_name))

    if alias is not None:
      setattr(plugin, 'bar', alias)
      entry_lines.append('build_file_aliases = {}:bar\n'.format(module_name))

    if after is not None:
      setattr(plugin, 'baz', after)
      entry_lines.append('load_after = {}:baz\n'.format(module_name))

    if entry_lines:
      entry_data = '[pantsbuild.plugin]\n{}\n'.format('\n'.join(entry_lines))
      metadata = {'entry_points.txt': entry_data}

    return Distribution(project_name=name, version=version, metadata=MockMetadata(metadata))

  def load_plugins(self, plugins):
    load_plugins(self.build_configuration, plugins, load_from=self.working_set)

  def test_plugin_load_and_order(self):
    d1 = self.get_mock_plugin('demo1', '0.0.1', after=lambda: ['demo2'])
    d2 = self.get_mock_plugin('demo2', '0.0.3')
    self.working_set.add(d1)

    # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2'].
    with self.assertRaises(PluginLoadOrderError):
      self.load_plugins(['demo1', 'demo2'])

    # Attempting to load 'demo2' first should fail as it is not (yet) installed.
    with self.assertRaises(PluginNotFound):
      self.load_plugins(['demo2', 'demo1'])

    # Installing demo2 and then loading in correct order should work though.
    self.working_set.add(d2)
    self.load_plugins(['demo2>=0.0.2', 'demo1'])

    # But asking for a bad (not installed) version fails.
    with self.assertRaises(VersionConflict):
      self.load_plugins(['demo2>=0.0.5'])

  def test_plugin_installs_goal(self):
    def reg_goal():
      Goal.by_name('plugindemo').install(TaskRegistrar('foo', DummyTask))
    self.working_set.add(self.get_mock_plugin('regdemo', '0.0.1', reg=reg_goal))

    # Start without the custom goal.
    self.assertEqual(0, len(Goal.by_name('plugindemo').ordered_task_names()))

    # Load plugin which registers custom goal.
    self.load_plugins(['regdemo'])

    # Now the custom goal exists.
    self.assertEqual(1, len(Goal.by_name('plugindemo').ordered_task_names()))
    self.assertEqual('foo', Goal.by_name('plugindemo').ordered_task_names()[0])

  def test_plugin_installs_alias(self):
    def reg_alias():
      return BuildFileAliases.create(targets={'pluginalias': DummyTarget},
                                     objects={'FROMPLUGIN1': DummyObject1,
                                              'FROMPLUGIN2': DummyObject2})
    self.working_set.add(self.get_mock_plugin('aliasdemo', '0.0.1', alias=reg_alias))

    # Start with no aliases.
    self.assert_empty_aliases()

    # Now load the plugin which defines aliases.
    self.load_plugins(['aliasdemo'])

    # Aliases now exist.
    registered_aliases = self.build_configuration.registered_aliases()
    self.assertEqual(DummyTarget, registered_aliases.targets['pluginalias'])
    self.assertEqual(DummyObject1, registered_aliases.objects['FROMPLUGIN1'])
    self.assertEqual(DummyObject2, registered_aliases.objects['FROMPLUGIN2'])
    self.assertEqual(self.build_configuration.subsystem_types(),
                     {DummySubsystem1, DummySubsystem2})

  def test_subsystems(self):
    def global_subsystems():
      return {DummySubsystem1, DummySubsystem2}
    with self.create_register(global_subsystems=global_subsystems) as backend_package:
      load_backend(self.build_configuration, backend_package)
      self.assertEqual(self.build_configuration.subsystem_types(),
                       {DummySubsystem1, DummySubsystem2})
Example #36
0
class LoaderTest(unittest.TestCase):

  def setUp(self):
    self.build_configuration = BuildConfiguration()
    self.working_set = WorkingSet()
    for entry in working_set.entries:
      self.working_set.add_entry(entry)

  def tearDown(self):
    Goal.clear()

  @contextmanager
  def create_register(self, build_file_aliases=None, register_goals=None, global_subsystems=None,
                      module_name='register'):

    package_name = b'__test_package_{0}'.format(uuid.uuid4().hex)
    self.assertFalse(package_name in sys.modules)

    package_module = types.ModuleType(package_name)
    sys.modules[package_name] = package_module
    try:
      register_module_fqn = b'{0}.{1}'.format(package_name, module_name)
      register_module = types.ModuleType(register_module_fqn)
      setattr(package_module, module_name, register_module)
      sys.modules[register_module_fqn] = register_module

      def register_entrypoint(function_name, function):
        if function:
          setattr(register_module, function_name, function)

      register_entrypoint('build_file_aliases', build_file_aliases)
      register_entrypoint('global_subsystems', global_subsystems)
      register_entrypoint('register_goals', register_goals)

      yield package_name
    finally:
      del sys.modules[package_name]

  def assert_empty_aliases(self):
    registered_aliases = self.build_configuration.registered_aliases()
    self.assertEqual(0, len(registered_aliases.target_types))
    self.assertEqual(0, len(registered_aliases.target_macro_factories))
    self.assertEqual(0, len(registered_aliases.objects))
    self.assertEqual(0, len(registered_aliases.context_aware_object_factories))
    self.assertEqual(self.build_configuration.subsystems(), set())

  def test_load_valid_empty(self):
    with self.create_register() as backend_package:
      load_backend(self.build_configuration, backend_package)
      self.assert_empty_aliases()

  def test_load_valid_partial_aliases(self):
    aliases = BuildFileAliases(targets={'bob': DummyTarget},
                               objects={'obj1': DummyObject1,
                                        'obj2': DummyObject2})
    with self.create_register(build_file_aliases=lambda: aliases) as backend_package:
      load_backend(self.build_configuration, backend_package)
      registered_aliases = self.build_configuration.registered_aliases()
      self.assertEqual(DummyTarget, registered_aliases.target_types['bob'])
      self.assertEqual(DummyObject1, registered_aliases.objects['obj1'])
      self.assertEqual(DummyObject2, registered_aliases.objects['obj2'])
      self.assertEqual(self.build_configuration.subsystems(), {DummySubsystem1, DummySubsystem2})

  def test_load_valid_partial_goals(self):
    def register_goals():
      Goal.by_name('jack').install(TaskRegistrar('jill', DummyTask))

    with self.create_register(register_goals=register_goals) as backend_package:
      Goal.clear()
      self.assertEqual(0, len(Goal.all()))

      load_backend(self.build_configuration, backend_package)
      self.assert_empty_aliases()
      self.assertEqual(1, len(Goal.all()))

      task_names = Goal.by_name('jack').ordered_task_names()
      self.assertEqual(1, len(task_names))

      task_name = task_names[0]
      self.assertEqual('jill', task_name)

  def test_load_invalid_entrypoint(self):
    def build_file_aliases(bad_arg):
      return BuildFileAliases()

    with self.create_register(build_file_aliases=build_file_aliases) as backend_package:
      with self.assertRaises(BuildConfigurationError):
        load_backend(self.build_configuration, backend_package)

  def test_load_invalid_module(self):
    with self.create_register(module_name='register2') as backend_package:
      with self.assertRaises(BuildConfigurationError):
        load_backend(self.build_configuration, backend_package)

  def test_load_missing_plugin(self):
    with self.assertRaises(PluginNotFound):
      self.load_plugins(['Foobar'])

  def get_mock_plugin(self, name, version, reg=None, alias=None, after=None):
    """Make a fake Distribution (optionally with entry points)

    Note the entry points do not actually point to code in the returned distribution --
    the distribution does not even have a location and does not contain any code, just metadata.

    A module is synthesized on the fly and installed into sys.modules under a random name.
    If optional entry point callables are provided, those are added as methods to the module and
    their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked
    metadata added to the returned dist.

    :param string name: project_name for distribution (see pkg_resources)
    :param string version: version for distribution (see pkg_resources)
    :param callable reg: Optional callable for goal registration entry point
    :param callable alias: Optional callable for build_file_aliases entry point
    :param callable after: Optional callable for load_after list entry point
    """

    plugin_pkg = b'demoplugin{0}'.format(uuid.uuid4().hex)
    pkg = types.ModuleType(plugin_pkg)
    sys.modules[plugin_pkg] = pkg
    module_name = b'{0}.{1}'.format(plugin_pkg, 'demo')
    plugin = types.ModuleType(module_name)
    setattr(pkg, 'demo', plugin)
    sys.modules[module_name] = plugin

    metadata = {}
    entry_lines = []

    if reg is not None:
      setattr(plugin, 'foo', reg)
      entry_lines.append('register_goals = {}:foo\n'.format(module_name))

    if alias is not None:
      setattr(plugin, 'bar', alias)
      entry_lines.append('build_file_aliases = {}:bar\n'.format(module_name))

    if after is not None:
      setattr(plugin, 'baz', after)
      entry_lines.append('load_after = {}:baz\n'.format(module_name))

    if entry_lines:
      entry_data = '[pantsbuild.plugin]\n{}\n'.format('\n'.join(entry_lines))
      metadata = {'entry_points.txt': entry_data}

    return Distribution(project_name=name, version=version, metadata=MockMetadata(metadata))

  def load_plugins(self, plugins):
    load_plugins(self.build_configuration, plugins, self.working_set)

  def test_plugin_load_and_order(self):
    d1 = self.get_mock_plugin('demo1', '0.0.1', after=lambda: ['demo2'])
    d2 = self.get_mock_plugin('demo2', '0.0.3')
    self.working_set.add(d1)

    # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2'].
    with self.assertRaises(PluginLoadOrderError):
      self.load_plugins(['demo1', 'demo2'])

    # Attempting to load 'demo2' first should fail as it is not (yet) installed.
    with self.assertRaises(PluginNotFound):
      self.load_plugins(['demo2', 'demo1'])

    # Installing demo2 and then loading in correct order should work though.
    self.working_set.add(d2)
    self.load_plugins(['demo2>=0.0.2', 'demo1'])

    # But asking for a bad (not installed) version fails.
    with self.assertRaises(VersionConflict):
      self.load_plugins(['demo2>=0.0.5'])

  def test_plugin_installs_goal(self):
    def reg_goal():
      Goal.by_name('plugindemo').install(TaskRegistrar('foo', DummyTask))
    self.working_set.add(self.get_mock_plugin('regdemo', '0.0.1', reg=reg_goal))

    # Start without the custom goal.
    self.assertEqual(0, len(Goal.by_name('plugindemo').ordered_task_names()))

    # Load plugin which registers custom goal.
    self.load_plugins(['regdemo'])

    # Now the custom goal exists.
    self.assertEqual(1, len(Goal.by_name('plugindemo').ordered_task_names()))
    self.assertEqual('foo', Goal.by_name('plugindemo').ordered_task_names()[0])

  def test_plugin_installs_alias(self):
    def reg_alias():
      return BuildFileAliases(targets={'pluginalias': DummyTarget},
                              objects={'FROMPLUGIN1': DummyObject1,
                                       'FROMPLUGIN2': DummyObject2})
    self.working_set.add(self.get_mock_plugin('aliasdemo', '0.0.1', alias=reg_alias))

    # Start with no aliases.
    self.assert_empty_aliases()

    # Now load the plugin which defines aliases.
    self.load_plugins(['aliasdemo'])

    # Aliases now exist.
    registered_aliases = self.build_configuration.registered_aliases()
    self.assertEqual(DummyTarget, registered_aliases.target_types['pluginalias'])
    self.assertEqual(DummyObject1, registered_aliases.objects['FROMPLUGIN1'])
    self.assertEqual(DummyObject2, registered_aliases.objects['FROMPLUGIN2'])
    self.assertEqual(self.build_configuration.subsystems(), {DummySubsystem1, DummySubsystem2})

  def test_subsystems(self):
    def global_subsystems():
      return {DummySubsystem1, DummySubsystem2}
    with self.create_register(global_subsystems=global_subsystems) as backend_package:
      load_backend(self.build_configuration, backend_package)
      self.assertEqual(self.build_configuration.subsystems(),
                       {DummySubsystem1, DummySubsystem2})
Example #37
0
class PEXEnvironment(Resolver):
  @classmethod
  def _log(cls, msg, *args, **kw):
    PEX.debug(msg)

  def __init__(self, pex, pex_info):
    self._pex_info = pex_info
    subcaches = sum([
      [os.path.join(pex, pex_info.internal_cache)],
      [cache for cache in pex_info.egg_caches],
      [pex_info.install_cache if pex_info.install_cache else []]],
      [])
    self._activated = False
    super(PEXEnvironment, self).__init__(
      caches=subcaches,
      install_cache=pex_info.install_cache,
      fetcher_provider=PEXEnvironment.get_fetcher_provider(pex_info))

  @classmethod
  def get_fetcher_provider(cls, pex_info):
    def fetcher_provider():
      from twitter.common.python.fetcher import Fetcher
      cls._log('Initializing fetcher:')
      cls._log('  repositories: %s' % ' '.join(pex_info.repositories))
      cls._log('       indices: %s' % ' '.join(pex_info.indices))
      cls._log('     with pypi: %s' % pex_info.allow_pypi)
      return Fetcher(
        repositories = pex_info.repositories,
        indices = pex_info.indices,
        external = pex_info.allow_pypi,
        download_cache = pex_info.download_cache
      )
    return fetcher_provider

  @staticmethod
  def _really_zipsafe(dist):
    try:
      pez_info = dist.resource_listdir('/PEZ-INFO')
    except OSError:
      pez_info = []
    if 'zip-safe' in pez_info:
      return True
    egg_metadata = dist.metadata_listdir('/')
    return 'zip-safe' in egg_metadata and 'native_libs.txt' not in egg_metadata

  def activate(self):
    from pkg_resources import Requirement, WorkingSet, DistributionNotFound

    if self._activated:
      return
    if self._pex_info.inherit_path:
      self._ws = WorkingSet(sys.path)

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    for req in all_reqs:
      with PEX.timed('Resolved %s' % str(req)):
        try:
          resolved = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          self._log('Failed to resolve %s: %s' % (req, e))
          if not self._pex_info.ignore_errors:
            raise
          continue
      for dist in resolved:
        with PEX.timed('  Activated %s' % dist):
          if self._really_zipsafe(dist):
            self._ws.add(dist)
            dist.activate()
          else:
            with PEX.timed('    Locally caching %s' % dist):
              new_dist = DistributionHelper.locally_cache(dist, self._pex_info.install_cache)
              new_dist.activate()

    self._activated = True
class LoaderTest(unittest.TestCase):
    def setUp(self):
        self.bc_builder = BuildConfiguration.Builder()
        self.working_set = WorkingSet()
        for entry in working_set.entries:
            self.working_set.add_entry(entry)

    @contextmanager
    def create_register(
        self,
        build_file_aliases=None,
        register_goals=None,
        global_subsystems=None,
        rules=None,
        target_types=None,
        module_name="register",
    ):

        package_name = f"__test_package_{uuid.uuid4().hex}"
        self.assertFalse(package_name in sys.modules)

        package_module = types.ModuleType(package_name)
        sys.modules[package_name] = package_module
        try:
            register_module_fqn = f"{package_name}.{module_name}"
            register_module = types.ModuleType(register_module_fqn)
            setattr(package_module, module_name, register_module)
            sys.modules[register_module_fqn] = register_module

            def register_entrypoint(function_name, function):
                if function:
                    setattr(register_module, function_name, function)

            register_entrypoint("build_file_aliases", build_file_aliases)
            register_entrypoint("global_subsystems", global_subsystems)
            register_entrypoint("register_goals", register_goals)
            register_entrypoint("rules", rules)
            register_entrypoint("target_types", target_types)

            yield package_name
        finally:
            del sys.modules[package_name]

    def assert_empty(self):
        build_configuration = self.bc_builder.create()
        registered_aliases = build_configuration.registered_aliases
        self.assertEqual(0, len(registered_aliases.objects))
        self.assertEqual(
            0, len(registered_aliases.context_aware_object_factories))
        self.assertEqual(build_configuration.optionables, FrozenOrderedSet())
        self.assertEqual(0, len(build_configuration.rules))
        self.assertEqual(0, len(build_configuration.target_types))

    def test_load_valid_empty(self):
        with self.create_register() as backend_package:
            load_backend(self.bc_builder, backend_package)
            self.assert_empty()

    def test_load_valid_partial_aliases(self):
        aliases = BuildFileAliases(objects={
            "obj1": DummyObject1,
            "obj2": DummyObject2
        })
        with self.create_register(
                build_file_aliases=lambda: aliases) as backend_package:
            load_backend(self.bc_builder, backend_package)
            build_configuration = self.bc_builder.create()
            registered_aliases = build_configuration.registered_aliases
            self.assertEqual(DummyObject1, registered_aliases.objects["obj1"])
            self.assertEqual(DummyObject2, registered_aliases.objects["obj2"])
            self.assertEqual(build_configuration.optionables,
                             FrozenOrderedSet([DummySubsystem]))

    def test_load_invalid_entrypoint(self):
        def build_file_aliases(bad_arg):
            return BuildFileAliases()

        with self.create_register(
                build_file_aliases=build_file_aliases) as backend_package:
            with self.assertRaises(BuildConfigurationError):
                load_backend(self.bc_builder, backend_package)

    def test_load_invalid_module(self):
        with self.create_register(module_name="register2") as backend_package:
            with self.assertRaises(BuildConfigurationError):
                load_backend(self.bc_builder, backend_package)

    def test_load_missing_plugin(self):
        with self.assertRaises(PluginNotFound):
            self.load_plugins(["Foobar"])

    @staticmethod
    def get_mock_plugin(name,
                        version,
                        reg=None,
                        alias=None,
                        after=None,
                        rules=None,
                        target_types=None):
        """Make a fake Distribution (optionally with entry points)

        Note the entry points do not actually point to code in the returned distribution --
        the distribution does not even have a location and does not contain any code, just metadata.

        A module is synthesized on the fly and installed into sys.modules under a random name.
        If optional entry point callables are provided, those are added as methods to the module and
        their name (foo/bar/baz in fake module) is added as the requested entry point to the mocked
        metadata added to the returned dist.

        :param string name: project_name for distribution (see pkg_resources)
        :param string version: version for distribution (see pkg_resources)
        :param callable reg: Optional callable for goal registration entry point
        :param callable alias: Optional callable for build_file_aliases entry point
        :param callable after: Optional callable for load_after list entry point
        :param callable rules: Optional callable for rules entry point
        :param callable target_types: Optional callable for target_types entry point
        """

        plugin_pkg = f"demoplugin{uuid.uuid4().hex}"
        pkg = types.ModuleType(plugin_pkg)
        sys.modules[plugin_pkg] = pkg
        module_name = f"{plugin_pkg}.demo"
        plugin = types.ModuleType(module_name)
        setattr(pkg, "demo", plugin)
        sys.modules[module_name] = plugin

        metadata = {}
        entry_lines = []

        if reg is not None:
            setattr(plugin, "foo", reg)
            entry_lines.append(f"register_goals = {module_name}:foo\n")

        if alias is not None:
            setattr(plugin, "bar", alias)
            entry_lines.append(f"build_file_aliases = {module_name}:bar\n")

        if after is not None:
            setattr(plugin, "baz", after)
            entry_lines.append(f"load_after = {module_name}:baz\n")

        if rules is not None:
            setattr(plugin, "qux", rules)
            entry_lines.append(f"rules = {module_name}:qux\n")

        if target_types is not None:
            setattr(plugin, "tofu", target_types)
            entry_lines.append(f"target_types = {module_name}:tofu\n")

        if entry_lines:
            entry_data = "[pantsbuild.plugin]\n{}\n".format(
                "\n".join(entry_lines))
            metadata = {"entry_points.txt": entry_data}

        return Distribution(project_name=name,
                            version=version,
                            metadata=MockMetadata(metadata))

    def load_plugins(self, plugins):
        load_plugins(self.bc_builder, plugins, self.working_set)

    def test_plugin_load_and_order(self):
        d1 = self.get_mock_plugin("demo1", "0.0.1", after=lambda: ["demo2"])
        d2 = self.get_mock_plugin("demo2", "0.0.3")
        self.working_set.add(d1)

        # Attempting to load 'demo1' then 'demo2' should fail as 'demo1' requires 'after'=['demo2'].
        with self.assertRaises(PluginLoadOrderError):
            self.load_plugins(["demo1", "demo2"])

        # Attempting to load 'demo2' first should fail as it is not (yet) installed.
        with self.assertRaises(PluginNotFound):
            self.load_plugins(["demo2", "demo1"])

        # Installing demo2 and then loading in correct order should work though.
        self.working_set.add(d2)
        self.load_plugins(["demo2>=0.0.2", "demo1"])

        # But asking for a bad (not installed) version fails.
        with self.assertRaises(VersionConflict):
            self.load_plugins(["demo2>=0.0.5"])

    def test_plugin_installs_alias(self):
        def reg_alias():
            return BuildFileAliases(objects={
                "FROMPLUGIN1": DummyObject1,
                "FROMPLUGIN2": DummyObject2
            }, )

        self.working_set.add(
            self.get_mock_plugin("aliasdemo", "0.0.1", alias=reg_alias))

        # Start with no aliases.
        self.assert_empty()

        # Now load the plugin which defines aliases.
        self.load_plugins(["aliasdemo"])

        # Aliases now exist.
        build_configuration = self.bc_builder.create()
        registered_aliases = build_configuration.registered_aliases
        self.assertEqual(DummyObject1,
                         registered_aliases.objects["FROMPLUGIN1"])
        self.assertEqual(DummyObject2,
                         registered_aliases.objects["FROMPLUGIN2"])
        self.assertEqual(build_configuration.optionables,
                         FrozenOrderedSet([DummySubsystem]))

    def test_rules(self):
        def backend_rules():
            return [example_rule]

        with self.create_register(rules=backend_rules) as backend_package:
            load_backend(self.bc_builder, backend_package)
            self.assertEqual(self.bc_builder.create().rules,
                             FrozenOrderedSet([example_rule.rule]))

        def plugin_rules():
            return [example_plugin_rule]

        self.working_set.add(
            self.get_mock_plugin("this-plugin-rules",
                                 "0.0.1",
                                 rules=plugin_rules))
        self.load_plugins(["this-plugin-rules"])
        self.assertEqual(
            self.bc_builder.create().rules,
            FrozenOrderedSet([example_rule.rule, example_plugin_rule.rule]),
        )

    def test_target_types(self):
        def target_types():
            return [DummyTarget, DummyTarget2]

        with self.create_register(
                target_types=target_types) as backend_package:
            load_backend(self.bc_builder, backend_package)
            assert self.bc_builder.create().target_types == FrozenOrderedSet(
                [DummyTarget, DummyTarget2])

        class PluginTarget(Target):
            alias = "plugin_tgt"
            core_fields = ()

        def plugin_targets():
            return [PluginTarget]

        self.working_set.add(
            self.get_mock_plugin("new-targets",
                                 "0.0.1",
                                 target_types=plugin_targets))
        self.load_plugins(["new-targets"])
        assert self.bc_builder.create().target_types == FrozenOrderedSet(
            [DummyTarget, DummyTarget2, PluginTarget])

    def test_backend_plugin_ordering(self):
        def reg_alias():
            return BuildFileAliases(objects={"override-alias": DummyObject2})

        self.working_set.add(
            self.get_mock_plugin("pluginalias", "0.0.1", alias=reg_alias))
        plugins = ["pluginalias==0.0.1"]
        aliases = BuildFileAliases(objects={"override-alias": DummyObject1})
        with self.create_register(
                build_file_aliases=lambda: aliases) as backend_module:
            backends = [backend_module]
            build_configuration = load_backends_and_plugins(
                plugins,
                self.working_set,
                backends,
                bc_builder=self.bc_builder)
        # The backend should load first, then the plugins, therefore the alias registered in
        # the plugin will override the alias registered by the backend
        registered_aliases = build_configuration.registered_aliases
        self.assertEqual(DummyObject2,
                         registered_aliases.objects["override-alias"])
Example #39
0
class PEXEnvironment(Environment):
  class Subcache(object):
    def __init__(self, path, env):
      self._activated = False
      self._path = path
      self._env = env

    @property
    def activated(self):
      return self._activated

    def activate(self):
      if not self._activated:
        with TRACER.timed('Activating cache %s' % self._path):
          for dist in find_distributions(self._path):
            if self._env.can_add(dist):
              self._env.add(dist)
        self._activated = True

  @staticmethod
  def _really_zipsafe(dist):
    try:
      pez_info = dist.resource_listdir('/PEZ-INFO')
    except OSError:
      pez_info = []
    if 'zip-safe' in pez_info:
      return True
    egg_metadata = dist.metadata_listdir('/')
    return 'zip-safe' in egg_metadata and 'native_libs.txt' not in egg_metadata

  def __init__(self, pex, pex_info, platform=Platform.current(), python=Platform.python()):
    subcaches = sum([
      [os.path.join(pex, pex_info.internal_cache)],
      [cache for cache in pex_info.egg_caches],
      [pex_info.install_cache if pex_info.install_cache else []]],
      [])
    self._pex_info = pex_info
    self._activated = False
    self._subcaches = [self.Subcache(cache, self) for cache in subcaches]
    self._ws = WorkingSet([])
    with TRACER.timed('Calling environment super'):
      super(PEXEnvironment, self).__init__(search_path=[], platform=platform, python=python)

  def resolve(self, requirements, ignore_errors=False):
    reqs = maybe_requirement_list(requirements)
    resolved = OrderedSet()
    for req in reqs:
      with TRACER.timed('Resolved %s' % req):
        try:
          distributions = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          TRACER.log('Failed to resolve %s' % req)
          if not ignore_errors:
            raise
          continue
        resolved.update(distributions)
    return list(resolved)

  def can_add(self, dist):
    return Platform.distribution_compatible(dist, self.python, self.platform)

  def best_match(self, req, *ignore_args, **ignore_kwargs):
    while True:
      resolved_req = super(PEXEnvironment, self).best_match(req, self._ws)
      if resolved_req:
        return resolved_req
      for subcache in self._subcaches:
        if not subcache.activated:
          subcache.activate()
          break
      else:
        # TODO(wickman)  Add per-requirement optional/ignore_errors flag.
        print('Failed to resolve %s, your installation may not work properly.' % req,
            file=sys.stderr)
        break

  def activate(self):
    if self._activated:
      return
    if self._pex_info.inherit_path:
      self._ws = WorkingSet(sys.path)

    # TODO(wickman)  Implement dynamic fetchers if pex_info requirements specify dynamic=True
    # or a non-empty repository.
    all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements]

    for req in all_reqs:
      with TRACER.timed('Resolved %s' % str(req)):
        try:
          resolved = self._ws.resolve([req], env=self)
        except DistributionNotFound as e:
          TRACER.log('Failed to resolve %s: %s' % (req, e))
          if not self._pex_info.ignore_errors:
            raise
          continue
      for dist in resolved:
        with TRACER.timed('  Activated %s' % dist):
          if os.environ.get('PEX_FORCE_LOCAL', not self._really_zipsafe(dist)):
            with TRACER.timed('    Locally caching'):
              new_dist = DistributionHelper.maybe_locally_cache(dist, self._pex_info.install_cache)
              new_dist.activate()
          else:
            self._ws.add(dist)
            dist.activate()

    self._activated = True