def _add_working_set_mocks(mocks, virtualenv_dists): ws = WorkingSet(entries=[]) [ws.add(d) for d in _find_distributions('setuptools', 'zc.buildout')] [ws.add(d) for d in virtualenv_dists] default_ws = WorkingSet(entries=ws.entries) [default_ws.add(d) for d in virtualenv_dists] _add_mock(mocks, _pkgr_ws, lambda: Mock(side_effect=lambda entries: ws if entries else WorkingSet([]))) _add_mock(mocks, _pkgr_default_ws, lambda: default_ws)
def testCollection(self): # empty path should produce no distributions ad = pkg_resources.Environment([], platform=None, python=None) self.assertEqual(list(ad), []) self.assertEqual(ad['FooPkg'],[]) ad.add(dist_from_fn("FooPkg-1.3_1.egg")) ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) # Name is in there now self.assertTrue(ad['FooPkg']) # But only 1 package self.assertEqual(list(ad), ['foopkg']) # Distributions sort by version self.assertEqual( [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] ) # Removing a distribution leaves sequence alone ad.remove(ad['FooPkg'][1]) self.assertEqual( [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] ) # And inserting adds them in order ad.add(dist_from_fn("FooPkg-1.9.egg")) self.assertEqual( [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] ) ws = WorkingSet([]) foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") req, = parse_requirements("FooPkg>=1.3") # Nominal case: no distros on path, should yield all applicable self.assertEqual(ad.best_match(req,ws).version, '1.9') # If a matching distro is already installed, should return only that ws.add(foo14) self.assertEqual(ad.best_match(req,ws).version, '1.4') # If the first matching distro is unsuitable, it's a version conflict ws = WorkingSet([]) ws.add(foo12) ws.add(foo14) self.assertRaises(VersionConflict, ad.best_match, req, ws) # If more than one match on the path, the first one takes precedence ws = WorkingSet([]) ws.add(foo14) ws.add(foo12) ws.add(foo14) self.assertEqual(ad.best_match(req,ws).version, '1.4')
def setUp(self): # save working directory remember_cwd(self) # All the pre-made setup. stub_mod_call(self, cli) app = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~1.11.0'}, })), ), 'foo', '1.9.0') underscore = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'underscore': '~1.8.0'}, })), ), 'underscore', '1.8.0') named = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~3.0.0'}, 'name': 'named-js', })), ), 'named', '2.0.0') working_set = WorkingSet() working_set.add(app, self._calmjs_testing_tmpdir) working_set.add(underscore, self._calmjs_testing_tmpdir) working_set.add(named, self._calmjs_testing_tmpdir) stub_item_attr_value(self, dist, 'default_working_set', working_set) stub_mod_check_interactive(self, [cli], True) # also save this self.inst_interactive = npm.npm.cli_driver.interactive
def _initialize_entry_point_group(entrypoint_group): global _WS installed = _installed_versions() if _WS is None: _initialize_master_working_set() _WS = WorkingSet() cache = {} result = {} for ep in _WS.iter_entry_points(entrypoint_group): egg_name = ep.dist.egg_name() conflicts = cache.get(egg_name, None) if conflicts is None: conflicts = _conflicts(ep.dist.requires(), installed) cache[egg_name] = conflicts if len(conflicts) != 0: LOG.error('{} not loadable: {}'.format(ep.name, ', '.join(conflicts))) result[ep.name] = MMEntryPoint(ep=ep, name=ep.name, conflicts=conflicts, loadable=(len(conflicts) == 0)) _ENTRYPOINT_GROUPS[entrypoint_group] = result
def plugin_resolution(chroot=None, plugins=None): @contextmanager def provide_chroot(existing): if existing: yield existing, False else: with temporary_dir() as new_chroot: yield new_chroot, True with provide_chroot(chroot) as (root_dir, create_artifacts): env = {'PANTS_BOOTSTRAPDIR': root_dir} repo_dir = None if plugins: repo_dir = os.path.join(root_dir, 'repo') env.update(PANTS_PYTHON_REPOS_REPOS='[{!r}]'.format(repo_dir), PANTS_PYTHON_REPOS_INDEXES='[]', PANTS_PYTHON_SETUP_RESOLVER_CACHE_TTL='1') plugin_list = [] for plugin in plugins: version = None if isinstance(plugin, tuple): plugin, version = plugin plugin_list.append('{}=={}'.format(plugin, version) if version else plugin) if create_artifacts: create_plugin(repo_dir, plugin, version) env['PANTS_PLUGINS'] = '[{}]'.format(','.join(map(repr, plugin_list))) configpath = os.path.join(root_dir, 'pants.ini') if create_artifacts: touch(configpath) options_bootstrapper = OptionsBootstrapper(env=env, configpath=configpath, args=[]) plugin_resolver = PluginResolver(options_bootstrapper) cache_dir = plugin_resolver.plugin_cache_dir yield plugin_resolver.resolve(WorkingSet(entries=[])), root_dir, repo_dir, cache_dir
def test_environment_marker_evaluation_positive(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info") ad.add(Foo) res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad) assert list(res) == [Foo]
def test_iter_builders_side_effect_build_issue(self): mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts') # clog the build directory so build cannot happen with open(join(root), 'w'): pass ep, toolchain, spec = next(registry.iter_builders_for('app')) check = [] spec.advise('after_prepare', check.append, True) with pretty_logging(stream=mocks.StringIO()) as stream: with self.assertRaises(ToolchainAbort): toolchain(spec) self.assertIn("an advice in group 'before_prepare' triggered an abort", stream.getvalue()) # should have stopped at before_prepare self.assertFalse(check)
def test_update_artifact_metadata(self): # inject dummy module and add cleanup mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, ( ('requires.txt', '\n'.join([ 'calmjs', ])), ('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) # mock a version of calmjs within that environment too utils.make_dummy_dist(self, (('entry_points.txt', ''), ), 'calmjs', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) self.assertTrue(exists(registry.metadata.get('app'))) with pretty_logging(stream=mocks.StringIO()) as s: registry.update_artifact_metadata('calmjs', {}) self.assertIn("package 'calmjs' has not declare any artifacts", s.getvalue())
def setup_virtualenv(self, name='venv'): """Performs the initial configuration and package environment setup""" try: WorkingSet().require('virtualenv') except DistributionNotFound: pip.main(['install', 'virtualenv']) self.activate_virtualenv(named=name)
def main(): # Dynamically configure the Django settings with the minimum necessary to # get Django running tests INSTALLED_APPS = ['multigtfs'] TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner' # If django-nose is installed, use it # You can do things like ./run_tests.py --with-coverage try: from pkg_resources import WorkingSet, DistributionNotFound working_set = WorkingSet() working_set.require('django_nose') except ImportError: print 'setuptools not installed. Weird.' except DistributionNotFound: print "django-nose not installed. You'd like it." else: INSTALLED_APPS.append('django_nose') TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' settings.configure( INSTALLED_APPS=INSTALLED_APPS, DATABASE_ENGINE='django.contrib.gis.db.backends.spatialite', DATABASES={ 'default': { 'ENGINE': 'django.contrib.gis.db.backends.spatialite', } }, DEBUG=True, TEMPLATE_DEBUG=True, TEST_RUNNER=TEST_RUNNER) from django.core import management failures = management.call_command('test') # Will pull sysv args itself sys.exit(failures)
def get_installed_packages(paths=None): """Find packages in default or given lib paths""" # WorkingSet returns installed packages in given path # working_set returns installed packages in default path # if paths is set then find installed packages from given paths ws = WorkingSet(paths) if paths else working_set return ["{0}=={1}".format(p.project_name, p.version) for p in ws]
def activate(self): from pkg_resources import Requirement, WorkingSet, DistributionNotFound if self._activated: return if self._pex_info.inherit_path: self._ws = WorkingSet(sys.path) # TODO(wickman) Implement dynamic fetchers if pex_info requirements specify dynamic=True # or a non-empty repository. all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements] for req in all_reqs: with PEX.timed('Resolved %s' % str(req)): try: resolved = self._ws.resolve([req], env=self) except DistributionNotFound as e: self._log('Failed to resolve %s: %s' % (req, e)) if not self._pex_info.ignore_errors: raise continue for dist in resolved: with PEX.timed(' Activated %s' % dist): if self._really_zipsafe(dist): self._ws.add(dist) dist.activate() else: with PEX.timed(' Locally caching %s' % dist): new_dist = DistributionHelper.locally_cache(dist, self._pex_info.install_cache) new_dist.activate() self._activated = True
def make_zapps(dirname, lib): ws = WorkingSet([dirname]) for ep in ws.iter_entry_points(group='console_scripts'): main = ep.module_name + ':' + ('.'.join(ep.attrs)) output = ep.name + '.pyz' with ZipFile(output, 'w') as z: z.writestr('__main__.py', mainfile(ep, lib))
def plugin_resolution(rule_runner: RuleRunner, *, interpreter=None, chroot=None, plugins=None, sdist=True): @contextmanager def provide_chroot(existing): if existing: yield existing, False else: with temporary_dir() as new_chroot: yield new_chroot, True with provide_chroot(chroot) as (root_dir, create_artifacts): env: Dict[str, str] = {} repo_dir = None if plugins: repo_dir = os.path.join(root_dir, "repo") env.update( PANTS_PYTHON_REPOS_REPOS=f"[{repo_dir!r}]", PANTS_PYTHON_REPOS_INDEXES="[]", PANTS_PYTHON_SETUP_RESOLVER_CACHE_TTL="1", ) plugin_list = [] for plugin in plugins: version = None if isinstance(plugin, tuple): plugin, version = plugin plugin_list.append( f"{plugin}=={version}" if version else plugin) if create_artifacts: setup_py_args = [ "sdist" if sdist else "bdist_wheel", "--dist-dir", "dist/" ] _run_setup_py(rule_runner, plugin, version, setup_py_args, repo_dir) env["PANTS_PLUGINS"] = f"[{','.join(map(repr, plugin_list))}]" env["PANTS_PLUGIN_CACHE_DIR"] = os.path.join( root_dir, "plugin-cache") configpath = os.path.join(root_dir, "pants.toml") if create_artifacts: touch(configpath) args = [f"--pants-config-files=['{configpath}']"] options_bootstrapper = OptionsBootstrapper.create(env=env, args=args, allow_pantsrc=False) plugin_resolver = PluginResolver(options_bootstrapper, interpreter=interpreter) cache_dir = plugin_resolver.plugin_cache_dir working_set = plugin_resolver.resolve(WorkingSet(entries=[])) for dist in working_set: assert (Path(os.path.realpath(cache_dir)) in Path(os.path.realpath(dist.location)).parents) yield working_set, root_dir, repo_dir, cache_dir
def sys_install_packages(installed_packages,requirements): packages=[] with open(requirements, "rt") as f: for line in f: l = line.strip() package = l.split(',') package=package[0] packages.append(package) for i in packages: if i in installed_packages: continue log.info("The %s package is already installed" % (i)) if i not in installed_packages: working_set = WorkingSet() try: dep = working_set.require('paramiko>=1.0') except DistributionNotFound: pass whoami=os.getlogin() if whoami =='root': installPackage=install([i]) log.info("Newlly installation of %s is sucessfully done"% (installPackage)) if whoami !='root': try: installPackage=subprocess.check_call(["pip", "install","--user", i]) log.info("Newlly installation of %s is sucessfully done"% (installPackage)) except: try: installPackage=subprocess.check_call(["pip3", "install","--user", i]) log.info("Newlly installation of %s is sucessfully done"% (installPackage)) except Exception as e: e = sys.exc_info() log.error("the above error occured while installing %s package"% (e))
def test_denormalized_package_names(self): working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'full.js = calmjs_testbuild:full', ])), ), 'de_normal_name', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) # stub the default working set in calmjs.dist for the resolver # to work. utils.stub_item_attr_value(self, dist, 'default_working_set', mock_ws) # still specify the working set. registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) self.assertEqual( 1, len(list(registry.iter_records_for('de_normal_name')))) # also test internal consistency self.assertIn('de_normal_name', registry.compat_builders['full']) self.assertIn('de_normal_name', registry.packages) default = registry.get_artifact_filename('de_normal_name', 'full.js') normal = registry.get_artifact_filename(safe_name('de_normal_name'), 'full.js') self.assertEqual(default, normal)
def test_normcase_registration(self): # create an empty working set for a clean-slate test. cwd = utils.mkdtemp(self) mock_ws = WorkingSet([]) dist_ = Distribution(cwd, project_name='pkg', version='1.0') dist_.egg_info = cwd # just lazy registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) # case sensitive test; have to patch the normcase at artifact # module with the nt version from ntpath import normcase as nt_normcase utils.stub_item_attr_value(self, artifact, 'normcase', nt_normcase) # using named case for case sensitivity test. c1 = EntryPoint.parse('case.js = dummy_builder:builder1') c1.dist = dist_ c2 = EntryPoint.parse('Case.js = dummy_builder:builder2') c2.dist = dist_ # use the error one ct = join(cwd, 'calmjs_artifacts', 'Case.js') with pretty_logging(stream=mocks.StringIO()) as stream: registry.register_entry_point(c1) registry.register_entry_point(c2) log = stream.getvalue() self.assertIn( "entry point 'Case.js = dummy_builder:builder2' from package " "'pkg 1.0' resolves to the path '%s' which was already " "registered to entry point 'case.js = dummy_builder:builder1'; " "conflicting entry point registration will be ignored." % ct, log) self.assertIn( "the file mapping error is caused by this platform's case-" "insensitive filename", log)
def test_remove_from_ws__removes_all_entries(): ws = WorkingSet([]) dist1 = create_dist("a", "1.0", location="a10") dist2 = create_dist("a", "2.0", location="a20") assert dist1 not in ws assert dist2 not in ws ws.add(dist1) assert dist1 in ws assert dist1.location in ws.entries assert dist2 not in ws assert dist2.location not in ws.entries ws.add_entry(dist2.location) assert dist1 in ws assert dist1.location in ws.entries assert dist2 not in ws assert dist2.location in ws.entries dependency.remove_from_ws(ws, dist2) assert dist1 not in ws assert dist2 not in ws assert len([d for d in ws]) == 0
def test_iter_builders_side_effect(self): # inject dummy module and add cleanup mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts') self.assertFalse(exists(root)) ep, toolchain, spec = next(registry.iter_builders_for('app')) self.assertFalse(exists(root)) # directory only created after the toolchain is executed toolchain(spec) self.assertTrue(exists(root))
def test_marker_evaluation_with_extras_loop(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) # Metadata needs to be native strings due to cStringIO behaviour in # 2.6, so use str(). a = Distribution.from_filename("/foo_dir/a-0.2.dist-info", metadata=Metadata( ("METADATA", str("Requires-Dist: c[a]")))) b = Distribution.from_filename("/foo_dir/b-0.3.dist-info", metadata=Metadata( ("METADATA", str("Requires-Dist: c[b]")))) c = Distribution.from_filename( "/foo_dir/c-1.0.dist-info", metadata=Metadata(("METADATA", str("Provides-Extra: a\n" "Requires-Dist: b;extra=='a'\n" "Provides-Extra: b\n" "Requires-Dist: foo;extra=='b'")))) foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") for dist in (a, b, c, foo): ad.add(dist) res = list(ws.resolve(parse_requirements("a"), ad)) assert res == [a, c, b, foo]
def test_iter_builders_verify_export_target(self): mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', 'invalid.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) class FakeArtifactRegistry(ArtifactRegistry): def verify_export_target(self, export_target): return 'invalid.js' not in export_target registry = FakeArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) # the invalid.js should be filtered out with pretty_logging(stream=mocks.StringIO()) as stream: self.assertEqual(1, len(list(registry.iter_builders_for('app')))) self.assertIn("invalid.js' has been rejected", stream.getvalue())
def test_LazyModule(): # create an entry point for taurus.core.util.test.dumm w = WorkingSet() d = w.find(Requirement.parse('taurus')) ep = EntryPoint.parse("dummy_mod = taurus.core.util.test.dummy", dist=d) modname = ep.name # lazy load the ep module as taurus.fbt LazyModule.import_ep(modname, "taurus", ep) # check that lazy-loading did not import the entry point modules assert modname not in sys.modules assert ep.module_name not in sys.modules # import the module and check that it is a LazyModule import taurus.dummy_mod as lzm assert isinstance(lzm, LazyModule) # same again import taurus.dummy_mod as lzm assert isinstance(lzm, LazyModule) # now access a member of the lazy module assert lzm.foo == 1 # ...and check that any subsequent import will return a "real" module, # not a lazy one import taurus.dummy_mod as lzm assert not isinstance(lzm, LazyModule) assert isinstance(lzm, ModuleType)
def test_resolve_conflicts_with_prior(self): """ A ContextualVersionConflict should be raised when a requirement conflicts with a prior requirement for a different package. """ # Create installation where Foo depends on Baz 1.0 and Bar depends on # Baz 2.0. ws = WorkingSet([]) md = Metadata(('depends.txt', "Baz==1.0")) Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md) ws.add(Foo) md = Metadata(('depends.txt', "Baz==2.0")) Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md) ws.add(Bar) Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg") ws.add(Baz) Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg") ws.add(Baz) with pytest.raises(VersionConflict) as vc: ws.resolve(parse_requirements("Foo\nBar\n")) msg = "Baz 1.0 is installed but Baz==2.0 is required by " msg += repr(set(['Bar'])) assert vc.value.report() == msg
def test_config(): '''Create a Django configuration for running tests''' config = base_config() # If django-nose is installed, use it # You can do things like ./run_tests.py --with-coverage try: from pkg_resources import WorkingSet, DistributionNotFound working_set = WorkingSet() working_set.require('django_nose') except ImportError: print('setuptools not installed. Weird.') except DistributionNotFound: print("django-nose not installed. You'd like it.") else: config['INSTALLED_APPS'].append('django_nose') config['TEST_RUNNER'] = 'django_nose.NoseTestSuiteRunner' # Optionally update configuration try: import t_overrides except ImportError: pass else: config = t_overrides.update(config) return config
def _activate(self): self.update_candidate_distributions( self.load_internal_cache(self._pex, self._pex_info)) if not self._pex_info.zip_safe and os.path.isfile(self._pex): self.update_module_paths( self.force_local(self._pex, self._pex_info)) # TODO(wickman) Implement dynamic fetchers if pex_info requirements specify dynamic=True # or a non-empty repository. all_reqs = [ Requirement.parse(req) for req, _, _ in self._pex_info.requirements ] working_set = WorkingSet([]) # for req in all_reqs: with TRACER.timed('Resolving %s' % ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list'): try: resolved = working_set.resolve(all_reqs, env=self) except DistributionNotFound as e: TRACER.log('Failed to resolve %s: %s' % (req, e)) TRACER.log('Current working set:') for dist in working_set: TRACER.log(' - %s' % dist) raise for dist in resolved: with TRACER.timed('Activated %s' % dist): dist.activate() self._working_set = working_set self._activated = True
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append( PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append( PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def get_all_entry_points(): """ Get all entry points related to `colcon` and any of its extensions. :returns: mapping of entry point names to :class:`pkg_resources.EntryPoint` instances :rtype: dict """ global EXTENSION_POINT_GROUP_NAME colcon_extension_points = get_entry_points(EXTENSION_POINT_GROUP_NAME) entry_points = defaultdict(dict) working_set = WorkingSet() for dist in sorted(working_set): entry_map = dist.get_entry_map() for group_name in entry_map.keys(): # skip groups which are not registered as extension points if group_name not in colcon_extension_points: continue group = entry_map[group_name] for entry_point_name, entry_point in group.items(): entry_point.group_name = group_name if entry_point_name in entry_points[group_name]: previous = entry_points[group_name][entry_point_name] logger.error( "Entry point '{group_name}.{entry_point_name}' is " "declared multiple times, '{entry_point}' overwriting " "'{previous}'".format_map(locals())) entry_points[group_name][entry_point_name] = \ (dist, entry_point) return entry_points
def test_conflict_registration(self): # create an empty working set for a clean-slate test. cwd = utils.mkdtemp(self) mock_ws = WorkingSet([]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) # using named case for case sensitivity test. st = join(cwd, 'calmjs_artifacts', 'Simple.js') dist_ = Distribution(cwd, project_name='pkg', version='1.0') dist_.egg_info = cwd # just lazy s1 = EntryPoint.parse('Simple.js = dummy_builder:builder1') s1.dist = dist_ s2 = EntryPoint.parse('Simple.js = dummy_builder:builder2') s2.dist = dist_ with pretty_logging(stream=mocks.StringIO()) as stream: registry.register_entry_point(s1) # normal registry usage shouldn't be able to do this. registry.register_entry_point(s2) log = stream.getvalue() self.assertIn( "entry point 'Simple.js = dummy_builder:builder2' from package " "'pkg 1.0' resolves to the path '%s' which was already " "registered to entry point 'Simple.js = dummy_builder:builder1'; " "conflicting entry point registration will be ignored." % st, log)
def build_docs_and_install(name, version, findlinks): # pragma no cover tdir = tempfile.mkdtemp() startdir = os.getcwd() os.chdir(tdir) try: tarpath = download_github_tar('OpenMDAO-Plugins', name, version) # extract the repo tar file tar = tarfile.open(tarpath) tar.extractall() tar.close() files = os.listdir('.') files.remove(os.path.basename(tarpath)) if len(files) != 1: raise RuntimeError( "after untarring, found multiple directories: %s" % files) # build sphinx docs os.chdir(files[0]) # should be in distrib directory now check_call(['plugin', 'build_docs', files[0]]) # create an sdist so we can query metadata for distrib dependencies check_call([sys.executable, 'setup.py', 'sdist', '-d', '.']) if sys.platform.startswith('win'): tars = fnmatch.filter(os.listdir('.'), "*.zip") else: tars = fnmatch.filter(os.listdir('.'), "*.tar.gz") if len(tars) != 1: raise RuntimeError("should have found a single archive file," " but found %s instead" % tars) check_call(['easy_install', '-NZ', tars[0]]) # now install any dependencies metadict = get_metadata(tars[0]) reqs = metadict.get('requires', []) done = set() while reqs: r = reqs.pop() if r not in done: done.add(r) ws = WorkingSet() req = Requirement.parse(r) dist = ws.find(req) if dist is None: check_call(['easy_install', '-NZ', '-f', findlinks, r]) dist = ws.find(req) if dist is None: raise RuntimeError("Couldn't find distribution '%s'" % r) dist.activate() dct = get_metadata(dist.egg_name().split('-')[0]) for new_r in dct.get('requires', []): reqs.append(new_r) finally: os.chdir(startdir) shutil.rmtree(tdir, ignore_errors=True)
def find_required(venv, file_): pkgdir = os.path.join(os.path.abspath(venv), "lib/python2.7/site-packages") working_set = WorkingSet([pkgdir]) #We need a version of nose & pylint, preferably our version, but if someone # insists on adding it to requirements.txt, we should accomodate them. nose_fulfilled = False pylint_fulfilled = False with open(file_, 'r') as fp: required = [Requirement.parse(req) for req in fp \ if not req.startswith("#")] requested = [] for requirement in required: if requirement.project_name == 'nose': nose_fulfilled = True if requirement.project_name == 'pylint': pylint_fulfilled = True if not version_in_working_set(requirement, working_set): requested.append(requirement) if not nose_fulfilled: requirement = Requirement.parse('nose==1.2.1') if not version_in_working_set(requirement, working_set): requested.append(requirement) if not pylint_fulfilled: requirement = Requirement.parse('pylint==0.26.0') if not version_in_working_set(requirement, working_set): requested.append(requirement) return requested