def _initialize_entry_point_group(entrypoint_group): global _WS installed = _installed_versions() if _WS is None: _initialize_master_working_set() _WS = WorkingSet() cache = {} result = {} for ep in _WS.iter_entry_points(entrypoint_group): egg_name = ep.dist.egg_name() conflicts = cache.get(egg_name, None) if conflicts is None: conflicts = _conflicts(ep.dist.requires(), installed) cache[egg_name] = conflicts if len(conflicts) != 0: LOG.error('{} not loadable: {}'.format(ep.name, ', '.join(conflicts))) result[ep.name] = MMEntryPoint(ep=ep, name=ep.name, conflicts=conflicts, loadable=(len(conflicts) == 0)) _ENTRYPOINT_GROUPS[entrypoint_group] = result
def load_entry_points( group: str, type_constraint: Optional[type] = None, working_set: pkg_resources.WorkingSet = pkg_resources.working_set, ) -> Dict[str, callable]: entry_points = {} log.info(f'Loading entry points for "{group}"') for entry_point in working_set.iter_entry_points(group): log.debug(f'Loading entry point "{entry_point.name}" from "{group}"') try: loaded = entry_point.load() except Exception as e: msg = (f'Failed to load Entry point "{entry_point.name}" from ' f'"{group}": {e}') log.error(msg) raise e if type_constraint and not issubclass(loaded, type_constraint): msg = (f'Entry Point "{entry_point.name}" from "{group}" does not ' f'match type constraint for "{type_constraint.__module__}.' f'{type_constraint.__name__}".', ) log.error(msg) raise TypeError(msg) log.debug(f'Successfully loaded "{entry_point.name}" from "{group}"') entry_points[entry_point.name] = loaded log.debug(f'Finished loading {len(entry_points)} from "{group}"') return entry_points
def _initialize_entry_point_group(entrypoint_group): global _WS installed = {d.project_name: d for d in working_set} if _WS is None: _WS = WorkingSet() cache = {} result = {} for ep in _WS.iter_entry_points(entrypoint_group): egg_name = ep.dist.egg_name() conflicts = cache.get(egg_name, None) if conflicts is None: conflicts = _conflicts( ep.dist.requires(), installed ) cache[egg_name] = conflicts if len(conflicts) != 0: LOG.error('{} not loadable: {}'.format( ep.name, ', '.join(conflicts) )) result[ep.name] = MMEntryPoint( ep=ep, name=ep.name, conflicts=conflicts, loadable=(len(conflicts) == 0) ) _ENTRYPOINT_GROUPS[entrypoint_group] = result
def make_zapps(dirname, lib): ws = WorkingSet([dirname]) for ep in ws.iter_entry_points(group='console_scripts'): main = ep.module_name + ':' + ('.'.join(ep.attrs)) output = ep.name + '.pyz' with ZipFile(output, 'w') as z: z.writestr('__main__.py', mainfile(ep, lib))
def make_zipapp(main, output, req): with tempfile.TemporaryDirectory() as target: subprocess.call([ sys.executable, '-m', 'pip', 'install', '--target', target, ] + list(req)) if main is None: ws = WorkingSet([target]) r = Requirement.parse(req[0]) for ep in ws.iter_entry_points(group='console_scripts'): if ep.dist in r: main = ep.module_name + ':' + ('.'.join(ep.attrs)) if output is None: output = ep.name + '.pyz' break zipapp.create_archive(target, output, main=main)
def generate_integration_working_set(working_dir, registry_id='calmjs.module.simulated', pkgman_filename='package.json', extras_calmjs_key='fake_modules', extra_working_sets=sys.path): """ Generate a comprehensive integration testing environment for test cases in other packages that integrates with calmjs. Arguments: working_dir The working directory to write all the distribution information and dummy test scripts to. registry_id The registry id to be used for the dummy module registry. Default is 'calmjs.module.simulated' pkgman_filename The package manager's expected filename. Defaults to the npm default of 'package.json'. extras_calmjs_key The extras keys for the extras_calmjs definition. Defaults to fake_modules. Returns a tuple of the mock working set and the registry. """ from calmjs.loaderplugin import MODULE_LOADER_SUFFIX from calmjs.dist import EXTRAS_CALMJS_JSON def make_entry_points(registry_id, *raw): return '\n'.join(['[%s]' % registry_id] + list(raw)) make_dummy_dist(None, ( ('entry_points.txt', '\n'.join([ make_entry_points( 'calmjs.registry', registry_id + ' = calmjs.module:ModuleRegistry', registry_id + '.tests = calmjs.module:ModuleRegistry', registry_id + MODULE_LOADER_SUFFIX + ' = calmjs.loaderplugin:ModuleLoaderRegistry', ), make_entry_points( 'calmjs.extras_keys', '%s = enabled' % extras_calmjs_key, ), ])), ('calmjs_module_registry.txt', registry_id), ), 'calmjs.simulated', '420', working_dir=working_dir) make_dummy_dist(None, (('requires.txt', '\n'.join([])), ), 'security', '9999', working_dir=working_dir) make_dummy_dist(None, ( ('requires.txt', '\n'.join([ 'security', 'calmjs.simulated', ])), (pkgman_filename, json.dumps({ 'dependencies': { 'left-pad': '~1.1.1', }, 'devDependencies': { 'sinon': '~1.15.0', }, })), ('entry_points.txt', make_entry_points( registry_id, 'framework = framework', )), (EXTRAS_CALMJS_JSON, json.dumps({ extras_calmjs_key: { 'jquery': 'jquery/dist/jquery.min.js', 'underscore': 'underscore/underscore-min.js', }, })), ), 'framework', '2.4', working_dir=working_dir) make_dummy_dist(None, ( ('requires.txt', '\n'.join([ 'framework>=2.1', ])), (pkgman_filename, json.dumps({ 'dependencies': { 'jquery': '~2.0.0', 'underscore': '~1.7.0', }, })), (EXTRAS_CALMJS_JSON, json.dumps({ extras_calmjs_key: { 'jquery': 'jquery/dist/jquery.min.js', }, })), ('entry_points.txt', '\n'.join([ make_entry_points( registry_id, 'widget = widget', ), make_entry_points( registry_id + MODULE_LOADER_SUFFIX, 'css = css[css]', ) ])), ), 'widget', '1.1', working_dir=working_dir) make_dummy_dist(None, ( ('requires.txt', '\n'.join([ 'framework>=2.2', 'widget>=1.0', ])), (pkgman_filename, json.dumps({ 'dependencies': { 'backbone': '~1.3.0', 'jquery-ui': '~1.12.0', }, })), ('entry_points.txt', make_entry_points( registry_id, 'forms = forms', )), ), 'forms', '1.6', working_dir=working_dir) make_dummy_dist(None, ( ('requires.txt', '\n'.join([ 'framework>=2.1', ])), (pkgman_filename, json.dumps({ 'dependencies': { 'underscore': '~1.8.0', }, 'devDependencies': { 'sinon': '~1.17.0', }, })), (EXTRAS_CALMJS_JSON, json.dumps({ extras_calmjs_key: { 'underscore': 'underscore/underscore.js', }, '_bad_dir_': { 'unsupported': 'unsupported', }, })), ('entry_points.txt', make_entry_points( registry_id, 'service = service', 'service.rpc = service.rpc', )), ), 'service', '1.1', working_dir=working_dir) make_dummy_dist(None, ( ('requires.txt', '\n'.join([ 'framework>=2.1', 'widget>=1.1', 'forms>=1.6', ])), (pkgman_filename, json.dumps({ 'name': 'site', 'dependencies': { 'underscore': '~1.8.0', 'jquery': '~3.0.0', }, })), (EXTRAS_CALMJS_JSON, json.dumps({ extras_calmjs_key: { 'jquery': 'jquery/dist/jquery.js', 'underscore': 'underscore/underscore.js', }, })), ), 'site', '2.0', working_dir=working_dir) # The mocked mock_working_set mock_working_set = WorkingSet([working_dir] + extra_working_sets) contents = ( (('framework', 'lib.js'), ''' exports.Core = 'framework.lib.Core'; '''), (('widget', 'core.js'), ''' var framework_lib = require('framework/lib'); var Core = framework_lib.Core; exports.Core = Core + '/' + 'widget.core.Core'; '''), (('widget', 'richedit.js'), ''' var core = require('widget/core'); exports.RichEditWidget = 'widget.richedit.RichEditWidget'; '''), (('widget', 'datepicker.js'), ''' var _ = require('underscore'); var core = require('widget/core'); exports.DatePickerWidget = 'widget.datepicker.DatePickerWidget'; '''), (('forms', 'ui.js'), ''' var $ = require('jquery'); var richedit = require('widget/richedit'); var datepicker = require('widget/datepicker'); exports.RichForm = [ 'forms.ui.RichForm', richedit.RichEditWidget, datepicker.DatePickerWidget, ]; '''), (('service', 'endpoint.js'), ''' var framework_lib = require('framework/lib'); var Core = framework_lib.Core; exports.Endpoint = 'service.endpoint.Endpoint'; '''), (('service', 'rpc', 'lib.js'), ''' var framework_lib = require('framework/lib'); var Core = framework_lib.Core; exports.Library = 'service.rpc.lib.Library'; '''), ) extras_sources = [ 'jquery/dist/jquery.js', 'jquery/dist/jquery.min.js', 'underscore/underscore.js', 'underscore/underscore-min.js', ] # Generate the extras, too for source in extras_sources: fn = source.split('/') target = join(working_dir, extras_calmjs_key, *fn) base = dirname(target) if not isdir(base): makedirs(base) with open(target, 'w') as fd: # return a module that returns the name of the file. fd.write("define([], function () { return '%s'; });" % source) # These attributes are directly records = {} package_module_map = {} # I kind of want to do something like # registry = ModuleRegistry(registry_id, _working_set=mock_working_set) # However, this requires actually stubbing out a bunch of other # stuff and I really don't want to muck about with imports for a # setup... so we are going to mock the registry like so: for ep in mock_working_set.iter_entry_points(registry_id): package_module_map[ep.dist.project_name] = package_module_map.get( ep.dist.project_name, []) package_module_map[ep.dist.project_name].append(ep.module_name) for fn, content in contents: target = join(working_dir, *fn) modname = '/'.join(fn)[:-3] record_key = '.'.join(fn[:-1]) records[record_key] = records.get(record_key, {}) records[record_key][modname] = target base = dirname(target) if not isdir(base): makedirs(base) with open(target, 'w') as fd: fd.write(textwrap.dedent(content).lstrip()) makedirs(join(working_dir, '_bad_dir_')) with open(join(working_dir, '_bad_dir_', 'unsupported'), 'w') as fd: pass return mock_working_set
class PasterFactoryTests(TestCase): """Tests for the Paster factory and filter functions.""" def setUp(self): super(PasterFactoryTests, self).setUp() self.global_config = {"__file__": "/path/to/paster.ini"} self.repo_dirs = [] self.repo_names = ("server_new.export", "server_old.export") self.entry_points = {"main": make_app, "gzip": make_gzip_filter, "limitinput": make_limit_input_filter} for rname in self.repo_names: self.repo_dirs.append(import_repo_to_dir(rname)) # Test import to see if paste.deploy is available try: from paste.deploy.converters import asbool from pkg_resources import WorkingSet self.working_set = WorkingSet() self.working_set.add_entry(_BASE_PKG_DIR) except ImportError: raise SkipTest("paste.deploy not available") def tearDown(self): super(PasterFactoryTests, self).tearDown() for rdir in self.repo_dirs: shutil.rmtree(rdir) root = getLogger() if root.handlers: root.removeHandler(root.handlers[0]) def test_cwd(self): cwd = os.getcwd() os.chdir(self.repo_dirs[0]) app = make_app(self.global_config) os.chdir(cwd) self.assertIn("/", app.backend.repos) def test_badrepo(self): self.assertRaises(IndexError, make_app, self.global_config, foo="/") def test_repo(self): rname = self.repo_names[0] local_config = {rname: self.repo_dirs[0]} app = make_app(self.global_config, **local_config) self.assertIn("/%s" % rname, app.backend.repos) def _get_repo_parents(self): repo_parents = [] for rdir in self.repo_dirs: repo_parents.append(os.path.split(rdir)[0]) return repo_parents def test_append_git(self): app = make_app(self.global_config, append_git=True, serve_dirs=self._get_repo_parents()) for rname in self.repo_names: self.assertIn("/%s.git" % rname, app.backend.repos) def test_serve_dirs(self): app = make_app(self.global_config, serve_dirs=self._get_repo_parents()) for rname in self.repo_names: self.assertIn("/%s" % rname, app.backend.repos) def _test_wrap(self, factory, wrapper): app = make_app(self.global_config, serve_dirs=self._get_repo_parents()) wrapped_app = factory(self.global_config)(app) self.assertTrue(isinstance(wrapped_app, wrapper)) def test_make_gzip_filter(self): self._test_wrap(make_gzip_filter, GunzipFilter) def test_make_limit_input_filter(self): self._test_wrap(make_limit_input_filter, LimitedInputFilter) def test_entry_points(self): test_points = {} for group in ("paste.app_factory", "paste.filter_factory"): for ep in self.working_set.iter_entry_points(group): test_points[ep.name] = ep.load() for ep_name, ep in self.entry_points.items(): self.assertTrue(test_points[ep_name] is ep)