def main(pkgList): working_set = WorkingSet() for pkgName, pkgVersion in pkgList.iteritems(): try: depends = working_set.require(pkgName) except DistributionNotFound: import os import urllib2 print "\n -- Library " + pkgName + " needs to be installed --\n" # Prompt for user decision if a package requires installation allow = raw_input(" May I install the above library? ([y]/n): ") if allow.upper() == "N" or allow.upper() == "NO": sys.exit("\n -- Please install package " + pkgName + " manually. Aborting. --\n") else: try: response = urllib2.urlopen('http://www.google.com', timeout=20) os.system("easy_install-2.7 --user " + pkgName + "==" + pkgVersion) # Important: After installation via easy_install, I must # restart the script for certain new modules (i.e. those # installed via egg,like dendropy) to be properly loaded os.execv(__file__, sys.argv) # TFLs, while nicer, fails because easy_install must be # run as superuser # from setuptools.command.easy_install import main as install # install([pkg]) except urllib2.URLError as err: sys.exit("\n -- Please connect to the internet. Aborting. --\n") # Eventually, import the module exec("import " + pkgName) ## Alternative: modules = map(__import__, pkgList) ## Completion notice print "\n -- P2C2M: Installation of Python libraries complete --\n"
def activate(self): if self._activated: return if self._pex_info.inherit_path: self._ws = WorkingSet(sys.path) # TODO(wickman) Implement dynamic fetchers if pex_info requirements specify dynamic=True # or a non-empty repository. all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements] for req in all_reqs: with TRACER.timed('Resolved %s' % str(req)): try: resolved = self._ws.resolve([req], env=self) except DistributionNotFound as e: TRACER.log('Failed to resolve %s: %s' % (req, e)) if not self._pex_info.ignore_errors: raise continue for dist in resolved: with TRACER.timed(' Activated %s' % dist): if os.environ.get('PEX_FORCE_LOCAL', not self._really_zipsafe(dist)): with TRACER.timed(' Locally caching'): new_dist = DistributionHelper.maybe_locally_cache(dist, self._pex_info.install_cache) new_dist.activate() else: self._ws.add(dist) dist.activate() self._activated = True
def sys_install_packages(installed_packages,requirements): packages=[] with open(requirements, "rt") as f: for line in f: l = line.strip() package = l.split(',') package=package[0] packages.append(package) for i in packages: if i in installed_packages: continue log.info("The %s package is already installed" % (i)) if i not in installed_packages: working_set = WorkingSet() try: dep = working_set.require('paramiko>=1.0') except DistributionNotFound: pass whoami=os.getlogin() if whoami =='root': installPackage=install([i]) log.info("Newlly installation of %s is sucessfully done"% (installPackage)) if whoami !='root': try: installPackage=subprocess.check_call(["pip", "install","--user", i]) log.info("Newlly installation of %s is sucessfully done"% (installPackage)) except: try: installPackage=subprocess.check_call(["pip3", "install","--user", i]) log.info("Newlly installation of %s is sucessfully done"% (installPackage)) except Exception as e: e = sys.exc_info() log.error("the above error occured while installing %s package"% (e))
def _initialize_entry_point_group(entrypoint_group): global _WS installed = {d.project_name: d for d in working_set} if _WS is None: _WS = WorkingSet() cache = {} result = {} for ep in _WS.iter_entry_points(entrypoint_group): egg_name = ep.dist.egg_name() conflicts = cache.get(egg_name, None) if conflicts is None: conflicts = _conflicts( ep.dist.requires(), installed ) cache[egg_name] = conflicts if len(conflicts) != 0: LOG.error('{} not loadable: {}'.format( ep.name, ', '.join(conflicts) )) result[ep.name] = MMEntryPoint( ep=ep, name=ep.name, conflicts=conflicts, loadable=(len(conflicts) == 0) ) _ENTRYPOINT_GROUPS[entrypoint_group] = result
def test_environment_marker_evaluation_positive(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info") ad.add(Foo) res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad) assert list(res) == [Foo]
def test_marker_evaluation_with_extras_loop(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) # Metadata needs to be native strings due to cStringIO behaviour in # 2.6, so use str(). a = Distribution.from_filename( "/foo_dir/a-0.2.dist-info", metadata=Metadata(("METADATA", str("Requires-Dist: c[a]"))) ) b = Distribution.from_filename( "/foo_dir/b-0.3.dist-info", metadata=Metadata(("METADATA", str("Requires-Dist: c[b]"))) ) c = Distribution.from_filename( "/foo_dir/c-1.0.dist-info", metadata=Metadata(("METADATA", str("Provides-Extra: a\n" "Requires-Dist: b;extra=='a'\n" "Provides-Extra: b\n" "Requires-Dist: foo;extra=='b'"))) ) foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") for dist in (a, b, c, foo): ad.add(dist) res = list(ws.resolve(parse_requirements("a"), ad)) assert res == [a, c, b, foo]
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append( PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append( PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def _activate(self): self.update_candidate_distributions( self.load_internal_cache(self._pex, self._pex_info)) if not self._pex_info.zip_safe and os.path.isfile(self._pex): self.update_module_paths( self.force_local(self._pex, self._pex_info)) # TODO(wickman) Implement dynamic fetchers if pex_info requirements specify dynamic=True # or a non-empty repository. all_reqs = [ Requirement.parse(req) for req, _, _ in self._pex_info.requirements ] working_set = WorkingSet([]) # for req in all_reqs: with TRACER.timed('Resolving %s' % ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list'): try: resolved = working_set.resolve(all_reqs, env=self) except DistributionNotFound as e: TRACER.log('Failed to resolve %s: %s' % (req, e)) TRACER.log('Current working set:') for dist in working_set: TRACER.log(' - %s' % dist) raise for dist in resolved: with TRACER.timed('Activated %s' % dist): working_set.add(dist) dist.activate() return working_set
def main(): # Dynamically configure the Django settings with the minimum necessary to # get Django running tests INSTALLED_APPS = ['multigtfs'] TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner' # If django-nose is installed, use it # You can do things like ./run_tests.py --with-coverage try: from pkg_resources import WorkingSet, DistributionNotFound working_set = WorkingSet() working_set.require('django_nose') except ImportError: print 'setuptools not installed. Weird.' except DistributionNotFound: print "django-nose not installed. You'd like it." else: INSTALLED_APPS.append('django_nose') TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' settings.configure( INSTALLED_APPS=INSTALLED_APPS, # Django replaces this, but it still wants it. *shrugs* DATABASE_ENGINE='django.db.backends.sqlite3', DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, DEBUG=True, TEMPLATE_DEBUG=True, TEST_RUNNER=TEST_RUNNER ) from django.core import management failures = management.call_command('test') # Will pull sysv args itself sys.exit(failures)
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append(PEXEnvironment(self._pex, pex_info)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def _activate(self): self.update_candidate_distributions(self.load_internal_cache(self._pex, self._pex_info)) if not self._pex_info.zip_safe and os.path.isfile(self._pex): self.update_module_paths(self.force_local(self._pex, self._pex_info)) # TODO(wickman) Implement dynamic fetchers if pex_info requirements specify dynamic=True # or a non-empty repository. all_reqs = [Requirement.parse(req) for req, _, _ in self._pex_info.requirements] working_set = WorkingSet([]) with TRACER.timed('Resolving %s' % ' '.join(map(str, all_reqs)) if all_reqs else 'empty dependency list'): try: resolved = working_set.resolve(all_reqs, env=self) except DistributionNotFound as e: TRACER.log('Failed to resolve a requirement: %s' % e) TRACER.log('Current working set:') for dist in working_set: TRACER.log(' - %s' % dist) raise for dist in resolved: with TRACER.timed('Activating %s' % dist): working_set.add(dist) if os.path.isdir(dist.location): with TRACER.timed('Adding sitedir'): site.addsitedir(dist.location) dist.activate() return working_set
def test_config(): '''Create a Django configuration for running tests''' config = base_config() # If django-nose is installed, use it # You can do things like ./run_tests.py --with-coverage try: from pkg_resources import WorkingSet, DistributionNotFound working_set = WorkingSet() working_set.require('django_nose') except ImportError: print('setuptools not installed. Weird.') except DistributionNotFound: print("django-nose not installed. You'd like it.") else: config['INSTALLED_APPS'].append('django_nose') config['TEST_RUNNER'] = 'django_nose.NoseTestSuiteRunner' # Optionally update configuration try: import t_overrides except ImportError: pass else: config = t_overrides.update(config) return config
def _working_set(self): trac_version = self.trac_version() metadata = InMemoryMetadataStub(name='trac', version=trac_version) trac_distribution = Distribution('/invalid/path', metadata, project_name='trac', version=trac_version) working_set = WorkingSet(entries=()) working_set.add(trac_distribution) return working_set
def test_marker_evaluation_with_extras_loop(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) # Metadata needs to be native strings due to cStringIO behaviour in # 2.6, so use str(). a = Distribution.from_filename("/foo_dir/a-0.2.dist-info", metadata=Metadata( ("METADATA", str("Requires-Dist: c[a]")))) b = Distribution.from_filename("/foo_dir/b-0.3.dist-info", metadata=Metadata( ("METADATA", str("Requires-Dist: c[b]")))) c = Distribution.from_filename( "/foo_dir/c-1.0.dist-info", metadata=Metadata(("METADATA", str("Provides-Extra: a\n" "Requires-Dist: b;extra=='a'\n" "Provides-Extra: b\n" "Requires-Dist: foo;extra=='b'")))) foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info") for dist in (a, b, c, foo): ad.add(dist) res = list(ws.resolve(parse_requirements("a"), ad)) assert res == [a, c, b, foo]
def build_docs_and_install(name, version, findlinks): # pragma no cover tdir = tempfile.mkdtemp() startdir = os.getcwd() os.chdir(tdir) try: tarpath = download_github_tar('OpenMDAO-Plugins', name, version) # extract the repo tar file tar = tarfile.open(tarpath) tar.extractall() tar.close() files = os.listdir('.') files.remove(os.path.basename(tarpath)) if len(files) != 1: raise RuntimeError("after untarring, found multiple directories: %s" % files) # build sphinx docs os.chdir(files[0]) # should be in distrib directory now check_call(['plugin', 'build_docs', files[0]]) # create an sdist so we can query metadata for distrib dependencies check_call([sys.executable, 'setup.py', 'sdist', '-d', '.']) if sys.platform.startswith('win'): tars = fnmatch.filter(os.listdir('.'), "*.zip") else: tars = fnmatch.filter(os.listdir('.'), "*.tar.gz") if len(tars) != 1: raise RuntimeError("should have found a single archive file," " but found %s instead" % tars) check_call(['easy_install', '-NZ', tars[0]]) # now install any dependencies metadict = get_metadata(tars[0]) reqs = metadict.get('requires', []) done = set() while reqs: r = reqs.pop() if r not in done: done.add(r) ws = WorkingSet() req = Requirement.parse(r) dist = ws.find(req) if dist is None: check_call(['easy_install', '-NZ', '-f', findlinks, r]) dist = ws.find(req) if dist is None: raise RuntimeError("Couldn't find distribution '%s'" % r) dist.activate() dct = get_metadata(dist.egg_name().split('-')[0]) for new_r in dct.get('requires', []): reqs.append(new_r) finally: os.chdir(startdir) shutil.rmtree(tdir, ignore_errors=True)
def _create_context_manager(group: str, entrypoint: pkg_resources.EntryPoint, working_set: pkg_resources.WorkingSet, scope: str): name = entrypoint.name # We need a Distribution to register our dynamic entrypoints within. # We have to always instantiate it to find our key, as key can be # different from the project_name dist = pkg_resources.Distribution(location=__file__, project_name=scope) # Prevent creating entrypoints in distributions not created by us, # otherwise we could remove the distributions when cleaning up. if (dist.key in working_set.by_key and working_set.by_key[dist.key].location != __file__): raise ValueError(f'scope {format_scope(scope, dist)} already ' f'exists in working set at location ' f'{working_set.by_key[dist.key].location}') if dist.key not in working_set.by_key: working_set.add(dist) # Reference the actual registered dist if we didn't just register it dist = working_set.by_key[dist.key] # Ensure the group exists in our distribution group_entries = dist.get_entry_map().setdefault(group, {}) # Create an entry for the specified entrypoint if name in group_entries: raise ValueError(f'{name!r} is already registered under {group!r} ' f'in scope {format_scope(scope, dist)}') assert entrypoint.dist is None entrypoint.dist = dist group_entries[name] = entrypoint # Wait for something to happen with the entrypoint... try: yield finally: # Tidy up del group_entries[name] # If we re-use this entrypoint (by re-entering the context) the # dist may well have changed (because it gets deleted from the # working set) so we shouldn't remember it. assert entrypoint.dist is dist entrypoint.dist = None if len(group_entries) == 0: del dist.get_entry_map()[group] if len(dist.get_entry_map()) == 0: del working_set.by_key[dist.key] working_set.entry_keys[__file__].remove(dist.key) if not working_set.entry_keys[__file__]: del working_set.entry_keys[__file__] working_set.entries.remove(__file__)
def resolve(requirements, cache=None, crawler=None, fetchers=None, obtainer=None, interpreter=None, platform=None): """Resolve a list of requirements into distributions. :param requirements: A list of strings or :class:`pkg_resources.Requirement` objects to be resolved. :param cache: The filesystem path to cache distributions or None for no caching. :param crawler: The :class:`Crawler` object to use to crawl for artifacts. If None specified a default crawler will be constructed. :param fetchers: A list of :class:`Fetcher` objects for generating links. If None specified, default to fetching from PyPI. :param obtainer: An :class:`Obtainer` object for converting from links to :class:`pkg_resources.Distribution` objects. If None specified, a default will be provided that accepts eggs or building from source. :param interpreter: A :class:`PythonInterpreter` object to resolve against. If None specified, use the current interpreter. :param platform: The string representing the platform to be resolved, such as `'linux-x86_64'` or `'macosx-10.7-intel'`. If None specified, the current platform is used. """ requirements = maybe_requirement_list(requirements) # Construct defaults crawler = crawler or Crawler() fetchers = fetchers or [PyPIFetcher()] interpreter = interpreter or PythonInterpreter.get() platform = platform or Platform.current() # wire up translators / obtainer if cache: shared_options = dict(install_cache=cache, platform=platform, interpreter=interpreter) translator = EggTranslator(**shared_options) cache_obtainer = Obtainer(crawler, [Fetcher([cache])], translator) else: cache_obtainer = None if not obtainer: translator = Translator.default(install_cache=cache, platform=platform, interpreter=interpreter) obtainer = Obtainer(crawler, fetchers, translator) # make installer def installer(req): if cache_obtainer and requirement_is_exact(req): dist = cache_obtainer.obtain(req) if dist: return dist return obtainer.obtain(req) # resolve working_set = WorkingSet(entries=[]) env = ResolverEnvironment(interpreter, search_path=[], platform=platform) return working_set.resolve(requirements, env=env, installer=installer)
def resolve(requirements, cache=None, crawler=None, fetchers=None, obtainer=None, interpreter=None, platform=None): """Resolve a list of requirements into distributions. :param requirements: A list of strings or :class:`pkg_resources.Requirement` objects to be resolved. :param cache: The filesystem path to cache distributions or None for no caching. :param crawler: The :class:`Crawler` object to use to crawl for artifacts. If None specified a default crawler will be constructed. :param fetchers: A list of :class:`Fetcher` objects for generating links. If None specified, default to fetching from PyPI. :param obtainer: An :class:`Obtainer` object for converting from links to :class:`pkg_resources.Distribution` objects. If None specified, a default will be provided that accepts eggs or building from source. :param interpreter: A :class:`PythonInterpreter` object to resolve against. If None specified, use the current interpreter. :param platform: The string representing the platform to be resolved, such as `'linux-x86_64'` or `'macosx-10.7-intel'`. If None specified, the current platform is used. """ requirements = maybe_requirement_list(requirements) # Construct defaults crawler = crawler or Crawler() fetchers = fetchers or [PyPIFetcher()] interpreter = interpreter or PythonInterpreter.get() platform = platform or Platform.current() # wire up translators / obtainer shared_options = dict(install_cache=cache, platform=platform) egg_translator = EggTranslator(python=interpreter.python, **shared_options) cache_obtainer = Obtainer(crawler, [Fetcher([cache])], egg_translator) if cache else None source_translator = SourceTranslator(interpreter=interpreter, **shared_options) translator = ChainedTranslator(egg_translator, source_translator) obtainer = Obtainer(crawler, fetchers, translator) # make installer def installer(req): if cache_obtainer and requirement_is_exact(req): dist = cache_obtainer.obtain(req) if dist: return dist return obtainer.obtain(req) # resolve working_set = WorkingSet(entries=[]) env = ResolverEnvironment(search_path=[], platform=platform, python=interpreter.python) return working_set.resolve(requirements, env=env, installer=installer)
def build_docs_and_install(name, version, findlinks): # pragma no cover tdir = tempfile.mkdtemp() startdir = os.getcwd() os.chdir(tdir) try: tarpath = download_github_tar('OpenMDAO-Plugins', name, version) # extract the repo tar file tar = tarfile.open(tarpath) tar.extractall() tar.close() files = os.listdir('.') files.remove(os.path.basename(tarpath)) if len(files) != 1: raise RuntimeError("after untarring, found multiple directories: %s" % files) os.chdir(files[0]) # should be in distrib directory now cfg = SafeConfigParser(dict_type=OrderedDict) cfg.readfp(open('setup.cfg', 'r'), 'setup.cfg') if cfg.has_option('metadata', 'requires-dist'): reqs = cfg.get('metadata', 'requires-dist').strip() reqs = reqs.replace(',', ' ') reqs = [n.strip() for n in reqs.split()] else: # couldn't find requires-dist in setup.cfg, so # create an sdist so we can query metadata for distrib dependencies tarname = _bld_sdist_and_install(deps=False) # now find any dependencies metadict = get_metadata(tarname) reqs = metadict.get('requires', []) # install dependencies (some may be needed by sphinx) ws = WorkingSet() for r in reqs: print "Installing dependency '%s'" % r req = Requirement.parse(r) dist = ws.find(req) if dist is None: try: check_call(['easy_install', '-Z', '-f', findlinks, r]) except Exception: traceback.print_exc() # build sphinx docs check_call(['plugin', 'build_docs', files[0]]) # make a new sdist with docs in it and install it tarname = _bld_sdist_and_install() finally: os.chdir(startdir) shutil.rmtree(tdir, ignore_errors=True)
def test_node_modules_registry_flattening(self): lib = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('bower.json', json.dumps({ 'dependencies': { 'jquery': '~1.8.3', 'underscore': '1.8.3', }, })), ('extras_calmjs.json', json.dumps({ 'bower_components': { 'jquery': 'jquery/dist/jquery.js', 'underscore': 'underscore/underscore-min.js', }, 'something_else': { 'parent': 'lib' }, })), ), 'lib', '1.0.0') app = make_dummy_dist(self, ( ('requires.txt', '\n'.join([ 'lib>=1.0.0', ])), ('bower.json', json.dumps({ 'dependencies': { 'jquery': '~3.0.0', }, })), ('extras_calmjs.json', json.dumps({ 'bower_components': { 'jquery': 'jquery/dist/jquery.min.js', }, 'something_else': { 'child': 'named' }, })), ), 'app', '2.0') working_set = WorkingSet() working_set.add(lib, self._calmjs_testing_tmpdir) working_set.add(app, self._calmjs_testing_tmpdir) results = dist.flatten_extras_calmjs(['app'], working_set=working_set) self.assertEqual( results['bower_components'], { 'jquery': 'jquery/dist/jquery.min.js', 'underscore': 'underscore/underscore-min.js', }) # child takes precedences as this was not specified to be merged self.assertEqual(results['something_else'], {'child': 'named'})
def test_remove_from_ws__removes_distribution(): ws = WorkingSet([]) dist = create_dist("a", "1.0") assert dist not in ws ws.add(dist) assert dist in ws dependency.remove_from_ws(ws, dist) assert dist not in ws
def checker_pex(self, interpreter): # TODO(John Sirois): Formalize in pants.base? pants_dev_mode = os.environ.get('PANTS_DEV') if pants_dev_mode: checker_id = self.checker_target.transitive_invalidation_hash() else: checker_id = hash_all([self._CHECKER_REQ]) pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity)) if not os.path.exists(pex_path): with self.context.new_workunit(name='build-checker'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper( PEXBuilder(path=chroot, interpreter=interpreter), PythonRepos.global_instance(), PythonSetup.global_instance(), self.context.log) # Constraining is required to guard against the case where the user # has a pexrc file set. pex_builder.add_interpreter_constraint( str(interpreter.identity.requirement)) if pants_dev_mode: pex_builder.add_sources_from(self.checker_target) req_libs = [ tgt for tgt in self.checker_target.closure() if isinstance(tgt, PythonRequirementLibrary) ] pex_builder.add_requirement_libs_from( req_libs=req_libs) else: try: # The checker is already on sys.path, eg: embedded in pants.pex. working_set = WorkingSet(entries=sys.path) for dist in working_set.resolve( [Requirement.parse(self._CHECKER_REQ)]): pex_builder.add_direct_requirements( dist.requires()) pex_builder.add_distribution(dist) pex_builder.add_direct_requirements( [self._CHECKER_REQ]) except DistributionNotFound: # We need to resolve the checker from a local or remote distribution repo. pex_builder.add_resolved_requirements( [PythonRequirement(self._CHECKER_REQ)]) pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT) pex_builder.freeze() return PEX(pex_path, interpreter=interpreter)
def package_installation(logger): try: reqs = subprocess.check_output([sys.executable, '-m', 'pip', 'freeze']) except: try: reqs = subprocess.check_output( [sys.executable, '-m', 'pip3', 'freeze']) except: print( "Please ensure that pip or pip3 is installed on your laptop and redo the setup" ) installed_packages = [r.decode().split('==')[0] for r in reqs.split()] config = conf_reader.get_config() requirements = config.get("requirements", None) print('packages execution started...') packages = [] try: with open(requirements, "rt") as f: for line in f: l = line.strip() package = l.split(',') package = package[0] packages.append(package) for i in packages: if i not in installed_packages: working_set = WorkingSet() try: dep = working_set.require('paramiko>=1.0') except DistributionNotFound: pass whoami = os.getlogin() if whoami == 'root': install([i]) if whoami != 'root': try: subprocess.check_call(["pip", "install", "--user", i]) except: try: subprocess.check_call( ["pip3", "install", "--user", i]) except: print( "Check whether this user has admin privileges for installing package" ) except Exception as e: logger.exception( 'ERROR:: Some issue in reading the Config...check config_reader.py script in bin Folder....' ) raise e
def __init__(self, pex, pex_info, platform=Platform.current(), python=Platform.python()): subcaches = sum([ [os.path.join(pex, pex_info.internal_cache)], [cache for cache in pex_info.egg_caches], [pex_info.install_cache if pex_info.install_cache else []]], []) self._pex_info = pex_info self._activated = False self._subcaches = [self.Subcache(cache, self) for cache in subcaches] self._ws = WorkingSet([]) with TRACER.timed('Calling environment super'): super(PEXEnvironment, self).__init__(search_path=[], platform=platform, python=python)
def _add_working_set_mocks(mocks, virtualenv_dists): ws = WorkingSet(entries=[]) [ws.add(d) for d in _find_distributions('setuptools', 'zc.buildout')] [ws.add(d) for d in virtualenv_dists] default_ws = WorkingSet(entries=ws.entries) [default_ws.add(d) for d in virtualenv_dists] _add_mock(mocks, _pkgr_ws, lambda: Mock(side_effect=lambda entries: ws if entries else WorkingSet([]))) _add_mock(mocks, _pkgr_default_ws, lambda: default_ws)
def checker_pex(self, interpreter): # TODO(John Sirois): Formalize in pants.base? pants_dev_mode = os.environ.get('PANTS_DEV') if pants_dev_mode: checker_id = self.checker_target.transitive_invalidation_hash() else: checker_id = hash_all([self._CHECKER_REQ]) pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity)) if not os.path.exists(pex_path): with self.context.new_workunit(name='build-checker'): with safe_concurrent_creation(pex_path) as chroot: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=chroot, interpreter=interpreter), log=self.context.log) # Constraining is required to guard against the case where the user # has a pexrc file set. pex_builder.add_interpreter_constraint(str(interpreter.identity.requirement)) if pants_dev_mode: pex_builder.add_sources_from(self.checker_target) req_libs = [tgt for tgt in self.checker_target.closure() if isinstance(tgt, PythonRequirementLibrary)] pex_builder.add_requirement_libs_from(req_libs=req_libs) else: try: # The checker is already on sys.path, eg: embedded in pants.pex. platform = Platform.current() platform_name = platform.platform env = Environment(search_path=sys.path, platform=platform_name, python=interpreter.version_string) working_set = WorkingSet(entries=sys.path) for dist in working_set.resolve([Requirement.parse(self._CHECKER_REQ)], env=env): pex_builder.add_direct_requirements(dist.requires()) # NB: We add the dist location instead of the dist itself to make sure its a # distribution style pex knows how to package. pex_builder.add_dist_location(dist.location) pex_builder.add_direct_requirements([self._CHECKER_REQ]) except (DistributionNotFound, PEXBuilder.InvalidDistribution): # We need to resolve the checker from a local or remote distribution repo. pex_builder.add_resolved_requirements( [PythonRequirement(self._CHECKER_REQ)]) pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT) pex_builder.freeze() return PEX(pex_path, interpreter=interpreter)
def parse_requirement(pkgstring, comparator='=='): ins = InstallRequirement.from_line(pkgstring) pkg_name, specs = ins.name, str(ins.specifier) if specs: return pkg_name, specs req = Requirement.parse(pkg_name) working_set = WorkingSet() dist = working_set.find(req) if dist: specs = "%s%s" % (comparator, dist.version) return req.project_name, specs
def test_find_conflicting(self): ws = WorkingSet([]) Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg") ws.add(Foo) # create a requirement that conflicts with Foo 1.2 req = next(parse_requirements("Foo<1.2")) with pytest.raises(VersionConflict) as vc: ws.find(req) msg = 'Foo 1.2 is installed but Foo<1.2 is required' assert vc.value.report() == msg
def test_yarn_install_package_json_no_overwrite_interactive(self): """ Most of these package_json testing will be done in the next test class specific for ``yarn init``. """ # Testing the implied init call stub_mod_call(self, cli) stub_stdouts(self) stub_stdin(self, 'n\n') stub_check_interactive(self, True) tmpdir = mkdtemp(self) os.chdir(tmpdir) # All the pre-made setup. app = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': { 'jquery': '~1.11.0' }, })), ), 'foo', '1.9.0') working_set = WorkingSet() working_set.add(app, self._calmjs_testing_tmpdir) stub_item_attr_value(self, dist, 'default_working_set', working_set) # We are going to have a fake package.json with open(join(tmpdir, 'package.json'), 'w') as fd: json.dump({}, fd) # capture the logging explicitly as the conditions which # determines how the errors are outputted differs from different # test harnesses. Verify that later. with pretty_logging(stream=StringIO()) as stderr: # This is faked. yarn.yarn_install('foo', callback=prompt_overwrite_json) self.assertIn( "Overwrite '%s'? (Yes/No) [No] " % join(tmpdir, 'package.json'), sys.stdout.getvalue()) # Ensure the error message. Normally this is printed through # stderr via distutils custom logger and our handler bridge for # that which is tested elsewhere. self.assertIn("not continuing with 'yarn install'", stderr.getvalue()) with open(join(tmpdir, 'package.json')) as fd: result = fd.read() # This should remain unchanged as no to overwrite is default. self.assertEqual(result, '{}')
def main(*paths): # Dynamically configure the Django settings with the minimum necessary to # get Django running tests config = { 'INSTALLED_APPS': ['multigtfs'], 'TEST_RUNNER': 'django.test.simple.DjangoTestSuiteRunner', 'DATABASE_ENGINE': 'django.contrib.gis.db.backends.spatialite', 'DATABASES': { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.spatialite', } }, 'DEBUG': True, 'TEMPLATE_DEBUG': True } try: import south except ImportError: pass else: assert south # flake8 be quiet config['INSTALLED_APPS'].insert(0, 'south') # If django-nose is installed, use it # You can do things like ./run_tests.py --with-coverage try: from pkg_resources import WorkingSet, DistributionNotFound working_set = WorkingSet() working_set.require('django_nose') except ImportError: print('setuptools not installed. Weird.') except DistributionNotFound: print("django-nose not installed. You'd like it.") else: config['INSTALLED_APPS'].append('django_nose') config['TEST_RUNNER'] = 'django_nose.NoseTestSuiteRunner' # Optionally update configuration try: import t_overrides except ImportError: pass else: config = t_overrides.update(config) settings.configure(**config) from django.core import management failures = management.call_command('test', *paths) sys.exit(failures)
def setUp(self): # save working directory remember_cwd(self) # All the pre-made setup. stub_mod_call(self, cli) app = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~1.11.0'}, })), ), 'foo', '1.9.0') underscore = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'underscore': '~1.8.0'}, })), ), 'underscore', '1.8.0') named = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~3.0.0'}, 'name': 'named-js', })), ), 'named', '2.0.0') working_set = WorkingSet() working_set.add(app, self._calmjs_testing_tmpdir) working_set.add(underscore, self._calmjs_testing_tmpdir) working_set.add(named, self._calmjs_testing_tmpdir) stub_item_attr_value(self, dist, 'default_working_set', working_set) stub_mod_check_interactive(self, [cli], True) # also save this self.inst_interactive = npm.npm.cli_driver.interactive
def checkApps(apps=[]): working_set = WorkingSet() for app in apps: manifest = getAppManifest(app) if manifest: for dependency in manifest["externalDependencies"]: try: dep = working_set.require(dependency) except DistributionNotFound: from setuptools.command.easy_install import main as install try: install([dependency]) except Exception as e: print e
def test_marker_evaluation_with_extras_normlized(self): """Extras are also evaluated as markers at resolution time.""" ad = pkg_resources.Environment([]) ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", metadata=Metadata(("METADATA", "Provides-Extra: baz-lightyear\n" "Requires-Dist: quux; extra=='baz-lightyear'"))) ad.add(Foo) assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") ad.add(quux) res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) assert res == [Foo, quux]
def check(request): package_name = settings.HEARTBEAT.get('package_name') if not package_name: raise ImproperlyConfigured( 'Missing package_name key from heartbeat configuration') sys_path_distros = WorkingSet() package_req = Requirement.parse(package_name) distro = sys_path_distros.find(package_req) if not distro: return dict(error='no distribution found for {}'.format(package_name)) return dict(name=distro.project_name, version=distro.version)
def tests_instantiate_integration_registries(self): """ Ensure that the integration registries, specifically the root registry, be instantiated (or re-instantiated) in a way that satisfies expectations of integration test creators. """ make_dummy_dist(self, ( ('entry_points.txt', '\n'.join([ '[calmjs.registry]', 'dummy.module = calmjs.module:ModuleRegistry', 'other.module = calmjs.module:ModuleRegistry', ])), ), 'somepkg', '1.0') working_set = WorkingSet([self._calmjs_testing_tmpdir]) registry = utils.instantiate_integration_registries( working_set, None, 'dummy.module', ) dummy_module = registry.get('dummy.module') other_module = registry.get('other.module') self.assertEqual('dummy.module', dummy_module.registry_name) self.assertIsNone(registry.get('dummy.module.tests')) make_dummy_dist(self, ( ('entry_points.txt', '\n'.join([ '[calmjs.registry]', 'dummy.module.tests = calmjs.module:ModuleRegistry', ])), ), 'somepkg.testing', '1.0') # re-add the tmpdir to reinitialize the working set with the # newly added entry points working_set.add_entry(self._calmjs_testing_tmpdir) reinstantiated_registry = utils.instantiate_integration_registries( working_set, registry, 'dummy.module', 'dummy.module.tests', ) # ensure that it is the same instance, as this could be used to # reinstantiate the registry with the additional entries. self.assertIs(registry, reinstantiated_registry) # the inner registries should be renewed. self.assertIsNot(dummy_module, registry.get('dummy.module')) # the not reinstantiated version is not renewed self.assertIs(other_module, registry.get('other.module')) # the newly added entry points should resolve now. self.assertIsNotNone(registry.get('dummy.module.tests'))
def test_marker_evaluation_with_extras_normlized(self): """Extras are also evaluated as markers at resolution time.""" ad = pkg_resources.Environment([]) ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", metadata=Metadata(("METADATA", "Provides-Extra: baz-lightyear\n" "Requires-Dist: quux; extra=='baz-lightyear'")) ) ad.add(Foo) assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") ad.add(quux) res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad)) assert res == [Foo, quux]
def attempt_pkg_install(pkg): msg("This tool requires the {0} package to be installed. Attempting installation..." .format(pkg)) from pkg_resources import WorkingSet, DistributionNotFound working_set = WorkingSet() try: dep = working_set.require(pkg) except DistributionNotFound: try: from setuptools.command.easy_install import main as install install([pkg]) except: msg("This tool was unable to find or install a required dependency: {0}" .format(pkg)) exit
def test_yarn_install_package_json_no_overwrite_interactive(self): """ Most of these package_json testing will be done in the next test class specific for ``yarn init``. """ # Testing the implied init call stub_mod_call(self, cli) stub_stdouts(self) stub_stdin(self, 'n\n') stub_check_interactive(self, True) tmpdir = mkdtemp(self) os.chdir(tmpdir) # All the pre-made setup. app = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~1.11.0'}, })), ), 'foo', '1.9.0') working_set = WorkingSet() working_set.add(app, self._calmjs_testing_tmpdir) stub_item_attr_value(self, dist, 'default_working_set', working_set) # We are going to have a fake package.json with open(join(tmpdir, 'package.json'), 'w') as fd: json.dump({}, fd) # capture the logging explicitly as the conditions which # determines how the errors are outputted differs from different # test harnesses. Verify that later. with pretty_logging(stream=StringIO()) as stderr: # This is faked. yarn.yarn_install('foo', callback=prompt_overwrite_json) self.assertIn( "Overwrite '%s'? (Yes/No) [No] " % join(tmpdir, 'package.json'), sys.stdout.getvalue()) # Ensure the error message. Normally this is printed through # stderr via distutils custom logger and our handler bridge for # that which is tested elsewhere. self.assertIn("not continuing with 'yarn install'", stderr.getvalue()) with open(join(tmpdir, 'package.json')) as fd: result = fd.read() # This should remain unchanged as no to overwrite is default. self.assertEqual(result, '{}')
def test_iter_builders_side_effect_build_issue(self): mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts') # clog the build directory so build cannot happen with open(join(root), 'w'): pass ep, toolchain, spec = next(registry.iter_builders_for('app')) check = [] spec.advise('after_prepare', check.append, True) with pretty_logging(stream=mocks.StringIO()) as stream: with self.assertRaises(ToolchainAbort): toolchain(spec) self.assertIn("an advice in group 'before_prepare' triggered an abort", stream.getvalue()) # should have stopped at before_prepare self.assertFalse(check)
def initialize(self): super(SetupEggSubRecipe, self).initialize() if self.recipe.options.get_as_bool('split-working-set', False): self.working_set = WorkingSet([]) else: self.working_set = self.recipe.working_set self.index = get_index(self.index_url, self.find_links_urls)
def test_iter_builders_verify_export_target(self): mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', 'invalid.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) class FakeArtifactRegistry(ArtifactRegistry): def verify_export_target(self, export_target): return 'invalid.js' not in export_target registry = FakeArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) # the invalid.js should be filtered out with pretty_logging(stream=mocks.StringIO()) as stream: self.assertEqual(1, len(list(registry.iter_builders_for('app')))) self.assertIn("invalid.js' has been rejected", stream.getvalue())
def load_entry_points( group: str, type_constraint: Optional[type] = None, working_set: pkg_resources.WorkingSet = pkg_resources.working_set, ) -> Dict[str, callable]: entry_points = {} log.info(f'Loading entry points for "{group}"') for entry_point in working_set.iter_entry_points(group): log.debug(f'Loading entry point "{entry_point.name}" from "{group}"') try: loaded = entry_point.load() except Exception as e: msg = (f'Failed to load Entry point "{entry_point.name}" from ' f'"{group}": {e}') log.error(msg) raise e if type_constraint and not issubclass(loaded, type_constraint): msg = (f'Entry Point "{entry_point.name}" from "{group}" does not ' f'match type constraint for "{type_constraint.__module__}.' f'{type_constraint.__name__}".', ) log.error(msg) raise TypeError(msg) log.debug(f'Successfully loaded "{entry_point.name}" from "{group}"') entry_points[entry_point.name] = loaded log.debug(f'Finished loading {len(entry_points)} from "{group}"') return entry_points
def test_iter_builders_side_effect(self): # inject dummy module and add cleanup mod = ModuleType('calmjs_testing_dummy') mod.complete = generic_builder self.addCleanup(sys.modules.pop, 'calmjs_testing_dummy') sys.modules['calmjs_testing_dummy'] = mod working_dir = utils.mkdtemp(self) utils.make_dummy_dist(self, (('entry_points.txt', '\n'.join([ '[calmjs.artifacts]', 'artifact.js = calmjs_testing_dummy:complete', ])), ), 'app', '1.0', working_dir=working_dir) mock_ws = WorkingSet([working_dir]) registry = ArtifactRegistry('calmjs.artifacts', _working_set=mock_ws) registry.update_artifact_metadata('app', {}) root = join(working_dir, 'app-1.0.egg-info', 'calmjs_artifacts') self.assertFalse(exists(root)) ep, toolchain, spec = next(registry.iter_builders_for('app')) self.assertFalse(exists(root)) # directory only created after the toolchain is executed toolchain(spec) self.assertTrue(exists(root))
def get_all_entry_points(): """ Get all entry points related to `colcon` and any of its extensions. :returns: mapping of entry point names to :class:`pkg_resources.EntryPoint` instances :rtype: dict """ global EXTENSION_POINT_GROUP_NAME colcon_extension_points = get_entry_points(EXTENSION_POINT_GROUP_NAME) entry_points = defaultdict(dict) working_set = WorkingSet() for dist in sorted(working_set): entry_map = dist.get_entry_map() for group_name in entry_map.keys(): # skip groups which are not registered as extension points if group_name not in colcon_extension_points: continue group = entry_map[group_name] for entry_point_name, entry_point in group.items(): entry_point.group_name = group_name if entry_point_name in entry_points[group_name]: previous = entry_points[group_name][entry_point_name] logger.error( "Entry point '{group_name}.{entry_point_name}' is " "declared multiple times, '{entry_point}' overwriting " "'{previous}'".format_map(locals())) entry_points[group_name][entry_point_name] = \ (dist, entry_point) return entry_points
def plugin_resolution(chroot=None, plugins=None): @contextmanager def provide_chroot(existing): if existing: yield existing, False else: with temporary_dir() as new_chroot: yield new_chroot, True with provide_chroot(chroot) as (root_dir, create_artifacts): env = {'PANTS_BOOTSTRAPDIR': root_dir} repo_dir = None if plugins: repo_dir = os.path.join(root_dir, 'repo') env.update(PANTS_PYTHON_REPOS_REPOS='[{!r}]'.format(repo_dir), PANTS_PYTHON_REPOS_INDEXES='[]', PANTS_PYTHON_SETUP_RESOLVER_CACHE_TTL='1') plugin_list = [] for plugin in plugins: version = None if isinstance(plugin, tuple): plugin, version = plugin plugin_list.append('{}=={}'.format(plugin, version) if version else plugin) if create_artifacts: create_plugin(repo_dir, plugin, version) env['PANTS_PLUGINS'] = '[{}]'.format(','.join(map(repr, plugin_list))) configpath = os.path.join(root_dir, 'pants.ini') if create_artifacts: touch(configpath) options_bootstrapper = OptionsBootstrapper(env=env, configpath=configpath, args=[]) plugin_resolver = PluginResolver(options_bootstrapper) cache_dir = plugin_resolver.plugin_cache_dir yield plugin_resolver.resolve(WorkingSet(entries=[])), root_dir, repo_dir, cache_dir
def test_marker_evaluation_with_multiple_extras(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", metadata=Metadata(("METADATA", "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'\n" "Provides-Extra: bar\n" "Requires-Dist: fred; extra=='bar'\n"))) ad.add(Foo) quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") ad.add(quux) fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info") ad.add(fred) res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad)) assert sorted(res) == [fred, quux, Foo]
def tests_make_dummy_dist_working_set(self): """ Dummy distributions should work with pkg_resources.WorkingSet """ # This also shows how WorkingSet might work. # A WorkingSet is basically a way to get to a collection of # distributions via the list of specified paths. By default it # will go for sys.path, but for testing purposes we can control # this by creating our own instance on a temporary directory. parentpkg = make_dummy_dist(self, ( # noqa: F841 ('requires.txt', '\n'.join([ ])), ), 'parentpkg', '0.8') childpkg = make_dummy_dist(self, ( # noqa: F841 ('requires.txt', '\n'.join([ 'parentpkg>=0.8', ])), ), 'childpkg', '0.1') grandchildpkg = make_dummy_dist(self, ( ('requires.txt', '\n'.join([ 'childpkg>=0.1', 'parentpkg>=0.8', ])), ), 'grandchildpkg', '0.8') working_set = WorkingSet([self._calmjs_testing_tmpdir]) distributions = working_set.resolve(grandchildpkg.requires()) self.assertEqual(len(distributions), 2) self.assertEqual(distributions[0].requires(), []) self.assertEqual(distributions[1].requires(), [ Requirement.parse('parentpkg>=0.8')]) # overwrite should work make_dummy_dist(self, ( ('requires.txt', '\n'.join([ 'parentpkg>=0.7', ])), ), 'childpkg', '0.1') # but the data have to be recreated working_set = WorkingSet([self._calmjs_testing_tmpdir]) distributions = working_set.resolve(grandchildpkg.requires()) self.assertEqual(distributions[1].requires(), [ Requirement.parse('parentpkg>=0.7')])
def test_marker_evaluation_with_multiple_extras(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", metadata=Metadata(("METADATA", "Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'\n" "Provides-Extra: bar\n" "Requires-Dist: fred; extra=='bar'\n")) ) ad.add(Foo) quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") ad.add(quux) fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info") ad.add(fred) res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad)) assert sorted(res) == [fred, quux, Foo]
def test_marker_evaluation_with_extras(self): """Extras are also evaluated as markers at resolution time.""" ad = pkg_resources.Environment([]) ws = WorkingSet([]) # Metadata needs to be native strings due to cStringIO behaviour in # 2.6, so use str(). Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.dist-info", metadata=Metadata(("METADATA", str("Provides-Extra: baz\n" "Requires-Dist: quux; extra=='baz'"))) ) ad.add(Foo) assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo] quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info") ad.add(quux) res = list(ws.resolve(parse_requirements("Foo[baz]"), ad)) assert res == [Foo, quux]
def setUp(self): # save working directory remember_cwd(self) # All the pre-made setup. stub_mod_call(self, cli) app = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~1.11.0'}, })), ), 'foo', '1.9.0') underscore = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'underscore': '~1.8.0'}, })), ), 'underscore', '1.8.0') named = make_dummy_dist(self, ( ('requires.txt', '\n'.join([])), ('package.json', json.dumps({ 'dependencies': {'jquery': '~3.0.0'}, 'name': 'named-js', })), ), 'named', '2.0.0') working_set = WorkingSet() working_set.add(app, self._calmjs_testing_tmpdir) working_set.add(underscore, self._calmjs_testing_tmpdir) working_set.add(named, self._calmjs_testing_tmpdir) stub_item_attr_value(self, dist, 'default_working_set', working_set) stub_check_interactive(self, True)
def main(args, options): from pkg_resources import WorkingSet, Requirement, find_distributions if not options.site_dir: app.error('Must supply --site') distributions = list(find_distributions(options.site_dir)) working_set = WorkingSet() for dist in distributions: working_set.add(dist) for arg in args: arg_req = Requirement.parse(arg) found_dist = working_set.find(arg_req) if not found_dist: print('Could not find %s!' % arg_req) out_zip = Distiller(found_dist).distill() print('Dumped %s => %s' % (arg_req, out_zip))
def go(self): working_set = WorkingSet() for pkgName in self.pkgList: try: depends = working_set.require(pkgName) except DistributionNotFound: from setuptools.command.easy_install import main as install import urllib2 print("\n Library '" + pkgName + "' needs to be installed.") allow = input(" May I install the above library? ([y]/n): ") # Prompt for user decision if a package requires installation allow = allow.upper() if allow == "N" or allow == "NO": sys.exit("\n ERROR: Please install package '" + pkgName + "' manually.\n") else: try: response = urllib2.urlopen("http://www.python.org/", timeout=10) except urllib2.URLError as err: sys.exit("\n ERROR: No internet connection available.\n") try: install(["--user", pkgName]) # Make certain to use the user flag # ALTERNATIVE: os.system("easy_install-2.7 --user " + pkgName + "==" + pkgVersion) print("\n Library '" + pkgName + "' installed successfully.") # ALTERNATIVE: print "\n Library '" + pkgName + "v." + pkgVersion + "' installed successfully." except: import os if sys.platform == "linux" or sys.platform == "linux2": if isExe("pip2.7"): os.system("pip2.7 install " + pkgName + " --user") else: ("\n ERROR: Python setuptools inaccessible.\n") elif sys.platform == "darwin": if isExe("pip2.7"): os.system("pip2.7 install " + pkgName + " --user") else: ("\n ERROR: Python setuptools inaccessible.\n") elif sys.platform == "win32": if isExe("pip2.7.exe"): os.system("pip2.7.exe install " + pkgName) else: ("\n ERROR: Python setuptools inaccessible.\n") try: exec("import " + pkgName) except ImportError: sys.exit("\n Please restart this script.\n") # After installation via easy_install, Python must be restarted for certain new modules to be properly loaded
def testResolve(self): ad = pkg_resources.Environment([]) ws = WorkingSet([]) # Resolving no requirements -> nothing to install self.assertEqual(list(ws.resolve([],ad)), []) # Request something not in the collection -> DistributionNotFound self.assertRaises( pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad ) Foo = Distribution.from_filename( "/foo_dir/Foo-1.2.egg", metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) ) ad.add(Foo) ad.add(Distribution.from_filename("Foo-0.9.egg")) # Request thing(s) that are available -> list to activate for i in range(3): targets = list(ws.resolve(parse_requirements("Foo"), ad)) self.assertEqual(targets, [Foo]) list(map(ws.add,targets)) self.assertRaises(VersionConflict, ws.resolve, parse_requirements("Foo==0.9"), ad) ws = WorkingSet([]) # reset # Request an extra that causes an unresolved dependency for "Baz" self.assertRaises( pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad ) Baz = Distribution.from_filename( "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) ) ad.add(Baz) # Activation list now includes resolved dependency self.assertEqual( list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] ) # Requests for conflicting versions produce VersionConflict self.assertRaises(VersionConflict, ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad)