def minimum_path(cls): """ Return as a tuple the emulated sys.path and sys.path_importer_cache of a bare python installation, a la python -S. """ from site import USER_SITE from twitter.common.collections import OrderedSet from pkg_resources import find_distributions from distutils.sysconfig import get_python_lib site_libs = set([get_python_lib(plat_specific=False), get_python_lib(plat_specific=True)]) site_distributions = OrderedSet() for path_element in sys.path: if any(path_element.startswith(site_lib) for site_lib in site_libs): TRACER.log('Inspecting path element: %s' % path_element) site_distributions.update(dist.location for dist in find_distributions(path_element)) user_site_distributions = OrderedSet(dist.location for dist in find_distributions(USER_SITE)) for path in site_distributions: TRACER.log('Scrubbing from site-packages: %s' % path) for path in user_site_distributions: TRACER.log('Scrubbing from user site: %s' % path) scrub_paths = site_distributions | user_site_distributions scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths) scrub_from_importer_cache = filter( lambda key: any(key.startswith(path) for path in scrub_paths), sys.path_importer_cache.keys()) scrubbed_importer_cache = dict((key, value) for (key, value) in sys.path_importer_cache.items() if key not in scrub_from_importer_cache) return scrubbed_sys_path, scrubbed_importer_cache
def minimum_path(cls): """ Return as a tuple the emulated sys.path and sys.path_importer_cache of a bare python installation, a la python -S. """ site_libs = set(cls._site_libs()) for site_lib in site_libs: TRACER.log('Found site-library: %s' % site_lib) for extras_path in cls._extras_paths(): TRACER.log('Found site extra: %s' % extras_path) site_libs.add(extras_path) site_libs = set(os.path.normpath(path) for path in site_libs) site_distributions = OrderedSet() for path_element in sys.path: if any(path_element.startswith(site_lib) for site_lib in site_libs): TRACER.log('Inspecting path element: %s' % path_element, V=2) site_distributions.update(dist.location for dist in find_distributions(path_element)) user_site_distributions = OrderedSet(dist.location for dist in find_distributions(USER_SITE)) for path in site_distributions: TRACER.log('Scrubbing from site-packages: %s' % path) for path in user_site_distributions: TRACER.log('Scrubbing from user site: %s' % path) scrub_paths = site_distributions | user_site_distributions scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths) scrub_from_importer_cache = filter( lambda key: any(key.startswith(path) for path in scrub_paths), sys.path_importer_cache.keys()) scrubbed_importer_cache = dict((key, value) for (key, value) in sys.path_importer_cache.items() if key not in scrub_from_importer_cache) return scrubbed_sys_path, scrubbed_importer_cache
def install_requirement(req, path=None, extra_site_dirs=[], index='http://pypi.python.org/simple', repositories=['http://pypi.python.org/simple'], interpreter=PythonInterpreter.get()): """ Install the requirement "req" to path "path" with extra_site_dirs put onto the PYTHONPATH. Returns the set of newly added Distributions (of type pkg_resource.Distribution.) "req" can either be a pkg_resources.Requirement object (e.g. created by pkg_resources.Requirement.parse("MySQL-python==1.2.2")) or an installable package (e.g. a tar.gz source distribution, a source or binary .egg) "path" is the into which we install the requirements. if path is None, we'll create one for you. """ # TODO(wickman) Consider importing the easy_install Command class directly and # manipulating it with initialize/finalize options + run. if not isinstance(req, pkg_resources.Requirement): if not os.path.exists(req): try: req = pkg_resources.Requirement.parse(req) except: raise TypeError( "req should either be an installable file, a pkg_resources.Requirement " "or a valid requirement string. got %s" % req) if path is None: path = tempfile.mkdtemp() if not os.path.exists(path): safe_mkdir(path) easy_install_args = [ '--install-dir=%s' % path, '--site-dirs=%s' % ','.join([path] + extra_site_dirs), '--always-copy', '--multi-version', '--exclude-scripts', '-i', index] for repo in reversed(repositories): easy_install_args.extend(['-f', repo]) easy_install_args.append(str(req)) distributions_backup = set(pkg_resources.find_distributions(path)) rc = ReqBuilder.run_easy_install([path] + extra_site_dirs + sys.path, easy_install_args, interpreter) distributions = set(pkg_resources.find_distributions(path)) new_distributions = distributions - distributions_backup return new_distributions if rc else set()
def test_zipped_egg(self, project_dir, target_dir): # install this distro as an unpacked egg: args = [ sys.executable, '-c', 'from setuptools.command.easy_install import main; main()', '-mNx', '-d', target_dir, '--zip-ok', project_dir, ] subprocess.check_call(args) dists = pkg_resources.find_distributions(target_dir) assert [dist.project_name for dist in dists] == ['my-test-package'] dists = pkg_resources.find_distributions(target_dir, only=True) assert not list(dists)
def install(name, progress_callback=None): with closing(open_addons(flag="r")) as addons: addon = addons[name.lower()] source_urls = [url for url in addon.release_urls if url.packagetype == "sdist"] release_url = source_urls[0] try: tmpdir = tempfile.mkdtemp() stream = urllib2.urlopen(release_url.url, timeout=120) package_path = os.path.join(tmpdir, release_url.filename) progress_cb = (lambda value: progress_callback(value, 0)) \ if progress_callback else None with open(package_path, "wb") as package_file: Orange.utils.copyfileobj( stream, package_file, progress=progress_cb) extract_archive(package_path, tmpdir) setup_py = os.path.join(tmpdir, name + '-' + addon.available_version, 'setup.py') if not os.path.isfile(setup_py): raise Exception("Unable to install add-on - it is not properly " "packed.") switches = [] if not hasattr(sys, "real_prefix"): # we're not in a virtualenv switches.append('--user') run_setup(setup_py, ['install'] + switches) finally: shutil.rmtree(tmpdir, ignore_errors=True) for p in list(sys.path): site.addsitedir(p) reload(pkg_resources) for p in list(sys.path): pkg_resources.find_distributions(p) from orngRegistry import load_new_addons load_new_addons() load_installed_addons() for func in addon_refresh_callback: func()
def working_set(buildout): """Creates and returns a new working set based on user prefixes and existing packages already installed""" working_set = pkg_resources.WorkingSet([]) # add development directory first dev_dir = buildout['develop-eggs-directory'] for path in fnmatch.filter(os.listdir(dev_dir), '*.egg-link'): full_path = os.path.join(dev_dir, path) python_path = open(full_path, 'rt').read().split('\n')[0] distro = None wants = os.path.splitext(path)[0] distro = [k for k in pkg_resources.find_distributions(python_path) \ if k.project_name == wants] if not distro: raise RuntimeError("Could not find a distribution for `%s' under `%s'" \ " - check egg-link at `%s'" % (wants, python_path, full_path)) working_set.add(distro[0]) # add all egg directories, newest first for path in order_egg_dirs(buildout): working_set.add_entry(path) # adds the user paths for path in find_site_packages(get_prefixes(buildout)): if has_distribution(path) and path not in working_set.entries: working_set.add_entry(path) # finally, adds the system path for path in site.sys.path: if has_distribution(path) and path not in working_set.entries: working_set.add_entry(path) return working_set
def getDistEggs(self): files = os.listdir(self.dist_dir) eggs = [] for file in files: eggs += pkg_resources.find_distributions(os.path.join(self.dist_dir, file) ) return dict([(( egg.project_name,egg.version),egg) for egg in eggs])
def eggs_info(directory): files_of_interest = ["python", "zopectl", "django", "test", "paster"] possible_egg_dirs = set() before = copy.copy(sys.path) bin_dir = os.path.join(directory, "bin") if not os.path.exists(bin_dir): return for file_ in os.listdir(bin_dir): if file_ not in files_of_interest: continue new_contents = [] for line in open(os.path.join(directory, "bin", file_)): # Skipping imports that may be unavailable in the current path. if line.strip() != "import sys": # When we see these lines we have past the sys.path: if "import " in line or "os.chdir" in line or "__import__" in line or "_interactive = True" in line: break new_contents.append(line) # This is very evil, but cool! Because the __name__ != main the # remainder of the script is not executed. exec "".join(new_contents) possible_egg_dirs.update(sys.path) # reset sys.path sys.path = before eggs = {} for dir_ in possible_egg_dirs: info = list(pkg_resources.find_distributions(dir_, only=True)) if len(info) == 0: continue info = info[0] eggs[info.project_name] = info.version return eggs
def locate_and_install(self, suite, installed=None): if self.egg: key = b64encode(self.line.encode('utf-8')).decode() target_dir = op.join(suite.parser.directory, '{}+{}'.format(self.egg, key)) target_req = self.line ready = [installation for installation in (installed or []) if getattr(installation, 'line', None) == self.line] else: loc_dist = self.locate(suite) ready = [installation for installation in (installed or []) if installation.version == loc_dist.version] target_dir = op.join(suite.parser.directory, '{}-{}'.format(loc_dist.key, loc_dist.version)) target_req = '%s==%s' % (loc_dist.name, loc_dist.version) if ready: return ready[0] try: makedirs(target_dir) except OSError: pass tmp_dir = tempfile.mkdtemp() try: pip.main([ 'install', '--no-deps', '-t', tmp_dir, target_req ]) for item in os.listdir(tmp_dir): shutil.move(op.join(tmp_dir, item), op.join(target_dir, item)) except Exception as exc: raise PundleException('%s was not installed due error %s' % (self.egg or loc_dist.name, exc)) finally: shutil.rmtree(tmp_dir, ignore_errors=True) return next(iter(pkg_resources.find_distributions(target_dir, True)), None)
def _directory_import(self): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(self.path) # Use an empty WorkingSet rather than the man # pkg_resources.working_set, since on older versions of setuptools this # will invoke a VersionConflict when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): # We use subprocess instead of run_setup from setuptools to # avoid segmentation faults - see the following for more details: # https://github.com/cython/cython/issues/2104 sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist
def get_pkginfo(dist): """Get a dictionary containing package information for a package `dist` can be either a Distribution instance or, as a shortcut, directly the module instance, if one can safely infer a Distribution instance from it. Always returns a dictionary but it will be empty if no Distribution instance can be created for the given module. """ import types if isinstance(dist, types.ModuleType): module = dist module_path = get_module_path(module) for dist in find_distributions(module_path, only=True): if os.path.isfile(module_path) or \ dist.key == module.__name__.lower(): break else: return {} import email attrs = ('author', 'author-email', 'license', 'home-page', 'summary', 'description', 'version') info = {} def normalize(attr): return attr.lower().replace('-', '_') try: pkginfo = email.message_from_string(dist.get_metadata('PKG-INFO')) for attr in [key for key in attrs if key in pkginfo]: info[normalize(attr)] = pkginfo[attr] except IOError, e: err = 'Failed to read PKG-INFO file for %s: %s' % (dist, e) for attr in attrs: info[normalize(attr)] = err
def activate(self): if not self._activated: with TRACER.timed('Activating cache %s' % self._path): for dist in find_distributions(self._path): if self._env.can_add(dist): self._env.add(dist) self._activated = True
def cache_distribution(cls, zf, source, target_dir): """Possibly cache an egg from within a zipfile into target_cache. Given a zipfile handle and a filename corresponding to an egg distribution within that zip, maybe write to the target cache and return a Distribution.""" dependency_basename = os.path.basename(source) if not os.path.exists(target_dir): target_dir_tmp = target_dir + '.' + uuid.uuid4().hex for name in zf.namelist(): if name.startswith(source) and not name.endswith('/'): # strip off prefix + '/' target_name = os.path.join(dependency_basename, name[len(source) + 1:]) with contextlib.closing(zf.open(name)) as zi: with safe_open(os.path.join(target_dir_tmp, target_name), 'wb') as fp: shutil.copyfileobj(zi, fp) try: os.rename(target_dir_tmp, target_dir) except OSError as e: if e.errno == errno.ENOTEMPTY: safe_rmtree(target_dir_tmp) else: raise distributions = list(find_distributions(target_dir)) assert len(distributions) == 1, 'Failed to cache distribution %s' % source return distributions[0]
def _register_surf(): import surf surf_parent = os.path.split(os.path.split(surf.__file__)[0])[0] for dist in pkg_resources.find_distributions(surf_parent): if dist.key == 'surf': pkg_resources.working_set.add(dist) break
def read_package_name_from_pkg_resources(path): try: vv = pkg_resources.find_distributions(path) if vv is not None: return vv.split() except (Exception, IOError): LOG.exception("Unable to execute 'pkg_resources.find_distributions(%s)'" % path)
def __init__(self, verbose=True): self.plugin_types = ('wralea', 'plugin', 'adapters', 'interfaces') self.groups = set() self.managers = {} self._services = {} self._interfaces = {} self._lowername = {} # list all path supporting python modules paths = site.getsitepackages() usersite = site.getusersitepackages() if isinstance(usersite, basestring): paths.append(usersite) elif isinstance(usersite, (tuple, list)): paths += list(usersite) paths += sys.path # scan all entry_point and list different groups for path in set(paths): distribs = pkg_resources.find_distributions(path) for distrib in distribs : for group in distrib.get_entry_map(): self.groups.add(group) self.groups = [group for group in self.groups] self.tags = self._clean_lst(self.groups) self._load_interfaces()
def run(self): where = self.args.where if where is None: dist = next(pkg_resources.find_distributions('.'), True) where = os.path.join('.', *dist.project_name.split('.')) print('Running tests from {}'.format(where)) with_coverage = self.args.with_coverage if with_coverage: import coverage cov = coverage.coverage(branch=True, source=[where]) cov.start() loader = unittest.TestLoader() if self.args.tests: suite = loader.loadTestsFromNames(self.args.tests) else: suite = loader.discover(where) runner = unittest.TextTestRunner() runner.run(suite) if with_coverage: cov.stop() cov.report()
def upload_file(self, stream, filename=None, data=None, load=True): out_filename = os.path.join(self.local_dir, filename) if isinstance(data, unicode): data = data.encode() with open(out_filename, 'wb') as f: f.write(data) f.flush() if load: try: name, ext = os.path.splitext(filename) if ext in ('.py', '.pyc'): logger.info("Reload module %s from .py file", name) name = name.split('-')[0] reload(import_module(name)) if ext == '.egg': sys.path.append(out_filename) pkgs = pkg_resources.find_distributions(out_filename) for pkg in pkgs: logger.info("Load module %s from egg", pkg.project_name) reload(import_module(pkg.project_name)) if not pkgs: logger.warning("Found no packages in egg file") except Exception as e: logger.exception(e) return {'status': 'error', 'exception': dumps(e)} return {'status': 'OK', 'nbytes': len(data)}
def locate_and_install(self, suite, installed=None): loc_dist = self.locate() ready = [installation for installation in (installed or []) if installation.version == loc_dist.version] if ready: return ready[0] target_dir = op.join(suite.parser.directory, '{}-{}'.format(loc_dist.key, loc_dist.version)) try: makedirs(target_dir) except OSError: pass tmp_dir = tempfile.mkdtemp() try: res = subprocess.call([sys.executable, '-m', 'pip', 'install', '--no-deps', '--install-option=%s' % ('--install-scripts=%s' % op.join(tmp_dir, '.scripts')), '-t', tmp_dir, '%s==%s'%(loc_dist.name, loc_dist.version) ]) for item in os.listdir(tmp_dir): shutil.move(op.join(tmp_dir, item), op.join(target_dir, item)) finally: shutil.rmtree(tmp_dir, ignore_errors=True) if res != 0: raise PundleException('%s was not installed due error' % loc_dist.name) return next(iter(pkg_resources.find_distributions(target_dir, True)), None)
def _find_projects_unfiltered_iter(searchpaths=None, dists_relpaths=('.',), distinction='dist', update_cmd=None, logger=None, loglevel=_logging.DEBUG): for path in searchpaths: if not _os.path.exists(path): continue if update_cmd: _misc.update_metadata_at(path, cmd=update_cmd, logger=logger, loglevel=loglevel) if distinction == 'path': try: yield Project.from_path(path, dists_relpaths=dists_relpaths) except _exc.DistNotFound: pass elif distinction == 'dist': for dists_relpath in dists_relpaths: dists_path = _os.path.join(path, dists_relpath) for dist in _pkgr.find_distributions(dists_path, only=True): try: yield Project.from_dist(dist) except _exc.DistNotFound: pass else: raise ValueError('invalid project distinction criterion {!r}' .format(distinction))
def parse_requirements(self): if self.requirements_file: requirements = parse_file(self.requirements_file) else: pkg = next(pkg_resources.find_distributions(self.package), None) return dict((req.key, CustomReq(str(req), 'setup.py')) for req in pkg.requires()) return dict((req.key, req) for req in (CustomReq(line, 'requirements file') for line in requirements))
def _directory_import(path): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(path) pkg_resources.working_set.add_entry(path) dist = pkg_resources.working_set.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... pkg_resources.working_set.add(dist, path, False) break return dist
def _directory_import(path): """ Import astropy_helpers from the given path, which will be added to sys.path. Must return True if the import succeeded, and False otherwise. """ # Return True on success, False on failure but download is allowed, and # otherwise raise SystemExit path = os.path.abspath(path) # Use an empty WorkingSet rather than the man pkg_resources.working_set, # since on older versions of setuptools this will invoke a VersionConflict # when trying to install an upgrade ws = pkg_resources.WorkingSet([]) ws.add_entry(path) dist = ws.by_key.get(DIST_NAME) if dist is None: # We didn't find an egg-info/dist-info in the given path, but if a # setup.py exists we can generate it setup_py = os.path.join(path, 'setup.py') if os.path.isfile(setup_py): with _silence(): run_setup(os.path.join(path, 'setup.py'), ['egg_info']) for dist in pkg_resources.find_distributions(path, True): # There should be only one... return dist return dist
def find(self, requirement, platform=pkg_resources.get_platform(), py_version=None): """ Query the location of a distribution that fulfills a requirement. Returns a tuple of: location = the location of the distribution (or None if none found.) repo = the repo in which it was found (or None if local or not found.) """ if py_version is None: py_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) env = pkg_resources.Environment() if isinstance(requirement, str): requirement = pkg_resources.Requirement.parse(requirement) # first check the local cache for dist in pkg_resources.find_distributions(self._cache): if dist in requirement and env.can_add(dist): return (dist.location, None) # if nothing found, go out to remotes for repo in self._pis: repo.find_packages(requirement) for package in repo[requirement.project_name]: if pkg_resources.compatible_platforms(package.platform, platform): if package.py_version is not None and package.py_version != py_version: continue if package not in requirement: continue return (package.location, repo) return (None, None)
def setupClass(cls): cls.tmppath, cls.pythonpath = install_package() import pkg_resources dist = list(pkg_resources.find_distributions(cls.pythonpath))[0] pkg_resources.working_set.add(dist)
def find_wheels_on_path(importer, path_item, only=False): if not os.path.isdir(path_item) or not os.access(path_item, os.R_OK): return if not only: for entry in os.listdir(path_item): if entry.lower().endswith('.whl'): for dist in pkg_resources.find_distributions(os.path.join(path_item, entry)): yield dist
def test_conditional_dependencies(self): specs = 'splort==4', 'quux>=1.1' requires = list(map(pkg_resources.Requirement.parse, specs)) for d in pkg_resources.find_distributions(self.tmpdir): assert d.requires() == requires[:1] assert d.requires(extras=('baz',)) == requires assert d.extras == ['baz']
def distribution_from_path(cls, path, name=None): """Return a distribution from a path. If name is provided, find the distribution. If none is found matching the name, return None. If name is not provided and there is unambiguously a single distribution, return that distribution otherwise None. """ # Monkeypatch pkg_resources finders should it not already be so. register_finders() if name is None: distributions = list(find_distributions(path)) if len(distributions) == 1: return distributions[0] else: for dist in find_distributions(path): if dist.project_name == name: return dist
def scan_egg_link(self, path, entry): lines = [_f for _f in map(str.strip, open(os.path.join(path, entry))) if _f] if len(lines)==2: for dist in find_distributions(os.path.join(path, lines[0])): dist.location = os.path.join(path, *lines) dist.precedence = SOURCE_DIST self.add(dist)
def test_conditional_dependencies(self): requires = [pkg_resources.Requirement.parse('splort==4'), pkg_resources.Requirement.parse('quux>=1.1')] for d in pkg_resources.find_distributions(self.tmpdir): self.assertEquals(d.requires(), requires[:1]) self.assertEquals(d.requires(extras=('baz',)), requires) self.assertEquals(d.extras, ['baz'])
def get_distributions(self): # type: () -> Generator[pkg_resources.Distribution, None, None] """ Retrives the distributions installed on the library path of the environment :return: A set of distributions found on the library path :rtype: iterator """ pkg_resources = self.safe_import("pkg_resources") libdirs = self.base_paths["libdirs"].split(os.pathsep) dists = (pkg_resources.find_distributions(libdir) for libdir in libdirs) for dist in itertools.chain.from_iterable(dists): yield dist
def minimum_path(cls): """ Return as a tuple the emulated sys.path and sys.path_importer_cache of a bare python installation, a la python -S. """ from site import USER_SITE from twitter.common.collections import OrderedSet from pkg_resources import find_distributions from distutils.sysconfig import get_python_lib site_libs = set([ get_python_lib(plat_specific=False), get_python_lib(plat_specific=True) ]) site_distributions = OrderedSet() for path_element in sys.path: if any( path_element.startswith(site_lib) for site_lib in site_libs): TRACER.log('Inspecting path element: %s' % path_element) site_distributions.update( dist.location for dist in find_distributions(path_element)) user_site_distributions = OrderedSet( dist.location for dist in find_distributions(USER_SITE)) for path in site_distributions: TRACER.log('Scrubbing from site-packages: %s' % path) for path in user_site_distributions: TRACER.log('Scrubbing from user site: %s' % path) scrub_paths = site_distributions | user_site_distributions scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths) scrub_from_importer_cache = filter( lambda key: any(key.startswith(path) for path in scrub_paths), sys.path_importer_cache.keys()) scrubbed_importer_cache = dict( (key, value) for (key, value) in sys.path_importer_cache.items() if key not in scrub_from_importer_cache) return scrubbed_sys_path, scrubbed_importer_cache
def parse_directory(self): if not op.exists(self.directory): return {} dists = [ next( iter( pkg_resources.find_distributions( op.join(self.directory, item), True)), None) for item in os.listdir(self.directory) if '-' in item ] dists = filter(None, dists) result = defaultdict(list) for dist in dists: result[dist.key].append(dist) return result
def scan_egg_link(self, path, entry): with open(os.path.join(path, entry)) as raw_lines: # filter non-empty lines lines = list(filter(None, map(str.strip, raw_lines))) if len(lines) != 2: # format is not recognized; punt return egg_path, setup_path = lines for dist in find_distributions(os.path.join(path, egg_path)): dist.location = os.path.join(path, *lines) dist.precedence = SOURCE_DIST self.add(dist)
def get_sources(path): """Return a dictionary mapping Python module source paths to the distributions that contain them. """ sources = {} for dist in find_distributions(path, only=True): try: toplevels = dist.get_metadata('top_level.txt').splitlines() toplevels = [each + '/' for each in toplevels] files = dist.get_metadata('SOURCES.txt').splitlines() sources.update((src, dist) for src in files if any( src.startswith(toplevel) for toplevel in toplevels)) except (KeyError, IOError): pass # Metadata not found return sources
def get_installer(module): """ Try to find which package manager installed a module. :param module: Module to check :return: Package manager or None """ file_name = get_module_file_attribute(module) site_dir = file_name[:file_name.index('site-packages') + len('site-packages')] # This is necessary for situations where the project name and module name don't match, i.e. # Project name: pyenchant Module name: enchant pkgs = pkg_resources.find_distributions(site_dir) package = None for pkg in pkgs: if module.lower() in pkg.key: package = pkg break metadata_dir, dest_dir = copy_metadata(package)[0] # Check for an INSTALLER file in the metedata_dir and return the first line # which should be the program that installed the module. installer_file = os.path.join(metadata_dir, 'INSTALLER') if os.path.isdir(metadata_dir) and os.path.exists(installer_file): with open_file(installer_file, 'r') as installer_file_object: lines = installer_file_object.readlines() if lines[0] != '': installer = lines[0].rstrip('\r\n') logger.debug( 'Found installer: \'{0}\' for module: \'{1}\' from package: \'{2}\'' .format(installer, module, package)) return installer if is_darwin: try: output = exec_command_stdout('port', 'provides', file_name) if 'is provided by' in output: logger.debug( 'Found installer: \'macports\' for module: \'{0}\' from package: \'{1}\'' .format(module, package)) return 'macports' except OSError: pass real_path = os.path.realpath(file_name) if 'Cellar' in real_path: logger.debug( 'Found installer: \'homebrew\' for module: \'{0}\' from package: \'{1}\'' .format(module, package)) return 'homebrew' return None
def get_metadata(): """ Function to read metadata """ module = __name__.split('.', 1) try: pkg = pkg_resources.get_distribution(module[0]) except pkg_resources.RequirementParseError: # if called from CLI, try to get pkg from the path distros = list( pkg_resources.find_distributions(cfg.BASE_DIR, only=True)) if len(distros) == 1: pkg = distros[0] except Exception as e: raise HTTPBadRequest(reason=e) # deserialize key-word arguments train_args = _fields_to_dict(get_train_args()) # make 'type' JSON serializable for key, val in train_args.items(): train_args[key]['type'] = str(val['type']) predict_args = _fields_to_dict(get_predict_args()) # make 'type' JSON serializable for key, val in predict_args.items(): predict_args[key]['type'] = str(val['type']) meta = { 'name': None, 'version': None, 'summary': None, 'home-page': None, 'author': None, 'author-email': None, 'license': None, 'help-train': train_args, 'help-predict': predict_args } for line in pkg.get_metadata_lines("PKG-INFO"): line_low = line.lower() # to avoid inconsistency due to letter cases for par in meta: if line_low.startswith(par.lower() + ":"): _, value = line.split(": ", 1) meta[par] = value return meta
def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): ''' Check `python_requires` is honored. ''' monkeypatch.setenv(str('PIP_RETRIES'), str('0')) monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) monkeypatch.setenv(str('PIP_VERBOSE'), str('1')) dep_1_0_sdist = 'dep-1.0.tar.gz' dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist)) dep_1_0_python_requires = '>=2.7' make_python_requires_sdist( str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires) dep_2_0_sdist = 'dep-2.0.tar.gz' dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist)) dep_2_0_python_requires = '!=' + '.'.join( map(str, sys.version_info[:2])) + '.*' make_python_requires_sdist( str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires) index = tmpdir / 'index.html' index.write_text(DALS( ''' <!DOCTYPE html> <html><head><title>Links for dep</title></head> <body> <h1>Links for dep</h1> <a href="{dep_1_0_url}" data-requires-python="{dep_1_0_python_requires}">{dep_1_0_sdist}</a><br/> <a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/> </body> </html> ''').format( # noqa dep_1_0_url=dep_1_0_url, dep_1_0_sdist=dep_1_0_sdist, dep_1_0_python_requires=dep_1_0_python_requires, dep_2_0_url=dep_2_0_url, dep_2_0_sdist=dep_2_0_sdist, dep_2_0_python_requires=dep_2_0_python_requires, ), 'utf-8') index_url = path_to_url(str(index)) with contexts.save_pkg_resources_state(): test_pkg = create_setup_requires_package( str(tmpdir), 'python-xlib', '0.19', # Ignored (overriden by setup_attrs). setup_attrs=dict( setup_requires='dep', dependency_links=[index_url])) test_setup_py = os.path.join(test_pkg, 'setup.py') run_setup(test_setup_py, [str('--version')]) eggs = list(map(str, pkg_resources.find_distributions( os.path.join(test_pkg, '.eggs')))) assert eggs == ['dep 1.0']
def _add_egg(self, filename, working_set=None): """ Create and add a distribution from the specified '.egg'. """ if working_set is None: working_set = pkg_resources.working_set # The eggs must be in our egg directory! filename = join(dirname(__file__), "eggs", filename) # Create a distribution for the egg. distributions = pkg_resources.find_distributions(filename) # Add the distributions to the working set (this makes any Python # modules in the eggs available for importing). for distribution in distributions: working_set.add(distribution)
def _find_distros(self, path): # load the pkg_resources library if you have it, or install it and load it if not bool(pkgutil.find_loader('pkg_resources')): cmd_line = ['-q', 'pkg_resources'] self._installer(cmd_line) # add the new path to load pkg_resources # site.addsitedir(os.path.join(os.path.abspath(self.target), '{0}{1}site-packages'.format(self.python_version, os.sep))) site.addsitedir(self.lib_folder) #TODO not sure on how this behave try: import pkg_resources as pkg_r except ImportError: raise RuntimeError( 'Could not load the pkg_resources package. Please check that the module is correctly installed in one of ' + str(sys.path)) return pkg_r.find_distributions(path)
def put(self, eggfile, project, version): eggpath = self._eggpath(project, version) eggdir = path.dirname(eggpath) if not path.exists(eggdir): makedirs(eggdir) with open(eggpath, 'wb') as f: copyfileobj(eggfile, f) try: d = next(pkg_resources.find_distributions(eggpath)) for r in d.requires(): # install_requires of setup.py cmd = 'pip install ' + r.__str__() system(cmd) except StopIteration: # raise ValueError("Unknown or corrupt egg") # tests can't pass pass
def packages(self): """ Returns a list of pkg_resources.Distribution objects. We'll use these to query the name and versions for that environment. """ site_pckg_path = self._get_site_pckg_path(self.path) dists = pkg_resources.find_distributions(site_pckg_path) pckgs = {} for d in dists: pckg = {} pckg['name'] = d.key pckg['version'] = d.version pckgs[d.key] = pckg return pckgs
def get_distribution(package: str) -> pkg_resources.Distribution: lib = package_dir(package) / 'lib' package_dists = [] for py_version in lib.iterdir(): # Iterate over 'python3.4' etc. if not py_version.is_dir(): continue site_packages = py_version / 'site-packages' dists = [ d for d in pkg_resources.find_distributions(str(site_packages)) ] package_dists += [d for d in dists if d.project_name == package] if not package_dists: raise Exception('Distribution not found in {}'.format(lib)) return package_dists[0]
def get_package_versions(): try: import pkg_resources except ImportError: logging.info("Setuptools not installed. Unable to determine version.") return [] versions = dict() for path in sys.path: for distribution in pkg_resources.find_distributions(path): if distribution.has_version(): versions.setdefault( distribution.project_name, distribution.version, ) return sorted(versions.items())
def test_odoo_addon1_post_version_override_env(self): """Test post version strategy override via environment variable.""" addon1_dir = os.path.join(DATA_DIR, "setup_reusable_addons", "addon1") subprocess.check_call( [sys.executable, "setup.py", "egg_info"], cwd=addon1_dir, env=dict(os.environ, SETUPTOOLS_ODOO_POST_VERSION_STRATEGY_OVERRIDE="none"), ) egg_info_dir = os.path.join(addon1_dir, "odoo8_addon_addon1.egg-info") assert os.path.isdir(egg_info_dir) try: dist = next(pkg_resources.find_distributions(addon1_dir)) self.assertEqual(dist.key, "odoo8-addon-addon1") self.assertEqual(dist.version, "8.0.1.0.0") finally: shutil.rmtree(egg_info_dir)
def iter_groups(): groups = set() paths = site.getsitepackages() usersite = site.getusersitepackages() if isinstance(usersite, basestring): paths.append(usersite) elif isinstance(usersite, (tuple, list)): paths += list(usersite) paths += sys.path # scan all entry_point and list different groups for path in set(paths): distribs = pkg_resources.find_distributions(path) for distrib in distribs: for group in distrib.get_entry_map(): groups.add(group) for group in groups: yield group
def test_custom_project(self): project_dir = os.path.join(DATA_DIR, "setup_custom_project") subprocess.check_call([sys.executable, "setup.py", "egg_info"], cwd=project_dir) egg_info_dir = os.path.join(project_dir, "test_custom_project.egg-info") assert os.path.isdir(egg_info_dir) dist = next(pkg_resources.find_distributions(project_dir)) self.assertEqual( dist.requires(), [ pkg_resources.Requirement.parse(r) for r in ["pyflakes", "odoo>=8.0a,<9.0a", "python-dateutil"] ], ) self.assertFalse(dist.has_metadata("not-zip-safe")) shutil.rmtree(egg_info_dir)
def test_odoo_addon2(self): addon2_dir = os.path.join(DATA_DIR, 'setup_reusable_addons', 'addon2') subprocess.check_call([sys.executable, 'setup.py', 'egg_info'], cwd=addon2_dir) egg_info_dir = os.path.join(addon2_dir, 'odoo8_addon_addon2.egg-info') assert os.path.isdir(egg_info_dir) try: dist = next(pkg_resources.find_distributions(addon2_dir)) self.assertEquals(dist.key, 'odoo8-addon-addon2') self.assertEquals(dist.requires(), [ pkg_resources.Requirement.parse(r) for r in ['odoo8-addon-addon1', 'odoo>=8.0a,<9.0a', 'python-dateutil'] ]) self.assertTrue(dist.has_metadata('not-zip-safe')) self.assertEquals(dist.version, "8.0.1.0.1") finally: shutil.rmtree(egg_info_dir)
def get_pkginfo(dist): """Get a dictionary containing package information for a package `dist` can be either a Distribution instance or, as a shortcut, directly the module instance, if one can safely infer a Distribution instance from it. Always returns a dictionary but it will be empty if no Distribution instance can be created for the given module. """ import types if isinstance(dist, types.ModuleType): try: from pkg_resources import find_distributions module = dist module_path = get_module_path(module) for dist in find_distributions(module_path, only=True): if os.path.isfile(module_path) or \ dist.key == module.__name__.lower(): break else: return {} except ImportError: return {} import email attrs = ('author', 'author-email', 'license', 'home-page', 'summary', 'description', 'version') info = {} def normalize(attr): return attr.lower().replace('-', '_') try: pkginfo = email.message_from_string(dist.get_metadata('PKG-INFO')) for attr in [key for key in attrs if key in pkginfo]: info[normalize(attr)] = pkginfo[attr] except IOError as e: err = 'Failed to read PKG-INFO file for %s: %s' % (dist, e) for attr in attrs: info[normalize(attr)] = err except email.Errors.MessageError as e: err = 'Failed to parse PKG-INFO file for %s: %s' % (dist, e) for attr in attrs: info[normalize(attr)] = err return info
def convert_distlib_to_setuptools(installed_dist): """Get the setuptools equivalent of a distlib installed dist. Args: installed_dist (`distlib.database.InstalledDistribution`: Distribution to convert. Returns: `pkg_resources.DistInfoDistribution`: Equivalent setuptools dist object. """ path = os.path.dirname(installed_dist.path) setuptools_dists = pkg_resources.find_distributions(path) for setuptools_dist in setuptools_dists: if setuptools_dist.key == installed_dist.key: return setuptools_dist return None
def put(self, eggfile, project, version): eggpath = self._eggpath(project, version) eggdir = path.dirname(eggpath) if not path.exists(eggdir): makedirs(eggdir) with open(eggpath, 'wb') as f: copyfileobj(eggfile, f) try: d = next(pkg_resources.find_distributions(eggpath)) for r in d.requires(): # install_requires of setup.py subprocess.check_call( [sys.executable, '-m', 'pip', 'install', r.__str__()]) #pip.main(['install',r.__str__()]) except StopIteration: # raise ValueError("Unknown or corrupt egg") # tests can't pass pass
def get_dist_from_egg_info(package_path): with change_dir(package_path): logger.info("Building egg info for package at %s", package_path) build_egg_info() try: dist = pkg_resources.find_distributions(package_path).next() dist.requires() return dist except StopIteration: logger.warning("Unable to get distribution information from package at %s." "Requirements analysis might find false positives", package_path) return None finally: for egg_path in glob.glob("*.egg-info"): try: shutil.rmtree(egg_path) except Exception: pass
def main(args, options): from pkg_resources import WorkingSet, Requirement, find_distributions if not options.site_dir: app.error('Must supply --site') distributions = list(find_distributions(options.site_dir)) working_set = WorkingSet() for dist in distributions: working_set.add(dist) for arg in args: arg_req = Requirement.parse(arg) found_dist = working_set.find(arg_req) if not found_dist: print('Could not find %s!' % arg_req) out_zip = Distiller(found_dist).distill() print('Dumped %s => %s' % (arg_req, out_zip))
def _read_egg_requirements(self, eggf): try: prefix = '%s-%s-' % (self.project_name, 0) fd, eggpath = tempfile.mkstemp(prefix=prefix, suffix='.egg') logger.debug('tmp egg file saved to %s' % eggpath) lf = os.fdopen(fd, 'wb') eggf.seek(0) shutil.copyfileobj(eggf, lf) lf.close() try: d = pkg_resources.find_distributions(eggpath).next() except StopIteration: raise ValueError("Unknown or corrupt egg") requirements = [str(x) for x in d.requires()] return requirements finally: if eggpath: os.remove(eggpath)
def check_package_exists(package, lib_dir): """Check if a package is installed globally or in lib_dir. Returns True when the requirement is met. Returns False when the package is not installed or doesn't meet req.""" try: req = pkg_resources.Requirement.parse(package) except ValueError: # This is a zip file req = pkg_resources.Requirement.parse(urlparse(package).fragment) # Check packages from lib dir if lib_dir is not None: if any(dist in req for dist in pkg_resources.find_distributions(lib_dir)): return True # Check packages from global + virtual environment return any(dist in req for dist in pkg_resources.working_set)
def load_egg_from_file(self, fullName): self.__logger.debug('load_egg_from_file. START, file=%s' % (fullName,)) try: import pkg_resources except: self.__logger.critical('load_egg_from_file. No support to load .egg plugins. Install package pkg_resources.') return pkg_resources.working_set.add_entry(fullName) dist_generator = pkg_resources.find_distributions(fullName) for dist in dist_generator: try: extension_class = dist.load_entry_point("pysqlin.plugins", "plugin") # create an instance of the class extension = extension_class(self.api) return self.__register(extension) except Exception, e : self.__logger.critical('load_egg_from_file. Exception, msg=%s' % (e,))
def _get_distribution_for_node(self, node): """ Get the distribution a module belongs to. Bug: This currently only handles packages in eggs. """ # TODO: Modulegraph could flag a module as residing in a zip file # TODO add support for single modules in eggs (e.g. mock-1.0.1) # TODO add support for egg-info: # TODO add support for wheels (dist-info) # # TODO add support for unpacked eggs and for new .whl packages. # Wheels: # .../site-packages/pip/ # It seams this has to be a package # .../site-packages/pip-6.1.1.dist-info # Unzipped Eggs: # .../site-packages/mock.py # this may be a single module, too! # .../site-packages/mock-1.0.1-py2.7.egg-info # Unzipped Eggs (I asume: multiple-versions externaly managed): # .../site-packages/pyPdf-1.13-py2.6.egg/pyPdf/ # .../site-packages/pyPdf-1.13-py2.6.egg/EGG_INFO # Zipped Egg: # .../site-packages/zipped.egg/zipped_egg/ # .../site-packages/zipped.egg/EGG_INFO modpath = node.filename if not modpath: # e.g. namespace-package return [] # TODO: add other ways to get a distribution path distpath = get_path_to_egg(modpath) if not distpath or distpath in self.__seen_distribution_paths: # no egg or already handled return [] self.__seen_distribution_paths.add(distpath) dists = list(pkg_resources.find_distributions(distpath)) assert len(dists) == 1 dist = dists[0] dist._pyinstaller_info = { 'zipped': zipfile.is_zipfile(dist.location), 'egg': True, # TODO when supporting other types 'zip-safe': dist.has_metadata('zip-safe'), } return dists
def import_file(path): """ Loads modules for a file (.py, .pyc, .zip, .egg) """ directory, filename = os.path.split(path) name, ext = os.path.splitext(filename) names_to_import = [] tmp_python_path = None if ext in ('.py', '.pyc'): if directory not in sys.path: tmp_python_path = directory names_to_import.append(name) # Ensures that no pyc file will be reused cache_file = cache_from_source(path) if os.path.exists(cache_file): os.remove(cache_file) if ext in ('.egg', '.zip'): if path not in sys.path: sys.path.insert(0, path) if ext == '.egg': import pkg_resources pkgs = pkg_resources.find_distributions(path) for pkg in pkgs: names_to_import.append(pkg.project_name) elif ext == '.zip': names_to_import.append(name) with zipfile.ZipFile(path) as zf: zf.extractall(directory) loaded = [] if not names_to_import: logger.warning("Found nothing to import from %s", filename) else: invalidate_caches() if tmp_python_path is not None: sys.path.insert(0, tmp_python_path) try: for name in names_to_import: logger.info("Reload module %s from %s file", name, ext) loaded.append(reload(import_module(name))) finally: if tmp_python_path is not None: sys.path.remove(tmp_python_path) return loaded
def locate_and_install(self, suite, installed=None, prereleases=False): if self.egg: key = b64encode(self.line.encode('utf-8')).decode() target_dir = op.join(suite.parser.directory, '{}+{}'.format(self.egg, key)) target_req = self.line ready = [ installation for installation in (installed or []) if getattr(installation, 'line', None) == self.line ] else: loc_dist = self.locate(suite, prereleases=prereleases) ready = [ installation for installation in (installed or []) if installation.version == loc_dist.version ] target_dir = op.join( suite.parser.directory, '{}-{}'.format(loc_dist.key, loc_dist.version)) # DEL? target_req = '%s==%s' % (loc_dist.name, loc_dist.version) # If we use custom index, then we want not to configure PIP with it # and just give it URL target_req = loc_dist.download_url if ready: return ready[0] try: makedirs(target_dir) except OSError: pass tmp_dir = tempfile.mkdtemp() print('Use temp dir', tmp_dir) try: print('pip install --no-deps -t %s %s' % (tmp_dir, target_req)) pip_exec(['install', '--no-deps', '-t', tmp_dir, '-v', target_req]) for item in os.listdir(tmp_dir): shutil.move(op.join(tmp_dir, item), op.join(target_dir, item)) except Exception as exc: raise PundleException('%s was not installed due error %s' % (self.egg or loc_dist.name, exc)) finally: shutil.rmtree(tmp_dir, ignore_errors=True) return next(iter(pkg_resources.find_distributions(target_dir, True)), None)
def iter_plugininfo(): """ Iterate over the plugins loaded so far """ if sys.version_info > (3, 0): from email.parser import Parser from email.policy import default else: from rfc822 import Message # pylint: disable=import-error from cStringIO import StringIO # pylint: disable=import-error for plugin_dir in PLUGIN_DIRECTORIES: for dist in find_distributions(plugin_dir): distmetadata = dist.get_metadata('PKG-INFO') if sys.version_info > (3, 0): msg = Parser(policy=default).parsestr(distmetadata) else: msg = Message(StringIO(distmetadata)) filtered_keys = ['metadata-version', 'home-page', 'platform'] distinfo = [x for x in list(msg.items()) if x[0] not in filtered_keys] yield dist, dict(distinfo)