def find_packages_latests_versions(self, options): index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.use_mirrors: warnings.warn( "--use-mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) if options.mirrors: warnings.warn( "--mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) index_urls += options.mirrors dependency_links = [] for dist in get_installed_distributions(local_only=options.local, user_only=options.user): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) installed_packages = get_installed_distributions( local_only=options.local, user_only=options.user, include_editables=False, ) for dist in installed_packages: req = InstallRequirement.from_line( dist.key, None, isolated=options.isolated_mode, ) try: link = finder.find_requirement(req, True) # If link is None, means installed version is most # up-to-date if link is None: continue except DistributionNotFound: continue else: remote_version = finder._link_package_versions( link, req.name ).version yield dist, remote_version
def find_packages_latest_versions(self, options): index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] dependency_links = [] for dist in get_installed_distributions(local_only=options.local, user_only=options.user): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) installed_packages = get_installed_distributions( local_only=options.local, user_only=options.user, include_editables=False, ) format_control = FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) for dist in installed_packages: req = InstallRequirement.from_line( dist.key, None, isolated=options.isolated_mode, wheel_cache=wheel_cache ) typ = 'unknown' try: link = finder.find_requirement(req, True) # If link is None, means installed version is most # up-to-date if link is None: continue except DistributionNotFound: continue else: canonical_name = pkg_resources.safe_name(req.name).lower() formats = fmt_ctl_formats(format_control, canonical_name) search = Search( req.name, canonical_name, formats) remote_version = finder._link_package_versions( link, search).version if link.is_wheel: typ = 'wheel' else: typ = 'sdist' yield dist, remote_version, typ
def find_extra_reqs(options): # 1. find files used by imports in the code (as best we can without # executing) used_modules = common.find_imported_modules(options) # 2. find which packages provide which files installed_files = {} all_pkgs = (pkg.project_name for pkg in get_installed_distributions()) for package in search_packages_info(all_pkgs): log.debug("installed package: %s (at %s)", package["name"], package["location"]) for f in package.get("files", []): path = os.path.realpath(os.path.join(package["location"], f)) installed_files[path] = package["name"] package_path = common.is_package_file(path) if package_path: # we've seen a package file so add the bare package directory # to the installed list as well as we might want to look up # a package by its directory path later installed_files[package_path] = package["name"] # 3. match imported modules against those packages used = collections.defaultdict(list) for modname, info in used_modules.items(): # probably standard library if it's not in the files list if info.filename in installed_files: used_name = canonicalize_name(installed_files[info.filename]) log.debug("used module: %s (from package %s)", modname, installed_files[info.filename]) used[used_name].append(info) else: log.debug("used module: %s (from file %s, assuming stdlib or local)", modname, info.filename) # 4. compare with requirements.txt explicit = common.find_required_modules(options) return [name for name in explicit if name not in used]
def run_editables(self, options): installed_packages = get_installed_distributions( local_only=options.local, user_only=options.user, editables_only=True, ) self.output_package_listing(installed_packages)
def find_packages_latest_versions(self): results = [] index_urls = [self.index_url] + self.extra_index_urls if self.no_index: print('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] dependency_links = [] for dist in get_installed_distributions( local_only=self.local, user_only=self.user, editables_only=self.editable): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) with PipSession() as session: finder = self._build_package_finder(self.options, index_urls, session) finder.add_dependency_links(dependency_links) installed_packages = get_installed_distributions( local_only=self.local, user_only=self.user, editables_only=self.editable, ) for dist in installed_packages: typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) if not options.pre: # Remove prereleases all_candidates = [candidate for candidate in all_candidates if not candidate.version.is_prerelease] if not all_candidates: continue best_candidate = max(all_candidates, key=finder._candidate_sort_key) latest_version = best_candidate.version if best_candidate.location.is_wheel: typ = 'wheel' else: typ = 'sdist' if latest_version > dist.parsed_version: results.append('pip install {} --upgrade'.format(dist.project_name)) return results
def test_include_globals(self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite dists = get_installed_distributions(local_only=False) assert len(dists) == 4
def test_gte_py27_excludes(self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite dists = get_installed_distributions() assert len(dists) == 0
def test_editables_only(self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite dists = get_installed_distributions(editables_only=True) assert len(dists) == 1, dists assert dists[0].test_name == "editable"
def test_exclude_editables(self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite dists = get_installed_distributions(include_editables=False) assert len(dists) == 1 assert dists[0].test_name == "normal"
def test_py26_excludes(self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite dists = get_installed_distributions() assert len(dists) == 1 assert dists[0].key == 'argparse'
def test_freeze_excludes(self, mock_dist_is_editable, mock_dist_is_local, mock_dist_in_usersite): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local mock_dist_in_usersite.side_effect = self.dist_in_usersite dists = get_installed_distributions( skip=('setuptools', 'pip', 'distribute')) assert len(dists) == 0
def find_packages_latest_versions(self, options): index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] dependency_links = [] for dist in get_installed_distributions( local_only=options.local, user_only=options.user, editables_only=options.editable): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) installed_packages = get_installed_distributions( local_only=options.local, user_only=options.user, editables_only=options.editable, ) for dist in installed_packages: typ = 'unknown' all_candidates = finder.find_all_candidates(dist.key) if not options.pre: # Remove prereleases all_candidates = [candidate for candidate in all_candidates if not candidate.version.is_prerelease] if not all_candidates: continue best_candidate = max(all_candidates, key=finder._candidate_sort_key) remote_version = best_candidate.version if best_candidate.location.is_wheel: typ = 'wheel' else: typ = 'sdist' yield dist, remote_version, typ
def run(self, options, args): if options.allow_external: warnings.warn( "--allow-external has been deprecated and will be removed in " "the future. Due to changes in the repository protocol, it no " "longer has any effect.", RemovedInPip10Warning, ) if options.allow_all_external: warnings.warn( "--allow-all-external has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.allow_unverified: warnings.warn( "--allow-unverified has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.list_format is None: warnings.warn( "The default format will switch to columns in the future. " "You can use --format=(legacy|columns) (or define a " "format=(legacy|columns) in your pip.conf under the [list] " "section) to disable this warning.", RemovedInPip10Warning, ) if options.outdated and options.uptodate: raise CommandError( "Options --outdated and --uptodate cannot be combined.") packages = get_installed_distributions( local_only=options.local, user_only=options.user, editables_only=options.editable, include_editables=options.include_editable, ) if options.outdated: packages = self.get_outdated(packages, options) elif options.uptodate: packages = self.get_uptodate(packages, options) if options.not_required: packages = self.get_not_required(packages, options) self.output_package_listing(packages, options)
def freeze(): ''' Show arguments to require() to recreate what has been installed ''' installations = {} for dist in get_installed_distributions(): req = pip.FrozenRequirement.from_dist(dist, [], find_tags=False) installations[req.name] = req return [str(installation).rstrip() for installation in sorted(installations.values(), key=lambda x: x.name.lower())]
def find_packages_latest_versions(cls, options): """Yield latest versions.""" index_urls = [] if options.get('no_index') else \ [options.get('index_url')] + options.get('extra_index_urls') dependency_links = [] for dist in get_installed_distributions( local_only=options.get('local'), user_only=options.get('user'), editables_only=options.get('editable')): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) with cls._build_session(options) as session: finder = cls._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) cls.installed_distributions = get_installed_distributions( local_only=options.get('local'), user_only=options.get('user'), editables_only=options.get('editable'), ) for dist in cls.installed_distributions: all_candidates = finder.find_all_candidates(dist.key) if not options.get('pre'): # Remove prereleases all_candidates = [c for c in all_candidates if not c.version.is_prerelease] if not all_candidates: continue # pylint: disable=protected-access best_candidate = max(all_candidates, key=finder._candidate_sort_key) remote_version = best_candidate.version typ = 'wheel' if best_candidate.location.is_wheel else 'sdist' yield dist, remote_version, typ
def get_installed_packages(self): final = [] get_list = ListCommand() options,args = get_list.parse_args(["--outdated"]) for package in get_installed_distributions(): name = str(package).split(" ")[0] if name == "team": continue for pkg in get_list.iter_packages_latest_infos([package], options): latest = str(pkg.latest_version) for attributes in search_packages_info([name]): result = {"name": attributes["name"], "version": attributes["version"], "latest": latest, "summary": attributes["summary"], "home-page": attributes["home-page"]} final.append(result) return final
def check_requirements(): installed = get_installed_distributions(skip=()) missing_reqs_dict = {} incompatible_reqs_dict = {} for dist in installed: key = '%s==%s' % (dist.project_name, dist.version) missing_reqs = list(get_missing_reqs(dist, installed)) if missing_reqs: missing_reqs_dict[key] = missing_reqs incompatible_reqs = list(get_incompatible_reqs(dist, installed)) if incompatible_reqs: incompatible_reqs_dict[key] = incompatible_reqs return (missing_reqs_dict, incompatible_reqs_dict)
def find_missing_reqs(options): # 1. find files used by imports in the code (as best we can without # executing) used_modules = common.find_imported_modules(options) # 2. find which packages provide which files installed_files = {} all_pkgs = (pkg.project_name for pkg in get_installed_distributions()) for package in search_packages_info(all_pkgs): log.debug('installed package: %s (at %s)', package['name'], package['location']) for file in package['files'] or []: path = os.path.realpath(os.path.join(package['location'], file)) installed_files[path] = package['name'] package_path = common.is_package_file(path) if package_path: # we've seen a package file so add the bare package directory # to the installed list as well as we might want to look up # a package by its directory path later installed_files[package_path] = package['name'] # 3. match imported modules against those packages used = collections.defaultdict(list) for modname, info in used_modules.items(): # probably standard library if it's not in the files list if info.filename in installed_files: used_name = normalize_name(installed_files[info.filename]) log.debug('used module: %s (from package %s)', modname, installed_files[info.filename]) used[used_name].append(info) else: log.debug( 'used module: %s (from file %s, assuming stdlib or local)', modname, info.filename) # 4. compare with requirements.txt explicit = set() for requirement in parse_requirements('requirements.txt', session=PipSession()): log.debug('found requirement: %s', requirement.name) explicit.add(normalize_name(requirement.name)) return [(name, used[name]) for name in used if name not in explicit]
def run(self, options, args): dists = get_installed_distributions(local_only=False, skip=()) missing_reqs_dict, incompatible_reqs_dict = check_requirements(dists) for dist in dists: for requirement in missing_reqs_dict.get(dist.key, []): logger.info( "%s %s requires %s, which is not installed.", dist.project_name, dist.version, requirement.project_name) for requirement, actual in incompatible_reqs_dict.get( dist.key, []): logger.info( "%s %s has requirement %s, but you have %s %s.", dist.project_name, dist.version, requirement, actual.project_name, actual.version) if missing_reqs_dict or incompatible_reqs_dict: return 1 else: logger.info("No broken requirements found.")
def main(): parser = argparse.ArgumentParser(description="Read all installed packages from sys.path and list licenses.") args = parser.parse_args() meta_files_to_check = ['PKG-INFO', 'METADATA'] for installed_distribution in get_installed_distributions(): found_license = False for metafile in meta_files_to_check: if not installed_distribution.has_metadata(metafile): continue for line in installed_distribution.get_metadata_lines(metafile): if 'License: ' in line: (k, v) = line.split(': ', 1) sys.stdout.write("{project_name}: {license}\n".format( project_name=installed_distribution.project_name, license=v)) found_license = True if not found_license: sys.stdout.write("{project_name}: Found no license information.\n".format( project_name=installed_distribution.project_name))
def implicit(session): """ Returns a list of Requirement instances for all the library dependencies of a given session. These are matched using the contents of "top_level.txt" metadata for all package names in the session. """ package_names = [_get_package_name(g) for g in session.values()] package_names = set(filter(None, package_names)) reqs = {} for d in get_installed_distributions(): for top_level in d._get_metadata("top_level.txt"): if top_level in package_names: # Sanity check: if a distribution is already in our # requirements, make sure we only keep the latest version. if d.project_name in reqs: reqs[d.project_name] = max(reqs[d.project_name], d.version) else: reqs[d.project_name] = d.version return [Requirement.parse('%s==%s' % r) for r in reqs.items()]
def test_licenses(**options): """ Checks for licenses minus those that have been identified to be ignored. """ meta_files_to_check = ['PKG-INFO', 'METADATA'] failed = False known_ignores = [ # -------------------------------------------------------------- # Pip packages added 'pip', # MIT 'setuptools', # ? # -------------------------------------------------------------- # Required install packages 'noise', # MIT # -------------------------------------------------------------- # Virtualenv packages added 'wheel', # MIT # -------------------------------------------------------------- # Test packages added 'apipkg', # MIT 'coverage', # Apache 2.0 'detox', # MIT 'eventlet', # MIT 'execnet', # MIT 'flake8', # MIT 'greenlet', # MIT 'mock', # BSD 'mccabe', # Expat 'pep8', # Expat 'pluggy', # MIT 'py', # MIT 'pyflakes', # MIT 'pytest', # MIT 'pytest-cache', # MIT 'pytest-cov', # MIT 'pytest-flake8', # BSD 'pytest-xdist' # MIT 'tox', # MIT 'virtualenv', # MIT # TravisCI automatically installs nose, # which is licensed under the LGPL 'nose', # LGPL # -------------------------------------------------------------- # Known licenses that do not register with this test and can # be ignored safely 'alabaster', # BSD - From Sphinx 'pbr', # Apache # -------------------------------------------------------------- # Unknown - TODO: Make sure these are not used within the # project 'ptyprocess', # ISC 'gnureadline', # GPL 2 - TODO: Alternatives? ] accepted_licenses = [ 'BSD', 'MIT', 'ZPL', 'Zope', 'Zope Public License', 'Apache', 'Apache 2.0', 'PSF', 'Python', 'Python Software Foundation', 'DSF', 'Django', 'Django Software Foundation', 'ISC', 'ISCL', 'Internet Software Consortium', ] for installed_distribution in get_installed_distributions(): found_license = None found_valid = None skip = False severity = ' ? ' license = 'Found no license information' project_name = 'unknown' message = '{severity} {project_name}: {license}' for metafile in meta_files_to_check: if not installed_distribution.has_metadata(metafile): continue for line in installed_distribution.get_metadata_lines(metafile): if 'License: ' in line: found_license = True (k, license) = line.split(': ', 1) project_name = installed_distribution.project_name if project_name in known_ignores: skip = True file = sys.stdout if license.startswith('Copyright'): severity = ' ' found_valid = True elif not any(lic in license for lic in accepted_licenses): severity = '!!!' file = sys.stderr found_valid = False elif 'unknown' in license.lower(): found_valid = False else: severity = ' ' found_valid = True break if found_license: break if skip: continue if not found_license or not found_valid: file = sys.stderr msg = message.format( severity=severity, project_name=project_name, license=license ) print(msg, file=file) if found_license is False: failed = True if project_name not in known_ignores and found_valid is False: failed = True assert not failed, "Some licences were not approved or not found"
def run_listing(self, options): installed_packages = get_installed_distributions( local_only=options.local, user_only=options.user, ) self.output_package_listing(installed_packages)
def freeze( requirement=None, find_links=None, local_only=None, user_only=None, skip_regex=None, default_vcs=None, isolated=False, wheel_cache=None): find_links = find_links or [] skip_match = None if skip_regex: skip_match = re.compile(skip_regex) dependency_links = [] for dist in pkg_resources.working_set: if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt') ) for link in find_links: if '#egg=' in link: dependency_links.append(link) for link in find_links: yield '-f %s' % link installations = {} for dist in get_installed_distributions(local_only=local_only, skip=freeze_excludes, user_only=user_only): req = pip.FrozenRequirement.from_dist( dist, dependency_links ) installations[req.name] = req if requirement: with open(requirement) as req_file: for line in req_file: if (not line.strip() or line.strip().startswith('#') or (skip_match and skip_match.search(line)) or line.startswith(( '-r', '--requirement', '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--extra-index-url'))): yield line.rstrip() continue if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') line_req = InstallRequirement.from_editable( line, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache, ) else: line_req = InstallRequirement.from_line( line, isolated=isolated, wheel_cache=wheel_cache, ) if not line_req.name: logger.info( "Skipping line because it's not clear what it " "would install: %s", line.strip(), ) logger.info( " (add #egg=PackageName to the URL to avoid" " this warning)" ) elif line_req.name not in installations: logger.warning( "Requirement file contains %s, but that package is" " not installed", line.strip(), ) else: yield str(installations[line_req.name]).rstrip() del installations[line_req.name] yield ( '## The following requirements were added by ' 'pip freeze:' ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): yield str(installation).rstrip()
def collect_installed(self): return [self.builder.from_distribution(d) for d in get_installed_distributions()]
def freeze( requirement=None, find_links=None, local_only=None, user_only=None, skip_regex=None, default_vcs=None, isolated=False, wheel_cache=None, skip=()): find_links = find_links or [] skip_match = None if skip_regex: skip_match = re.compile(skip_regex).search dependency_links = [] for dist in pkg_resources.working_set: if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt') ) for link in find_links: if '#egg=' in link: dependency_links.append(link) for link in find_links: yield '-f %s' % link installations = {} for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): req = pip.FrozenRequirement.from_dist( dist, dependency_links ) installations[req.name] = req if requirement: with open(requirement) as req_file: for line in req_file: if (not line.strip() or line.strip().startswith('#') or (skip_match and skip_match(line)) or line.startswith(( '-r', '--requirement', '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--pre', '--trusted-host', '--process-dependency-links', '--extra-index-url'))): yield line.rstrip() continue if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') line_req = InstallRequirement.from_editable( line, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache, ) else: line_req = InstallRequirement.from_line( line, isolated=isolated, wheel_cache=wheel_cache, ) if not line_req.name: logger.info( "Skipping line because it's not clear what it " "would install: %s", line.strip(), ) logger.info( " (add #egg=PackageName to the URL to avoid" " this warning)" ) elif line_req.name not in installations: logger.warning( "Requirement file contains %s, but that package is" " not installed", line.strip(), ) else: yield str(installations[line_req.name]).rstrip() del installations[line_req.name] yield( '## The following requirements were added by ' 'pip freeze:' ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): if canonicalize_name(installation.name) not in skip: yield str(installation).rstrip()
def freeze( requirement=None, find_links=None, local_only=None, user_only=None, skip_regex=None, default_vcs=None, isolated=False, wheel_cache=None, skip=()): find_links = find_links or [] skip_match = None if skip_regex: skip_match = re.compile(skip_regex).search dependency_links = [] for dist in pkg_resources.working_set: if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt') ) for link in find_links: if '#egg=' in link: dependency_links.append(link) for link in find_links: yield '-f %s' % link installations = {} for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): try: req = pip.FrozenRequirement.from_dist( dist, dependency_links ) except RequirementParseError: logger.warning( "Could not parse requirement: %s", dist.project_name ) continue installations[req.name] = req if requirement: # the options that don't get turned into an InstallRequirement # should only be emitted once, even if the same option is in multiple # requirements files, so we need to keep track of what has been emitted # so that we don't emit it again if it's seen again emitted_options = set() for req_file_path in requirement: with open(req_file_path) as req_file: for line in req_file: if (not line.strip() or line.strip().startswith('#') or (skip_match and skip_match(line)) or line.startswith(( '-r', '--requirement', '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--pre', '--trusted-host', '--process-dependency-links', '--extra-index-url'))): line = line.rstrip() if line not in emitted_options: emitted_options.add(line) yield line continue if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') line_req = InstallRequirement.from_editable( line, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache, ) else: line_req = InstallRequirement.from_line( COMMENT_RE.sub('', line).strip(), isolated=isolated, wheel_cache=wheel_cache, ) if not line_req.name: logger.info( "Skipping line in requirement file [%s] because " "it's not clear what it would install: %s", req_file_path, line.strip(), ) logger.info( " (add #egg=PackageName to the URL to avoid" " this warning)" ) elif line_req.name not in installations: logger.warning( "Requirement file [%s] contains %s, but that " "package is not installed", req_file_path, COMMENT_RE.sub('', line).strip(), ) else: yield str(installations[line_req.name]).rstrip() del installations[line_req.name] yield( '## The following requirements were added by ' 'pip freeze:' ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): if canonicalize_name(installation.name) not in skip: yield str(installation).rstrip()
def autocomplete(): """Command and option completion for the main option parser (and options) and its subcommands (and options). Enable by sourcing one of the completion shell scripts (bash or zsh). """ # Don't complete if user hasn't sourced bash_completion file. if 'PIP_AUTO_COMPLETE' not in os.environ: return cwords = os.environ['COMP_WORDS'].split()[1:] cword = int(os.environ['COMP_CWORD']) try: current = cwords[cword - 1] except IndexError: current = '' subcommands = [cmd for cmd, summary in get_summaries()] options = [] # subcommand try: subcommand_name = [w for w in cwords if w in subcommands][0] except IndexError: subcommand_name = None parser = create_main_parser() # subcommand options if subcommand_name: # special case: 'help' subcommand has no options if subcommand_name == 'help': sys.exit(1) # special case: list locally installed dists for uninstall command if subcommand_name == 'uninstall' and not current.startswith('-'): installed = [] lc = current.lower() for dist in get_installed_distributions(local_only=True): if dist.key.startswith(lc) and dist.key not in cwords[1:]: installed.append(dist.key) # if there are no dists installed, fall back to option completion if installed: for dist in installed: print(dist) sys.exit(1) subcommand = commands_dict[subcommand_name]() options += [(opt.get_opt_string(), opt.nargs) for opt in subcommand.parser.option_list_all if opt.help != optparse.SUPPRESS_HELP] # filter out previously specified options from available options prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] options = [(x, v) for (x, v) in options if x not in prev_opts] # filter options by current input options = [(k, v) for k, v in options if k.startswith(current)] for option in options: opt_label = option[0] # append '=' to options which require args if option[1]: opt_label += '=' print(opt_label) else: # show main parser options only when necessary if current.startswith('-') or current.startswith('--'): opts = [i.option_list for i in parser.option_groups] opts.append(parser.option_list) opts = (o for it in opts for o in it) subcommands += [ i.get_opt_string() for i in opts if i.help != optparse.SUPPRESS_HELP ] print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1)
def freeze( requirement=None, find_links=None, local_only=None, user_only=None, skip_regex=None, default_vcs=None, isolated=False, wheel_cache=None, skip=(), ): find_links = find_links or [] skip_match = None if skip_regex: skip_match = re.compile(skip_regex).search dependency_links = [] for dist in pkg_resources.working_set: if dist.has_metadata("dependency_links.txt"): dependency_links.extend(dist.get_metadata_lines("dependency_links.txt")) for link in find_links: if "#egg=" in link: dependency_links.append(link) for link in find_links: yield "-f %s" % link installations = {} for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): try: req = pip.FrozenRequirement.from_dist(dist, dependency_links) except RequirementParseError: logger.warning("Could not parse requirement: %s", dist.project_name) continue installations[req.name] = req if requirement: with open(requirement) as req_file: for line in req_file: if ( not line.strip() or line.strip().startswith("#") or (skip_match and skip_match(line)) or line.startswith( ( "-r", "--requirement", "-Z", "--always-unzip", "-f", "--find-links", "-i", "--index-url", "--pre", "--trusted-host", "--process-dependency-links", "--extra-index-url", ) ) ): yield line.rstrip() continue if line.startswith("-e") or line.startswith("--editable"): if line.startswith("-e"): line = line[2:].strip() else: line = line[len("--editable") :].strip().lstrip("=") line_req = InstallRequirement.from_editable( line, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache ) else: line_req = InstallRequirement.from_line(line, isolated=isolated, wheel_cache=wheel_cache) if not line_req.name: logger.info("Skipping line because it's not clear what it " "would install: %s", line.strip()) logger.info(" (add #egg=PackageName to the URL to avoid" " this warning)") elif line_req.name not in installations: logger.warning("Requirement file contains %s, but that package is" " not installed", line.strip()) else: yield str(installations[line_req.name]).rstrip() del installations[line_req.name] yield ("## The following requirements were added by " "pip freeze:") for installation in sorted(installations.values(), key=lambda x: x.name.lower()): if canonicalize_name(installation.name) not in skip: yield str(installation).rstrip()
def list_all(): return get_installed_distributions(local_only=True, user_only=False)
def evaluate(self): from pip.utils import get_installed_distributions return [i.project_name for i in get_installed_distributions()]
"future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) if options.mirrors: warnings.warn( "--mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) index_urls += options.mirrors dependency_links = [] for dist in get_installed_distributions(local_only=options.local, user_only=options.user): if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt'), ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) finder.add_dependency_links(dependency_links) installed_packages = get_installed_distributions( local_only=options.local, user_only=options.user, include_editables=False, ) for dist in installed_packages:
def autocomplete(): """Command and option completion for the main option parser (and options) and its subcommands (and options). Enable by sourcing one of the completion shell scripts (bash, zsh or fish). """ # Don't complete if user hasn't sourced bash_completion file. if 'PIP_AUTO_COMPLETE' not in os.environ: return cwords = os.environ['COMP_WORDS'].split()[1:] cword = int(os.environ['COMP_CWORD']) try: current = cwords[cword - 1] except IndexError: current = '' subcommands = [cmd for cmd, summary in get_summaries()] options = [] # subcommand try: subcommand_name = [w for w in cwords if w in subcommands][0] except IndexError: subcommand_name = None parser = create_main_parser() # subcommand options if subcommand_name: # special case: 'help' subcommand has no options if subcommand_name == 'help': sys.exit(1) # special case: list locally installed dists for uninstall command if subcommand_name == 'uninstall' and not current.startswith('-'): installed = [] lc = current.lower() for dist in get_installed_distributions(local_only=True): if dist.key.startswith(lc) and dist.key not in cwords[1:]: installed.append(dist.key) # if there are no dists installed, fall back to option completion if installed: for dist in installed: print(dist) sys.exit(1) subcommand = commands_dict[subcommand_name]() options += [(opt.get_opt_string(), opt.nargs) for opt in subcommand.parser.option_list_all if opt.help != optparse.SUPPRESS_HELP] # filter out previously specified options from available options prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] options = [(x, v) for (x, v) in options if x not in prev_opts] # filter options by current input options = [(k, v) for k, v in options if k.startswith(current)] for option in options: opt_label = option[0] # append '=' to options which require args if option[1]: opt_label += '=' print(opt_label) else: # show main parser options only when necessary if current.startswith('-') or current.startswith('--'): opts = [i.option_list for i in parser.option_groups] opts.append(parser.option_list) opts = (o for it in opts for o in it) subcommands += [i.get_opt_string() for i in opts if i.help != optparse.SUPPRESS_HELP] print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1)
def file_pip_module_info(abs_path): """ Package name can be found at `module_info.project_name` """ global editable_dists # Adapted from Github user nbeaver's pip_file_lookup repo (MIT license) # Found through: https://stackoverflow.com/questions/33483818 # TODO: why is this import so slow? try: from pip.utils import get_installed_distributions except ModuleNotFoundError: from pip._internal.utils.misc import get_installed_distributions if editable_dists is None: # all versions of stuff that could be imported above have this flag? editable_dists = get_installed_distributions(editables_only=True) for dist in get_installed_distributions(): # TODO maybe only use a test using this, and remove conditional below? ''' try: # Python 3.7+ builtin from importlib.resources import contents except ImportError: try: # Backport of above available on PyPi from importlib_resources import contents except ImportError: # TODO warn once that we can't find package resources + # say to install importlib_resources w/ pip pass ''' # First two tests are insufficient in some cases... # RECORDs should be part of .dist-info metadatas if dist.has_metadata('RECORD'): lines = dist.get_metadata_lines('RECORD') paths = [l.split(',')[0] for l in lines] paths_absolute = [normpath(join(dist.location, p)) for p in paths] # Otherwise use pip's log for .egg-info's elif dist.has_metadata('installed-files.txt'): paths = dist.get_metadata_lines('installed-files.txt') paths_absolute = [normpath(join(dist.egg_info, p)) for p in paths] # This seems to work for at least some editable installed things. # (but has problems w/ non-editable stuff) elif dist in editable_dists and abs_path.startswith(dist.location): rel_path = abs_path[len(dist.location) + 1:] if dist.has_resource(rel_path): return dist else: warnings.warn( ('expected pip package {} to have resource {}, ' 'but it did not').format(dist.project_name, rel_path)) else: continue if abs_path in paths_absolute: return dist return None