def wheel_dist_info_dir(source, name): # type: (ZipFile, str) -> str """Returns the name of the contained .dist-info directory. Raises AssertionError or UnsupportedWheel if not found, >1 found, or it doesn't match the provided name. """ # Zip file path separators must be / subdirs = list(set(p.split("/")[0] for p in source.namelist())) info_dirs = [s for s in subdirs if s.endswith('.dist-info')] if not info_dirs: raise UnsupportedWheel(".dist-info directory not found") if len(info_dirs) > 1: raise UnsupportedWheel( "multiple .dist-info directories found: {}".format( ", ".join(info_dirs))) info_dir = info_dirs[0] info_dir_name = canonicalize_name(info_dir) canonical_name = canonicalize_name(name) if not info_dir_name.startswith(canonical_name): raise UnsupportedWheel( ".dist-info directory {!r} does not start with {!r}".format( info_dir, canonical_name)) # Zip file paths can be unicode or str depending on the zip entry flags, # so normalize it. return ensure_str(info_dir)
def wheel_dist_info_dir(source: ZipFile, name: str) -> str: """Returns the name of the contained .dist-info directory. Raises AssertionError or UnsupportedWheel if not found, >1 found, or it doesn't match the provided name. """ # Zip file path separators must be / subdirs = {p.split("/", 1)[0] for p in source.namelist()} info_dirs = [s for s in subdirs if s.endswith(".dist-info")] if not info_dirs: raise UnsupportedWheel(".dist-info directory not found") if len(info_dirs) > 1: raise UnsupportedWheel( "multiple .dist-info directories found: {}".format( ", ".join(info_dirs))) info_dir = info_dirs[0] info_dir_name = canonicalize_name(info_dir) canonical_name = canonicalize_name(name) if not info_dir_name.startswith(canonical_name): raise UnsupportedWheel( ".dist-info directory {!r} does not start with {!r}".format( info_dir, canonical_name)) return info_dir
def _make_requirement_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str]) -> Optional[Requirement]: if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", ireq.name, ireq.markers, ) return None if not ireq.link: return SpecifierRequirement(ireq) self._fail_if_link_is_unsupported_wheel(ireq.link) cand = self._make_candidate_from_link( ireq.link, extras=frozenset(ireq.extras), template=ireq, name=canonicalize_name(ireq.name) if ireq.name else None, version=None, ) if cand is None: # There's no way we can satisfy a URL requirement if the underlying # candidate fails to build. An unnamed URL must be user-supplied, so # we fail eagerly. If the URL is named, an unsatisfiable requirement # can make the resolver do the right thing, either backtrack (and # maybe find some other requirement that's buildable) or raise a # ResolutionImpossible eventually. if not ireq.name: raise self._build_failures[ireq.link] return UnsatisfiableRequirement(canonicalize_name(ireq.name)) return self.make_requirement_from_candidate(cand)
def run(self, options, args): with self._build_session(options) as session: reqs_to_uninstall = {} for name in args: req = InstallRequirement.from_line( name, isolated=options.isolated_mode, ) if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req for filename in options.requirements: for req in parse_requirements( filename, options=options, session=session): if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req if not reqs_to_uninstall: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name) ) for req in reqs_to_uninstall.values(): uninstall_pathset = req.uninstall( auto_confirm=options.yes, verbose=self.verbosity > 0, ) if uninstall_pathset: uninstall_pathset.commit()
def run(self, options, args): with self._build_session(options) as session: reqs_to_uninstall = {} for name in args: req = InstallRequirement.from_line( name, isolated=options.isolated_mode, ) if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req for filename in options.requirements: for req in parse_requirements(filename, options=options, session=session): if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req if not reqs_to_uninstall: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name)) protect_pip_from_modification_on_windows( modifying_pip="pip" in reqs_to_uninstall) for req in reqs_to_uninstall.values(): uninstall_pathset = req.uninstall( auto_confirm=options.yes, verbose=self.verbosity > 0, ) if uninstall_pathset: uninstall_pathset.commit()
def _verify_one(req, wheel_path): # type: (InstallRequirement, str) -> None canonical_name = canonicalize_name(req.name or "") w = Wheel(os.path.basename(wheel_path)) if canonicalize_name(w.name) != canonical_name: raise InvalidWheelFilename( "Wheel has unexpected file name: expected {!r}, " "got {!r}".format(canonical_name, w.name), ) dist = get_wheel_distribution(wheel_path, canonical_name) dist_verstr = str(dist.version) if canonicalize_version(dist_verstr) != canonicalize_version(w.version): raise InvalidWheelFilename( "Wheel has unexpected file name: expected {!r}, " "got {!r}".format(dist_verstr, w.version), ) metadata_version_value = dist.metadata_version if metadata_version_value is None: raise UnsupportedWheel("Missing Metadata-Version") try: metadata_version = Version(metadata_version_value) except InvalidVersion: msg = f"Invalid Metadata-Version: {metadata_version_value}" raise UnsupportedWheel(msg) if (metadata_version >= Version("1.2") and not isinstance(dist.version, Version)): raise UnsupportedWheel( "Metadata 1.2 mandates PEP 440 version, " "but {!r} is not".format(dist_verstr) )
def get_requiring_packages(package_name): canonical_name = canonicalize_name(package_name) return [ pkg.project_name for pkg in pkg_resources.working_set if canonical_name in [canonicalize_name(required.name) for required in pkg.requires()] ]
def run_egg_info(self): assert self.source_dir if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', self.setup_py, self.name, ) else: logger.debug( 'Running setup.py (path:%s) egg_info for package from %s', self.setup_py, self.link, ) with indent_log(): script = SETUPTOOLS_SHIM % self.setup_py base_cmd = [os.environ.get('PIP_PYTHON_PATH', sys.executable), '-c', script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ['egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') ensure_dir(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] with self.build_env: call_subprocess( egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, command_desc='python setup.py egg_info') if not self.req: if isinstance(parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = Requirement( "".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ]) ) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( 'Running setup.py (path:%s) egg_info for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.setup_py, self.name, metadata_name, self.name ) self.req = Requirement(metadata_name)
def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and canonicalize_name(s).startswith( canonicalize_name(req.name))): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = ( st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH ) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed)
def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and canonicalize_name(s).startswith( canonicalize_name(req.name))): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = (st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed)
def warn_on_mismatching_name(self) -> None: metadata_name = canonicalize_name(self.metadata["Name"]) if canonicalize_name(self.req.name) == metadata_name: # Everything is fine. return # If we're here, there's a mismatch. Log a warning about it. logger.warning( 'Generating metadata for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.name, metadata_name, self.name) self.req = Requirement(metadata_name)
def _fetch_metadata_using_lazy_wheel( self, link: Link, ) -> Optional[BaseDistribution]: """Fetch metadata using lazy wheel, if possible.""" if not self.use_lazy_wheel: return None if self.require_hashes: logger.debug("Lazy wheel is not used as hash checking is required") return None if link.is_file or not link.is_wheel: logger.debug( "Lazy wheel is not used as %r does not points to a remote wheel", link, ) return None wheel = Wheel(link.filename) name = canonicalize_name(wheel.name) logger.info( "Obtaining dependency information from %s %s", name, wheel.version, ) url = link.url.split("#", 1)[0] try: return dist_from_wheel_url(name, url, self._session) except HTTPRangeRequestUnsupported: logger.debug("%s does not support range requests", url) return None
def __init__(self, name, req, editable, comments=()): # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None self.name = name self.canonical_name = canonicalize_name(name) self.req = req self.editable = editable self.comments = comments
def __str__(self): # type: () -> str requirements = sorted( (req for req in self.requirements.values() if not req.comes_from), key=lambda req: canonicalize_name(req.name), ) return ' '.join(str(req.req) for req in requirements)
def collect_root_requirements( self, root_ireqs: List[InstallRequirement]) -> CollectedRootRequirements: collected = CollectedRootRequirements([], {}, {}) for i, ireq in enumerate(root_ireqs): if ireq.constraint: # Ensure we only accept valid constraints problem = check_invalid_constraint_type(ireq) if problem: raise InstallationError(problem) if not ireq.match_markers(): continue assert ireq.name, "Constraint must be named" name = canonicalize_name(ireq.name) if name in collected.constraints: collected.constraints[name] &= ireq else: collected.constraints[name] = Constraint.from_ireq(ireq) else: req = self._make_requirement_from_install_req( ireq, requested_extras=(), ) if req is None: continue if ireq.user_supplied and req.name not in collected.user_requested: collected.user_requested[req.name] = i collected.requirements.append(req) return collected
def _get_requiring_packages( current_dist: BaseDistribution) -> Iterator[str]: return (dist.metadata["Name"] or "UNKNOWN" for dist in installed.values() if current_dist.canonical_name in {canonicalize_name(d.name) for d in dist.iter_dependencies()})
def make_install_req_from_dist( dist: Distribution, template: InstallRequirement) -> InstallRequirement: project_name = canonicalize_name(dist.project_name) if template.req: line = str(template.req) elif template.link: line = f"{project_name} @ {template.link.url}" else: line = f"{project_name}=={dist.parsed_version}" ireq = install_req_from_line( line, user_supplied=template.user_supplied, comes_from=template.comes_from, use_pep517=template.use_pep517, isolated=template.isolated, constraint=template.constraint, options=dict( install_options=template.install_options, global_options=template.global_options, hashes=template.hash_options, ), ) ireq.satisfied_by = dist return ireq
def check_binary_allowed(req): # type: (InstallRequirement) -> bool if req.use_pep517: return True canonical_name = canonicalize_name(req.name) allowed_formats = format_control.get_allowed_formats(canonical_name) return "binary" in allowed_formats
def get_requirement(self, name: str) -> InstallRequirement: project_name = canonicalize_name(name) if project_name in self.requirements: return self.requirements[project_name] raise KeyError(f"No project with the name {name!r}")
def has_requirement(self, name: str) -> bool: project_name = canonicalize_name(name) return ( project_name in self.requirements and not self.requirements[project_name].constraint )
def _find_name_version_sep(fragment, canonical_name): # type: (str, str) -> int """Find the separator's index based on the package's canonical name. :param fragment: A <package>+<version> filename "fragment" (stem) or egg fragment. :param canonical_name: The package's canonical name. This function is needed since the canonicalized name does not necessarily have the same length as the egg info's name part. An example:: >>> fragment = 'foo__bar-1.0' >>> canonical_name = 'foo-bar' >>> _find_name_version_sep(fragment, canonical_name) 8 """ # Project name and version must be separated by one single dash. Find all # occurrences of dashes; if the string in front of it matches the canonical # name, this is the one separating the name and version parts. for i, c in enumerate(fragment): if c != "-": continue if canonicalize_name(fragment[:i]) == canonical_name: return i raise ValueError("{} does not match {}".format(fragment, canonical_name))
def get_requirement(self, name): # type: (str) -> InstallRequirement project_name = canonicalize_name(name) if project_name in self.requirements: return self.requirements[project_name] pass
def get_metadata_distribution(self) -> BaseDistribution: """Loads the metadata from the wheel file into memory and returns a Distribution that uses it, not relying on the wheel file or requirement. """ assert self.req.local_file_path, "Set as part of preparation during download" assert self.req.name, "Wheels are never unnamed" wheel = FilesystemWheel(self.req.local_file_path) return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
def run(self, options: Values, args: List[str]) -> int: session = self.get_default_session(options) reqs_to_uninstall = {} for name in args: req = install_req_from_line( name, isolated=options.isolated_mode, ) if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req else: logger.warning( "Invalid requirement: %r ignored -" " the uninstall command expects named" " requirements.", name, ) for filename in options.requirements: for parsed_req in parse_requirements(filename, options=options, session=session): req = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode) if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req if not reqs_to_uninstall: raise InstallationError( f"You must give at least one requirement to {self.name} (see " f'"pip help {self.name}")') protect_pip_from_modification_on_windows( modifying_pip="pip" in reqs_to_uninstall) for req in reqs_to_uninstall.values(): uninstall_pathset = req.uninstall( auto_confirm=options.yes, verbose=self.verbosity > 0, ) if uninstall_pathset: uninstall_pathset.commit() warn_if_run_as_root() return SUCCESS
def get( self, link, # type: Link package_name, # type: Optional[str] supported_tags, # type: List[Tag] ): # type: (...) -> Link candidates = [] if not package_name: return link canonical_package_name = canonicalize_name(package_name) for wheel_name, wheel_dir in self._get_candidates( link, canonical_package_name ): try: wheel = Wheel(wheel_name) except InvalidWheelFilename: continue if canonicalize_name(wheel.name) != canonical_package_name: logger.debug( "Ignoring cached wheel %s for %s as it " "does not match the expected distribution name %s.", wheel_name, link, package_name, ) continue if not wheel.supported(supported_tags): # Built for a different python/arch/etc continue candidates.append( ( wheel.support_index_min(supported_tags), wheel_name, wheel_dir, ) ) if not candidates: return link _, wheel_name, wheel_dir = min(candidates) return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
def _simulate_installation_of(to_install, state): # type: (List[InstallRequirement], PackageSet) -> None """Computes the version of packages after installing to_install. """ # Modify it as installing requirement_set would (assuming no errors) for inst_req in to_install: dist = make_abstract_dist(inst_req).dist(finder=None) name = canonicalize_name(dist.key) state[name] = PackageDetails(dist.version, dist.requires())
def mkurl_pypi_url(url): loc = posixpath.join( url, urllib_parse.quote(canonicalize_name(project_name))) # For maximum compatibility with easy_install, ensure the path # ends in a trailing slash. Although this isn't in the spec # (and PyPI can handle it without the slash) some other index # implementations might break if they relied on easy_install's # behavior. if not loc.endswith('/'): loc = loc + '/' return loc
def should_use_ephemeral_cache( req, # type: InstallRequirement format_control, # type: FormatControl autobuilding, # type: bool cache_available # type: bool ): # type: (...) -> Optional[bool] """ Return whether to build an InstallRequirement object using the ephemeral cache. :param cache_available: whether a cache directory is available for the autobuilding=True case. :return: True or False to build the requirement with ephem_cache=True or False, respectively; or None not to build the requirement. """ if req.constraint: return None if req.is_wheel: if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name, ) return None if not autobuilding: return False if req.editable or not req.source_dir: return None if req.link and not req.link.is_artifact: # VCS checkout. Build wheel just for this run. return True if "binary" not in format_control.get_allowed_formats( canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name, ) return None link = req.link base, ext = link.splitext() if cache_available and _contains_egg_info(base): return False # Otherwise, build the wheel just for this run using the ephemeral # cache since we are either in the case of e.g. a local directory, or # no cache directory is available to use. return True
def _search_distribution(self, name: str) -> Optional[BaseDistribution]: """Find a distribution matching the ``name`` in the environment. This searches from *all* distributions available in the environment, to match the behavior of ``pkg_resources.get_distribution()``. """ canonical_name = canonicalize_name(name) for dist in self.iter_distributions(): if dist.canonical_name == canonical_name: return dist return None
def _req_set_item_sorter( item: Tuple[str, InstallRequirement], weights: Dict[Optional[str], int], ) -> Tuple[int, str]: """Key function used to sort install requirements for installation. Based on the "weight" mapping calculated in ``get_installation_order()``. The canonical package name is returned as the second member as a tie- breaker to ensure the result is predictable, which is useful in tests. """ name = canonicalize_name(item[0]) return weights[name], name
def make_link_evaluator(self, project_name: str) -> LinkEvaluator: canonical_name = canonicalize_name(project_name) formats = self.format_control.get_allowed_formats(canonical_name) return LinkEvaluator( project_name=project_name, canonical_name=canonical_name, formats=formats, target_python=self._target_python, allow_yanked=self._allow_yanked, ignore_requires_python=self._ignore_requires_python, ignore_compatibility=self._ignore_compatibility)
def __repr__(self) -> str: requirements = sorted( self.requirements.values(), key=lambda req: canonicalize_name(req.name or ""), ) format_string = '<{classname} object; {count} requirement(s): {reqs}>' return format_string.format( classname=self.__class__.__name__, count=len(requirements), reqs=', '.join(str(req.req) for req in requirements), )
def fmt_ctl_handle_mutual_exclude(value, target, other): new = value.split(',') while ':all:' in new: other.clear() target.clear() target.add(':all:') del new[:new.index(':all:') + 1] if ':none:' not in new: # Without a none, we want to discard everything as :all: covers it return for name in new: if name == ':none:': target.clear() continue name = canonicalize_name(name) other.discard(name) target.add(name)
def _get_candidates(self, link, package_name): can_not_cache = ( not self.cache_dir or not package_name or not link ) if can_not_cache: return [] canonical_name = canonicalize_name(package_name) formats = index.fmt_ctl_formats( self.format_control, canonical_name ) if not self.allowed_formats.intersection(formats): return [] root = self.get_path_for_link(link) try: return os.listdir(root) except OSError as err: if err.errno in {errno.ENOENT, errno.ENOTDIR}: return [] raise
def freeze( requirement=None, find_links=None, local_only=None, user_only=None, skip_regex=None, isolated=False, wheel_cache=None, exclude_editable=False, skip=()): find_links = find_links or [] skip_match = None if skip_regex: skip_match = re.compile(skip_regex).search dependency_links = [] for dist in pkg_resources.working_set: if dist.has_metadata('dependency_links.txt'): dependency_links.extend( dist.get_metadata_lines('dependency_links.txt') ) for link in find_links: if '#egg=' in link: dependency_links.append(link) for link in find_links: yield '-f %s' % link installations = {} for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): try: req = FrozenRequirement.from_dist( dist, dependency_links ) except RequirementParseError: logger.warning( "Could not parse requirement: %s", dist.project_name ) continue if exclude_editable and req.editable: continue installations[req.name] = req if requirement: # the options that don't get turned into an InstallRequirement # should only be emitted once, even if the same option is in multiple # requirements files, so we need to keep track of what has been emitted # so that we don't emit it again if it's seen again emitted_options = set() # keep track of which files a requirement is in so that we can # give an accurate warning if a requirement appears multiple times. req_files = collections.defaultdict(list) for req_file_path in requirement: with open(req_file_path) as req_file: for line in req_file: if (not line.strip() or line.strip().startswith('#') or (skip_match and skip_match(line)) or line.startswith(( '-r', '--requirement', '-Z', '--always-unzip', '-f', '--find-links', '-i', '--index-url', '--pre', '--trusted-host', '--process-dependency-links', '--extra-index-url'))): line = line.rstrip() if line not in emitted_options: emitted_options.add(line) yield line continue if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') line_req = InstallRequirement.from_editable( line, isolated=isolated, wheel_cache=wheel_cache, ) else: line_req = InstallRequirement.from_line( COMMENT_RE.sub('', line).strip(), isolated=isolated, wheel_cache=wheel_cache, ) if not line_req.name: logger.info( "Skipping line in requirement file [%s] because " "it's not clear what it would install: %s", req_file_path, line.strip(), ) logger.info( " (add #egg=PackageName to the URL to avoid" " this warning)" ) elif line_req.name not in installations: # either it's not installed, or it is installed # but has been processed already if not req_files[line_req.name]: logger.warning( "Requirement file [%s] contains %s, but that " "package is not installed", req_file_path, COMMENT_RE.sub('', line).strip(), ) else: req_files[line_req.name].append(req_file_path) else: yield str(installations[line_req.name]).rstrip() del installations[line_req.name] req_files[line_req.name].append(req_file_path) # Warn about requirements that were included multiple times (in a # single requirements file or in different requirements files). for name, files in six.iteritems(req_files): if len(files) > 1: logger.warning("Requirement %s included multiple times [%s]", name, ', '.join(sorted(set(files)))) yield( '## The following requirements were added by ' 'pip freeze:' ) for installation in sorted( installations.values(), key=lambda x: x.name.lower()): if canonicalize_name(installation.name) not in skip: yield str(installation).rstrip()
def _link_package_versions(self, link, search, ignore_compatibility=True): """Return an InstallationCandidate or None""" version = None if link.egg_fragment: egg_info = link.egg_fragment ext = link.ext else: egg_info, ext = link.splitext() if not ext: self._log_skipped_link(link, 'not a file') return # Always ignore unsupported extensions even when we ignore compatibility if ext not in SUPPORTED_EXTENSIONS: self._log_skipped_link( link, 'unsupported archive format: %s' % ext, ) return if "binary" not in search.formats and ext == wheel_ext and not ignore_compatibility: self._log_skipped_link( link, 'No binaries permitted for %s' % search.supplied, ) return if "macosx10" in link.path and ext == '.zip' and not ignore_compatibility: self._log_skipped_link(link, 'macosx10 one') return if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: self._log_skipped_link(link, 'invalid wheel filename') return if canonicalize_name(wheel.name) != search.canonical: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return if not wheel.supported(self.valid_tags) and not ignore_compatibility: self._log_skipped_link( link, 'it is not compatible with this Python') return version = wheel.version # This should be up by the search.ok_binary check, but see issue 2700. if "source" not in search.formats and ext != wheel_ext: self._log_skipped_link( link, 'No sources permitted for %s' % search.supplied, ) return if not version: version = egg_info_matches(egg_info, search.supplied, link) if version is None: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: self._log_skipped_link( link, 'Python version is incorrect') return try: support_this_python = check_requires_python(link.requires_python) except specifiers.InvalidSpecifier: logger.debug("Package %s has an invalid Requires-Python entry: %s", link.filename, link.requires_python) support_this_python = True if not support_this_python and not ignore_compatibility: logger.debug("The package %s is incompatible with the python" "version in use. Acceptable python versions are:%s", link, link.requires_python) return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search.supplied, version, link, link.requires_python)
def find_all_candidates(self, project_name): """Find all available InstallationCandidate for project_name This checks index_urls, find_links and dependency_links. All versions found are returned as an InstallationCandidate list. See _link_package_versions for details on which files are accepted """ index_locations = self._get_index_urls_locations(project_name) index_file_loc, index_url_loc = self._sort_locations(index_locations) fl_file_loc, fl_url_loc = self._sort_locations( self.find_links, expand_dir=True, ) dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) file_locations = (Link(url) for url in itertools.chain( index_file_loc, fl_file_loc, dep_file_loc, )) # We trust every url that the user has given us whether it was given # via --index-url or --find-links # We explicitly do not trust links that came from dependency_links # We want to filter out any thing which does not have a secure origin. url_locations = [ link for link in itertools.chain( (Link(url) for url in index_url_loc), (Link(url) for url in fl_url_loc), (Link(url) for url in dep_url_loc), ) if self._validate_secure_origin(logger, link) ] logger.debug('%d location(s) to search for versions of %s:', len(url_locations), project_name) for location in url_locations: logger.debug('* %s', location) canonical_name = canonicalize_name(project_name) formats = fmt_ctl_formats(self.format_control, canonical_name) search = Search(project_name, canonical_name, formats) find_links_versions = self._package_versions( # We trust every directly linked archive in find_links (Link(url, '-f') for url in self.find_links), search ) page_versions = [] for page in self._get_pages(url_locations, project_name): logger.debug('Analyzing links from page %s', page.url) with indent_log(): page_versions.extend( self._package_versions(page.links, search) ) dependency_versions = self._package_versions( (Link(url) for url in self.dependency_links), search ) if dependency_versions: logger.debug( 'dependency_links found: %s', ', '.join([ version.location.url for version in dependency_versions ]) ) file_versions = self._package_versions(file_locations, search) if file_versions: file_versions.sort(reverse=True) logger.debug( 'Local files found: %s', ', '.join([ url_to_path(candidate.location.url) for candidate in file_versions ]) ) # This is an intentional priority ordering return ( file_versions + find_links_versions + page_versions + dependency_versions )
def build(self, requirements, session, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ from pipenv.patched.notpip._internal import index building_is_possible = self._wheel_dir or ( autobuilding and self.wheel_cache.cache_dir ) assert building_is_possible buildset = [] for req in requirements: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name, ) elif autobuilding and req.editable: pass elif autobuilding and not req.source_dir: pass elif autobuilding and req.link and not req.link.is_artifact: # VCS checkout. Build wheel just for this run. buildset.append((req, True)) else: ephem_cache = False if autobuilding: link = req.link base, ext = link.splitext() if index.egg_info_matches(base, None, link) is None: # E.g. local directory. Build wheel just for this run. ephem_cache = True if "binary" not in index.fmt_ctl_formats( self.finder.format_control, canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name, ) continue buildset.append((req, ephem_cache)) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists(os.path.join( req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir ) # Update the link for this. req.link = index.Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=session, ) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
def search_packages_info(query): """ Gather details from installed distributions. Print distribution name, version, location, and installed files. Installed files requires a pip generated 'installed-files.txt' in the distributions '.egg-info' directory. """ installed = {} for p in pkg_resources.working_set: installed[canonicalize_name(p.project_name)] = p query_names = [canonicalize_name(name) for name in query] for dist in [installed[pkg] for pkg in query_names if pkg in installed]: package = { 'name': dist.project_name, 'version': dist.version, 'location': dist.location, 'requires': [dep.project_name for dep in dist.requires()], } file_list = None metadata = None if isinstance(dist, pkg_resources.DistInfoDistribution): # RECORDs should be part of .dist-info metadatas if dist.has_metadata('RECORD'): lines = dist.get_metadata_lines('RECORD') paths = [l.split(',')[0] for l in lines] paths = [os.path.join(dist.location, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] if dist.has_metadata('METADATA'): metadata = dist.get_metadata('METADATA') else: # Otherwise use pip's log for .egg-info's if dist.has_metadata('installed-files.txt'): paths = dist.get_metadata_lines('installed-files.txt') paths = [os.path.join(dist.egg_info, p) for p in paths] file_list = [os.path.relpath(p, dist.location) for p in paths] if dist.has_metadata('PKG-INFO'): metadata = dist.get_metadata('PKG-INFO') if dist.has_metadata('entry_points.txt'): entry_points = dist.get_metadata_lines('entry_points.txt') package['entry_points'] = entry_points if dist.has_metadata('INSTALLER'): for line in dist.get_metadata_lines('INSTALLER'): if line.strip(): package['installer'] = line.strip() break # @todo: Should pkg_resources.Distribution have a # `get_pkg_info` method? feed_parser = FeedParser() feed_parser.feed(metadata) pkg_info_dict = feed_parser.close() for key in ('metadata-version', 'summary', 'home-page', 'author', 'author-email', 'license'): package[key] = pkg_info_dict.get(key) # It looks like FeedParser cannot deal with repeated headers classifiers = [] for line in metadata.splitlines(): if line.startswith('Classifier: '): classifiers.append(line[len('Classifier: '):]) package['classifiers'] = classifiers if file_list: package['files'] = sorted(file_list) yield package