def install_given_reqs( to_install, # type: List[InstallRequirement] install_options, # type: List[str] global_options=(), # type: Sequence[str] *args, **kwargs ): # type: (...) -> List[InstallRequirement] """ Install everything in the given list. (to be called after having downloaded and unpacked the packages) """ if to_install: logger.info( 'Installing collected packages: %s', ', '.join([req.name for req in to_install]), ) with indent_log(): for requirement in to_install: if requirement.conflicts_with: logger.info( 'Found existing installation: %s', requirement.conflicts_with, ) with indent_log(): uninstalled_pathset = requirement.uninstall( auto_confirm=True ) try: requirement.install( install_options, global_options, *args, **kwargs ) except Exception: should_rollback = ( requirement.conflicts_with and not requirement.install_succeeded ) # if install did not succeed, rollback previous uninstall if should_rollback: uninstalled_pathset.rollback() raise else: should_commit = ( requirement.conflicts_with and requirement.install_succeeded ) if should_commit: uninstalled_pathset.commit() requirement.remove_temporary_source() return to_install
def install(self, install_options, global_options=(), *args, **kwargs): """ Install everything in this set (after having downloaded and unpacked the packages) """ to_install = self._to_install() if to_install: logger.info( 'Installing collected packages: %s', ', '.join([req.name for req in to_install]), ) with indent_log(): for requirement in to_install: if requirement.conflicts_with: logger.info( 'Found existing installation: %s', requirement.conflicts_with, ) with indent_log(): uninstalled_pathset = requirement.uninstall( auto_confirm=True ) try: requirement.install( install_options, global_options, *args, **kwargs ) except: should_rollback = ( requirement.conflicts_with and not requirement.install_succeeded ) # if install did not succeed, rollback previous uninstall if should_rollback: uninstalled_pathset.rollback() raise else: should_commit = ( requirement.conflicts_with and requirement.install_succeeded ) if should_commit: uninstalled_pathset.commit() requirement.remove_temporary_source() return to_install
def remove(self, auto_confirm=False, verbose=False): """Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).""" if not self.paths: logger.info( "Can't uninstall '%s'. No files were found to uninstall.", self.dist.project_name, ) return dist_name_version = ( self.dist.project_name + "-" + self.dist.version ) logger.info('Uninstalling %s:', dist_name_version) with indent_log(): if auto_confirm or self._allowed_to_proceed(verbose): for path in sorted(compact(compress_for_rename(self.paths))): new_path = self._stash(path) logger.debug('Removing file or directory %s', path) self._moved_paths.append((path, new_path)) renames(path, new_path) for pth in self.pth.values(): pth.remove() logger.info('Successfully uninstalled %s', dist_name_version)
def prepare_editable_requirement(self, req, require_hashes, use_user_site, finder): """Prepare an editable requirement """ assert req.editable, "cannot prepare a non-editable req as editable" logger.info('Obtaining %s', req) with indent_log(): if require_hashes: raise InstallationError( 'The editable requirement %s cannot be installed when ' 'requiring hashes, because there is no single file to ' 'hash.' % req ) req.ensure_has_source_dir(self.src_dir) req.update_editable(not self._download_should_save) abstract_dist = make_abstract_dist(req) abstract_dist.prep_for_dist(finder, self.build_isolation) if self._download_should_save: req.archive(self.download_dir) req.check_if_exists(use_user_site) return abstract_dist
def cleanup_files(self): # type: () -> None """Clean up files, remove builds.""" logger.debug('Cleaning up...') with indent_log(): for req in self.reqs_to_cleanup: req.remove_temporary_source()
def install_editable(self, install_options, global_options=(), prefix=None): logger.info('Running setup.py develop for %s', self.name) if self.isolated: global_options = list(global_options) + ["--no-user-cfg"] if prefix: prefix_param = ['--prefix={0}'.format(prefix)] install_options = list(install_options) + prefix_param with indent_log(): # FIXME: should we do --install-headers here too? call_subprocess( [ sys.executable, '-c', SETUPTOOLS_SHIM % self.setup_py ] + list(global_options) + ['develop', '--no-deps'] + list(install_options), cwd=self.setup_py_dir, show_stdout=False) self.install_succeeded = True
def install_editable( self, install_options, # type: List[str] global_options=(), # type: Sequence[str] prefix=None # type: Optional[str] ): # type: (...) -> None logger.info('Running setup.py develop for %s', self.name) if self.isolated: global_options = list(global_options) + ["--no-user-cfg"] if prefix: prefix_param = ['--prefix={}'.format(prefix)] install_options = list(install_options) + prefix_param with indent_log(): # FIXME: should we do --install-headers here too? with self.build_env: call_subprocess( [ sys.executable, '-c', SETUPTOOLS_SHIM % self.setup_py ] + list(global_options) + ['develop', '--no-deps'] + list(install_options), cwd=self.setup_py_dir, ) self.install_succeeded = True
def remove(self, auto_confirm=False, verbose=False): """Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).""" if not self.paths: logger.info( "Can't uninstall '%s'. No files were found to uninstall.", self.dist.project_name, ) return dist_name_version = ( self.dist.project_name + "-" + self.dist.version ) logger.info('Uninstalling %s:', dist_name_version) with indent_log(): if auto_confirm or self._allowed_to_proceed(verbose): for path in sorted(compact(compress_for_rename(self.paths))): new_path = self._stash(path) logger.debug('Removing file or directory %s', path) self._moved_paths.append((path, new_path)) if os.path.isdir(path) and os.path.isdir(new_path): # If we're moving a directory, we need to # remove the destination first or else it will be # moved to inside the existing directory. # We just created new_path ourselves, so it will # be removable. os.rmdir(new_path) renames(path, new_path) for pth in self.pth.values(): pth.remove() logger.info('Successfully uninstalled %s', dist_name_version)
def print_results(hits, name_column_width=None, terminal_width=None): if not hits: return if name_column_width is None: name_column_width = max([ len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) for hit in hits ]) + 4 installed_packages = [p.project_name for p in pkg_resources.working_set] for hit in hits: name = hit['name'] summary = hit['summary'] or '' latest = highest_version(hit.get('versions', ['-'])) if terminal_width is not None: target_width = terminal_width - name_column_width - 5 if target_width > 10: # wrap and indent summary to fit terminal summary = textwrap.wrap(summary, target_width) summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) line = '%-*s - %s' % (name_column_width, '%s (%s)' % (name, latest), summary) try: logger.info(line) if name in installed_packages: dist = pkg_resources.get_distribution(name) with indent_log(): if dist.version == latest: logger.info('INSTALLED: %s (latest)', dist.version) else: logger.info('INSTALLED: %s', dist.version) logger.info('LATEST: %s', latest) except UnicodeEncodeError: pass
def _copy_dist_from_dir(link_path, location): """Copy distribution files in `link_path` to `location`. Invoked when user requests to install a local directory. E.g.: pip install . pip install ~/dev/git-repos/python-prompt-toolkit """ # Note: This is currently VERY SLOW if you have a lot of data in the # directory, because it copies everything with `shutil.copytree`. # What it should really do is build an sdist and install that. # See https://github.com/pypa/pip/issues/2195 if os.path.isdir(location): rmtree(location) # build an sdist setup_py = 'setup.py' sdist_args = [sys.executable] sdist_args.append('-c') sdist_args.append(SETUPTOOLS_SHIM % setup_py) sdist_args.append('sdist') sdist_args += ['--dist-dir', location] logger.info('Running setup.py sdist for %s', link_path) with indent_log(): call_subprocess(sdist_args, cwd=link_path, show_stdout=False) # unpack sdist into `location` sdist = os.path.join(location, os.listdir(location)[0]) logger.info('Unpacking sdist %s into %s', sdist, location) unpack_file(sdist, location, content_type=None, link=None)
def _display(msg, paths): if not paths: return logger.info(msg) with indent_log(): for path in sorted(compact(paths)): logger.info(path)
def run_egg_info(self): assert self.source_dir if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', self.setup_py, self.name, ) else: logger.debug( 'Running setup.py (path:%s) egg_info for package from %s', self.setup_py, self.link, ) with indent_log(): script = SETUPTOOLS_SHIM % self.setup_py base_cmd = [sys.executable, '-c', script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ['egg_info'] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') ensure_dir(egg_info_dir) egg_base_option = ['--egg-base', 'pip-egg-info'] with self.build_env: call_subprocess( egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, command_desc='python setup.py egg_info') if not self.req: if isinstance(parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = Requirement( "".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ]) ) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( 'Running setup.py (path:%s) egg_info for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.setup_py, self.name, metadata_name, self.name ) self.req = Requirement(metadata_name)
def _display(msg, paths): # type: (str, Iterable[str]) -> None if not paths: return logger.info(msg) with indent_log(): for path in sorted(compact(paths)): logger.info(path)
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev_options = self.get_url_rev_options(self.url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) cmd_args = ['export'] + rev_options.to_args() + [url, location] self.run_command(cmd_args, show_stdout=False)
def export(self, location): """Export the svn repository at the url to the destination location""" url, rev = self.get_url_rev() rev_options = get_rev_options(url, rev) url = self.remove_auth_from_url(url) logger.info('Exporting svn repository %s to %s', url, location) with indent_log(): if os.path.exists(location): # Subversion doesn't like to check out over an existing # directory --force fixes this, but was only added in svn 1.5 rmtree(location) self.run_command( ['export'] + rev_options + [url, location], show_stdout=False)
def prepare_metadata(self): # type: () -> None """Ensure that project metadata is available. Under PEP 517, call the backend hook to prepare the metadata. Under legacy processing, call setup.py egg-info. """ assert self.source_dir with indent_log(): if self.use_pep517: self.prepare_pep517_metadata() else: self.run_egg_info() if not self.req: if isinstance(parse_version(self.metadata["Version"]), Version): op = "==" else: op = "===" self.req = Requirement( "".join([ self.metadata["Name"], op, self.metadata["Version"], ]) ) self._correct_build_location() else: metadata_name = canonicalize_name(self.metadata["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( 'Generating metadata for package %s ' 'produced metadata for project name %s. Fix your ' '#egg=%s fragments.', self.name, metadata_name, self.name ) self.req = Requirement(metadata_name)
def prepare_installed_requirement(self, req, require_hashes, skip_reason): """Prepare an already-installed requirement """ assert req.satisfied_by, "req should have been satisfied but isn't" assert skip_reason is not None, ( "did not get skip reason skipped but req.satisfied_by " "is set to %r" % (req.satisfied_by,) ) logger.info( 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version ) with indent_log(): if require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.' ) abstract_dist = Installed(req) return abstract_dist
def _resolve_one( self, requirement_set, # type: RequirementSet req_to_install, # type: InstallRequirement ): # type: (...) -> List[InstallRequirement] """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True # Parse and return dependencies dist = self._get_dist_for(req_to_install) # This will raise UnsupportedPythonVersion if the given Python # version isn't compatible with the distribution's Requires-Python. _check_dist_requires_python( dist, version_info=self._py_version_info, ignore_requires_python=self.ignore_requires_python, ) more_reqs = [] # type: List[InstallRequirement] def add_req(subreq, extras_requested): # type: (Distribution, Iterable[str]) -> None sub_install_req = self._make_install_req( str(subreq), req_to_install, ) parent_req_name = req_to_install.name to_scan_again, add_to_parent = requirement_set.add_requirement( sub_install_req, parent_req_name=parent_req_name, extras_requested=extras_requested, ) if parent_req_name and add_to_parent: self._discovered_dependencies[parent_req_name].append( add_to_parent) more_reqs.extend(to_scan_again) with indent_log(): # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here # 'unnamed' requirements can only come from being directly # provided by the user. assert req_to_install.user_supplied requirement_set.add_requirement(req_to_install, parent_req_name=None) if not self.ignore_dependencies: if req_to_install.extras: logger.debug( "Installing extra requirements: %r", ",".join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras)) for missing in missing_requested: logger.warning("%s does not provide the extra '%s'", dist, missing) available_requested = sorted( set(dist.extras) & set(req_to_install.extras)) for subreq in dist.requires(available_requested): add_req(subreq, extras_requested=available_requested) return more_reqs
def build( self, requirements, # type: Iterable[InstallRequirement] session, # type: PipSession autobuilding=False # type: bool ): # type: (...) -> List[InstallRequirement] """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ buildset = [] format_control = self.finder.format_control # Whether a cache directory is available for autobuilding=True. cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) for req in requirements: ephem_cache = should_use_ephemeral_cache( req, format_control=format_control, autobuilding=autobuilding, cache_available=cache_available, ) if ephem_cache is None: continue buildset.append((req, ephem_cache)) if not buildset: return [] # Is any wheel build not using the ephemeral cache? if any(not ephem_cache for _, ephem_cache in buildset): have_directory_for_build = self._wheel_dir or ( autobuilding and self.wheel_cache.cache_dir ) assert have_directory_for_build # TODO by @pradyunsg # Should break up this method into 2 separate methods. # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists(os.path.join( req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir ) # Update the link for this. req.link = Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=session, ) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return a list of requirements that failed to build return build_failure
def show_sys_implementation(): # type: () -> None logger.info('sys.implementation:') implementation_name = sys.implementation.name with indent_log(): show_value('name', implementation_name)
def build(self, requirements, session, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ from pip._internal import index building_is_possible = self._wheel_dir or (autobuilding and self.wheel_cache.cache_dir) assert building_is_possible buildset = [] for req in requirements: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name, ) elif autobuilding and req.editable: pass elif autobuilding and not req.source_dir: pass elif autobuilding and req.link and not req.link.is_artifact: # VCS checkout. Build wheel just for this run. buildset.append((req, True)) else: ephem_cache = False if autobuilding: link = req.link base, ext = link.splitext() if index.egg_info_matches(base, None, link) is None: # E.g. local directory. Build wheel just for this run. ephem_cache = True if "binary" not in index.fmt_ctl_formats( self.finder.format_control, canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name, ) continue buildset.append((req, ephem_cache)) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists( os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir) # Update the link for this. req.link = index.Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=session, ) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
def run_egg_info(self): assert self.source_dir if self.name: logger.debug( "Running setup.py (path:%s) egg_info for package %s", self.setup_py, self.name, ) else: logger.debug( "Running setup.py (path:%s) egg_info for package from %s", self.setup_py, self.link, ) with indent_log(): script = SETUPTOOLS_SHIM % self.setup_py base_cmd = [sys.executable, "-c", script] if self.isolated: base_cmd += ["--no-user-cfg"] egg_info_cmd = base_cmd + ["egg_info"] # We can't put the .egg-info files at the root, because then the # source code will be mistaken for an installed egg, causing # problems if self.editable: egg_base_option = [] else: egg_info_dir = os.path.join(self.setup_py_dir, "pip-egg-info") ensure_dir(egg_info_dir) egg_base_option = ["--egg-base", "pip-egg-info"] with self.build_env: call_subprocess( egg_info_cmd + egg_base_option, cwd=self.setup_py_dir, show_stdout=False, command_desc="python setup.py egg_info", ) if not self.req: if isinstance(parse_version(self.pkg_info()["Version"]), Version): op = "==" else: op = "===" self.req = Requirement("".join([ self.pkg_info()["Name"], op, self.pkg_info()["Version"], ])) self._correct_build_location() else: metadata_name = canonicalize_name(self.pkg_info()["Name"]) if canonicalize_name(self.req.name) != metadata_name: logger.warning( "Running setup.py (path:%s) egg_info for package %s " "produced metadata for project name %s. Fix your " "#egg=%s fragments.", self.setup_py, self.name, metadata_name, self.name, ) self.req = Requirement(metadata_name)
def install( self, install_options, # type: List[str] global_options=None, # type: Optional[Sequence[str]] root=None, # type: Optional[str] home=None, # type: Optional[str] prefix=None, # type: Optional[str] warn_script_location=True, # type: bool use_user_site=False, # type: bool pycompile=True # type: bool ): # type: (...) -> None scheme = get_scheme( self.name, user=use_user_site, home=home, root=root, isolated=self.isolated, prefix=prefix, ) global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( install_options, global_options, prefix=prefix, home=home, use_user_site=use_user_site, ) return if self.is_wheel: version = wheel.wheel_version(self.source_dir) wheel.check_compatibility(version, self.name) self.move_wheel_files( self.source_dir, scheme=scheme, warn_script_location=warn_script_location, pycompile=pycompile, ) self.install_succeeded = True return # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options = list(global_options) + \ self.options.get('global_options', []) install_options = list(install_options) + \ self.options.get('install_options', []) header_dir = scheme.headers with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = make_setuptools_install_args( self.setup_py_path, global_options=global_options, install_options=install_options, record_filename=record_filename, root=root, prefix=prefix, header_dir=header_dir, home=home, use_user_site=use_user_site, no_user_config=self.isolated, pycompile=pycompile, ) runner = runner_with_spinner_message( "Running setup.py install for {}".format(self.name)) with indent_log(), self.build_env: runner( cmd=install_args, cwd=self.unpacked_source_directory, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return self.install_succeeded = True def prepend_root(path): # type: (str) -> str if root is None or not os.path.isabs(path): return path else: return change_root(root, path) with open(record_filename) as f: for line in f: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: deprecated( reason=( "{} did not indicate that it installed an " ".egg-info directory. Only setup.py projects " "generating .egg-info directories are supported." ).format(self), replacement=( "for maintainers: updating the setup.py of {0}. " "For users: contact the maintainers of {0} to let " "them know to update their setup.py.".format( self.name)), gone_in="20.2", issue=6998, ) # FIXME: put the record somewhere return new_lines = [] with open(record_filename) as f: for line in f: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir)) new_lines.sort() ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def _resolve_one(self, requirement_set, req_to_install): """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True # register tmp src for cleanup in case something goes wrong requirement_set.reqs_to_cleanup.append(req_to_install) abstract_dist = self._get_abstract_dist_for(req_to_install) # Parse and return dependencies dist = abstract_dist.dist(self.finder) try: check_dist_requires_python(dist) except UnsupportedPythonVersion as err: if self.ignore_requires_python: logger.warning(err.args[0]) else: raise more_reqs = [] def add_req(subreq, extras_requested): sub_install_req = InstallRequirement.from_req( str(subreq), req_to_install, isolated=self.isolated, wheel_cache=self.wheel_cache, ) parent_req_name = req_to_install.name to_scan_again, add_to_parent = requirement_set.add_requirement( sub_install_req, parent_req_name=parent_req_name, extras_requested=extras_requested, ) if parent_req_name and add_to_parent: self._discovered_dependencies[parent_req_name].append( add_to_parent ) more_reqs.extend(to_scan_again) with indent_log(): # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here req_to_install.is_direct = True requirement_set.add_requirement( req_to_install, parent_req_name=None, ) if not self.ignore_dependencies: if req_to_install.extras: logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras) ) for missing in missing_requested: logger.warning( '%s does not provide the extra \'%s\'', dist, missing ) available_requested = sorted( set(dist.extras) & set(req_to_install.extras) ) for subreq in dist.requires(available_requested): add_req(subreq, extras_requested=available_requested) if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. requirement_set.successfully_downloaded.append(req_to_install) return more_reqs
def install( self, install_options, # type: List[str] global_options=None, # type: Optional[Sequence[str]] root=None, # type: Optional[str] home=None, # type: Optional[str] prefix=None, # type: Optional[str] warn_script_location=True, # type: bool use_user_site=False, # type: bool pycompile=True # type: bool ): # type: (...) -> None global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( install_options, global_options, prefix=prefix, ) return if self.is_wheel: version = wheel.wheel_version(self.source_dir) wheel.check_compatibility(version, self.name) self.move_wheel_files( self.source_dir, root=root, prefix=prefix, home=home, warn_script_location=warn_script_location, use_user_site=use_user_site, pycompile=pycompile, ) self.install_succeeded = True return # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options = list(global_options) + \ self.options.get('global_options', []) install_options = list(install_options) + \ self.options.get('install_options', []) header_dir = None # type: Optional[str] if running_under_virtualenv(): py_ver_str = 'python' + sysconfig.get_python_version() header_dir = os.path.join( sys.prefix, 'include', 'site', py_ver_str, self.name ) with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = make_setuptools_install_args( self.setup_py_path, global_options=global_options, install_options=install_options, record_filename=record_filename, root=root, prefix=prefix, header_dir=header_dir, no_user_config=self.isolated, pycompile=pycompile, ) runner = runner_with_spinner_message( "Running setup.py install for {}".format(self.name) ) with indent_log(), self.build_env: runner( cmd=install_args, cwd=self.unpacked_source_directory, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return self.install_succeeded = True def prepend_root(path): # type: (str) -> str if root is None or not os.path.isabs(path): return path else: return change_root(root, path) with open(record_filename) as f: for line in f: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: logger.warning( 'Could not find .egg-info directory in install record' ' for %s', self, ) # FIXME: put the record somewhere return new_lines = [] with open(record_filename) as f: for line in f: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir) ) new_lines.sort() ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def find_all_candidates(self, project_name): # type: (str) -> List[InstallationCandidate] """Find all available InstallationCandidate for project_name This checks index_urls and find_links. All versions found are returned as an InstallationCandidate list. See LinkEvaluator.evaluate_link() for details on which files are accepted. """ search_scope = self.search_scope index_locations = search_scope.get_index_urls_locations(project_name) index_file_loc, index_url_loc = group_locations(index_locations) fl_file_loc, fl_url_loc = group_locations( self.find_links, expand_dir=True, ) file_locations = (Link(url) for url in itertools.chain( index_file_loc, fl_file_loc, )) # We trust every url that the user has given us whether it was given # via --index-url or --find-links. # We want to filter out any thing which does not have a secure origin. url_locations = [ link for link in itertools.chain( (Link(url) for url in index_url_loc), (Link(url) for url in fl_url_loc), ) if self.session.is_secure_origin(link) ] logger.debug('%d location(s) to search for versions of %s:', len(url_locations), project_name) for location in url_locations: logger.debug('* %s', location) link_evaluator = self.make_link_evaluator(project_name) find_links_versions = self._package_versions( link_evaluator, # We trust every directly linked archive in find_links (Link(url, '-f') for url in self.find_links), ) page_versions = [] for page in self._get_pages(url_locations, project_name): logger.debug('Analyzing links from page %s', page.url) with indent_log(): page_versions.extend( self._package_versions(link_evaluator, page.iter_links()) ) file_versions = self._package_versions(link_evaluator, file_locations) if file_versions: file_versions.sort(reverse=True) logger.debug( 'Local files found: %s', ', '.join([ url_to_path(candidate.link.url) for candidate in file_versions ]) ) # This is an intentional priority ordering return file_versions + find_links_versions + page_versions
def thread_function() -> None: with indent_log(): results.append(f.format(record))
def build( self, requirements, # type: Iterable[InstallRequirement] session, # type: PipSession autobuilding=False # type: bool ): # type: (...) -> List[InstallRequirement] """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ buildset = [] format_control = self.finder.format_control # Whether a cache directory is available for autobuilding=True. cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) for req in requirements: ephem_cache = should_use_ephemeral_cache( req, format_control=format_control, autobuilding=autobuilding, cache_available=cache_available, ) if ephem_cache is None: continue buildset.append((req, ephem_cache)) if not buildset: return [] # Is any wheel build not using the ephemeral cache? if any(not ephem_cache for _, ephem_cache in buildset): have_directory_for_build = self._wheel_dir or ( autobuilding and self.wheel_cache.cache_dir) assert have_directory_for_build # TODO by @pradyunsg # Should break up this method into 2 separate methods. # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists( os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir) # Update the link for this. req.link = Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=session, ) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return a list of requirements that failed to build return build_failure
def print_config_file_values(self, variant: Kind) -> None: """Get key-value pairs from the file of a variant""" for name, value in self.configuration.\ get_values_in_config(variant).items(): with indent_log(): write_output("%s: %s", name, value)
def build(self, requirements, session, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ from pip._internal import index building_is_possible = self._wheel_dir or ( autobuilding and self.wheel_cache.cache_dir ) assert building_is_possible buildset = [] for req in requirements: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name, ) elif autobuilding and req.editable: pass elif autobuilding and not req.source_dir: pass elif autobuilding and req.link and not req.link.is_artifact: # VCS checkout. Build wheel just for this run. buildset.append((req, True)) else: ephem_cache = False if autobuilding: link = req.link base, ext = link.splitext() if index.egg_info_matches(base, None, link) is None: # E.g. local directory. Build wheel just for this run. ephem_cache = True if "binary" not in index.fmt_ctl_formats( self.finder.format_control, canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name, ) continue buildset.append((req, ephem_cache)) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists(os.path.join( req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir ) # Update the link for this. req.link = index.Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=session, ) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
def install( install_options, # type: List[str] global_options, # type: Sequence[str] root, # type: Optional[str] home, # type: Optional[str] prefix, # type: Optional[str] use_user_site, # type: bool pycompile, # type: bool scheme, # type: Scheme setup_py_path, # type: str isolated, # type: bool req_name, # type: str build_env, # type: BuildEnvironment unpacked_source_directory, # type: str req_description, # type: str ): # type: (...) -> bool header_dir = scheme.headers with TempDirectory(kind="record") as temp_dir: try: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = make_setuptools_install_args( setup_py_path, global_options=global_options, install_options=install_options, record_filename=record_filename, root=root, prefix=prefix, header_dir=header_dir, home=home, use_user_site=use_user_site, no_user_config=isolated, pycompile=pycompile, ) runner = runner_with_spinner_message( "Running setup.py install for {}".format(req_name) ) with indent_log(), build_env: runner( cmd=install_args, cwd=unpacked_source_directory, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) # Signal to the caller that we didn't install the new package return False except Exception: # Signal to the caller that we didn't install the new package raise LegacyInstallFailure # At this point, we have successfully installed the requirement. # We intentionally do not use any encoding to read the file because # setuptools writes the file using distutils.file_util.write_file, # which does not specify an encoding. with open(record_filename) as f: record_lines = f.read().splitlines() def prepend_root(path): # type: (str) -> str if root is None or not os.path.isabs(path): return path else: return change_root(root, path) for line in record_lines: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: deprecated( reason=( "{} did not indicate that it installed an " ".egg-info directory. Only setup.py projects " "generating .egg-info directories are supported." ).format(req_description), replacement=( "for maintainers: updating the setup.py of {0}. " "For users: contact the maintainers of {0} to let " "them know to update their setup.py.".format( req_name ) ), gone_in="20.2", issue=6998, ) # FIXME: put the record somewhere return True new_lines = [] for line in record_lines: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir) ) new_lines.sort() ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n') return True
def install( self, install_options, # type: List[str] global_options=None, # type: Optional[Sequence[str]] root=None, # type: Optional[str] home=None, # type: Optional[str] prefix=None, # type: Optional[str] warn_script_location=True, # type: bool use_user_site=False, # type: bool pycompile=True # type: bool ): # type: (...) -> None global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( install_options, global_options, prefix=prefix, ) return if self.is_wheel: version = wheel.wheel_version(self.source_dir) wheel.check_compatibility(version, self.name) self.move_wheel_files( self.source_dir, root=root, prefix=prefix, home=home, warn_script_location=warn_script_location, use_user_site=use_user_site, pycompile=pycompile, ) self.install_succeeded = True return # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options = list(global_options) + \ self.options.get('global_options', []) install_options = list(install_options) + \ self.options.get('install_options', []) if self.isolated: # https://github.com/python/mypy/issues/1174 global_options = global_options + ["--no-user-cfg"] # type: ignore with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = self.get_install_args( global_options, record_filename, root, prefix, pycompile, ) msg = 'Running setup.py install for %s' % (self.name,) with open_spinner(msg) as spinner: with indent_log(): with self.build_env: call_subprocess( install_args + install_options, cwd=self.setup_py_dir, spinner=spinner, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return self.install_succeeded = True def prepend_root(path): if root is None or not os.path.isabs(path): return path else: return change_root(root, path) with open(record_filename) as f: for line in f: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: logger.warning( 'Could not find .egg-info directory in install record' ' for %s', self, ) # FIXME: put the record somewhere # FIXME: should this be an error? return new_lines = [] with open(record_filename) as f: for line in f: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir) ) new_lines.sort() ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def prepare_linked_requirement( self, req, # type: InstallRequirement ): # type: (...) -> AbstractDistribution """Prepare a requirement that would be obtained from req.link """ assert req.link link = req.link # TODO: Breakup into smaller functions if link.scheme == 'file': path = link.file_path logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req.req or req) with indent_log(): # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory # Since source_dir is only set for editable requirements. assert req.source_dir is None req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '{}' due to a" " pre-existing build directory ({}). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." .format(req, req.source_dir) ) # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if self.require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if link.is_vcs: raise VcsHashUnsupported() elif link.is_existing_dir(): raise DirectoryUrlHashUnsupported() if not req.original_link and not req.is_pinned: # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req.hashes(trust_internet=not self.require_hashes) if self.require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() download_dir = self.download_dir if link.is_wheel and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir try: local_path = unpack_url( link, req.source_dir, self.downloader, download_dir, hashes=hashes, ) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', req, exc, ) raise InstallationError( 'Could not install requirement {} because of HTTP ' 'error {} for URL {}'.format(req, exc, link) ) # For use in later processing, preserve the file path on the # requirement. if local_path: req.local_file_path = local_path if link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False else: # We always delete unpacked sdists after pip runs. autodelete_unpacked = True if autodelete_unpacked: write_delete_marker_file(req.source_dir) abstract_dist = _get_prepared_distribution( req, self.req_tracker, self.finder, self.build_isolation, ) if download_dir: if link.is_existing_dir(): logger.info('Link is a directory, ignoring download_dir') elif local_path and not os.path.exists( os.path.join(download_dir, link.filename) ): _copy_file(local_path, download_dir, link) if self._download_should_save: # Make a .zip of the source_dir we already created. if link.is_vcs: req.archive(self.download_dir) return abstract_dist
def build( self, requirements, # type: Iterable[InstallRequirement] should_unpack=False, # type: bool ): # type: (...) -> List[InstallRequirement] """Build wheels. :param should_unpack: If True, after building the wheel, unpack it and replace the sdist with the unpacked version in preparation for installation. :return: True if all the wheels built correctly. """ # pip install uses should_unpack=True. # pip install never provides a _wheel_dir. # pip wheel uses should_unpack=False. # pip wheel always provides a _wheel_dir (via the preparer). assert (should_unpack and not self._wheel_dir) or ( not should_unpack and self._wheel_dir ) buildset = [] cache_available = bool(self.wheel_cache.cache_dir) for req in requirements: ephem_cache = should_use_ephemeral_cache( req, should_unpack=should_unpack, cache_available=cache_available, check_binary_allowed=self.check_binary_allowed, ) if ephem_cache is None: continue # Determine where the wheel should go. if should_unpack: if ephem_cache: output_dir = self.wheel_cache.get_ephem_path_for_link(req.link) else: output_dir = self.wheel_cache.get_path_for_link(req.link) else: output_dir = self._wheel_dir buildset.append((req, output_dir)) if not buildset: return [] # TODO by @pradyunsg # Should break up this method into 2 separate methods. # Build the wheels. logger.info( "Building wheels for collected packages: %s", ", ".join([req.name for (req, _) in buildset]), ) python_tag = None if should_unpack: python_tag = pep425tags.implementation_tag with indent_log(): build_success, build_failure = [], [] for req, output_dir in buildset: try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue wheel_file = self._build_one(req, output_dir, python_tag=python_tag) if wheel_file: build_success.append(req) if should_unpack: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not has_delete_marker_file( req.source_dir ): raise AssertionError("bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.ensure_build_location( self.preparer.build_dir ) # Update the link for this. req.link = Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_file(req.link.file_path, req.source_dir) else: build_failure.append(req) # notify success/failure if build_success: logger.info( "Successfully built %s", " ".join([req.name for req in build_success]) ) if build_failure: logger.info( "Failed to build %s", " ".join([req.name for req in build_failure]) ) # Return a list of requirements that failed to build return build_failure
def install( install_options, # type: List[str] global_options, # type: Sequence[str] root, # type: Optional[str] home, # type: Optional[str] prefix, # type: Optional[str] use_user_site, # type: bool pycompile, # type: bool scheme, # type: Scheme setup_py_path, # type: str isolated, # type: bool req_name, # type: str build_env, # type: BuildEnvironment unpacked_source_directory, # type: str req_description, # type: str ): # type: (...) -> bool header_dir = scheme.headers with TempDirectory(kind="record") as temp_dir: try: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = make_setuptools_install_args( setup_py_path, global_options=global_options, install_options=install_options, record_filename=record_filename, root=root, prefix=prefix, header_dir=header_dir, home=home, use_user_site=use_user_site, no_user_config=isolated, pycompile=pycompile, ) runner = runner_with_spinner_message( f"Running setup.py install for {req_name}") with indent_log(), build_env: runner( cmd=install_args, cwd=unpacked_source_directory, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) # Signal to the caller that we didn't install the new package return False except Exception: # Signal to the caller that we didn't install the new package raise LegacyInstallFailure # At this point, we have successfully installed the requirement. # We intentionally do not use any encoding to read the file because # setuptools writes the file using distutils.file_util.write_file, # which does not specify an encoding. with open(record_filename) as f: record_lines = f.read().splitlines() write_installed_files_from_setuptools_record(record_lines, root, req_description) return True
def prepare_linked_requirement(self, req, session, finder, upgrade_allowed, require_hashes): """Prepare a requirement that would be obtained from req.link """ # TODO: Breakup into smaller functions if req.link and req.link.scheme == 'file': path = url_to_path(req.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req) with indent_log(): # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req, req.source_dir) ) req.populate_link(finder, upgrade_allowed, require_hashes) # We can't hit this spot and have populate_link return None. # req.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req.link link = req.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if not req.original_link and not req.is_pinned: # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req.hashes(trust_internet=not require_hashes) if require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req.link.is_wheel and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url( req.link, req.source_dir, download_dir, autodelete_unpacked, session=session, hashes=hashes, progress_bar=self.progress_bar ) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', req, exc, ) raise InstallationError( 'Could not install requirement %s because of HTTP ' 'error %s for URL %s' % (req, exc, req.link) ) abstract_dist = make_abstract_dist(req) abstract_dist.prep_for_dist(finder, self.build_isolation) if self._download_should_save: # Make a .zip of the source_dir we already created. if req.link.scheme in vcs.all_schemes: req.archive(self.download_dir) return abstract_dist
def prepare_linked_requirement(self, req, session, finder, upgrade_allowed, require_hashes): """Prepare a requirement that would be obtained from req.link """ # TODO: Breakup into smaller functions if req.link and req.link.scheme == 'file': path = url_to_path(req.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req) with indent_log(): # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req, req.source_dir) ) req.populate_link(finder, upgrade_allowed, require_hashes) # We can't hit this spot and have populate_link return None. # req.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a html is guaranteed. assert req.link link = req.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if not req.original_link and not req.is_pinned: # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req.hashes(trust_internet=not require_hashes) if require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req.link.is_wheel and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url( req.link, req.source_dir, download_dir, autodelete_unpacked, session=session, hashes=hashes, progress_bar=self.progress_bar ) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', req, exc, ) raise InstallationError( 'Could not install requirement %s because of HTTP ' 'error %s for URL %s' % (req, exc, req.link) ) abstract_dist = make_abstract_dist(req) abstract_dist.prep_for_dist(finder, self.build_isolation) if self._download_should_save: # Make a .zip of the source_dir we already created. if req.link.scheme in vcs.all_schemes: req.archive(self.download_dir) return abstract_dist
def install( install_req, # type: InstallRequirement install_options, # type: List[str] global_options, # type: Sequence[str] root, # type: Optional[str] home, # type: Optional[str] prefix, # type: Optional[str] use_user_site, # type: bool pycompile, # type: bool scheme, # type: Scheme ): # type: (...) -> None # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options = list(global_options) + \ install_req.options.get('global_options', []) install_options = list(install_options) + \ install_req.options.get('install_options', []) header_dir = scheme.headers with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = make_setuptools_install_args( install_req.setup_py_path, global_options=global_options, install_options=install_options, record_filename=record_filename, root=root, prefix=prefix, header_dir=header_dir, home=home, use_user_site=use_user_site, no_user_config=install_req.isolated, pycompile=pycompile, ) runner = runner_with_spinner_message( "Running setup.py install for {}".format(install_req.name)) with indent_log(), install_req.build_env: runner( cmd=install_args, cwd=install_req.unpacked_source_directory, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return install_req.install_succeeded = True # We intentionally do not use any encoding to read the file because # setuptools writes the file using distutils.file_util.write_file, # which does not specify an encoding. with open(record_filename) as f: record_lines = f.read().splitlines() def prepend_root(path): # type: (str) -> str if root is None or not os.path.isabs(path): return path else: return change_root(root, path) for line in record_lines: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: deprecated( reason=("{} did not indicate that it installed an " ".egg-info directory. Only setup.py projects " "generating .egg-info directories are supported." ).format(install_req), replacement=("for maintainers: updating the setup.py of {0}. " "For users: contact the maintainers of {0} to let " "them know to update their setup.py.".format( install_req.name)), gone_in="20.2", issue=6998, ) # FIXME: put the record somewhere return new_lines = [] for line in record_lines: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir)) new_lines.sort() ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def cleanup_files(self): """Clean up files, remove builds.""" logger.debug('Cleaning up...') with indent_log(): for req in self.reqs_to_cleanup: req.remove_temporary_source()
def _resolve_one(self, requirement_set, req_to_install): """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True # register tmp src for cleanup in case something goes wrong requirement_set.reqs_to_cleanup.append(req_to_install) abstract_dist = self._get_abstract_dist_for(req_to_install) # Parse and return dependencies dist = abstract_dist.dist(self.finder) try: check_dist_requires_python(dist) except UnsupportedPythonVersion as err: if self.ignore_requires_python: logger.warning(err.args[0]) else: raise more_reqs = [] def add_req(subreq, extras_requested): sub_install_req = InstallRequirement.from_req( str(subreq), req_to_install, isolated=self.isolated, wheel_cache=self.wheel_cache, ) parent_req_name = req_to_install.name to_scan_again, add_to_parent = requirement_set.add_requirement( sub_install_req, parent_req_name=parent_req_name, extras_requested=extras_requested, ) if parent_req_name and add_to_parent: self._discovered_dependencies[parent_req_name].append( add_to_parent ) more_reqs.extend(to_scan_again) with indent_log(): # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here req_to_install.is_direct = True requirement_set.add_requirement( req_to_install, parent_req_name=None, ) if not self.ignore_dependencies: if req_to_install.extras: logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras) ) for missing in missing_requested: logger.warning( '%s does not provide the extra \'%s\'', dist, missing ) available_requested = sorted( set(dist.extras) & set(req_to_install.extras) ) for subreq in dist.requires(available_requested): add_req(subreq, extras_requested=available_requested) if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. requirement_set.successfully_downloaded.append(req_to_install) return more_reqs
def install(self, install_options, global_options=None, root=None, home=None, prefix=None, warn_script_location=True, use_user_site=False, pycompile=True): global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( install_options, global_options, prefix=prefix, ) return if self.is_wheel: version = wheel.wheel_version(self.source_dir) wheel.check_compatibility(version, self.name) self.move_wheel_files( self.source_dir, root=root, prefix=prefix, home=home, warn_script_location=warn_script_location, use_user_site=use_user_site, pycompile=pycompile, ) self.install_succeeded = True return # Extend the list of global and install options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options = list(global_options) + \ self.options.get('global_options', []) install_options = list(install_options) + \ self.options.get('install_options', []) if self.isolated: global_options = global_options + ["--no-user-cfg"] with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') install_args = self.get_install_args( global_options, record_filename, root, prefix, pycompile, ) msg = 'Running setup.py install for %s' % (self.name, ) with open_spinner(msg) as spinner: with indent_log(): with self.build_env: call_subprocess( install_args + install_options, cwd=self.setup_py_dir, show_stdout=False, spinner=spinner, ) if not os.path.exists(record_filename): logger.debug('Record file %s not found', record_filename) return self.install_succeeded = True def prepend_root(path): if root is None or not os.path.isabs(path): return path else: return change_root(root, path) with open(record_filename) as f: for line in f: directory = os.path.dirname(line) if directory.endswith('.egg-info'): egg_info_dir = prepend_root(directory) break else: logger.warning( 'Could not find .egg-info directory in install record' ' for %s', self, ) # FIXME: put the record somewhere # FIXME: should this be an error? return new_lines = [] with open(record_filename) as f: for line in f: filename = line.strip() if os.path.isdir(filename): filename += os.path.sep new_lines.append( os.path.relpath(prepend_root(filename), egg_info_dir)) new_lines.sort() ensure_dir(egg_info_dir) inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n')
def find_all_candidates(self, project_name): # type: (str) -> List[Optional[InstallationCandidate]] """Find all available InstallationCandidate for project_name This checks index_urls and find_links. All versions found are returned as an InstallationCandidate list. See _link_package_versions for details on which files are accepted """ index_locations = self._get_index_urls_locations(project_name) index_file_loc, index_url_loc = self._sort_locations(index_locations) fl_file_loc, fl_url_loc = self._sort_locations( self.find_links, expand_dir=True, ) file_locations = (Link(url) for url in itertools.chain( index_file_loc, fl_file_loc, )) # We trust every url that the user has given us whether it was given # via --index-url or --find-links. # We want to filter out any thing which does not have a secure origin. url_locations = [ link for link in itertools.chain( (Link(url) for url in index_url_loc), (Link(url) for url in fl_url_loc), ) if self._validate_secure_origin(logger, link) ] logger.debug('%d location(s) to search for versions of %s:', len(url_locations), project_name) for location in url_locations: logger.debug('* %s', location) canonical_name = canonicalize_name(project_name) formats = self.format_control.get_allowed_formats(canonical_name) search = Search(project_name, canonical_name, formats) find_links_versions = self._package_versions( # We trust every directly linked archive in find_links (Link(url, '-f') for url in self.find_links), search) page_versions = [] for page in self._get_pages(url_locations, project_name): logger.debug('Analyzing links from page %s', page.url) with indent_log(): page_versions.extend( self._package_versions(page.iter_links(), search)) file_versions = self._package_versions(file_locations, search) if file_versions: file_versions.sort(reverse=True) logger.debug( 'Local files found: %s', ', '.join([ url_to_path(candidate.location.url) for candidate in file_versions ])) # This is an intentional priority ordering return file_versions + find_links_versions + page_versions
def _resolve_one( self, requirement_set, # type: RequirementSet req_to_install # type: InstallRequirement ): # type: (...) -> List[InstallRequirement] """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True # register tmp src for cleanup in case something goes wrong requirement_set.reqs_to_cleanup.append(req_to_install) abstract_dist = self._get_abstract_dist_for(req_to_install) # Parse and return dependencies dist = abstract_dist.get_pkg_resources_distribution() # This will raise UnsupportedPythonVersion if the given Python # version isn't compatible with the distribution's Requires-Python. _check_dist_requires_python( dist, version_info=self._py_version_info, ignore_requires_python=self.ignore_requires_python, ) more_reqs = [] # type: List[InstallRequirement] def add_req(subreq, extras_requested): sub_install_req = self._make_install_req( str(subreq), req_to_install, ) parent_req_name = req_to_install.name to_scan_again, add_to_parent = requirement_set.add_requirement( sub_install_req, parent_req_name=parent_req_name, extras_requested=extras_requested, ) if parent_req_name and add_to_parent: self._discovered_dependencies[parent_req_name].append( add_to_parent) more_reqs.extend(to_scan_again) with indent_log(): # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not requirement_set.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here req_to_install.is_direct = True requirement_set.add_requirement( req_to_install, parent_req_name=None, ) if not self.ignore_dependencies: if req_to_install.extras: logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras)) for missing in missing_requested: logger.warning('%s does not provide the extra \'%s\'', dist, missing) available_requested = sorted( set(dist.extras) & set(req_to_install.extras)) for subreq in dist.requires(available_requested): add_req(subreq, extras_requested=available_requested) if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. requirement_set.successfully_downloaded.append(req_to_install) return more_reqs
def find_all_candidates(self, project_name): """Find all available InstallationCandidate for project_name This checks index_urls, find_links and dependency_links. All versions found are returned as an InstallationCandidate list. See _link_package_versions for details on which files are accepted """ index_locations = self._get_index_urls_locations(project_name) index_file_loc, index_url_loc = self._sort_locations(index_locations) fl_file_loc, fl_url_loc = self._sort_locations( self.find_links, expand_dir=True, ) dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) file_locations = (Link(url) for url in itertools.chain( index_file_loc, fl_file_loc, dep_file_loc, )) # We trust every url that the user has given us whether it was given # via --index-url or --find-links # We explicitly do not trust links that came from dependency_links # We want to filter out any thing which does not have a secure origin. url_locations = [ link for link in itertools.chain( (Link(url) for url in index_url_loc), (Link(url) for url in fl_url_loc), (Link(url) for url in dep_url_loc), ) if self._validate_secure_origin(logger, link) ] logger.debug('%d location(s) to search for versions of %s:', len(url_locations), project_name) for location in url_locations: logger.debug('* %s', location) canonical_name = canonicalize_name(project_name) formats = fmt_ctl_formats(self.format_control, canonical_name) search = Search(project_name, canonical_name, formats) find_links_versions = self._package_versions( # We trust every directly linked archive in find_links (Link(url, '-f') for url in self.find_links), search ) page_versions = [] for page in self._get_pages(url_locations, project_name): logger.debug('Analyzing links from page %s', page.url) with indent_log(): page_versions.extend( self._package_versions(page.links, search) ) dependency_versions = self._package_versions( (Link(url) for url in self.dependency_links), search ) if dependency_versions: logger.debug( 'dependency_links found: %s', ', '.join([ version.location.url for version in dependency_versions ]) ) file_versions = self._package_versions(file_locations, search) if file_versions: file_versions.sort(reverse=True) logger.debug( 'Local files found: %s', ', '.join([ url_to_path(candidate.location.url) for candidate in file_versions ]) ) # This is an intentional priority ordering return ( file_versions + find_links_versions + page_versions + dependency_versions )
download_dir = self.wheel_download_dir if link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False else: # We always delete unpacked sdists after pip runs. autodelete_unpacked = True with indent_log(): # Since source_dir is only set for editable requirements. assert req.source_dir is None req.ensure_has_source_dir(self.build_dir, autodelete_unpacked) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '{}' due to a" " pre-existing build directory ({}). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again."