def run(self, options, args): cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) cmdoptions.check_dist_restriction(options, check_target=True) if options.python_version: python_versions = [options.python_version] else: python_versions = None options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations") if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv.") install_options.append('--user') install_options.append('--prefix=') target_temp_dir = TempDirectory(kind="target") if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue.") # Create a target directory for using with the target option target_temp_dir.create() install_options.append('--home=' + target_temp_dir.path) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="install") as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, check_supported_wheels=not options.target_dir, ) try: self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, use_pep517=options.use_pep517) resolver.resolve(requirement_set) protect_pip_from_modification_on_windows( modifying_pip=requirement_set.has_requirement("pip")) # Consider legacy and PEP517-using requirements.txt separately legacy_requirements = [] pep517_requirements = [] for req in requirement_set.requirements.values(): if req.use_pep517: pep517_requirements.append(req) else: legacy_requirements.append(req) # We don't build wheels for legacy requirements.txt if we # don't have wheel installed or we don't have a cache dir try: import wheel # noqa: F401 build_legacy = bool(options.cache_dir) except ImportError: build_legacy = False wb = WheelBuilder( finder, preparer, wheel_cache, build_options=[], global_options=[], ) # Always build PEP 517 requirements.txt build_failures = wb.build(pep517_requirements, session=session, autobuilding=True) if build_legacy: # We don't care about failures building legacy # requirements.txt, as we'll fall through to a direct # install for those. wb.build(legacy_requirements, session=session, autobuilding=True) # If we're using PEP 517, we cannot do a direct install # so we fail here. if build_failures: raise InstallationError( "Could not build wheels for {} which use" " PEP 517 and cannot be installed directly".format( ", ".join(r.name for r in build_failures))) to_install = resolver.get_installation_order( requirement_set) # Consistency Checking of the package set we're installing. should_warn_about_conflicts = ( not options.ignore_dependencies and options.warn_about_conflicts) if should_warn_about_conflicts: self._warn_about_conflicts(to_install) # Don't warn about script install locations if # --target has been specified warn_script_location = options.warn_script_location if options.target_dir: warn_script_location = False installed = install_given_reqs( to_install, install_options, global_options, root=options.root_path, home=target_temp_dir.path, prefix=options.prefix_path, pycompile=options.compile, warn_script_location=warn_script_location, use_user_site=options.use_user_site, ) lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) working_set = pkg_resources.WorkingSet(lib_locations) reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, working_set=working_set) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) except EnvironmentError as error: show_traceback = (self.verbosity >= 1) message = create_env_error_message( error, show_traceback, options.use_user_site, ) logger.error(message, exc_info=show_traceback) return ERROR except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() wheel_cache.cleanup() if options.target_dir: self._handle_target_dir(options.target_dir, target_temp_dir, options.upgrade) return requirement_set
def run(self, options, args): cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations" ) if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') install_options.append('--prefix=') target_temp_dir = TempDirectory(kind="target") if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) # Create a target directory for using with the target option target_temp_dir.create() install_options.append('--home=' + target_temp_dir.path) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with TempDirectory( options.build_dir, delete=build_delete, kind="install" ) as directory: requirement_set = RequirementSet( target_dir=target_temp_dir.path, pycompile=options.compile, require_hashes=options.require_hashes, use_user_site=options.use_user_site, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, ) try: resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) # on -d don't do complex things like building # wheels, and don't try to build wheels when wheel is # not installed. if wheel and options.cache_dir: # build wheels before install. wb = WheelBuilder( requirement_set, finder, preparer, wheel_cache, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(session=session, autobuilding=True) installed = requirement_set.install( install_options, global_options, root=options.root_path, prefix=options.prefix_path, warn_script_location=options.warn_script_location, ) possible_lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, possible_lib_locations ) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) except EnvironmentError as e: message_parts = [] user_option_part = "Consider using the `--user` option" permissions_part = "Check the permissions" if e.errno == errno.EPERM: if not options.use_user_site: message_parts.extend([ user_option_part, " or ", permissions_part.lower(), ]) else: message_parts.append(permissions_part) message_parts.append("\n") logger.error( "".join(message_parts), exc_info=(options.verbose > 1) ) return ERROR except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() if options.target_dir: self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) return requirement_set
def run(self, options, args): options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] if options.python_version: python_versions = [options.python_version] else: python_versions = None dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), {':all:'}) if dist_restriction_set and options.format_control != binary_only: raise CommandError( "--only-binary=:all: must be set and --no-binary must not " "be set (or must be set to :none:) when restricting platform " "and interpreter constraints using --python-version, " "--platform, --abi, or --implementation.") options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with TempDirectory(options.build_dir, delete=build_delete, kind="download") as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, None) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=options.download_dir, wheel_download_dir=None, progress_bar=options.progress_bar, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=None, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=False, ignore_installed=True, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info('Successfully downloaded %s', downloaded) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
class PyPIRepository(BaseRepository): DEFAULT_INDEX_URL = PyPI.simple_url """ The PyPIRepository will use the provided Finder instance to lookup packages. Typically, it looks up packages on PyPI (the default implicit config), but any other PyPI mirror can be used if index_urls is changed/configured on the Finder. """ def __init__(self, pip_options, session): self.session = session self.pip_options = pip_options self.wheel_cache = WheelCache(CACHE_DIR, pip_options.format_control) index_urls = [pip_options.index_url] + pip_options.extra_index_urls if pip_options.no_index: index_urls = [] self.finder = PackageFinder( find_links=pip_options.find_links, index_urls=index_urls, trusted_hosts=pip_options.trusted_hosts, allow_all_prereleases=pip_options.pre, process_dependency_links=pip_options.process_dependency_links, session=self.session, ) # Caches # stores project_name => InstallationCandidate mappings for all # versions reported by PyPI, so we only have to ask once for each # project self._available_candidates_cache = {} # stores InstallRequirement => list(InstallRequirement) mappings # of all secondary dependencies for the given requirement, so we # only have to go to disk once for each requirement self._dependencies_cache = {} # Setup file paths self.freshen_build_caches() self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs')) self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels')) def freshen_build_caches(self): """ Start with fresh build/source caches. Will remove any old build caches from disk automatically. """ self._build_dir = TemporaryDirectory(fs_str('build')) self._source_dir = TemporaryDirectory(fs_str('source')) @property def build_dir(self): return self._build_dir.name @property def source_dir(self): return self._source_dir.name def clear_caches(self): rmtree(self._download_dir, ignore_errors=True) rmtree(self._wheel_download_dir, ignore_errors=True) def find_all_candidates(self, req_name): if req_name not in self._available_candidates_cache: candidates = self.finder.find_all_candidates(req_name) self._available_candidates_cache[req_name] = candidates return self._available_candidates_cache[req_name] def find_best_match(self, ireq, prereleases=None): """ Returns a Version object that indicates the best match for the given InstallRequirement according to the external repository. """ if ireq.editable: return ireq # return itself as the best match all_candidates = self.find_all_candidates(ireq.name) candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True) matching_versions = ireq.specifier.filter((candidate.version for candidate in all_candidates), prereleases=prereleases) # Reuses pip's internal candidate sort key to sort matching_candidates = [candidates_by_version[ver] for ver in matching_versions] if not matching_candidates: raise NoCandidateFound(ireq, all_candidates, self.finder) best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key) # Turn the candidate into a pinned InstallRequirement return make_install_requirement( best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint ) def get_dependencies(self, ireq): """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). They indicate the secondary dependencies for the given requirement. """ if not (ireq.editable or is_pinned_requirement(ireq)): raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq)) if ireq not in self._dependencies_cache: if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): # No download_dir for locally available editable requirements. # If a download_dir is passed, pip will unnecessarely # archive the entire source directory download_dir = None elif ireq.link and not ireq.link.is_artifact: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. download_dir = None else: download_dir = self._download_dir if not os.path.isdir(download_dir): os.makedirs(download_dir) if not os.path.isdir(self._wheel_download_dir): os.makedirs(self._wheel_download_dir) try: # Pip < 9 and below reqset = RequirementSet( self.build_dir, self.source_dir, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, session=self.session, wheel_cache=self.wheel_cache, ) self._dependencies_cache[ireq] = reqset._prepare_file( self.finder, ireq ) except TypeError: # Pip >= 10 (new resolver!) preparer = RequirementPreparer( build_dir=self.build_dir, src_dir=self.source_dir, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, progress_bar='off', build_isolation=False ) reqset = RequirementSet() ireq.is_direct = True reqset.add_requirement(ireq) self.resolver = PipResolver( preparer=preparer, finder=self.finder, session=self.session, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=False, ignore_requires_python=False, ignore_installed=True, isolated=False, wheel_cache=self.wheel_cache, use_user_site=False, ) self.resolver.resolve(reqset) self._dependencies_cache[ireq] = reqset.requirements.values() reqset.cleanup_files() return set(self._dependencies_cache[ireq]) def get_hashes(self, ireq): """ Given an InstallRequirement, return a set of hashes that represent all of the files for a given requirement. Editable requirements return an empty set. Unpinned requirements raise a TypeError. """ if ireq.editable: return set() if not is_pinned_requirement(ireq): raise TypeError( "Expected pinned requirement, got {}".format(ireq)) # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. all_candidates = self.find_all_candidates(ireq.name) candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version) matching_versions = list( ireq.specifier.filter((candidate.version for candidate in all_candidates))) matching_candidates = candidates_by_version[matching_versions[0]] return { self._get_file_hash(candidate.location) for candidate in matching_candidates } def _get_file_hash(self, location): h = hashlib.new(FAVORITE_HASH) with open_local_or_remote_file(location, self.session) as fp: for chunk in iter(lambda: fp.read(8096), b""): h.update(chunk) return ":".join([FAVORITE_HASH, h.hexdigest()]) @contextmanager def allow_all_wheels(self): """ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. This also saves the candidate cache and set a new one, or else the results from the previous non-patched calls will interfere. """ def _wheel_supported(self, tags=None): # Ignore current platform. Support everything. return True def _wheel_support_index_min(self, tags=None): # All wheels are equal priority for sorting. return 0 original_wheel_supported = Wheel.supported original_support_index_min = Wheel.support_index_min original_cache = self._available_candidates_cache Wheel.supported = _wheel_supported Wheel.support_index_min = _wheel_support_index_min self._available_candidates_cache = {} try: yield finally: Wheel.supported = original_wheel_supported Wheel.support_index_min = original_support_index_min self._available_candidates_cache = original_cache
def run(self, options, args): cmdoptions.check_install_build_global(options) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="wheel") as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) try: self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=options.wheel_dir, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=True, isolated=options.isolated_mode, use_pep517=options.use_pep517) resolver.resolve(requirement_set) # build wheels wb = WheelBuilder( finder, preparer, wheel_cache, build_options=options.build_options or [], global_options=options.global_options or [], no_clean=options.no_clean, ) build_failures = wb.build( requirement_set.requirements.values(), session=session, ) if len(build_failures) != 0: raise CommandError( "Failed to build one or more wheels") except PreviousBuildDirError: options.no_clean = True raise finally: if not options.no_clean: requirement_set.cleanup_files() wheel_cache.cleanup()
def run(self, options, args): self.check_required_packages() cmdoptions.check_install_build_global(options) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) with TempDirectory( options.build_dir, delete=build_delete, kind="wheel" ) as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=options.wheel_dir, progress_bar=options.progress_bar, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=True, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) try: # build wheels wb = WheelBuilder( requirement_set, finder, preparer, wheel_cache, build_options=options.build_options or [], global_options=options.global_options or [], no_clean=options.no_clean, ) wheels_built_successfully = wb.build(session=session) if not wheels_built_successfully: raise CommandError( "Failed to build one or more wheels" ) except PreviousBuildDirError: options.no_clean = True raise finally: if not options.no_clean: requirement_set.cleanup_files()
def run(self, options, args): cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations") if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv.") install_options.append('--user') install_options.append('--prefix=') target_temp_dir = TempDirectory(kind="target") if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue.") # Create a target directory for using with the target option target_temp_dir.create() install_options.append('--home=' + target_temp_dir.path) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with TempDirectory(options.build_dir, delete=build_delete, kind="install") as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) try: self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) # If caching is disabled or wheel is not installed don't # try to build wheels. if wheel and options.cache_dir: # build wheels before install. wb = WheelBuilder( finder, preparer, wheel_cache, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(requirement_set.requirements.values(), session=session, autobuilding=True) to_install = resolver.get_installation_order( requirement_set) # Consistency Checking of the package set we're installing. should_warn_about_conflicts = ( not options.ignore_dependencies and options.warn_about_conflicts) if should_warn_about_conflicts: self._warn_about_conflicts(to_install) # Don't warn about script install locations if # --target has been specified warn_script_location = options.warn_script_location if options.target_dir: warn_script_location = False installed = install_given_reqs( to_install, install_options, global_options, root=options.root_path, home=target_temp_dir.path, prefix=options.prefix_path, pycompile=options.compile, warn_script_location=warn_script_location, use_user_site=options.use_user_site, ) possible_lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, possible_lib_locations) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) except EnvironmentError as e: message_parts = [] user_option_part = "Consider using the `--user` option" permissions_part = "Check the permissions" if e.errno == errno.EPERM: if not options.use_user_site: message_parts.extend([ user_option_part, " or ", permissions_part.lower(), ]) else: message_parts.append(permissions_part) message_parts.append("\n") logger.error("".join(message_parts), exc_info=(self.verbosity > 1)) return ERROR except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() wheel_cache.cleanup() if options.target_dir: self._handle_target_dir(options.target_dir, target_temp_dir, options.upgrade) return requirement_set
def run(self, options, args): cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations" ) if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') install_options.append('--prefix=') target_temp_dir = TempDirectory(kind="target") if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) # Create a target directory for using with the target option target_temp_dir.create() install_options.append('--home=' + target_temp_dir.path) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with TempDirectory( options.build_dir, delete=build_delete, kind="install" ) as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) try: self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) protect_pip_from_modification_on_windows( modifying_pip=requirement_set.has_requirement("pip") ) # If caching is disabled or wheel is not installed don't # try to build wheels. if wheel and options.cache_dir: # build wheels before install. wb = WheelBuilder( finder, preparer, wheel_cache, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build( requirement_set.requirements.values(), session=session, autobuilding=True ) to_install = resolver.get_installation_order( requirement_set ) # Consistency Checking of the package set we're installing. should_warn_about_conflicts = ( not options.ignore_dependencies and options.warn_about_conflicts ) if should_warn_about_conflicts: self._warn_about_conflicts(to_install) # Don't warn about script install locations if # --target has been specified warn_script_location = options.warn_script_location if options.target_dir: warn_script_location = False installed = install_given_reqs( to_install, install_options, global_options, root=options.root_path, home=target_temp_dir.path, prefix=options.prefix_path, pycompile=options.compile, warn_script_location=warn_script_location, use_user_site=options.use_user_site, ) lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) working_set = pkg_resources.WorkingSet(lib_locations) reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, working_set=working_set ) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) except EnvironmentError as error: show_traceback = (self.verbosity >= 1) message = create_env_error_message( error, show_traceback, options.use_user_site, ) logger.error(message, exc_info=show_traceback) return ERROR except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() wheel_cache.cleanup() if options.target_dir: self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) return requirement_set
def run(self, options, args): options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] if options.python_version: python_versions = [options.python_version] else: python_versions = None cmdoptions.check_dist_restriction(options) options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = not (options.no_clean or options.build_dir) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="download") as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes) self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, None) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=options.download_dir, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=None, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=False, ignore_installed=True, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) downloaded = " ".join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info("Successfully downloaded %s", downloaded) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def super_run(self, options, args): """Copy of relevant parts from InstallCommand's run()""" upgrade_strategy = "eager" if options.upgrade: upgrade_strategy = options.upgrade_strategy with self._build_session(options) as session: finder = self._build_package_finder(options, session) wheel_cache = WheelCache(options.cache_dir, options.format_control) try: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) req_tracker_path = False except TypeError: # got an unexpected keyword argument 'require_hashes' requirement_set = RequirementSet() req_tracker_path = True # pip 20 try: with TempDirectory( options.build_dir, delete=True, kind="install") as directory, RequirementTracker( *([directory.path] if req_tracker_path else [] )) as req_tracker: try: self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) except TypeError: self.populate_requirement_set(requirement_set, args, options, finder, session, wheel_cache) except AttributeError: requirement_set = self.get_requirements( args, options, finder, session, wheel_cache) try: preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) except TypeError: from pip._internal.network.download import Downloader downloader = Downloader( session, progress_bar=options.progress_bar) preparer = RequirementPreparer( build_dir=directory.path, download_dir=None, src_dir=options.src_dir, wheel_download_dir=None, build_isolation=options.build_isolation, req_tracker=req_tracker, downloader=downloader, finder=finder, require_hashes=options.require_hashes, use_user_site=options.use_user_site, ) try: resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options. ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, ) except TypeError: from pip._internal.req.constructors import ( install_req_from_req_string, ) make_install_req = partial( install_req_from_req_string, isolated=options.isolated_mode, wheel_cache=wheel_cache, use_pep517=options.use_pep517, ) try: resolver = Resolver( preparer=preparer, session=session, finder=finder, make_install_req=make_install_req, use_user_site=options.use_user_site, ignore_dependencies=options. ignore_dependencies, ignore_installed=options.ignore_installed, ignore_requires_python=options. ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, ) except TypeError: try: resolver = Resolver( preparer=preparer, finder=finder, make_install_req=make_install_req, use_user_site=options.use_user_site, ignore_dependencies=options. ignore_dependencies, ignore_installed=options.ignore_installed, ignore_requires_python=options. ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, ) except TypeError: make_install_req = partial( install_req_from_req_string, isolated=options.isolated_mode, use_pep517=options.use_pep517, ) resolver = Resolver( preparer=preparer, finder=finder, make_install_req=make_install_req, use_user_site=options.use_user_site, ignore_dependencies=options. ignore_dependencies, ignore_installed=options.ignore_installed, ignore_requires_python=options. ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, wheel_cache=wheel_cache, ) try: resolver.resolve(requirement_set) except TypeError: requirement_set = resolver.resolve( requirement_set, check_supported_wheels=True) finder.format_control.no_binary = set() # allow binaries self.process_requirements(options, requirement_set, finder, resolver) finally: try: requirement_set.cleanup_files() wheel_cache.cleanup() except AttributeError: # https://github.com/pypa/pip/commit/5cca8f10b304a5a7f3a96dfd66937615324cf826 pass return requirement_set
def run(self, options, args): options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. options.editables = [] if options.python_version: python_versions = [options.python_version] else: python_versions = None dist_restriction_set = any([ options.python_version, options.platform, options.abi, options.implementation, ]) binary_only = FormatControl(set(), {':all:'}) no_sdist_dependencies = ( options.format_control != binary_only and not options.ignore_dependencies ) if dist_restriction_set and no_sdist_dependencies: raise CommandError( "When restricting platform and interpreter constraints using " "--python-version, --platform, --abi, or --implementation, " "either --no-deps must be set, or --only-binary=:all: must be " "set and --no-binary must not be set (or must be set to " ":none:)." ) options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="download" ) as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, None ) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=options.download_dir, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=None, use_user_site=False, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=False, ignore_installed=True, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info('Successfully downloaded %s', downloaded) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def run(self, options, args): cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) cmdoptions.check_dist_restriction(options, check_target=True) if options.python_version: python_versions = [options.python_version] else: python_versions = None options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations" ) if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') install_options.append('--prefix=') target_temp_dir = TempDirectory(kind="target") if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) # Create a target directory for using with the target option target_temp_dir.create() install_options.append('--home=' + target_temp_dir.path) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder( options=options, session=session, platform=options.platform, python_versions=python_versions, abi=options.abi, implementation=options.implementation, ) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="install" ) as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, check_supported_wheels=not options.target_dir, ) try: self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, use_pep517=options.use_pep517 ) resolver.resolve(requirement_set) protect_pip_from_modification_on_windows( modifying_pip=requirement_set.has_requirement("pip") ) # Consider legacy and PEP517-using requirements separately legacy_requirements = [] pep517_requirements = [] for req in requirement_set.requirements.values(): if req.use_pep517: pep517_requirements.append(req) else: legacy_requirements.append(req) # We don't build wheels for legacy requirements if we # don't have wheel installed or we don't have a cache dir try: import wheel # noqa: F401 build_legacy = bool(options.cache_dir) except ImportError: build_legacy = False wb = WheelBuilder( finder, preparer, wheel_cache, build_options=[], global_options=[], ) # Always build PEP 517 requirements build_failures = wb.build( pep517_requirements, session=session, autobuilding=True ) if build_legacy: # We don't care about failures building legacy # requirements, as we'll fall through to a direct # install for those. wb.build( legacy_requirements, session=session, autobuilding=True ) # If we're using PEP 517, we cannot do a direct install # so we fail here. if build_failures: raise InstallationError( "Could not build wheels for {} which use" + " PEP 517 and cannot be installed directly".format( ", ".join(r.name for r in build_failures))) to_install = resolver.get_installation_order( requirement_set ) # Consistency Checking of the package set we're installing. should_warn_about_conflicts = ( not options.ignore_dependencies and options.warn_about_conflicts ) if should_warn_about_conflicts: self._warn_about_conflicts(to_install) # Don't warn about script install locations if # --target has been specified warn_script_location = options.warn_script_location if options.target_dir: warn_script_location = False installed = install_given_reqs( to_install, install_options, global_options, root=options.root_path, home=target_temp_dir.path, prefix=options.prefix_path, pycompile=options.compile, warn_script_location=warn_script_location, use_user_site=options.use_user_site, ) lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) working_set = pkg_resources.WorkingSet(lib_locations) reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, working_set=working_set ) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) except EnvironmentError as error: show_traceback = (self.verbosity >= 1) message = create_env_error_message( error, show_traceback, options.use_user_site, ) logger.error(message, exc_info=show_traceback) return ERROR except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() wheel_cache.cleanup() if options.target_dir: self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) return requirement_set
def run(self, options, args): cmdoptions.check_install_build_global(options) def is_venv(): return hasattr(sys, 'real_prefix') or \ (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix) # Check whether we have root privileges and aren't in venv/virtualenv if os.getuid() == 0 and not is_venv(): logger.warning( "WARNING: Running pip install with root privileges is " "generally not a good idea. Try `%s install --user` instead." % path.basename(sys.argv[0])) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if options.prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations") if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv.") install_options.append('--user') install_options.append('--prefix=') target_temp_dir = TempDirectory(kind="target") if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue.") # Create a target directory for using with the target option target_temp_dir.create() install_options.append('--home=' + target_temp_dir.path) global_options = options.global_options or [] with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with RequirementTracker() as req_tracker, TempDirectory( options.build_dir, delete=build_delete, kind="install") as directory: requirement_set = RequirementSet( require_hashes=options.require_hashes, ) try: self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, wheel_cache) preparer = RequirementPreparer( build_dir=directory.path, src_dir=options.src_dir, download_dir=None, wheel_download_dir=None, progress_bar=options.progress_bar, build_isolation=options.build_isolation, req_tracker=req_tracker, ) resolver = Resolver( preparer=preparer, finder=finder, session=session, wheel_cache=wheel_cache, use_user_site=options.use_user_site, upgrade_strategy=upgrade_strategy, force_reinstall=options.force_reinstall, ignore_dependencies=options.ignore_dependencies, ignore_requires_python=options.ignore_requires_python, ignore_installed=options.ignore_installed, isolated=options.isolated_mode, ) resolver.resolve(requirement_set) protect_pip_from_modification_on_windows( modifying_pip=requirement_set.has_requirement("pip")) # If caching is disabled or wheel is not installed don't # try to build wheels. if wheel and options.cache_dir: # build wheels before install. wb = WheelBuilder( finder, preparer, wheel_cache, build_options=[], global_options=[], ) # Ignore the result: a failed wheel will be # installed from the sdist/vcs whatever. wb.build(requirement_set.requirements.values(), session=session, autobuilding=True) to_install = resolver.get_installation_order( requirement_set) # Consistency Checking of the package set we're installing. should_warn_about_conflicts = ( not options.ignore_dependencies and options.warn_about_conflicts) if should_warn_about_conflicts: self._warn_about_conflicts(to_install) # Don't warn about script install locations if # --target has been specified warn_script_location = options.warn_script_location if options.target_dir: warn_script_location = False installed = install_given_reqs( to_install, install_options, global_options, root=options.root_path, home=target_temp_dir.path, prefix=options.prefix_path, pycompile=options.compile, warn_script_location=warn_script_location, use_user_site=options.use_user_site, strip_file_prefix=options.strip_file_prefix, ) lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir.path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) working_set = pkg_resources.WorkingSet(lib_locations) reqs = sorted(installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: installed_version = get_installed_version( req.name, working_set=working_set) if installed_version: item += '-' + installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) except EnvironmentError as error: show_traceback = (self.verbosity >= 1) message = create_env_error_message( error, show_traceback, options.use_user_site, ) logger.error(message, exc_info=show_traceback) return ERROR except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if not options.no_clean: requirement_set.cleanup_files() wheel_cache.cleanup() if options.target_dir: self._handle_target_dir(options.target_dir, target_temp_dir, options.upgrade) return requirement_set
class PyPIRepository(BaseRepository): DEFAULT_INDEX_URL = PyPI.simple_url """ The PyPIRepository will use the provided Finder instance to lookup packages. Typically, it looks up packages on PyPI (the default implicit config), but any other PyPI mirror can be used if index_urls is changed/configured on the Finder. """ def __init__(self, pip_options, session): self.session = session self.pip_options = pip_options self.wheel_cache = WheelCache(CACHE_DIR, pip_options.format_control) index_urls = [pip_options.index_url] + pip_options.extra_index_urls if pip_options.no_index: index_urls = [] self.finder = PackageFinder( find_links=pip_options.find_links, index_urls=index_urls, trusted_hosts=pip_options.trusted_hosts, allow_all_prereleases=pip_options.pre, process_dependency_links=pip_options.process_dependency_links, session=self.session, ) # Caches # stores project_name => InstallationCandidate mappings for all # versions reported by PyPI, so we only have to ask once for each # project self._available_candidates_cache = {} # stores InstallRequirement => list(InstallRequirement) mappings # of all secondary dependencies for the given requirement, so we # only have to go to disk once for each requirement self._dependencies_cache = {} # Setup file paths self.freshen_build_caches() self._download_dir = fs_str(os.path.join(CACHE_DIR, 'pkgs')) self._wheel_download_dir = fs_str(os.path.join(CACHE_DIR, 'wheels')) def freshen_build_caches(self): """ Start with fresh build/source caches. Will remove any old build caches from disk automatically. """ self._build_dir = TemporaryDirectory(fs_str('build')) self._source_dir = TemporaryDirectory(fs_str('source')) @property def build_dir(self): return self._build_dir.name @property def source_dir(self): return self._source_dir.name def clear_caches(self): rmtree(self._download_dir, ignore_errors=True) rmtree(self._wheel_download_dir, ignore_errors=True) def find_all_candidates(self, req_name): if req_name not in self._available_candidates_cache: candidates = self.finder.find_all_candidates(req_name) self._available_candidates_cache[req_name] = candidates return self._available_candidates_cache[req_name] def find_best_match(self, ireq, prereleases=None): """ Returns a Version object that indicates the best match for the given InstallRequirement according to the external repository. """ if ireq.editable: return ireq # return itself as the best match all_candidates = self.find_all_candidates(ireq.name) candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version, unique=True) matching_versions = ireq.specifier.filter( (candidate.version for candidate in all_candidates), prereleases=prereleases) # Reuses pip's internal candidate sort key to sort matching_candidates = [ candidates_by_version[ver] for ver in matching_versions ] if not matching_candidates: raise NoCandidateFound(ireq, all_candidates, self.finder) best_candidate = max(matching_candidates, key=self.finder._candidate_sort_key) # Turn the candidate into a pinned InstallRequirement return make_install_requirement(best_candidate.project, best_candidate.version, ireq.extras, constraint=ireq.constraint) def get_dependencies(self, ireq): """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). They indicate the secondary dependencies for the given requirement. """ if not (ireq.editable or is_pinned_requirement(ireq)): raise TypeError( 'Expected pinned or editable InstallRequirement, got {}'. format(ireq)) if ireq not in self._dependencies_cache: if ireq.editable and (ireq.source_dir and os.path.exists(ireq.source_dir)): # No download_dir for locally available editable requirements. # If a download_dir is passed, pip will unnecessarely # archive the entire source directory download_dir = None elif ireq.link and not ireq.link.is_artifact: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. download_dir = None else: download_dir = self._download_dir if not os.path.isdir(download_dir): os.makedirs(download_dir) if not os.path.isdir(self._wheel_download_dir): os.makedirs(self._wheel_download_dir) try: # Pip < 9 and below reqset = RequirementSet( self.build_dir, self.source_dir, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, session=self.session, wheel_cache=self.wheel_cache, ) self._dependencies_cache[ireq] = reqset._prepare_file( self.finder, ireq) except TypeError: # Pip >= 10 (new resolver!) preparer = RequirementPreparer( build_dir=self.build_dir, src_dir=self.source_dir, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, progress_bar='off', build_isolation=False) reqset = RequirementSet() ireq.is_direct = True reqset.add_requirement(ireq) self.resolver = PipResolver( preparer=preparer, finder=self.finder, session=self.session, upgrade_strategy="to-satisfy-only", force_reinstall=False, ignore_dependencies=False, ignore_requires_python=False, ignore_installed=True, isolated=False, wheel_cache=self.wheel_cache, use_user_site=False, ) self.resolver.resolve(reqset) self._dependencies_cache[ireq] = reqset.requirements.values() reqset.cleanup_files() return set(self._dependencies_cache[ireq]) def get_hashes(self, ireq): """ Given an InstallRequirement, return a set of hashes that represent all of the files for a given requirement. Editable requirements return an empty set. Unpinned requirements raise a TypeError. """ if ireq.editable: return set() if not is_pinned_requirement(ireq): raise TypeError("Expected pinned requirement, got {}".format(ireq)) # We need to get all of the candidates that match our current version # pin, these will represent all of the files that could possibly # satisfy this constraint. all_candidates = self.find_all_candidates(ireq.name) candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version) matching_versions = list( ireq.specifier.filter( (candidate.version for candidate in all_candidates))) matching_candidates = candidates_by_version[matching_versions[0]] return { self._get_file_hash(candidate.location) for candidate in matching_candidates } def _get_file_hash(self, location): h = hashlib.new(FAVORITE_HASH) with open_local_or_remote_file(location, self.session) as fp: for chunk in iter(lambda: fp.read(8096), b""): h.update(chunk) return ":".join([FAVORITE_HASH, h.hexdigest()]) @contextmanager def allow_all_wheels(self): """ Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. This also saves the candidate cache and set a new one, or else the results from the previous non-patched calls will interfere. """ def _wheel_supported(self, tags=None): # Ignore current platform. Support everything. return True def _wheel_support_index_min(self, tags=None): # All wheels are equal priority for sorting. return 0 original_wheel_supported = Wheel.supported original_support_index_min = Wheel.support_index_min original_cache = self._available_candidates_cache Wheel.supported = _wheel_supported Wheel.support_index_min = _wheel_support_index_min self._available_candidates_cache = {} try: yield finally: Wheel.supported = original_wheel_supported Wheel.support_index_min = original_support_index_min self._available_candidates_cache = original_cache