def _prepare_file(self, finder, req_to_install, require_hashes=False, ignore_dependencies=False): """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True # ###################### # # # print log messages # # # ###################### # if req_to_install.editable: logger.info('Obtaining %s', req_to_install) else: # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. assert req_to_install.satisfied_by is None if not self.ignore_installed: skip_reason = self._check_skip_installed( req_to_install, finder) if req_to_install.satisfied_by: assert skip_reason is not None, ( '_check_skip_installed returned None but ' 'req_to_install.satisfied_by is set to %r' % (req_to_install.satisfied_by,)) logger.info( 'Requirement %s: %s (%s)', skip_reason, req_to_install, req_to_install.satisfied_by.version) else: if (req_to_install.link and req_to_install.link.scheme == 'file'): path = url_to_path(req_to_install.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req_to_install) with indent_log(): # ################################ # # # vcs update or unpack archive # # # ################################ # if req_to_install.editable: if require_hashes: raise InstallationError( 'The editable requirement %s cannot be installed when ' 'requiring hashes, because there is no single file to ' 'hash.' % req_to_install) req_to_install.ensure_has_source_dir(self.src_dir) req_to_install.update_editable(not self.is_download) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if self.is_download: req_to_install.archive(self.download_dir) req_to_install.check_if_exists() in_toto_verify_wrapper(req_to_install.source_dir, toto_verify=self.toto_verify, toto_default=self.toto_default) elif req_to_install.satisfied_by: if require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.') abstract_dist = Installed(req_to_install) else: # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req_to_install.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req_to_install.source_dir` if os.path.exists( os.path.join(req_to_install.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req_to_install, req_to_install.source_dir) ) req_to_install.populate_link( finder, self._is_upgrade_allowed(req_to_install), require_hashes ) # We can't hit this spot and have populate_link return None. # req_to_install.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req_to_install.link link = req_to_install.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if (not req_to_install.original_link and not req_to_install.is_pinned): # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req_to_install.hashes( trust_internet=not require_hashes) if require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req_to_install.link.is_wheel \ and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req_to_install.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False #print "req.source_dir: %s, req_to_install.link: %s, download_dir: %s" % (req_to_install.source_dir, req_to_install.link, download_dir) unpack_url( req_to_install.link, req_to_install.source_dir, download_dir, autodelete_unpacked, session=self.session, hashes=hashes, toto_verify=self.toto_verify, toto_default=self.toto_default) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because ' 'of error %s', req_to_install, exc, ) raise InstallationError( 'Could not install requirement %s because ' 'of HTTP error %s for URL %s' % (req_to_install, exc, req_to_install.link) ) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if self.is_download: # Make a .zip of the source_dir we already created. if req_to_install.link.scheme in vcs.all_schemes: req_to_install.archive(self.download_dir) # req_to_install.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not self.ignore_installed: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade or self.ignore_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by)): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: logger.info( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s', req_to_install, ) # ###################### # # # parse dependencies # # # ###################### # dist = abstract_dist.dist(finder) try: check_dist_requires_python(dist) except UnsupportedPythonVersion as e: if self.ignore_requires_python: logger.warning(e.args[0]) else: req_to_install.remove_temporary_source() raise more_reqs = [] def add_req(subreq, extras_requested): sub_install_req = InstallRequirement( str(subreq), req_to_install, isolated=self.isolated, wheel_cache=self._wheel_cache, ) more_reqs.extend(self.add_requirement( sub_install_req, req_to_install.name, extras_requested=extras_requested)) # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not self.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here self.add_requirement(req_to_install, None) if not ignore_dependencies: if (req_to_install.extras): logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras) ) for missing in missing_requested: logger.warning( '%s does not provide the extra \'%s\'', dist, missing ) available_requested = sorted( set(dist.extras) & set(req_to_install.extras) ) for subreq in dist.requires(available_requested): add_req(subreq, extras_requested=available_requested) # cleanup tmp src self.reqs_to_cleanup.append(req_to_install) if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. self.successfully_downloaded.append(req_to_install) return more_reqs
def prepare_files(self, finder): """ Prepare process. Create temp directories, download and/or unpack files. """ from pip.index import Link unnamed = list(self.unnamed_requirements) reqs = list(self.requirements.values()) while reqs or unnamed: if unnamed: req_to_install = unnamed.pop(0) else: req_to_install = reqs.pop(0) install = True best_installed = False not_found = None # ############################################# # # # Search for archive to fulfill requirement # # # ############################################# # if not self.ignore_installed and not req_to_install.editable: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade: if not self.force_reinstall and not req_to_install.url: try: url = finder.find_requirement( req_to_install, self.upgrade) except BestVersionAlreadyInstalled: best_installed = True install = False except DistributionNotFound as exc: not_found = exc else: # Avoid the need to call find_requirement again req_to_install.url = url.url if not best_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by )): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: install = False if req_to_install.satisfied_by: if best_installed: logger.info( 'Requirement already up-to-date: %s', req_to_install, ) else: logger.info( 'Requirement already satisfied (use --upgrade to ' 'upgrade): %s', req_to_install, ) if req_to_install.editable: logger.info('Obtaining %s', req_to_install) elif install: if (req_to_install.url and req_to_install.url.lower().startswith('file:')): path = url_to_path(req_to_install.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req_to_install) with indent_log(): # ################################ # # # vcs update or unpack archive # # # ################################ # is_wheel = False if req_to_install.editable: if req_to_install.source_dir is None: location = req_to_install.build_location(self.src_dir) req_to_install.source_dir = location else: location = req_to_install.source_dir if not os.path.exists(self.build_dir): _make_build_dir(self.build_dir) req_to_install.update_editable(not self.is_download) if self.is_download: req_to_install.run_egg_info() req_to_install.archive(self.download_dir) else: req_to_install.run_egg_info() elif install: # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory # NB: This call can result in the creation of a temporary # build directory location = req_to_install.build_location( self.build_dir, ) unpack = True url = None # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. if os.path.exists(os.path.join(location, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req_to_install, location) ) else: # FIXME: this won't upgrade when there's an existing # package unpacked in `location` if req_to_install.url is None: if not_found: raise not_found url = finder.find_requirement( req_to_install, upgrade=self.upgrade, ) else: # FIXME: should req_to_install.url already be a # link? url = Link(req_to_install.url) assert url if url: try: if ( url.filename.endswith(wheel_ext) and self.wheel_download_dir ): # when doing 'pip wheel` download_dir = self.wheel_download_dir do_download = True else: download_dir = self.download_dir do_download = self.is_download unpack_url( url, location, download_dir, do_download, session=self.session, ) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because ' 'of error %s', req_to_install, exc, ) raise InstallationError( 'Could not install requirement %s because ' 'of HTTP error %s for URL %s' % (req_to_install, exc, url) ) else: unpack = False if unpack: is_wheel = url and url.filename.endswith(wheel_ext) if self.is_download: req_to_install.source_dir = location if not is_wheel: # FIXME:https://github.com/pypa/pip/issues/1112 req_to_install.run_egg_info() if url and url.scheme in vcs.all_schemes: req_to_install.archive(self.download_dir) elif is_wheel: req_to_install.source_dir = location req_to_install.url = url.url else: req_to_install.source_dir = location req_to_install.run_egg_info() req_to_install.assert_source_matches_version() # req_to_install.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not self.ignore_installed: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade or self.ignore_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by)): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: logger.info( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s', req_to_install, ) install = False # ###################### # # # parse dependencies # # # ###################### # if (req_to_install.extras): logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) if is_wheel: dist = list( pkg_resources.find_distributions(location) )[0] else: # sdists if req_to_install.satisfied_by: dist = req_to_install.satisfied_by else: dist = req_to_install.get_dist() # FIXME: shouldn't be globally added: if dist.has_metadata('dependency_links.txt'): finder.add_dependency_links( dist.get_metadata_lines('dependency_links.txt') ) if not self.ignore_dependencies: for subreq in dist.requires( req_to_install.extras): if self.has_requirement( subreq.project_name): # FIXME: check for conflict continue subreq = InstallRequirement( str(subreq), req_to_install, isolated=self.isolated, ) reqs.append(subreq) self.add_requirement(subreq) if not self.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here self.add_requirement(req_to_install) # cleanup tmp src if (self.is_download or req_to_install._temp_build_dir is not None): self.reqs_to_cleanup.append(req_to_install) if install: self.successfully_downloaded.append(req_to_install)
def _prepare_linked_requirement(self, req, resolver): """Prepare a requirement that would be obtained from req.link """ # TODO: Breakup into smaller functions if req.link and req.link.scheme == 'file': path = url_to_path(req.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req) with indent_log(): # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req, req.source_dir)) req.populate_link(resolver.finder, resolver._is_upgrade_allowed(req), resolver.require_hashes) # We can't hit this spot and have populate_link return None. # req.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req.link link = req.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if resolver.require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if not req.original_link and not req.is_pinned: # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req.hashes(trust_internet=not resolver.require_hashes) if resolver.require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req.link.is_wheel and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url(req.link, req.source_dir, download_dir, autodelete_unpacked, session=resolver.session, hashes=hashes, progress_bar=self.progress_bar) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', req, exc, ) raise InstallationError( 'Could not install requirement %s because of HTTP ' 'error %s for URL %s' % (req, exc, req.link)) abstract_dist = make_abstract_dist(req) abstract_dist.prep_for_dist() if self._download_should_save: # Make a .zip of the source_dir we already created. if req.link.scheme in vcs.all_schemes: req.archive(self.download_dir) # req.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not resolver.ignore_installed: req.check_if_exists() if req.satisfied_by: should_modify = (resolver.upgrade_strategy != "to-satisfy-only" or resolver.ignore_installed) if should_modify: resolver._set_req_to_reinstall(req) else: logger.info( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s', req, ) return abstract_dist
def _prepare_file(self, finder, req_to_install): """Prepare a single requirements files. :return: A list of addition InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True if req_to_install.editable: logger.info('Obtaining %s', req_to_install) else: # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. assert req_to_install.satisfied_by is None if not self.ignore_installed: skip_reason = self._check_skip_installed( req_to_install, finder) if req_to_install.satisfied_by: assert skip_reason is not None, ( '_check_skip_installed returned None but ' 'req_to_install.satisfied_by is set to %r' % (req_to_install.satisfied_by, )) logger.info('Requirement already %s: %s', skip_reason, req_to_install) else: if (req_to_install.link and req_to_install.link.scheme == 'file'): path = url_to_path(req_to_install.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req_to_install) with indent_log(): # ################################ # # # vcs update or unpack archive # # # ################################ # if req_to_install.editable: req_to_install.ensure_has_source_dir(self.src_dir) req_to_install.update_editable(not self.is_download) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if self.is_download: req_to_install.archive(self.download_dir) elif req_to_install.satisfied_by: abstract_dist = Installed(req_to_install) else: # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req_to_install.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req_to_install.source_dir` if os.path.exists( os.path.join(req_to_install.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req_to_install, req_to_install.source_dir)) req_to_install.populate_link(finder, self.upgrade) # We can't hit this spot and have populate_link return None. # req_to_install.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req_to_install.link try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req_to_install.link.is_wheel \ and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req_to_install.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url(req_to_install.link, req_to_install.source_dir, download_dir, autodelete_unpacked, session=self.session) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because ' 'of error %s', req_to_install, exc, ) raise InstallationError( 'Could not install requirement %s because ' 'of HTTP error %s for URL %s' % (req_to_install, exc, req_to_install.link)) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if self.is_download: # Make a .zip of the source_dir we already created. if req_to_install.link.scheme in vcs.all_schemes: req_to_install.archive(self.download_dir) # req_to_install.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not self.ignore_installed: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade or self.ignore_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by)): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: logger.info( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s', req_to_install, ) # ###################### # # # parse dependencies # # # ###################### # dist = abstract_dist.dist(finder) more_reqs = [] def add_req(subreq): sub_install_req = InstallRequirement( str(subreq), req_to_install, isolated=self.isolated, wheel_cache=self._wheel_cache, ) more_reqs.extend( self.add_requirement(sub_install_req, req_to_install.name)) # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not self.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here self.add_requirement(req_to_install, None) if not self.ignore_dependencies: if (req_to_install.extras): logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras)) for missing in missing_requested: logger.warning('%s does not provide the extra \'%s\'', dist, missing) available_requested = sorted( set(dist.extras) & set(req_to_install.extras)) for subreq in dist.requires(available_requested): add_req(subreq) # cleanup tmp src self.reqs_to_cleanup.append(req_to_install) if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. self.successfully_downloaded.append(req_to_install) return more_reqs
def prepare_files(self, finder, force_root_egg_info=False, bundle=False): """ Prepare process. Create temp directories, download and/or unpack files. """ unnamed = list(self.unnamed_requirements) reqs = list(self.requirements.values()) while reqs or unnamed: if unnamed: req_to_install = unnamed.pop(0) else: req_to_install = reqs.pop(0) install = True best_installed = False not_found = None # ############################################# # # # Search for archive to fulfill requirement # # # ############################################# # if not self.ignore_installed and not req_to_install.editable: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade: if not self.force_reinstall and not req_to_install.url: try: url = finder.find_requirement( req_to_install, self.upgrade) except BestVersionAlreadyInstalled: best_installed = True install = False except DistributionNotFound as exc: not_found = exc else: # Avoid the need to call find_requirement again req_to_install.url = url.url if not best_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by )): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: install = False if req_to_install.satisfied_by: if best_installed: logger.notify('Requirement already up-to-date: %s' % req_to_install) else: logger.notify('Requirement already satisfied ' '(use --upgrade to upgrade): %s' % req_to_install) if req_to_install.editable: logger.notify('Obtaining %s' % req_to_install) elif install: if (req_to_install.url and req_to_install.url.lower().startswith('file:')): logger.notify( 'Unpacking %s' % display_path(url_to_path(req_to_install.url)) ) else: logger.notify('Downloading/unpacking %s' % req_to_install) logger.indent += 2 # ################################ # # # vcs update or unpack archive # # # ################################ # try: is_bundle = False is_wheel = False if req_to_install.editable: if req_to_install.source_dir is None: location = req_to_install.build_location(self.src_dir) req_to_install.source_dir = location else: location = req_to_install.source_dir if not os.path.exists(self.build_dir): _make_build_dir(self.build_dir) req_to_install.update_editable(not self.is_download) if self.is_download: req_to_install.run_egg_info() req_to_install.archive(self.download_dir) else: req_to_install.run_egg_info() elif install: # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory # NB: This call can result in the creation of a temporary # build directory location = req_to_install.build_location( self.build_dir, not self.is_download, ) unpack = True url = None # In the case where the req comes from a bundle, we should # assume a build dir exists and move on if req_to_install.from_bundle: pass # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. elif os.path.exists(os.path.join(location, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing buld directory (%s). This is likely" " due to a previous installation that failed. pip " "is being responsible and not assuming it can " "delete this. Please delete it and try again." % (req_to_install, location) ) else: # FIXME: this won't upgrade when there's an existing # package unpacked in `location` if req_to_install.url is None: if not_found: raise not_found url = finder.find_requirement( req_to_install, upgrade=self.upgrade, ) else: # FIXME: should req_to_install.url already be a # link? url = Link(req_to_install.url) assert url if url: try: if ( url.filename.endswith(wheel_ext) and self.wheel_download_dir ): # when doing 'pip wheel` download_dir = self.wheel_download_dir do_download = True else: download_dir = self.download_dir do_download = self.is_download self.unpack_url( url, location, download_dir, do_download, ) except HTTPError as exc: logger.fatal( 'Could not install requirement %s because ' 'of error %s' % (req_to_install, exc) ) raise InstallationError( 'Could not install requirement %s because ' 'of HTTP error %s for URL %s' % (req_to_install, exc, url) ) else: unpack = False if unpack: is_bundle = req_to_install.is_bundle is_wheel = url and url.filename.endswith(wheel_ext) if is_bundle: req_to_install.move_bundle_files( self.build_dir, self.src_dir, ) for subreq in req_to_install.bundle_requirements(): reqs.append(subreq) self.add_requirement(subreq) elif self.is_download: req_to_install.source_dir = location if not is_wheel: # FIXME:https://github.com/pypa/pip/issues/1112 req_to_install.run_egg_info() if url and url.scheme in vcs.all_schemes: req_to_install.archive(self.download_dir) elif is_wheel: req_to_install.source_dir = location req_to_install.url = url.url else: req_to_install.source_dir = location req_to_install.run_egg_info() if force_root_egg_info: # We need to run this to make sure that the # .egg-info/ directory is created for packing # in the bundle req_to_install.run_egg_info( force_root_egg_info=True, ) req_to_install.assert_source_matches_version() # @@ sketchy way of identifying packages not # grabbed from an index if bundle and req_to_install.url: self.copy_to_build_dir(req_to_install) install = False # req_to_install.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not self.ignore_installed: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade or self.ignore_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by)): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: logger.notify( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s' % req_to_install ) install = False # ###################### # # # parse dependencies # # # ###################### # if is_wheel: dist = list( pkg_resources.find_distributions(location) )[0] if not req_to_install.req: req_to_install.req = dist.as_requirement() self.add_requirement(req_to_install) if not self.ignore_dependencies: for subreq in dist.requires( req_to_install.extras): if self.has_requirement( subreq.project_name): continue subreq = InstallRequirement(str(subreq), req_to_install) reqs.append(subreq) self.add_requirement(subreq) # sdists elif not is_bundle: if (req_to_install.extras): logger.notify( "Installing extra requirements: %r" % ','.join(req_to_install.extras) ) if not self.ignore_dependencies: for req in req_to_install.requirements( req_to_install.extras): try: name = pkg_resources.Requirement.parse( req ).project_name except ValueError as exc: # FIXME: proper warning logger.error( 'Invalid requirement: %r (%s) in ' 'requirement %s' % (req, exc, req_to_install) ) continue if self.has_requirement(name): # FIXME: check for conflict continue subreq = InstallRequirement(req, req_to_install) reqs.append(subreq) self.add_requirement(subreq) if not self.has_requirement(req_to_install.name): # 'unnamed' requirements will get added here self.add_requirement(req_to_install) # cleanup tmp src if not is_bundle: if ( self.is_download or req_to_install._temp_build_dir is not None ): self.reqs_to_cleanup.append(req_to_install) if install: self.successfully_downloaded.append(req_to_install) if (bundle and ( req_to_install.url and req_to_install.url.startswith('file:///') )): self.copy_to_build_dir(req_to_install) finally: logger.indent -= 2
def prepare_requirement(self, req_to_install, resolver, requirement_set): # ###################### # # # print log messages # # # ###################### # if req_to_install.editable: logger.info('Obtaining %s', req_to_install) else: # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. assert req_to_install.satisfied_by is None if not resolver.ignore_installed: skip_reason = resolver._check_skip_installed(req_to_install) if req_to_install.satisfied_by: assert skip_reason is not None, ( '_check_skip_installed returned None but ' 'req_to_install.satisfied_by is set to %r' % (req_to_install.satisfied_by,)) logger.info( 'Requirement %s: %s (%s)', skip_reason, req_to_install, req_to_install.satisfied_by.version) else: if (req_to_install.link and req_to_install.link.scheme == 'file'): path = url_to_path(req_to_install.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req_to_install) assert resolver.require_hashes is not None, \ "This should have been set in resolve()" with indent_log(): # ################################ # # # vcs update or unpack archive # # # ################################ # if req_to_install.editable: if resolver.require_hashes: raise InstallationError( 'The editable requirement %s cannot be installed when ' 'requiring hashes, because there is no single file to ' 'hash.' % req_to_install) req_to_install.ensure_has_source_dir(requirement_set.src_dir) req_to_install.update_editable(not requirement_set.is_download) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if requirement_set.is_download: req_to_install.archive(requirement_set.download_dir) req_to_install.check_if_exists() elif req_to_install.satisfied_by: if resolver.require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.') abstract_dist = Installed(req_to_install) else: # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req_to_install.ensure_has_source_dir(requirement_set.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req_to_install.source_dir` # package unpacked in `req_to_install.source_dir` if os.path.exists( os.path.join(req_to_install.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req_to_install, req_to_install.source_dir) ) req_to_install.populate_link( resolver.finder, resolver._is_upgrade_allowed(req_to_install), resolver.require_hashes ) # We can't hit this spot and have populate_link return None. # req_to_install.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req_to_install.link link = req_to_install.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if resolver.require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if (not req_to_install.original_link and not req_to_install.is_pinned): # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req_to_install.hashes( trust_internet=not resolver.require_hashes) if resolver.require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = requirement_set.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req_to_install.link.is_wheel \ and requirement_set.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = requirement_set.wheel_download_dir if req_to_install.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url( req_to_install.link, req_to_install.source_dir, download_dir, autodelete_unpacked, session=resolver.session, hashes=hashes, progress_bar=requirement_set.progress_bar) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because ' 'of error %s', req_to_install, exc, ) raise InstallationError( 'Could not install requirement %s because ' 'of HTTP error %s for URL %s' % (req_to_install, exc, req_to_install.link) ) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if requirement_set.is_download: # Make a .zip of the source_dir we already created. if req_to_install.link.scheme in vcs.all_schemes: req_to_install.archive(requirement_set.download_dir) # req_to_install.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not resolver.ignore_installed: req_to_install.check_if_exists() if req_to_install.satisfied_by: should_modify = ( resolver.upgrade_strategy != "to-satisfy-only" or resolver.ignore_installed ) if should_modify: # don't uninstall conflict if user install and # conflict is not user install if not (resolver.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: logger.info( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s', req_to_install, ) return abstract_dist