def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # By the time this is called, the requirement's link should have # been checked so we can tell what kind of requirements req is # and raise some more informative errors than otherwise. # (For example, we can raise VcsHashUnsupported for a VCS URL # rather than HashMissing.) if not self.require_hashes: return req.hashes(trust_internet=True) # We could check these first 2 conditions inside unpack_url # and save repetition of conditions, but then we would # report less-useful error messages for unhashable # requirements, complaining that there's no hash provided. if req.link.is_vcs: raise VcsHashUnsupported() if req.link.is_existing_dir(): raise DirectoryUrlHashUnsupported() # Unpinned packages are asking for trouble when a new version # is uploaded. This isn't a security check, but it saves users # a surprising hash mismatch in the future. # file:/// URLs aren't pinnable, so don't complain about them # not being pinned. if req.original_link is None and not req.is_pinned: raise HashUnpinned() # If known-good hashes are missing for this requirement, # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. return req.hashes(trust_internet=False) or MissingHashes()
def prepare_linked_requirement(self, req, session, finder, upgrade_allowed, require_hashes): """Prepare a requirement that would be obtained from req.link """ # TODO: Breakup into smaller functions if req.link and req.link.scheme == 'file': path = url_to_path(req.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req) with indent_log(): # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): rmtree(req.source_dir) req.populate_link(finder, upgrade_allowed, require_hashes) # We can't hit this spot and have populate_link return None. # req.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req.link link = req.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if not req.original_link and not req.is_pinned: # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req.hashes(trust_internet=not require_hashes) if require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req.link.is_wheel and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url(req.link, req.source_dir, download_dir, autodelete_unpacked, session=session, hashes=hashes, progress_bar=self.progress_bar) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', req, exc, ) raise InstallationError( 'Could not install requirement %s because of HTTP ' 'error %s for URL %s' % (req, exc, req.link)) abstract_dist = make_abstract_dist(req) with self.req_tracker.track(req): abstract_dist.prep_for_dist(finder, self.build_isolation) if self._download_should_save: # Make a .zip of the source_dir we already created. if req.link.scheme in vcs.all_schemes: req.archive(self.download_dir) return abstract_dist
def prepare_linked_requirement( self, req, # type: InstallRequirement ): # type: (...) -> AbstractDistribution """Prepare a requirement that would be obtained from req.link """ assert req.link link = req.link # TODO: Breakup into smaller functions if link.scheme == 'file': path = link.file_path logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req.req or req) with indent_log(): # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory # Since source_dir is only set for editable requirements. assert req.source_dir is None req.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): rmtree(req.source_dir) # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if self.require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if link.is_vcs: raise VcsHashUnsupported() elif link.is_existing_dir(): raise DirectoryUrlHashUnsupported() if not req.original_link and not req.is_pinned: # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req.hashes(trust_internet=not self.require_hashes) if self.require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() download_dir = self.download_dir if link.is_wheel and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir try: local_path = unpack_url( link, req.source_dir, self.downloader, download_dir, hashes=hashes, ) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because of error %s', req, exc, ) raise InstallationError( 'Could not install requirement {} because of HTTP ' 'error {} for URL {}'.format(req, exc, link) ) # For use in later processing, preserve the file path on the # requirement. if local_path: req.local_file_path = local_path if link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False else: # We always delete unpacked sdists after pip runs. autodelete_unpacked = True if autodelete_unpacked: write_delete_marker_file(req.source_dir) abstract_dist = _get_prepared_distribution( req, self.req_tracker, self.finder, self.build_isolation, ) if download_dir: if link.is_existing_dir(): logger.info('Link is a directory, ignoring download_dir') elif local_path and not os.path.exists( os.path.join(download_dir, link.filename) ): _copy_file(local_path, download_dir, link) if self._download_should_save: # Make a .zip of the source_dir we already created. if link.is_vcs: req.archive(self.download_dir) return abstract_dist