def add_req(subreq, extras_requested): sub_install_req = InstallRequirement( str(subreq), req_to_install, isolated=self.isolated, wheel_cache=self._wheel_cache, ) more_reqs.extend(self.add_requirement( sub_install_req, req_to_install.name, extras_requested=extras_requested))
def gen(ireq): if self.DEFAULT_INDEX_URL in self.finder.index_urls: url = 'https://pypi.org/pypi/{0}/json'.format(ireq.req.name) r = self.session.get(url) # TODO: Latest isn't always latest. latest = list(r.json()['releases'].keys())[-1] if str(ireq.req.specifier) == '=={0}'.format(latest): for requires in r.json().get('info', {}).get('requires_dist', {}): i = InstallRequirement.from_line(requires) if 'extra' not in repr(i.markers): yield i
def get_legacy_dependencies(self, ireq): """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). They indicate the secondary dependencies for the given requirement. """ if not (ireq.editable or is_pinned_requirement(ireq)): raise TypeError('Expected pinned or editable InstallRequirement, got {}'.format(ireq)) # Collect setup_requires info from local eggs. setup_requires = {} if ireq.editable: try: dist = ireq.get_dist() if dist.has_metadata('requires.txt'): setup_requires = self.finder.get_extras_links( dist.get_metadata_lines('requires.txt') ) except TypeError: pass if ireq not in self._dependencies_cache: if ireq.link and not ireq.link.is_artifact: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. download_dir = None else: download_dir = self._download_dir if not os.path.isdir(download_dir): os.makedirs(download_dir) if not os.path.isdir(self._wheel_download_dir): os.makedirs(self._wheel_download_dir) reqset = RequirementSet(self.build_dir, self.source_dir, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, session=self.session, ignore_installed=True, ignore_compatibility=False ) result = reqset._prepare_file(self.finder, ireq, ignore_requires_python=True) # Convert setup_requires dict into a somewhat usable form. if setup_requires: for section in setup_requires: python_version = section not_python = not (section.startswith('[') and ':' in section) for value in setup_requires[section]: # This is a marker. if value.startswith('[') and ':' in value: python_version = value[1:-1] not_python = False # Strip out other extras. if value.startswith('[') and ':' not in value: not_python = True if ':' not in value: try: if not not_python: result = result + [InstallRequirement.from_line("{0}{1}".format(value, python_version).replace(':', ';'))] # Anything could go wrong here — can't be too careful. except Exception: pass if reqset.requires_python: marker = 'python_version=="{0}"'.format(reqset.requires_python.replace(' ', '')) new_req = InstallRequirement.from_line('{0}; {1}'.format(str(ireq.req), marker)) result = [new_req] self._dependencies_cache[ireq] = result return set(self._dependencies_cache[ireq])
def get_legacy_dependencies(self, ireq): """ Given a pinned or an editable InstallRequirement, returns a set of dependencies (also InstallRequirements, but not necessarily pinned). They indicate the secondary dependencies for the given requirement. """ if not (ireq.editable or is_pinned_requirement(ireq)): raise TypeError( 'Expected pinned or editable InstallRequirement, got {}'. format(ireq)) setup_requires = {} # if ireq.editable: # dist = ireq.get_dist() # if dist.has_metadata('requires.txt'): # setup_requires = self.finder.get_extras_links( # dist.get_metadata_lines('requires.txt') # ) if ireq not in self._dependencies_cache: if ireq.link and not ireq.link.is_artifact: # No download_dir for VCS sources. This also works around pip # using git-checkout-index, which gets rid of the .git dir. download_dir = None else: download_dir = self._download_dir if not os.path.isdir(download_dir): os.makedirs(download_dir) if not os.path.isdir(self._wheel_download_dir): os.makedirs(self._wheel_download_dir) reqset = RequirementSet( self.build_dir, self.source_dir, download_dir=download_dir, wheel_download_dir=self._wheel_download_dir, session=self.session, ignore_installed=True, ignore_compatibility=True) result = reqset._prepare_file(self.finder, ireq, ignore_requires_python=True) # if setup_requires: # for section in setup_requires: # python_version = section # for value in setup_requires[section]: # if ':' in value: # python_version = value[1:-1] # else: # result = result + [InstallRequirement.from_line("{0}{1}".format(value, python_version).replace(':', ';'))] if reqset.requires_python: marker = 'python_version=="{0}"'.format( reqset.requires_python.replace(' ', '')) new_req = InstallRequirement.from_line('{0}; {1}'.format( str(ireq.req), marker)) result = [new_req] self._dependencies_cache[ireq] = result return set(self._dependencies_cache[ireq])
def _prepare_file(self, finder, req_to_install, require_hashes=False, ignore_dependencies=False, ignore_requires_python=False): """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. """ # Tell user what we are doing for this requirement: # obtain (editable), skipping, processing (local url), collecting # (remote url or package name) if ignore_requires_python: self.ignore_compatibility = True if req_to_install.constraint or req_to_install.prepared: return [] req_to_install.prepared = True # ###################### # # # print log messages # # # ###################### # if req_to_install.editable: logger.info('Obtaining %s', req_to_install) else: # satisfied_by is only evaluated by calling _check_skip_installed, # so it must be None here. assert req_to_install.satisfied_by is None if not self.ignore_installed: skip_reason = self._check_skip_installed( req_to_install, finder) if req_to_install.satisfied_by: assert skip_reason is not None, ( '_check_skip_installed returned None but ' 'req_to_install.satisfied_by is set to %r' % (req_to_install.satisfied_by,)) logger.info( 'Requirement %s: %s', skip_reason, req_to_install) else: if (req_to_install.link and req_to_install.link.scheme == 'file'): path = url_to_path(req_to_install.link.url) logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req_to_install) with indent_log(): # ################################ # # # vcs update or unpack archive # # # ################################ # if req_to_install.editable: if require_hashes: raise InstallationError( 'The editable requirement %s cannot be installed when ' 'requiring hashes, because there is no single file to ' 'hash.' % req_to_install) req_to_install.ensure_has_source_dir(self.src_dir) req_to_install.update_editable(not self.is_download) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if self.is_download: req_to_install.archive(self.download_dir) req_to_install.check_if_exists() elif req_to_install.satisfied_by: if require_hashes: logger.debug( 'Since it is already installed, we are trusting this ' 'package without checking its hash. To ensure a ' 'completely repeatable environment, install into an ' 'empty virtualenv.') abstract_dist = Installed(req_to_install) else: # @@ if filesystem packages are not marked # editable in a req, a non deterministic error # occurs when the script attempts to unpack the # build directory req_to_install.ensure_has_source_dir(self.build_dir) # If a checkout exists, it's unwise to keep going. version # inconsistencies are logged later, but do not fail the # installation. # FIXME: this won't upgrade when there's an existing # package unpacked in `req_to_install.source_dir` if os.path.exists( os.path.join(req_to_install.source_dir, 'setup.py')): raise PreviousBuildDirError( "pip can't proceed with requirements '%s' due to a" " pre-existing build directory (%s). This is " "likely due to a previous installation that failed" ". pip is being responsible and not assuming it " "can delete this. Please delete it and try again." % (req_to_install, req_to_install.source_dir) ) req_to_install.populate_link( finder, self._is_upgrade_allowed(req_to_install), require_hashes ) # We can't hit this spot and have populate_link return None. # req_to_install.satisfied_by is None here (because we're # guarded) and upgrade has no impact except when satisfied_by # is not None. # Then inside find_requirement existing_applicable -> False # If no new versions are found, DistributionNotFound is raised, # otherwise a result is guaranteed. assert req_to_install.link link = req_to_install.link # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported # for a VCS URL rather than HashMissing.) if require_hashes: # We could check these first 2 conditions inside # unpack_url and save repetition of conditions, but then # we would report less-useful error messages for # unhashable requirements, complaining that there's no # hash provided. if is_vcs_url(link): raise VcsHashUnsupported() elif is_file_url(link) and is_dir_url(link): raise DirectoryUrlHashUnsupported() if (not req_to_install.original_link and not req_to_install.is_pinned): # Unpinned packages are asking for trouble when a new # version is uploaded. This isn't a security check, but # it saves users a surprising hash mismatch in the # future. # # file:/// URLs aren't pinnable, so don't complain # about them not being pinned. raise HashUnpinned() hashes = req_to_install.hashes( trust_internet=not require_hashes) if require_hashes and not hashes: # Known-good hashes are missing for this requirement, so # shim it with a facade object that will provoke hash # computation and then raise a HashMissing exception # showing the user what the hash should be. hashes = MissingHashes() try: download_dir = self.download_dir # We always delete unpacked sdists after pip ran. autodelete_unpacked = True if req_to_install.link.is_wheel \ and self.wheel_download_dir: # when doing 'pip wheel` we download wheels to a # dedicated dir. download_dir = self.wheel_download_dir if req_to_install.link.is_wheel: if download_dir: # When downloading, we only unpack wheels to get # metadata. autodelete_unpacked = True else: # When installing a wheel, we use the unpacked # wheel. autodelete_unpacked = False unpack_url( req_to_install.link, req_to_install.source_dir, download_dir, autodelete_unpacked, session=self.session, hashes=hashes) except requests.HTTPError as exc: logger.critical( 'Could not install requirement %s because ' 'of error %s', req_to_install, exc, ) raise InstallationError( 'Could not install requirement %s because ' 'of HTTP error %s for URL %s' % (req_to_install, exc, req_to_install.link) ) abstract_dist = make_abstract_dist(req_to_install) abstract_dist.prep_for_dist() if self.is_download: # Make a .zip of the source_dir we already created. if req_to_install.link.scheme in vcs.all_schemes: req_to_install.archive(self.download_dir) # req_to_install.req is only avail after unpack for URL # pkgs repeat check_if_exists to uninstall-on-upgrade # (#14) if not self.ignore_installed: req_to_install.check_if_exists() if req_to_install.satisfied_by: if self.upgrade or self.ignore_installed: # don't uninstall conflict if user install and # conflict is not user install if not (self.use_user_site and not dist_in_usersite( req_to_install.satisfied_by)): req_to_install.conflicts_with = \ req_to_install.satisfied_by req_to_install.satisfied_by = None else: logger.info( 'Requirement already satisfied (use ' '--upgrade to upgrade): %s', req_to_install, ) # ###################### # # # parse dependencies # # # ###################### # dist = abstract_dist.dist(finder) more_reqs = [] def add_req(subreq, extras_requested): sub_install_req = InstallRequirement( str(subreq), req_to_install, isolated=self.isolated, wheel_cache=self._wheel_cache, ) more_reqs.extend(self.add_requirement( sub_install_req, req_to_install.name, extras_requested=extras_requested)) try: check_dist_requires_python(dist) except (UnsupportedPythonVersion, TypeError) as e: if self.ignore_compatibility: logger.warning(e.args[0]) else: req_to_install.remove_temporary_source() raise # A huge hack, by Kenneth Reitz. try: self.requires_python = check_dist_requires_python(dist, absorb=False) except TypeError: self.requires_python = None # We add req_to_install before its dependencies, so that we # can refer to it when adding dependencies. if not self.has_requirement(req_to_install.name): available_requested = sorted( set(dist.extras) & set(req_to_install.extras) ) # 'unnamed' requirements will get added here self.add_requirement(req_to_install, None, extras_requested=available_requested) # self.add_requirement(req_to_install) if not ignore_dependencies: if (req_to_install.extras): logger.debug( "Installing extra requirements: %r", ','.join(req_to_install.extras), ) missing_requested = sorted( set(req_to_install.extras) - set(dist.extras) ) for missing in missing_requested: logger.warning( '%s does not provide the extra \'%s\'', dist, missing ) available_requested = sorted( set(dist.extras) & set(req_to_install.extras) ) for subreq in dist.requires(available_requested): add_req(subreq, extras_requested=available_requested) # Hack for deep-resolving extras. for available in available_requested: if hasattr(dist, '_DistInfoDistribution__dep_map'): for req in dist._DistInfoDistribution__dep_map[available]: req = InstallRequirement( str(req), req_to_install, isolated=self.isolated, wheel_cache=self._wheel_cache, ) more_reqs.append(req) # cleanup tmp src self.reqs_to_cleanup.append(req_to_install) if not req_to_install.editable and not req_to_install.satisfied_by: # XXX: --no-install leads this to report 'Successfully # downloaded' for only non-editable reqs, even though we took # action on them. self.successfully_downloaded.append(req_to_install) return more_reqs