def __init__(self, find_links, index_urls, use_wheel=True, allow_external=[], allow_unverified=[], allow_all_external=False, allow_all_unverified=False, allow_all_prereleases=False, process_dependency_links=False, session=None): self.find_links = find_links self.index_urls = index_urls self.dependency_links = [] self.cache = PageCache() # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Do we all insecure and unverifiable files? self.allow_all_unverified = allow_all_unverified # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links self._have_warned_dependency_links = False # The Session we'll use to make requests self.session = session or PipSession()
def _get_queued_page(self, req, pending_queue, done, seen): while 1: try: location = pending_queue.get(False) except QueueEmpty: return if location in seen: continue seen.add(location) page = self._get_page(location, req) if page is None: continue done.append(page) for link in page.rel_links(): normalized = normalize_name(req.name).lower() if (not normalized in self.allow_external and not self.allow_all_external): self.need_warn_external = True logger.debug("Not searching %s for files because external " "urls are disallowed." % link) continue if (link.trusted is not None and not link.trusted and not normalized in self.allow_insecure and not self.allow_all_insecure): # TODO: Remove after release logger.debug("Not searching %s for urls, it is an " "untrusted link and cannot produce safe or " "verifiable files." % link) self.need_warn_insecure = True continue pending_queue.put(link)
def _get_queued_page(self, req, pending_queue, done, seen): while 1: try: location = pending_queue.get(False) except QueueEmpty: return if location in seen: continue seen.add(location) page = self._get_page(location, req) if page is None: continue done.append(page) for link in page.rel_links(): normalized = normalize_name(req.name).lower() if (not normalized in self.allow_external and not self.allow_all_external): self.need_warn_external = True logger.debug("Not searching %s for files because external " "urls are disallowed." % link) continue if (link.trusted is not None and not link.trusted and not normalized in self.allow_insecure and not self.allow_all_insecure ): # TODO: Remove after release logger.debug("Not searching %s for urls, it is an " "untrusted link and cannot produce safe or " "verifiable files." % link) self.need_warn_insecure = True continue pending_queue.put(link)
def __init__(self, find_links, index_urls, use_wheel=True, allow_external=[], allow_unverified=[], allow_all_external=False, allow_all_unverified=False, allow_all_prereleases=False, process_dependency_links=False, session=None): self.find_links = find_links self.index_urls = index_urls self.dependency_links = [] self.cache = PageCache() # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified ) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Do we all insecure and unverifiable files? self.allow_all_unverified = allow_all_unverified # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links self._have_warned_dependency_links = False # The Session we'll use to make requests self.session = session or PipSession()
def __init__(self, find_links, index_urls, use_mirrors=False, mirrors=None, main_mirror_url=None, use_wheel=False, allow_external=[], allow_insecure=[], allow_all_external=False, allow_all_insecure=False, allow_all_prereleases=False): self.find_links = find_links self.index_urls = index_urls self.dependency_links = [] self.cache = PageCache() # These are boring links that have already been logged somehow: self.logged_links = set() if use_mirrors: self.mirror_urls = self._get_mirror_urls(mirrors, main_mirror_url) logger.info('Using PyPI mirrors: %s' % ', '.join(self.mirror_urls)) else: self.mirror_urls = [] self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_insecure = set(normalize_name(n) for n in allow_insecure) # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Do we allow unsafe and unverifiable files? self.allow_all_insecure = allow_all_insecure # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_insecure = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases
def __init__(self, find_links, index_urls, use_wheel=True, allow_external=[], allow_unverified=[], allow_all_external=False, allow_all_prereleases=False, session=None): if session is None: raise TypeError( "PackageFinder() missing 1 required keyword argument: " "'session'" ) self.find_links = find_links self.index_urls = index_urls # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified ) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # The Session we'll use to make requests self.session = session
def _find_url_name(self, index_url, url_name, req): """Finds the true URL name of a package, when the given name isn't quite correct. This is usually used to implement case-insensitivity.""" if not index_url.url.endswith('/'): # Vaguely part of the PyPI API... weird but true. ## FIXME: bad to modify this? index_url.url += '/' page = self._get_page(index_url, req) if page is None: logger.fatal('Cannot fetch index base URL %s' % index_url) return norm_name = normalize_name(req.url_name) for link in page.links: base = posixpath.basename(link.path.rstrip('/')) if norm_name == normalize_name(base): logger.notify('Real name of requirement %s is %s' % (url_name, base)) return base return None
def _get_pages(self, locations, req): """ Yields (page, page_url) from the given locations, skipping locations that have errors, and adding download/homepage links """ all_locations = list(locations) seen = set() while all_locations: location = all_locations.pop(0) if location in seen: continue seen.add(location) page = self._get_page(location, req) if page is None: continue yield page for link in page.rel_links(): normalized = normalize_name(req.name).lower() if (normalized not in self.allow_external and not self.allow_all_external): self.need_warn_external = True logger.debug("Not searching %s for files because external " "urls are disallowed." % link) continue if (link.trusted is not None and not link.trusted and normalized not in self.allow_unverified): logger.debug( "Not searching %s for urls, it is an " "untrusted link and cannot produce safe or " "verifiable files." % link ) self.need_warn_unverified = True continue all_locations.append(link)
def _link_package_versions(self, link, search_name): """ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. """ platform = get_platform() version = None if link.egg_fragment: egg_info = link.egg_fragment else: egg_info, ext = link.splitext() if not ext: if link not in self.logged_links: logger.debug('Skipping link %s; not a file' % link) self.logged_links.add(link) return [] if egg_info.endswith('.tar'): # Special double-extension case: egg_info = egg_info[:-4] ext = '.tar' + ext if ext not in self._known_extensions(): if link not in self.logged_links: logger.debug( 'Skipping link %s; unknown archive format: %s' % (link, ext)) self.logged_links.add(link) return [] if "macosx10" in link.path and ext == '.zip': if link not in self.logged_links: logger.debug('Skipping link %s; macosx10 one' % (link)) self.logged_links.add(link) return [] if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: logger.debug( 'Skipping %s because the wheel filename is invalid' % link) return [] if wheel.name.lower() != search_name.lower(): logger.debug( 'Skipping link %s; wrong project name (not %s)' % (link, search_name)) return [] if not wheel.supported(): logger.debug( 'Skipping %s because it is not compatible with this Python' % link) return [] # This is a dirty hack to prevent installing Binary Wheels from # PyPI unless it is a Windows or Mac Binary Wheel. This is # paired with a change to PyPI disabling uploads for the # same. Once we have a mechanism for enabling support for binary # wheels on linux that deals with the inherent problems of # binary distribution this can be removed. comes_from = getattr(link, "comes_from", None) if ((not platform.startswith('win') and not platform.startswith('macosx')) and comes_from is not None and urlparse.urlparse( comes_from.url).netloc.endswith("pypi.python.org") ): if not wheel.supported(tags=supported_tags_noarch): logger.debug( "Skipping %s because it is a pypi-hosted binary " "Wheel on an unsupported platform" % link) return [] version = wheel.version if not version: version = self._egg_info_matches(egg_info, search_name, link) if version is None: logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) return [] if (link.internal is not None and not link.internal and not normalize_name(search_name).lower() in self.allow_external and not self.allow_all_external): # We have a link that we are sure is external, so we should skip # it unless we are allowing externals logger.debug("Skipping %s because it is externally hosted." % link) self.need_warn_external = True return [] if (link.verifiable is not None and not link.verifiable and not (normalize_name(search_name).lower() in self.allow_unverified) and not self.allow_all_unverified): # We have a link that we are sure we cannot verify it's integrity, # so we should skip it unless we are allowing unsafe installs # for this requirement. logger.debug("Skipping %s because it is an insecure and " "unverifiable file." % link) self.need_warn_unverified = True return [] match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: logger.debug( 'Skipping %s because Python version is incorrect' % link) return [] logger.debug('Found link %s, version: %s' % (link, version)) return [(pkg_resources.parse_version(version), link, version)]
def _link_package_versions(self, link, search_name): """ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. """ platform = get_platform() version = None if link.egg_fragment: egg_info = link.egg_fragment else: egg_info, ext = link.splitext() if not ext: if link not in self.logged_links: logger.debug('Skipping link %s; not a file' % link) self.logged_links.add(link) return [] if egg_info.endswith('.tar'): # Special double-extension case: egg_info = egg_info[:-4] ext = '.tar' + ext if ext not in self._known_extensions(): if link not in self.logged_links: logger.debug( 'Skipping link %s; unknown archive format: %s' % (link, ext) ) self.logged_links.add(link) return [] if "macosx10" in link.path and ext == '.zip': if link not in self.logged_links: logger.debug('Skipping link %s; macosx10 one' % (link)) self.logged_links.add(link) return [] if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: logger.debug( 'Skipping %s because the wheel filename is invalid' % link ) return [] if wheel.name.lower() != search_name.lower(): logger.debug( 'Skipping link %s; wrong project name (not %s)' % (link, search_name) ) return [] if not wheel.supported(): logger.debug( 'Skipping %s because it is not compatible with this ' 'Python' % link ) return [] # This is a dirty hack to prevent installing Binary Wheels from # PyPI unless it is a Windows or Mac Binary Wheel. This is # paired with a change to PyPI disabling uploads for the # same. Once we have a mechanism for enabling support for # binary wheels on linux that deals with the inherent problems # of binary distribution this can be removed. comes_from = getattr(link, "comes_from", None) if ( ( not platform.startswith('win') and not platform.startswith('macosx') ) and comes_from is not None and urlparse.urlparse( comes_from.url ).netloc.endswith("pypi.python.org")): if not wheel.supported(tags=supported_tags_noarch): logger.debug( "Skipping %s because it is a pypi-hosted binary " "Wheel on an unsupported platform" % link ) return [] version = wheel.version if not version: version = self._egg_info_matches(egg_info, search_name, link) if version is None: logger.debug( 'Skipping link %s; wrong project name (not %s)' % (link, search_name) ) return [] if (link.internal is not None and not link.internal and not normalize_name(search_name).lower() in self.allow_external and not self.allow_all_external): # We have a link that we are sure is external, so we should skip # it unless we are allowing externals logger.debug("Skipping %s because it is externally hosted." % link) self.need_warn_external = True return [] if (link.verifiable is not None and not link.verifiable and not (normalize_name(search_name).lower() in self.allow_unverified)): # We have a link that we are sure we cannot verify its integrity, # so we should skip it unless we are allowing unsafe installs # for this requirement. logger.debug("Skipping %s because it is an insecure and " "unverifiable file." % link) self.need_warn_unverified = True return [] match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: logger.debug( 'Skipping %s because Python version is incorrect' % link ) return [] logger.debug('Found link %s, version: %s' % (link, version)) return [( pkg_resources.parse_version(version), link, version, )]
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: session = PipSession() skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines()): line_number += 1 line = line.strip() # Remove comments from file line = re.sub(r"(^|\s)#.*$", "", line) if not line or line.startswith('#'): continue if skip_match and skip_match.search(line): continue if line.startswith('-r') or line.startswith('--requirement'): if line.startswith('-r'): req_url = line[2:].strip() else: req_url = line[len('--requirement'):].strip().strip('=') if _scheme_re.search(filename): # Relative to a URL req_url = urlparse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements( req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith('-Z') or line.startswith('--always-unzip'): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith('-f') or line.startswith('--find-links'): if line.startswith('-f'): line = line[2:].strip() else: line = line[len('--find-links'):].strip().lstrip('=') ## FIXME: it would be nice to keep track of the source of ## the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, line) if os.path.exists(relative_to_reqs_file): line = relative_to_reqs_file if finder: finder.find_links.append(line) elif line.startswith('-i') or line.startswith('--index-url'): if line.startswith('-i'): line = line[2:].strip() else: line = line[len('--index-url'):].strip().lstrip('=') if finder: finder.index_urls = [line] elif line.startswith('--extra-index-url'): line = line[len('--extra-index-url'):].strip().lstrip('=') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): finder.use_wheel = True elif line.startswith('--no-index'): finder.index_urls = [] elif line.startswith("--allow-external"): line = line[len("--allow-external"):].strip().lstrip("=") finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): finder.allow_all_external = True # Remove in 1.7 elif line.startswith("--no-allow-external"): pass # Remove in 1.7 elif line.startswith("--no-allow-insecure"): pass # Remove after 1.7 elif line.startswith("--allow-insecure"): line = line[len("--allow-insecure"):].strip().lstrip("=") finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = line[len("--allow-unverified"):].strip().lstrip("=") finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') req = InstallRequirement.from_editable( line, comes_from=comes_from, default_vcs=options.default_vcs if options else None ) else: req = InstallRequirement.from_line( line, comes_from, prereleases=getattr(options, "pre", None) ) yield req
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'") skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines(), 1): line = line.strip() # Remove comments from file line = re.sub(r"(^|\s)#.*$", "", line) if not line or line.startswith('#'): continue if skip_match and skip_match.search(line): continue if line.startswith('-r') or line.startswith('--requirement'): if line.startswith('-r'): req_url = line[2:].strip() else: req_url = line[len('--requirement'):].strip().strip('=') if _scheme_re.search(filename): # Relative to a URL req_url = urlparse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith('-Z') or line.startswith('--always-unzip'): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith('-f') or line.startswith('--find-links'): if line.startswith('-f'): line = line[2:].strip() else: line = line[len('--find-links'):].strip().lstrip('=') # FIXME: it would be nice to keep track of the source of # the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, line) if os.path.exists(relative_to_reqs_file): line = relative_to_reqs_file if finder: finder.find_links.append(line) elif line.startswith('-i') or line.startswith('--index-url'): if line.startswith('-i'): line = line[2:].strip() else: line = line[len('--index-url'):].strip().lstrip('=') if finder: finder.index_urls = [line] elif line.startswith('--extra-index-url'): line = line[len('--extra-index-url'):].strip().lstrip('=') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): finder.use_wheel = True elif line.startswith('--no-index'): finder.index_urls = [] elif line.startswith("--allow-external"): line = line[len("--allow-external"):].strip().lstrip("=") finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): finder.allow_all_external = True # Remove in 1.7 elif line.startswith("--no-allow-external"): pass # Remove in 1.7 elif line.startswith("--no-allow-insecure"): pass # Remove after 1.7 elif line.startswith("--allow-insecure"): line = line[len("--allow-insecure"):].strip().lstrip("=") finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = line[len("--allow-unverified"):].strip().lstrip("=") finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') req = InstallRequirement.from_editable( line, comes_from=comes_from, default_vcs=options.default_vcs if options else None) else: req = InstallRequirement.from_line(line, comes_from, prereleases=getattr( options, "pre", None)) yield req