def __init__(self, find_links, index_urls, use_wheel=True, allow_external=(), allow_unverified=(), allow_all_external=False, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None): if session is None: raise TypeError( "PackageFinder() missing 1 required keyword argument: " "'session'") self.find_links = find_links self.index_urls = index_urls self.dependency_links = [] # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Domains that we won't emit warnings for when not using HTTPS self.secure_origins = [ ("*", host, "*") for host in (trusted_hosts if trusted_hosts else []) ] # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links # The Session we'll use to make requests self.session = session
def _find_url_name(self, index_url, url_name): """ Finds the true URL name of a package, when the given name isn't quite correct. This is usually used to implement case-insensitivity. """ if not index_url.url.endswith('/'): # Vaguely part of the PyPI API... weird but true. # FIXME: bad to modify this? index_url.url += '/' page = self._get_page(index_url) if page is None: logger.critical('Cannot fetch index base URL %s', index_url) return norm_name = normalize_name(url_name) for link in page.links: base = posixpath.basename(link.path.rstrip('/')) if norm_name == normalize_name(base): logger.debug( 'Real name of requirement %s is %s', url_name, base, ) return base return None
def __init__(self, find_links, index_urls, use_wheel=True, allow_external=(), allow_unverified=(), allow_all_external=False, allow_all_unverified=False, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None): if session is None: raise TypeError( "PackageFinder() missing 1 required keyword argument: " "'session'" ) self.find_links = find_links self.index_urls = index_urls self.dependency_links = [] # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified ) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Do we all insecure and unverifiable files? self.allow_all_unverified = allow_all_unverified # Domains that we won't emit warnings for when not using HTTPS self.secure_origins = [ ("*", host, "*") for host in (trusted_hosts if trusted_hosts else []) ] # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links # The Session we'll use to make requests self.session = session
def find_missing_reqs(options): # 1. find files used by imports in the code (as best we can without # executing) used_modules = common.find_imported_modules(options) # 2. find which packages provide which files installed_files = {} all_pkgs = (pkg.project_name for pkg in get_installed_distributions()) for package in search_packages_info(all_pkgs): log.debug('installed package: %s (at %s)', package['name'], package['location']) for file in package['files'] or []: path = os.path.realpath(os.path.join(package['location'], file)) installed_files[path] = package['name'] package_path = common.is_package_file(path) if package_path: # we've seen a package file so add the bare package directory # to the installed list as well as we might want to look up # a package by its directory path later installed_files[package_path] = package['name'] # 3. match imported modules against those packages used = collections.defaultdict(list) for modname, info in used_modules.items(): # probably standard library if it's not in the files list if info.filename in installed_files: used_name = normalize_name(installed_files[info.filename]) log.debug('used module: %s (from package %s)', modname, installed_files[info.filename]) used[used_name].append(info) else: log.debug( 'used module: %s (from file %s, assuming stdlib or local)', modname, info.filename) # 4. compare with requirements.txt explicit = set() for requirement in parse_requirements('requirements.txt', session=PipSession()): log.debug('found requirement: %s', requirement.name) explicit.add(normalize_name(requirement.name)) return [(name, used[name]) for name in used if name not in explicit]
def find_required_modules(options): explicit = set() for requirement in parse_requirements('requirements.txt', session=PipSession()): if options.ignore_reqs(requirement): log.debug('ignoring requirement: %s', requirement.name) else: log.debug('found requirement: %s', requirement.name) explicit.add(normalize_name(requirement.name)) return explicit
def _find_url_name(self, index_url, url_name): """ Finds the true URL name of a package, when the given name isn't quite correct. This is usually used to implement case-insensitivity. """ if not index_url.url.endswith("/"): # Vaguely part of the PyPI API... weird but true. # FIXME: bad to modify this? index_url.url += "/" page = self._get_page(index_url) if page is None: logger.critical("Cannot fetch index base URL %s", index_url) return norm_name = normalize_name(url_name) for link in page.links: base = posixpath.basename(link.path.rstrip("/")) if norm_name == normalize_name(base): logger.debug("Real name of requirement %s is %s", url_name, base) return base return None
def _get_pages(self, locations, req): """ Yields (page, page_url) from the given locations, skipping locations that have errors, and adding download/homepage links """ all_locations = list(locations) seen = set() while all_locations: location = all_locations.pop(0) if location in seen: continue seen.add(location) page = self._get_page(location, req) if page is None: continue yield page for link in page.rel_links(): normalized = normalize_name(req.name).lower() if (normalized not in self.allow_external and not self.allow_all_external): self.need_warn_external = True logger.debug( "Not searching %s for files because external " "urls are disallowed.", link, ) continue if (link.trusted is not None and not link.trusted and not normalized in self.allow_unverified and not self.allow_all_unverified): logger.debug( "Not searching %s for urls, it is an " "untrusted link and cannot produce safe or " "verifiable files.", link, ) self.need_warn_unverified = True continue all_locations.append(link)
def _get_pages(self, locations, project_name): """ Yields (page, page_url) from the given locations, skipping locations that have errors, and adding download/homepage links """ all_locations = list(locations) seen = set() normalized = normalize_name(project_name) while all_locations: location = all_locations.pop(0) if location in seen: continue seen.add(location) page = self._get_page(location) if page is None: continue yield page for link in page.rel_links(): if (normalized not in self.allow_external and not self.allow_all_external): self.need_warn_external = True logger.debug( "Not searching %s for files because external " "urls are disallowed.", link, ) continue if (link.trusted is not None and not link.trusted and normalized not in self.allow_unverified and not self.allow_all_unverified): logger.debug( "Not searching %s for urls, it is an " "untrusted link and cannot produce safe or " "verifiable files.", link, ) self.need_warn_unverified = True continue all_locations.append(link)
def __init__(self, find_links, index_urls, allow_external=(), allow_unverified=(), allow_all_external=False, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None, format_control=None): """Create a PackageFinder. :param format_control: A FormatControl object or None. Used to control the selection of source packages / binary packages when consulting the index and links. """ if session is None: raise TypeError( "PackageFinder() missing 1 required keyword argument: " "'session'") # Build find_links. If an argument starts with ~, it may be # a local file relative to a home directory. So try normalizing # it and if it exists, use the normalized version. # This is deliberately conservative - it might be fine just to # blindly normalize anything starting with a ~... self.find_links = [] for link in find_links: if link.startswith('~'): new_link = normalize_path(link) if os.path.exists(new_link): link = new_link self.find_links.append(link) self.index_urls = index_urls self.dependency_links = [] # These are boring links that have already been logged somehow: self.logged_links = set() self.format_control = format_control or FormatControl(set(), set()) # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Domains that we won't emit warnings for when not using HTTPS self.secure_origins = [ ("*", host, "*") for host in (trusted_hosts if trusted_hosts else []) ] # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links # The Session we'll use to make requests self.session = session # If we don't have TLS enabled, then WARN if anyplace we're looking # relies on TLS. if not HAS_TLS: for link in itertools.chain(self.index_urls, self.find_links): parsed = urllib_parse.urlparse(link) if parsed.scheme == "https": logger.warning( "pip is configured with locations that require " "TLS/SSL, however the ssl module in Python is not " "available.") break
def _link_package_versions(self, link, search): """Return an InstallationCandidate or None""" platform = get_platform() version = None if link.egg_fragment: egg_info = link.egg_fragment ext = link.ext else: egg_info, ext = link.splitext() if not ext: self._log_skipped_link(link, 'not a file') return if ext not in SUPPORTED_EXTENSIONS: self._log_skipped_link( link, 'unsupported archive format: %s' % ext) return if "binary" not in search.formats and ext == wheel_ext: self._log_skipped_link( link, 'No binaries permitted for %s' % search.supplied) return if "macosx10" in link.path and ext == '.zip': self._log_skipped_link(link, 'macosx10 one') return if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: self._log_skipped_link(link, 'invalid wheel filename') return if (pkg_resources.safe_name(wheel.name).lower() != search.canonical): self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return if not wheel.supported(): self._log_skipped_link( link, 'it is not compatible with this Python') return # This is a dirty hack to prevent installing Binary Wheels from # PyPI unless it is a Windows or Mac Binary Wheel. This is # paired with a change to PyPI disabling uploads for the # same. Once we have a mechanism for enabling support for # binary wheels on linux that deals with the inherent problems # of binary distribution this can be removed. comes_from = getattr(link, "comes_from", None) if ( ( not platform.startswith('win') and not platform.startswith('macosx') and not platform == 'cli' ) and comes_from is not None and urllib_parse.urlparse( comes_from.url ).netloc.endswith(PyPI.netloc)): if not wheel.supported(tags=supported_tags_noarch): self._log_skipped_link( link, "it is a pypi-hosted binary " "Wheel on an unsupported platform", ) return version = wheel.version # This should be up by the search.ok_binary check, but see issue 2700. if "source" not in search.formats and ext != wheel_ext: self._log_skipped_link( link, 'No sources permitted for %s' % search.supplied) return if not version: version = egg_info_matches(egg_info, search.supplied, link) if version is None: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return if (link.internal is not None and not link.internal and not normalize_name(search.supplied).lower() in self.allow_external and not self.allow_all_external): # We have a link that we are sure is external, so we should skip # it unless we are allowing externals self._log_skipped_link(link, 'it is externally hosted') self.need_warn_external = True return if (link.verifiable is not None and not link.verifiable and not (normalize_name(search.supplied).lower() in self.allow_unverified)): # We have a link that we are sure we cannot verify its integrity, # so we should skip it unless we are allowing unsafe installs # for this requirement. self._log_skipped_link( link, 'it is an insecure and unverifiable file') self.need_warn_unverified = True return match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: self._log_skipped_link( link, 'Python version is incorrect') return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search.supplied, version, link)
def __init__(self, find_links, index_urls, allow_external=(), allow_unverified=(), allow_all_external=False, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None, format_control=None): """Create a PackageFinder. :param format_control: A FormatControl object or None. Used to control the selection of source packages / binary packages when consulting the index and links. """ if session is None: raise TypeError( "PackageFinder() missing 1 required keyword argument: " "'session'" ) # Build find_links. If an argument starts with ~, it may be # a local file relative to a home.html directory. So try normalizing # it and if it exists, use the normalized version. # This is deliberately conservative - it might be fine just to # blindly normalize anything starting with a ~... self.find_links = [] for link in find_links: if link.startswith('~'): new_link = normalize_path(link) if os.path.exists(new_link): link = new_link self.find_links.append(link) self.index_urls = index_urls self.dependency_links = [] # These are boring links that have already been logged somehow: self.logged_links = set() self.format_control = format_control or FormatControl(set(), set()) # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified ) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Domains that we won't emit warnings for when not using HTTPS self.secure_origins = [ ("*", host, "*") for host in (trusted_hosts if trusted_hosts else []) ] # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links # The Session we'll use to make requests self.session = session # If we don't have TLS enabled, then WARN if anyplace we're looking # relies on TLS. if not HAS_TLS: for link in itertools.chain(self.index_urls, self.find_links): parsed = urllib_parse.urlparse(link) if parsed.scheme == "https": logger.warning( "pip is configured with locations that require " "TLS/SSL, however the ssl module in Python is not " "available." ) break
def __init__(self, find_links, index_urls, use_wheel=True, allow_external=(), allow_unverified=(), allow_all_external=False, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None): if session is None: raise TypeError( "PackageFinder() missing 1 required keyword argument: " "'session'") # Build find_links. If an argument starts with ~, it may be # a local file relative to a home directory. So try normalizing # it and if it exists, use the normalized version. # This is deliberately conservative - it might be fine just to # blindly normalize anything starting with a ~... self.find_links = [] for link in find_links: if link.startswith('~'): new_link = normalize_path(link) if os.path.exists(new_link): link = new_link self.find_links.append(link) self.index_urls = index_urls self.dependency_links = [] # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set( normalize_name(n) for n in allow_unverified) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Domains that we won't emit warnings for when not using HTTPS self.secure_origins = [ ("*", host, "*") for host in (trusted_hosts if trusted_hosts else []) ] # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links # The Session we'll use to make requests self.session = session
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines(), 1): line = line.strip() # Remove comments from file and all spaces before it line = re.sub(r"(^|\s)+#.*$", "", line) if not line: continue if skip_match and skip_match.search(line): continue if line.startswith(('-r', '--requirement')): req_url = _remove_prefixes(line, '-r', '--requirement') if _scheme_re.search(filename): # Relative to a URL req_url = urllib_parse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements( req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith(('-Z', '--always-unzip')): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith(('-f', '--find-links')): find_links = _remove_prefixes(line, '-f', '--find-links') # FIXME: it would be nice to keep track of the source of # the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, find_links) if os.path.exists(relative_to_reqs_file): find_links = relative_to_reqs_file if finder: finder.find_links.append(find_links) elif line.startswith(('-i', '--index-url')): index_url = _remove_prefixes(line, '-i', '--index-url') if finder: finder.index_urls = [index_url] elif line.startswith('--extra-index-url'): line = _remove_prefix(line, '--extra-index-url') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): # Default in 1.5 pass elif line.startswith('--no-use-wheel'): if finder: finder.use_wheel = False elif line.startswith('--no-index'): if finder: finder.index_urls = [] elif line.startswith("--allow-external"): line = _remove_prefix(line, '--allow-external') if finder: finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): if finder: finder.allow_all_external = True # Remove in 7.0 elif line.startswith("--no-allow-external"): pass # Remove in 7.0 elif line.startswith("--no-allow-insecure"): pass # Remove after 7.0 elif line.startswith("--allow-insecure"): line = _remove_prefix(line, '--allow-insecure') if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = _remove_prefix(line, '--allow-unverified') if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith(('-e', '--editable')): editable = _remove_prefixes(line, '-e', '--editable') req = InstallRequirement.from_editable( editable, comes_from=comes_from, default_vcs=options.default_vcs if options else None, isolated=options.isolated_mode if options else False, ) else: req = InstallRequirement.from_line( line, comes_from, isolated=options.isolated_mode if options else False, ) yield req
def process_line(line, filename, line_number, finder=None, comes_from=None, options=None, session=None, wheel_cache=None, constraint=False): """Process a single requirements line; This can result in creating/yielding requirements, or updating the finder. For lines that contain requirements, the only options that have an effect are from SUPPORTED_OPTIONS_REQ, and they are scoped to the requirement. Other options from SUPPORTED_OPTIONS may be present, but are ignored. For lines that do not contain requirements, the only options that have an effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may be present, but are ignored. These lines may contain multiple options (although our docs imply only one is supported), and all our parsed and affect the finder. :param constraint: If True, parsing a constraints file. """ parser = build_parser() defaults = parser.get_default_values() defaults.index_url = None if finder: # `finder.format_control` will be updated during parsing defaults.format_control = finder.format_control args_str, options_str = break_args_options(line) opts, _ = parser.parse_args(shlex.split(options_str), defaults) # preserve for the nested code path line_comes_from = '%s %s (line %s)' % ( '-c' if constraint else '-r', filename, line_number) # yield a line requirement if args_str: isolated = options.isolated_mode if options else False if options: cmdoptions.check_install_build_global(options, opts) # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: if dest in opts.__dict__ and opts.__dict__[dest]: req_options[dest] = opts.__dict__[dest] yield InstallRequirement.from_line( args_str, line_comes_from, constraint=constraint, isolated=isolated, options=req_options, wheel_cache=wheel_cache ) # yield an editable requirement elif opts.editables: isolated = options.isolated_mode if options else False default_vcs = options.default_vcs if options else None yield InstallRequirement.from_editable( opts.editables[0], comes_from=line_comes_from, constraint=constraint, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache ) # parse a nested requirements file elif opts.requirements or opts.constraints: if opts.requirements: req_path = opts.requirements[0] nested_constraint = False else: req_path = opts.constraints[0] nested_constraint = True # original file is over http if SCHEME_RE.search(filename): # do a url join so relative paths work req_path = urllib_parse.urljoin(filename, req_path) # original file and nested file are paths elif not SCHEME_RE.search(req_path): # do a join so relative paths work req_dir = os.path.dirname(filename) req_path = os.path.join(os.path.dirname(filename), req_path) # TODO: Why not use `comes_from='-r {} (line {})'` here as well? parser = parse_requirements( req_path, finder, comes_from, options, session, constraint=nested_constraint, wheel_cache=wheel_cache ) for req in parser: yield req # set finder options elif finder: if opts.index_url: finder.index_urls = [opts.index_url] if opts.use_wheel is False: finder.use_wheel = False pip.index.fmt_ctl_no_use_wheel(finder.format_control) if opts.no_index is True: finder.index_urls = [] if opts.allow_all_external: finder.allow_all_external = opts.allow_all_external if opts.extra_index_urls: finder.index_urls.extend(opts.extra_index_urls) if opts.allow_external: finder.allow_external |= set( [normalize_name(v).lower() for v in opts.allow_external]) if opts.allow_unverified: # Remove after 7.0 finder.allow_unverified |= set( [normalize_name(v).lower() for v in opts.allow_unverified]) if opts.find_links: # FIXME: it would be nice to keep track of the source # of the find_links: support a find-links local path # relative to a requirements file. value = opts.find_links[0] req_dir = os.path.dirname(os.path.abspath(filename)) relative_to_reqs_file = os.path.join(req_dir, value) if os.path.exists(relative_to_reqs_file): value = relative_to_reqs_file finder.find_links.append(value)
def _link_package_versions(self, link, search): """Return an InstallationCandidate or None""" platform = get_platform() version = None if link.egg_fragment: egg_info = link.egg_fragment ext = link.ext else: egg_info, ext = link.splitext() if not ext: self._log_skipped_link(link, 'not a file') return if ext not in SUPPORTED_EXTENSIONS: self._log_skipped_link(link, 'unsupported archive format: %s' % ext) return if "binary" not in search.formats and ext == wheel_ext: self._log_skipped_link( link, 'No binaries permitted for %s' % search.supplied) return if "macosx10" in link.path and ext == '.zip': self._log_skipped_link(link, 'macosx10 one') return if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: self._log_skipped_link(link, 'invalid wheel filename') return if (pkg_resources.safe_name(wheel.name).lower() != search.canonical): self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return if not wheel.supported(): self._log_skipped_link( link, 'it is not compatible with this Python') return # This is a dirty hack to prevent installing Binary Wheels from # PyPI unless it is a Windows or Mac Binary Wheel. This is # paired with a change to PyPI disabling uploads for the # same. Once we have a mechanism for enabling support for # binary wheels on linux that deals with the inherent problems # of binary distribution this can be removed. comes_from = getattr(link, "comes_from", None) if ((not platform.startswith('win') and not platform.startswith('macosx') and not platform == 'cli') and comes_from is not None and urllib_parse.urlparse( comes_from.url).netloc.endswith(PyPI.netloc)): if not wheel.supported(tags=supported_tags_noarch): self._log_skipped_link( link, "it is a pypi-hosted binary " "Wheel on an unsupported platform", ) return version = wheel.version # This should be up by the search.ok_binary check, but see issue 2700. if "source" not in search.formats and ext != wheel_ext: self._log_skipped_link( link, 'No sources permitted for %s' % search.supplied) return if not version: version = egg_info_matches(egg_info, search.supplied, link) if version is None: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) return if (link.internal is not None and not link.internal and not normalize_name(search.supplied).lower() in self.allow_external and not self.allow_all_external): # We have a link that we are sure is external, so we should skip # it unless we are allowing externals self._log_skipped_link(link, 'it is externally hosted') self.need_warn_external = True return if (link.verifiable is not None and not link.verifiable and not (normalize_name(search.supplied).lower() in self.allow_unverified)): # We have a link that we are sure we cannot verify its integrity, # so we should skip it unless we are allowing unsafe installs # for this requirement. self._log_skipped_link(link, 'it is an insecure and unverifiable file') self.need_warn_unverified = True return match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: self._log_skipped_link(link, 'Python version is incorrect') return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search.supplied, version, link)
def _link_package_versions(self, link, search_name): """ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. """ platform = get_platform() version = None if link.egg_fragment: egg_info = link.egg_fragment else: egg_info, ext = link.splitext() if not ext: if link not in self.logged_links: logger.debug('Skipping link %s; not a file', link) self.logged_links.add(link) return if egg_info.endswith('.tar'): # Special double-extension case: egg_info = egg_info[:-4] ext = '.tar' + ext if ext not in self._known_extensions(): if link not in self.logged_links: logger.debug( 'Skipping link %s; unknown archive format: %s', link, ext, ) self.logged_links.add(link) return if "macosx10" in link.path and ext == '.zip': if link not in self.logged_links: logger.debug('Skipping link %s; macosx10 one', link) self.logged_links.add(link) return if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: logger.debug( 'Skipping %s because the wheel filename is invalid', link ) return if (pkg_resources.safe_name(wheel.name).lower() != pkg_resources.safe_name(search_name).lower()): logger.debug( 'Skipping link %s; wrong project name (not %s)', link, search_name, ) return if not wheel.supported(): logger.debug( 'Skipping %s because it is not compatible with this ' 'Python', link, ) return # This is a dirty hack to prevent installing Binary Wheels from # PyPI unless it is a Windows or Mac Binary Wheel. This is # paired with a change to PyPI disabling uploads for the # same. Once we have a mechanism for enabling support for # binary wheels on linux that deals with the inherent problems # of binary distribution this can be removed. comes_from = getattr(link, "comes_from", None) if ( ( not platform.startswith('win') and not platform.startswith('macosx') and not platform == 'cli' ) and comes_from is not None and urllib_parse.urlparse( comes_from.url ).netloc.endswith(PyPI.netloc)): if not wheel.supported(tags=supported_tags_noarch): logger.debug( "Skipping %s because it is a pypi-hosted binary " "Wheel on an unsupported platform", link, ) return version = wheel.version if not version: version = self._egg_info_matches(egg_info, search_name, link) if version is None: logger.debug( 'Skipping link %s; wrong project name (not %s)', link, search_name, ) return if (link.internal is not None and not link.internal and not normalize_name(search_name).lower() in self.allow_external and not self.allow_all_external): # We have a link that we are sure is external, so we should skip # it unless we are allowing externals logger.debug("Skipping %s because it is externally hosted.", link) self.need_warn_external = True return if (link.verifiable is not None and not link.verifiable and not (normalize_name(search_name).lower() in self.allow_unverified)): # We have a link that we are sure we cannot verify its integrity, # so we should skip it unless we are allowing unsafe installs # for this requirement. logger.debug( "Skipping %s because it is an insecure and unverifiable file.", link, ) self.need_warn_unverified = True return match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: logger.debug( 'Skipping %s because Python version is incorrect', link ) return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search_name, version, link)
def _link_package_versions(self, link, search_name): """ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. """ platform = get_platform() version = None if link.egg_fragment: egg_info = link.egg_fragment else: egg_info, ext = link.splitext() if not ext: if link not in self.logged_links: logger.debug('Skipping link %s; not a file', link) self.logged_links.add(link) return if egg_info.endswith('.tar'): # Special double-extension case: egg_info = egg_info[:-4] ext = '.tar' + ext if ext not in self._known_extensions(): if link not in self.logged_links: logger.debug( 'Skipping link %s; unknown archive format: %s', link, ext, ) self.logged_links.add(link) return if "macosx10" in link.path and ext == '.zip': if link not in self.logged_links: logger.debug('Skipping link %s; macosx10 one', link) self.logged_links.add(link) return if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: logger.debug( 'Skipping %s because the wheel filename is invalid', link) return if (pkg_resources.safe_name(wheel.name).lower() != pkg_resources.safe_name(search_name).lower()): logger.debug( 'Skipping link %s; wrong project name (not %s)', link, search_name, ) return if not wheel.supported(): logger.debug( 'Skipping %s because it is not compatible with this ' 'Python', link, ) return # This is a dirty hack to prevent installing Binary Wheels from # PyPI unless it is a Windows or Mac Binary Wheel. This is # paired with a change to PyPI disabling uploads for the # same. Once we have a mechanism for enabling support for # binary wheels on linux that deals with the inherent problems # of binary distribution this can be removed. comes_from = getattr(link, "comes_from", None) if ((not platform.startswith('win') and not platform.startswith('macosx') and not platform == 'cli') and comes_from is not None and urllib_parse.urlparse( comes_from.url).netloc.endswith(PyPI.netloc)): if not wheel.supported(tags=supported_tags_noarch): logger.debug( "Skipping %s because it is a pypi-hosted binary " "Wheel on an unsupported platform", link, ) return version = wheel.version if not version: version = self._egg_info_matches(egg_info, search_name, link) if version is None: logger.debug( 'Skipping link %s; wrong project name (not %s)', link, search_name, ) return if (link.internal is not None and not link.internal and not normalize_name(search_name).lower() in self.allow_external and not self.allow_all_external): # We have a link that we are sure is external, so we should skip # it unless we are allowing externals logger.debug("Skipping %s because it is externally hosted.", link) self.need_warn_external = True return if (link.verifiable is not None and not link.verifiable and not (normalize_name(search_name).lower() in self.allow_unverified)): # We have a link that we are sure we cannot verify its integrity, # so we should skip it unless we are allowing unsafe installs # for this requirement. logger.debug( "Skipping %s because it is an insecure and unverifiable file.", link, ) self.need_warn_unverified = True return match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: logger.debug('Skipping %s because Python version is incorrect', link) return logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search_name, version, link)
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'") skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines(), 1): line = line.strip() # Remove comments from file line = re.sub(r"(^|\s)#.*$", "", line) if not line or line.startswith('#'): continue if skip_match and skip_match.search(line): continue if line.startswith('-r') or line.startswith('--requirement'): if line.startswith('-r'): req_url = line[2:].strip() else: req_url = line[len('--requirement'):].strip().strip('=') if _scheme_re.search(filename): # Relative to a URL req_url = urllib_parse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith('-Z') or line.startswith('--always-unzip'): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith('-f') or line.startswith('--find-links'): if line.startswith('-f'): line = line[2:].strip() else: line = line[len('--find-links'):].strip().lstrip('=') # FIXME: it would be nice to keep track of the source of # the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, line) if os.path.exists(relative_to_reqs_file): line = relative_to_reqs_file if finder: finder.find_links.append(line) elif line.startswith('-i') or line.startswith('--index-url'): if line.startswith('-i'): line = line[2:].strip() else: line = line[len('--index-url'):].strip().lstrip('=') if finder: finder.index_urls = [line] elif line.startswith('--extra-index-url'): line = line[len('--extra-index-url'):].strip().lstrip('=') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): # Default in 1.5 pass elif line.startswith('--no-use-wheel'): if finder: finder.use_wheel = False elif line.startswith('--no-index'): if finder: finder.index_urls = [] elif line.startswith("--allow-external"): line = line[len("--allow-external"):].strip().lstrip("=") if finder: finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): if finder: finder.allow_all_external = True # Remove in 7.0 elif line.startswith("--no-allow-external"): pass # Remove in 7.0 elif line.startswith("--no-allow-insecure"): pass # Remove after 7.0 elif line.startswith("--allow-insecure"): line = line[len("--allow-insecure"):].strip().lstrip("=") if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = line[len("--allow-unverified"):].strip().lstrip("=") if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') req = InstallRequirement.from_editable( line, comes_from=comes_from, default_vcs=options.default_vcs if options else None, isolated=options.isolated_mode if options else False, ) else: req = InstallRequirement.from_line( line, comes_from, isolated=options.isolated_mode if options else False, ) yield req
def __init__( self, find_links, index_urls, use_wheel=True, allow_external=(), allow_unverified=(), allow_all_external=False, allow_all_prereleases=False, trusted_hosts=None, process_dependency_links=False, session=None, ): if session is None: raise TypeError("PackageFinder() missing 1 required keyword argument: " "'session'") # Build find_links. If an argument starts with ~, it may be # a local file relative to a home directory. So try normalizing # it and if it exists, use the normalized version. # This is deliberately conservative - it might be fine just to # blindly normalize anything starting with a ~... self.find_links = [] for link in find_links: if link.startswith("~"): new_link = normalize_path(link) if os.path.exists(new_link): link = new_link self.find_links.append(link) self.index_urls = index_urls self.dependency_links = [] # These are boring links that have already been logged somehow: self.logged_links = set() self.use_wheel = use_wheel # Do we allow (safe and verifiable) externally hosted files? self.allow_external = set(normalize_name(n) for n in allow_external) # Which names are allowed to install insecure and unverifiable files? self.allow_unverified = set(normalize_name(n) for n in allow_unverified) # Anything that is allowed unverified is also allowed external self.allow_external |= self.allow_unverified # Do we allow all (safe and verifiable) externally hosted files? self.allow_all_external = allow_all_external # Domains that we won't emit warnings for when not using HTTPS self.secure_origins = [("*", host, "*") for host in (trusted_hosts if trusted_hosts else [])] # Stores if we ignored any external links so that we can instruct # end users how to install them if no distributions are available self.need_warn_external = False # Stores if we ignored any unsafe links so that we can instruct # end users how to install them if no distributions are available self.need_warn_unverified = False # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases # Do we process dependency links? self.process_dependency_links = process_dependency_links # The Session we'll use to make requests self.session = session
def process_line(line, filename, line_number, finder=None, comes_from=None, options=None, session=None, wheel_cache=None, constraint=False): """Process a single requirements line; This can result in creating/yielding requirements, or updating the finder. For lines that contain requirements, the only options that have an effect are from SUPPORTED_OPTIONS_REQ, and they are scoped to the requirement. Other options from SUPPORTED_OPTIONS may be present, but are ignored. For lines that do not contain requirements, the only options that have an effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may be present, but are ignored. These lines may contain multiple options (although our docs imply only one is supported), and all our parsed and affect the finder. :param constraint: If True, parsing a constraints file. """ parser = build_parser() defaults = parser.get_default_values() defaults.index_url = None if finder: # `finder.format_control` will be updated during parsing defaults.format_control = finder.format_control args_str, options_str = break_args_options(line) opts, _ = parser.parse_args(shlex.split(options_str), defaults) # preserve for the nested code path line_comes_from = '%s %s (line %s)' % ('-c' if constraint else '-r', filename, line_number) # yield a line requirement if args_str: isolated = options.isolated_mode if options else False if options: cmdoptions.check_install_build_global(options, opts) # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: if dest in opts.__dict__ and opts.__dict__[dest]: req_options[dest] = opts.__dict__[dest] yield InstallRequirement.from_line(args_str, line_comes_from, constraint=constraint, isolated=isolated, options=req_options, wheel_cache=wheel_cache) # yield an editable requirement elif opts.editables: isolated = options.isolated_mode if options else False default_vcs = options.default_vcs if options else None yield InstallRequirement.from_editable(opts.editables[0], comes_from=line_comes_from, constraint=constraint, default_vcs=default_vcs, isolated=isolated, wheel_cache=wheel_cache) # parse a nested requirements file elif opts.requirements or opts.constraints: if opts.requirements: req_path = opts.requirements[0] nested_constraint = False else: req_path = opts.constraints[0] nested_constraint = True # original file is over http if SCHEME_RE.search(filename): # do a url join so relative paths work req_path = urllib_parse.urljoin(filename, req_path) # original file and nested file are paths elif not SCHEME_RE.search(req_path): # do a join so relative paths work req_dir = os.path.dirname(filename) req_path = os.path.join(os.path.dirname(filename), req_path) # TODO: Why not use `comes_from='-r {} (line {})'` here as well? parser = parse_requirements(req_path, finder, comes_from, options, session, constraint=nested_constraint, wheel_cache=wheel_cache) for req in parser: yield req # set finder options elif finder: if opts.index_url: finder.index_urls = [opts.index_url] if opts.use_wheel is False: finder.use_wheel = False pip.index.fmt_ctl_no_use_wheel(finder.format_control) if opts.no_index is True: finder.index_urls = [] if opts.allow_all_external: finder.allow_all_external = opts.allow_all_external if opts.extra_index_urls: finder.index_urls.extend(opts.extra_index_urls) if opts.allow_external: finder.allow_external |= set( [normalize_name(v).lower() for v in opts.allow_external]) if opts.allow_unverified: # Remove after 7.0 finder.allow_unverified |= set( [normalize_name(v).lower() for v in opts.allow_unverified]) if opts.find_links: # FIXME: it would be nice to keep track of the source # of the find_links: support a find-links local path # relative to a requirements file. value = opts.find_links[0] req_dir = os.path.dirname(os.path.abspath(filename)) relative_to_reqs_file = os.path.join(req_dir, value) if os.path.exists(relative_to_reqs_file): value = relative_to_reqs_file finder.find_links.append(value)
# Default in 1.5 pass elif line.startswith('--no-use-wheel'): if finder: finder.use_wheel = False elif line.startswith('--no-index'): if finder: finder.index_urls = [] elif line.startswith("--allow-external"): <<<<<<< HEAD line = _remove_prefix(line, '--allow-external') ======= line = line[len("--allow-external"):].strip().lstrip("=") >>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59 if finder: finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): if finder: finder.allow_all_external = True # Remove in 7.0 elif line.startswith("--no-allow-external"): pass # Remove in 7.0 elif line.startswith("--no-allow-insecure"): pass # Remove after 7.0 elif line.startswith("--allow-insecure"): <<<<<<< HEAD line = _remove_prefix(line, '--allow-insecure') if finder: finder.allow_unverified |= set([normalize_name(line).lower()])