def get_requirements_and_latest(filename, force=False): """Parse a requirements file and get latest version for each requirement. Yields a tuple of (original line, InstallRequirement instance, spec_versions, latest_version). :param filename: Path to a requirements.txt file. :param force: Force getting latest version even for packages without a version specified. """ session = PipSession() finder = PackageFinder(session=session, find_links=[], index_urls=[PyPI.simple_url]) _, content = get_file_content(filename, session=session) for line_number, line, orig_line in yield_lines(content): line = req_file.COMMENT_RE.sub("", line) line = line.strip() req = parse_requirement(line, filename, line_number, session, finder) if req is None or req.name is None or req_file.SCHEME_RE.match(req.name): yield (orig_line, None, None, None) continue spec_ver = current_version(req) if spec_ver or force: latest_ver = latest_version(req, session, finder) yield (orig_line, req, spec_ver, latest_ver)
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None, constraint=False, wheel_cache=None): """Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: cli options. :param session: Instance of pip.download.PipSession. :param constraint: If true, parsing a constraint file rather than requirements file. :param wheel_cache: Instance of pip.wheel.WheelCache """ if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) _, content = get_file_content( filename, comes_from=comes_from, session=session ) lines_enum = preprocess(content, options) for line_number, line in lines_enum: req_iter = process_line(line, filename, line_number, finder, comes_from, options, session, wheel_cache, constraint=constraint) for req in req_iter: yield req
def get_requirements_and_latest(filename, force=False): """Parse a requirements file and get latest version for each requirement. Yields a tuple of (original line, InstallRequirement instance, spec_versions, latest_version). :param filename: Path to a requirements.txt file. :param force: Force getting latest version even for packages without a version specified. """ session = PipSession() finder = PackageFinder(session=session, find_links=[], index_urls=[PyPI.simple_url]) _, content = get_file_content(filename, session=session) for line_number, line, orig_line in yield_lines(content): line = req_file.COMMENT_RE.sub('', line) line = line.strip() req = parse_requirement_line(line, filename, line_number, session, finder) if req is None or req.name is None or req_file.SCHEME_RE.match( req.name): yield (orig_line, None, None, None) continue spec_ver = current_version(req) if spec_ver or force: latest_ver = latest_version(req, session, finder) yield (orig_line, req, spec_ver, latest_ver)
def get_requirements_and_latest(filename): """Parse a requirements file and get latest version for each requirement. Yields a tuple of (original line, InstallRequirement instance, spec_version, latest_version). :param filename: Path to a requirements.txt file. """ session = PipSession() url, content = get_file_content(filename, session=session) for orig_line, line_number, line in yield_lines(content): line = req_file.COMMENT_RE.sub('', line) line = line.strip() if line: reqs = list(req_file.process_line(line, filename, line_number, session=session)) if len(reqs) > 0: req = reqs[0] spec_ver = None try: if req and req.req: spec_ver = Version(req.req.specs[0][1]) except IndexError: pass if spec_ver: latest_ver = latest_version(req, session) yield (orig_line, req, spec_ver, latest_ver) else: yield (orig_line, None, None, None) else: yield (orig_line, None, None, None) else: yield (orig_line, None, None, None)
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None, wheel_cache=None): """ Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: Global options. :param session: Instance of pip.download.PipSession. :param wheel_cache: Instance of pip.wheel.WheelCache """ if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) _, content = get_file_content( filename, comes_from=comes_from, session=session ) lines = content.splitlines() lines = ignore_comments(lines) lines = join_lines(lines) lines = skip_regex(lines, options) for line_number, line in enumerate(lines, 1): req_iter = process_line(line, filename, line_number, finder, comes_from, options, session, wheel_cache) for req in req_iter: yield req
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None, wheel_cache=None): """ Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: Global options. :param session: Instance of pip.download.PipSession. :param wheel_cache: Instance of pip.wheel.WheelCache """ if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'") _, content = get_file_content(filename, comes_from=comes_from, session=session) lines = content.splitlines() lines = ignore_comments(lines) lines = join_lines(lines) lines = skip_regex(lines, options) for line_number, line in enumerate(lines, 1): req_iter = process_line(line, filename, line_number, finder, comes_from, options, session, wheel_cache) for req in req_iter: yield req
def download_pkfile(): if not os.path.isdir(cf['home']): subprocess.check_call(['mkdir', '-p', cf['home']]) if not os.path.exists(cf['pkfile']): logger.info("Downloading package file %s" % cf['url']) url, content = get_file_content(cf['url']) with open(cf['pkfile'], 'w') as f: f.write(content)
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines(), 1): line = line.strip()
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None, constraint=False, wheel_cache=None): """Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: Global options. :param session: Instance of pip.download.PipSession. :param constraint: If true, parsing a constraint file rather than requirements file. :param wheel_cache: Instance of pip.wheel.WheelCache """ if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) _, content = get_file_content( filename, comes_from=comes_from, session=session ) lines = content.splitlines() lines = ignore_comments(lines) lines = join_lines(lines) lines = skip_regex(lines, options) lines = expand_env_variables(lines) kwargs = {} # The constraint keyword was only added in 7.1+ if tuple(pip.__version__.split('.')[:2]) > ('7', '0'): kwargs['constraint'] = constraint for line_number, line in enumerate(lines, 1): req_iter = process_line(line, filename, line_number, finder, comes_from, options, session, wheel_cache, **kwargs) for req in req_iter: yield req
def get_requirements_and_latest(filename, force=False): """Parse a requirements file and get latest version for each requirement. Yields a tuple of (original line, InstallRequirement instance, spec_version, latest_version). :param filename: Path to a requirements.txt file. :param force: Force getting latest version even for packages without a version specified. """ session = PipSession() url, content = get_file_content(filename, session=session) for orig_line, line_number, line in yield_lines(content): line = req_file.COMMENT_RE.sub('', line) line = line.strip() req = parse_requirement(line, filename, line_number, session) spec_ver = current_version(req, force=force) if spec_ver: latest_ver = latest_version(req, session) yield (orig_line, req, spec_ver, latest_ver) else: yield (orig_line, None, None, None)
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines(), 1): line = line.strip() # Remove comments from file and all spaces before it line = re.sub(r"(^|\s)+#.*$", "", line) if not line: continue if skip_match and skip_match.search(line): continue if line.startswith(('-r', '--requirement')): req_url = _remove_prefixes(line, '-r', '--requirement') if _scheme_re.search(filename): # Relative to a URL req_url = urllib_parse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements( req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith(('-Z', '--always-unzip')): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith(('-f', '--find-links')): find_links = _remove_prefixes(line, '-f', '--find-links') # FIXME: it would be nice to keep track of the source of # the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, find_links) if os.path.exists(relative_to_reqs_file): find_links = relative_to_reqs_file if finder: finder.find_links.append(find_links) elif line.startswith(('-i', '--index-url')): index_url = _remove_prefixes(line, '-i', '--index-url') if finder: finder.index_urls = [index_url] elif line.startswith('--extra-index-url'): line = _remove_prefix(line, '--extra-index-url') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): # Default in 1.5 pass elif line.startswith('--no-use-wheel'): if finder: finder.use_wheel = False elif line.startswith('--no-index'): if finder: finder.index_urls = [] elif line.startswith("--allow-external"): line = _remove_prefix(line, '--allow-external') if finder: finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): if finder: finder.allow_all_external = True # Remove in 7.0 elif line.startswith("--no-allow-external"): pass # Remove in 7.0 elif line.startswith("--no-allow-insecure"): pass # Remove after 7.0 elif line.startswith("--allow-insecure"): line = _remove_prefix(line, '--allow-insecure') if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = _remove_prefix(line, '--allow-unverified') if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith(('-e', '--editable')): editable = _remove_prefixes(line, '-e', '--editable') req = InstallRequirement.from_editable( editable, comes_from=comes_from, default_vcs=options.default_vcs if options else None, isolated=options.isolated_mode if options else False, ) else: req = InstallRequirement.from_line( line, comes_from, isolated=options.isolated_mode if options else False, ) yield req
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'") skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines(), 1): line = line.strip() # Remove comments from file line = re.sub(r"(^|\s)#.*$", "", line) if not line or line.startswith('#'): continue if skip_match and skip_match.search(line): continue if line.startswith('-r') or line.startswith('--requirement'): if line.startswith('-r'): req_url = line[2:].strip() else: req_url = line[len('--requirement'):].strip().strip('=') if _scheme_re.search(filename): # Relative to a URL req_url = urllib_parse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith('-Z') or line.startswith('--always-unzip'): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith('-f') or line.startswith('--find-links'): if line.startswith('-f'): line = line[2:].strip() else: line = line[len('--find-links'):].strip().lstrip('=') # FIXME: it would be nice to keep track of the source of # the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, line) if os.path.exists(relative_to_reqs_file): line = relative_to_reqs_file if finder: finder.find_links.append(line) elif line.startswith('-i') or line.startswith('--index-url'): if line.startswith('-i'): line = line[2:].strip() else: line = line[len('--index-url'):].strip().lstrip('=') if finder: finder.index_urls = [line] elif line.startswith('--extra-index-url'): line = line[len('--extra-index-url'):].strip().lstrip('=') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): # Default in 1.5 pass elif line.startswith('--no-use-wheel'): if finder: finder.use_wheel = False elif line.startswith('--no-index'): if finder: finder.index_urls = [] elif line.startswith("--allow-external"): line = line[len("--allow-external"):].strip().lstrip("=") if finder: finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): if finder: finder.allow_all_external = True # Remove in 7.0 elif line.startswith("--no-allow-external"): pass # Remove in 7.0 elif line.startswith("--no-allow-insecure"): pass # Remove after 7.0 elif line.startswith("--allow-insecure"): line = line[len("--allow-insecure"):].strip().lstrip("=") if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = line[len("--allow-unverified"):].strip().lstrip("=") if finder: finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') req = InstallRequirement.from_editable( line, comes_from=comes_from, default_vcs=options.default_vcs if options else None, isolated=options.isolated_mode if options else False, ) else: req = InstallRequirement.from_line( line, comes_from, isolated=options.isolated_mode if options else False, ) yield req
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None): if session is None: session = PipSession() skip_match = None skip_regex = options.skip_requirements_regex if options else None if skip_regex: skip_match = re.compile(skip_regex) reqs_file_dir = os.path.dirname(os.path.abspath(filename)) filename, content = get_file_content( filename, comes_from=comes_from, session=session, ) for line_number, line in enumerate(content.splitlines()): line_number += 1 line = line.strip() # Remove comments from file line = re.sub(r"(^|\s)#.*$", "", line) if not line or line.startswith('#'): continue if skip_match and skip_match.search(line): continue if line.startswith('-r') or line.startswith('--requirement'): if line.startswith('-r'): req_url = line[2:].strip() else: req_url = line[len('--requirement'):].strip().strip('=') if _scheme_re.search(filename): # Relative to a URL req_url = urlparse.urljoin(filename, req_url) elif not _scheme_re.search(req_url): req_url = os.path.join(os.path.dirname(filename), req_url) for item in parse_requirements( req_url, finder, comes_from=filename, options=options, session=session): yield item elif line.startswith('-Z') or line.startswith('--always-unzip'): # No longer used, but previously these were used in # requirement files, so we'll ignore. pass elif line.startswith('-f') or line.startswith('--find-links'): if line.startswith('-f'): line = line[2:].strip() else: line = line[len('--find-links'):].strip().lstrip('=') ## FIXME: it would be nice to keep track of the source of ## the find_links: # support a find-links local path relative to a requirements file relative_to_reqs_file = os.path.join(reqs_file_dir, line) if os.path.exists(relative_to_reqs_file): line = relative_to_reqs_file if finder: finder.find_links.append(line) elif line.startswith('-i') or line.startswith('--index-url'): if line.startswith('-i'): line = line[2:].strip() else: line = line[len('--index-url'):].strip().lstrip('=') if finder: finder.index_urls = [line] elif line.startswith('--extra-index-url'): line = line[len('--extra-index-url'):].strip().lstrip('=') if finder: finder.index_urls.append(line) elif line.startswith('--use-wheel'): finder.use_wheel = True elif line.startswith('--no-index'): finder.index_urls = [] elif line.startswith("--allow-external"): line = line[len("--allow-external"):].strip().lstrip("=") finder.allow_external |= set([normalize_name(line).lower()]) elif line.startswith("--allow-all-external"): finder.allow_all_external = True # Remove in 1.7 elif line.startswith("--no-allow-external"): pass # Remove in 1.7 elif line.startswith("--no-allow-insecure"): pass # Remove after 1.7 elif line.startswith("--allow-insecure"): line = line[len("--allow-insecure"):].strip().lstrip("=") finder.allow_unverified |= set([normalize_name(line).lower()]) elif line.startswith("--allow-unverified"): line = line[len("--allow-unverified"):].strip().lstrip("=") finder.allow_unverified |= set([normalize_name(line).lower()]) else: comes_from = '-r %s (line %s)' % (filename, line_number) if line.startswith('-e') or line.startswith('--editable'): if line.startswith('-e'): line = line[2:].strip() else: line = line[len('--editable'):].strip().lstrip('=') req = InstallRequirement.from_editable( line, comes_from=comes_from, default_vcs=options.default_vcs if options else None ) else: req = InstallRequirement.from_line( line, comes_from, prereleases=getattr(options, "pre", None) ) yield req