def test_unsupported_hashes(self, data): """VCS and dir links should raise errors when --require-hashes is on. In addition, complaints about the type of requirement (VCS or dir) should trump the presence or absence of a hash. """ reqset = self.basic_reqset(require_hashes=True) reqset.add_requirement( list(process_line("git+git://github.com/pypa/pip-test-package --hash=sha256:123", "file", 1))[0] ) dir_path = data.packages.join("FSPkg") reqset.add_requirement(list(process_line("file://%s" % (dir_path,), "file", 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) sep = os.path.sep if sep == "\\": sep = "\\\\" # This needs to be escaped for the regex assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " r"have a way to hash version control repositories:\n" r" git\+git://github\.com/pypa/pip-test-package \(from -r file " r"\(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because they " r"point to directories:\n" r" file://.*{sep}data{sep}packages{sep}FSPkg " "\(from -r file \(line 2\)\)".format(sep=sep), reqset.prepare_files, finder, )
def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = RequirementSet() # Test that there must be exactly 1 specifier: reqset.add_requirement( list(process_line('simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement(list(process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', resolver.resolve, reqset)
def test_unsupported_hashes(self, data): """VCS and dir links should raise errors when --require-hashes is on. In addition, complaints about the type of requirement (VCS or dir) should trump the presence or absence of a hash. """ reqset = self.basic_reqset(require_hashes=True) reqset.add_requirement( list( process_line( 'git+git://github.com/pypa/pip-test-package --hash=sha256:123', 'file', 1))[0]) dir_path = data.packages.join('FSPkg') reqset.add_requirement( list(process_line('file://%s' % (dir_path, ), 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) sep = os.path.sep if sep == '\\': sep = '\\\\' # This needs to be escaped for the regex assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " r"have a way to hash version control repositories:\n" r" git\+git://github\.com/pypa/pip-test-package \(from -r file " r"\(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because they " r"point to directories:\n" r" file://.*{sep}data{sep}packages{sep}FSPkg " "\(from -r file \(line 2\)\)".format(sep=sep), reqset.prepare_files, finder)
def test_missing_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement has no hash in implicit hash-checking mode. """ reqset = self.basic_reqset() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: reqset.add_requirement( list(process_line('blessings==1.0', 'file', 1))[0]) # This flag activates --require-hashes mode: reqset.add_requirement( list(process_line('tracefront==0.1 --hash=sha256:somehash', 'file', 2))[0]) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( list(process_line('https://pypi.python.org/packages/source/m/more-' 'itertools/more-itertools-1.0.tar.gz#md5=b21850c' '3cfa7efbb70fd662ab5413bdd', 'file', 3))[0]) finder = PackageFinder([], ['https://pypi.python.org/simple'], session=PipSession()) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' blessings==1.0 --hash=sha256:[0-9a-f]+\n' r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' tracefront==0.1 .*:\n' r' Expected sha256 somehash\n' r' Got [0-9a-f]+$', reqset.prepare_files, finder)
def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = self.basic_reqset() # Test that there must be exactly 1 specifier: reqset.add_requirement( list( process_line( 'simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement( list( process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', reqset.prepare_files, finder)
def test_hashed_deps_on_require_hashes(self, data): """Make sure hashed dependencies get installed when --require-hashes is on. (We actually just check that no "not all dependencies are hashed!" error gets raised while preparing; there is no reason to expect installation to then fail, as the code paths are the same as ever.) """ reqset = self.basic_reqset() reqset.add_requirement( next( process_line( "TopoRequires2==0.0.1 " # requires TopoRequires "--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd" "e3591d14f7896bdbefcf48543720c970", "file", 1, ) ) ) reqset.add_requirement( next( process_line( "TopoRequires==0.0.1 " "--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb" "3d0a6d4e8bfa6", "file", 2, ) ) )
def test_missing_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement has no hash in implicit hash-checking mode. """ reqset = self.basic_reqset() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: reqset.add_requirement( list(process_line('blessings==1.0', 'file', 1))[0]) # This flag activates --require-hashes mode: reqset.add_requirement( list( process_line('tracefront==0.1 --hash=sha256:somehash', 'file', 2))[0]) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( list( process_line( 'https://pypi.python.org/packages/source/m/more-' 'itertools/more-itertools-1.0.tar.gz#md5=b21850c' '3cfa7efbb70fd662ab5413bdd', 'file', 3))[0]) finder = PackageFinder([], ['https://pypi.python.org/simple'], session=PipSession()) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' blessings==1.0 --hash=sha256:[0-9a-f]+\n' r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' tracefront==0.1 .*:\n' r' Expected sha256 somehash\n' r' Got [0-9a-f]+$', reqset.prepare_files, finder)
def test_set_finder_allow_external(self, finder): list( process_line("--allow-external=SomeProject", "file", 1, finder=finder)) assert finder.allow_external == set(['someproject'])
def get_requirements_and_latest(filename): """Parse a requirements file and get latest version for each requirement. Yields a tuple of (original line, InstallRequirement instance, spec_version, latest_version). :param filename: Path to a requirements.txt file. """ session = PipSession() url, content = get_file_content(filename, session=session) for orig_line, line_number, line in yield_lines(content): line = req_file.COMMENT_RE.sub('', line) line = line.strip() if line: reqs = list(req_file.process_line(line, filename, line_number, session=session)) if len(reqs) > 0: req = reqs[0] spec_ver = None try: if req and req.req: spec_ver = Version(req.req.specs[0][1]) except IndexError: pass if spec_ver: latest_ver = latest_version(req, session) yield (orig_line, req, spec_ver, latest_ver) else: yield (orig_line, None, None, None) else: yield (orig_line, None, None, None) else: yield (orig_line, None, None, None)
def test_set_default_vcs(self, options): url = 'https://url#egg=SomeProject' line = '-e %s' % url filename = 'filename' options.default_vcs = 'git' result = process_line(line, filename, 1, options=options) assert list(result)[0].link.url == 'git+' + url
def populate_requirement_set(self, requirement_set, args, options, finder, session, name, wheel_cache): # add all of the standard reqs first. InstallCommand.populate_requirement_set(requirement_set, args, options, finder, session, name, wheel_cache) # add our constraints. if hasattr(self, "constraint_dict"): for package_name, specifier in self.constraint_dict.items(): requirement = package_name if specifier: requirement += specifier for req in process_line(requirement, "", 0, finder=finder, options=options, session=session, wheel_cache=wheel_cache, constraint=True): try: existing_req = requirement_set.get_requirement( package_name) existing_req.req.specifier &= req.specifier except KeyError: requirement_set.add_requirement(req) for r in requirement_set.unnamed_requirements: if r.editable: r.run_egg_info() name = r.pkg_info()["name"] if name in requirement_set.requirements: del requirement_set.requirements._dict[name] requirement_set.requirements._keys.remove(name)
def test_set_finder_allow_unsafe(self, finder): list( process_line("--allow-unverified=SomeProject", "file", 1, finder=finder)) assert finder.allow_unverified == set(['someproject'])
def test_yield_editable_requirement(self): url = 'git+https://url#egg=SomeProject' line = '-e %s' % url filename = 'filename' comes_from = '-r %s (line %s)' % (filename, 1) req = InstallRequirement.from_editable(url, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req)
def test_hash_options(self): """Test the --hash option: mostly its value storage. Make sure it reads and preserve multiple hashes. """ line = ('SomeProject --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b1' '61e5c1fa7425e73043362938b9824 ' '--hash=sha384:59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c' '3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f ' '--hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8' 'e5a6c65260e9cb8a7') filename = 'filename' req = list(process_line(line, filename, 1))[0] assert req.options == { 'hashes': { 'sha256': [ '2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e730433' '62938b9824', '486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65' '260e9cb8a7' ], 'sha384': [ '59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcd' 'b9c666fa90125a3c79f90397bdf5f6a13de828684f' ] } }
def test_yield_line_requirement_with_spaces_in_specifier(self): line = 'SomeProject >= 2' filename = 'filename' comes_from = '-r %s (line %s)' % (filename, 1) req = InstallRequirement.from_line(line, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req) assert str(req.req.specifier) == '>=2'
def test_yield_line_requirement_with_spaces_in_specifier(self): line = 'SomeProject >= 2' filename = 'filename' comes_from = '-r %s (line %s)' % (filename, 1) req = InstallRequirement.from_line(line, comes_from=comes_from) assert repr(list(process_line(line, filename, 1))[0]) == repr(req) assert req.req.specs == [('>=', '2')]
def test_set_finder_process_dependency_links(self, finder): list( process_line("--process-dependency-links", "file", 1, finder=finder)) assert finder.process_dependency_links
def test_options_on_a_requirement_line(self): line = 'SomeProject --install-option=yo1 --install-option yo2 '\ '--global-option="yo3" --global-option "yo4"' filename = 'filename' req = list(process_line(line, filename, 1))[0] assert req.options == { 'global_options': ['yo3', 'yo4'], 'install_options': ['yo1', 'yo2']}
def test_yield_line_constraint(self): line = 'SomeProject' filename = 'filename' comes_from = '-c %s (line %s)' % (filename, 1) req = InstallRequirement.from_line( line, comes_from=comes_from, constraint=True) found_req = list(process_line(line, filename, 1, constraint=True))[0] assert repr(found_req) == repr(req) assert found_req.constraint is True
def test_yield_editable_constraint(self): url = 'git+https://url#egg=SomeProject' line = '-e %s' % url filename = 'filename' comes_from = '-c %s (line %s)' % (filename, 1) req = InstallRequirement.from_editable( url, comes_from=comes_from, constraint=True) found_req = list(process_line(line, filename, 1, constraint=True))[0] assert repr(found_req) == repr(req) assert found_req.constraint is True
def test_missing_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement has no hash in implicit hash-checking mode. """ reqset = self.basic_reqset() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: reqset.add_requirement(list(process_line("blessings==1.0", "file", 1))[0]) # This flag activates --require-hashes mode: reqset.add_requirement(list(process_line("tracefront==0.1 --hash=sha256:somehash", "file", 2))[0]) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( list( process_line( "https://pypi.python.org/packages/source/m/more-" "itertools/more-itertools-1.0.tar.gz#md5=b21850c" "3cfa7efbb70fd662ab5413bdd", "file", 3, ) )[0] ) # The error text should list this as a URL and not `peep==3.1.1`: reqset.add_requirement( list(process_line("https://pypi.python.org/packages/source/p/peep/" "peep-3.1.1.tar.gz", "file", 4))[0] ) finder = PackageFinder([], ["https://pypi.python.org/simple"], session=PipSession()) assert_raises_regexp( HashErrors, r"Hashes are required in --require-hashes mode, but they are " r"missing .*\n" r" https://pypi\.python\.org/packages/source/p/peep/peep" r"-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n" r" blessings==1.0 --hash=sha256:[0-9a-f]+\n" r"THESE PACKAGES DO NOT MATCH THE HASHES.*\n" r" tracefront==0.1 .*:\n" r" Expected sha256 somehash\n" r" Got [0-9a-f]+$", reqset.prepare_files, finder, )
def test_hashed_deps_on_require_hashes(self, data): """Make sure hashed dependencies get installed when --require-hashes is on. (We actually just check that no "not all dependencies are hashed!" error gets raised while preparing; there is no reason to expect installation to then fail, as the code paths are the same as ever.) """ reqset = self.basic_reqset() reqset.add_requirement(next(process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) reqset.add_requirement(next(process_line( 'TopoRequires==0.0.1 ' '--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb' '3d0a6d4e8bfa6', 'file', 2)))
def test_nested_constraints_file(self, monkeypatch): line = '-c another_file' req = InstallRequirement.from_line('SomeProject') import pip.req.req_file def stub_parse_requirements(req_url, finder, comes_from, options, session, wheel_cache, constraint): return [(req, constraint)] parse_requirements_stub = stub(call=stub_parse_requirements) monkeypatch.setattr(pip.req.req_file, 'parse_requirements', parse_requirements_stub.call) assert list(process_line(line, 'filename', 1)) == [(req, True)]
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = self.basic_reqset(require_hashes=True) reqset.add_requirement(list(process_line('simple==1.0', 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' r'fb866d6ca016b42d2e6ce53619b653$', reqset.prepare_files, finder)
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url((data.packages / 'simple-1.0.tar.gz').abspath) reqset = self.basic_reqset(require_hashes=True) reqset.add_requirement( list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb866d' r'6ca016b42d2e6ce53619b653$', reqset.prepare_files, finder)
def test_relative_local_nested_req_files(self, finder, monkeypatch): """ Test a relative nested req file path is joined with the req file dir """ req_file = os.path.normpath('/path/req_file.txt') def parse(*args, **kwargs): return iter([]) mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse) list(process_line("-r reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] assert call[1][0] == os.path.normpath('/path/reqs.txt')
def test_absolute_local_nested_req_files(self, finder, monkeypatch): """ Test an absolute nested req file path """ req_file = '/path/req_file.txt' def parse(*args, **kwargs): return iter([]) mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse) list(process_line("-r /other/reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] assert call[1][0] == '/other/reqs.txt'
def parse_requirement(line, filename, line_number, session, finder): """Parse a requirement line and return an InstallRequirement instance. :param line: One line from a requirements.txt file. :param filename: Path to a requirements.txt file. :param line_number: The integer line number of the current line. :param session: Instance of pip.download.PipSession. :param finder: Instance of pip.download.PackageFinder. """ if not line: return None reqs = list(req_file.process_line(line, filename, line_number, session=session, finder=finder)) return reqs[0] if len(reqs) > 0 else None
def test_absolute_http_nested_req_file_in_local(self, finder, monkeypatch): """ Test a nested req file url in a local req file """ req_file = '/path/req_file.txt' def parse(*args, **kwargs): return iter([]) mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr(pip.req.req_file, 'parse_requirements', mock_parse) list(process_line("-r http://me.com/me/reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] assert call[1][0] == 'http://me.com/me/reqs.txt'
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = self.basic_reqset(require_hashes=True) reqset.add_requirement( list(process_line('simple==1.0', 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' r'fb866d6ca016b42d2e6ce53619b653$', reqset.prepare_files, finder)
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url((data.packages / "simple-1.0.tar.gz").abspath) reqset = self.basic_reqset(require_hashes=True) reqset.add_requirement(list(process_line("%s --hash=sha256:badbad" % file_url, "file", 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) assert_raises_regexp( HashErrors, r"THESE PACKAGES DO NOT MATCH THE HASHES.*\n" r" file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n" r" Expected sha256 badbad\n" r" Got 393043e672415891885c9a2a0929b1af95fb866d" r"6ca016b42d2e6ce53619b653$", reqset.prepare_files, finder, )
def parse_requirement_line(line, filename, line_number, session, finder): """Parse a requirement line and return an InstallRequirement instance. :param line: One line from a requirements.txt file. :param filename: Path to a requirements.txt file. :param line_number: The integer line number of the current line. :param session: Instance of pip.download.PipSession. :param finder: Instance of pip.download.PackageFinder. """ if not line: return None reqs = list(req_file.process_line( line, filename, line_number, session=session, finder=finder)) return reqs[0] if len(reqs) > 0 else None
def test_unhashed_deps_on_require_hashes(self, data): """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = self.basic_reqset() finder = PackageFinder([data.find_links], [], session=PipSession()) reqset.add_requirement(next(process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) assert_raises_regexp( HashErrors, r'In --require-hashes mode, all requirements must have their ' r'versions pinned.*\n' r' TopoRequires from .*$', reqset.prepare_files, finder)
def test_relative_local_find_links(self, finder, monkeypatch): """ Test a relative find_links path is joined with the req file directory """ req_file = '/path/req_file.txt' nested_link = '/path/rel_path' exists_ = os.path.exists def exists(path): if path == nested_link: return True else: exists_(path) monkeypatch.setattr(os.path, 'exists', exists) list(process_line("--find-links=rel_path", req_file, 1, finder=finder)) assert finder.find_links == [nested_link]
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url( (data.packages / 'simple-1.0.tar.gz').abspath) reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb866d' r'6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def test_relative_local_find_links(self, finder, monkeypatch): """ Test a relative find_links path is joined with the req file directory """ # Make sure the test also passes on windows req_file = os.path.normcase( os.path.abspath(os.path.normpath('/path/req_file.txt'))) nested_link = os.path.normcase( os.path.abspath(os.path.normpath('/path/rel_path'))) exists_ = os.path.exists def exists(path): if path == nested_link: return True else: exists_(path) monkeypatch.setattr(os.path, 'exists', exists) list(process_line("--find-links=rel_path", req_file, 1, finder=finder)) assert finder.find_links == [nested_link]
def test_relative_local_find_links(self, finder, monkeypatch): """ Test a relative find_links path is joined with the req file directory """ # Make sure the test also passes on windows req_file = os.path.normcase(os.path.abspath( os.path.normpath('/path/req_file.txt'))) nested_link = os.path.normcase(os.path.abspath( os.path.normpath('/path/rel_path'))) exists_ = os.path.exists def exists(path): if path == nested_link: return True else: exists_(path) monkeypatch.setattr(os.path, 'exists', exists) list(process_line("--find-links=rel_path", req_file, 1, finder=finder)) assert finder.find_links == [nested_link]
def test_hash_options(self): """Test the --hash option: mostly its value storage. Make sure it reads and preserve multiple hashes. """ line = ('SomeProject --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b1' '61e5c1fa7425e73043362938b9824 ' '--hash=sha384:59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c' '3553bcdb9c666fa90125a3c79f90397bdf5f6a13de828684f ' '--hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8' 'e5a6c65260e9cb8a7') filename = 'filename' req = list(process_line(line, filename, 1))[0] assert req.options == {'hashes': { 'sha256': ['2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e730433' '62938b9824', '486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65' '260e9cb8a7'], 'sha384': ['59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcd' 'b9c666fa90125a3c79f90397bdf5f6a13de828684f']}}
def populate_requirement_set(self, requirement_set, args, options, finder, session, name, wheel_cache): # add all of the standard reqs first. InstallCommand.populate_requirement_set(requirement_set, args, options, finder, session, name, wheel_cache) packages_config = getattr(self, "packages_config", {}) # add our constraints. if hasattr(self, "constraint_dict"): for package_name, specifier in self.constraint_dict.items(): requirement = package_name if specifier: requirement += specifier for req in process_line(requirement, "", 0, finder=finder, options=options, session=session, wheel_cache=wheel_cache, constraint=True): if packages_config and req.name in packages_config: # Wrap the requirement's install method so we can # apply custom install options if provided req.install = functools.partial( types.MethodType(requirement_install, req), packages_config.get(req.name)) try: existing_req = requirement_set.get_requirement( package_name) existing_req.req.specifier &= req.specifier except KeyError: requirement_set.add_requirement(req) for r in requirement_set.unnamed_requirements: if r.editable: r.run_egg_info() name = r.pkg_info()["name"] if name in requirement_set.requirements: del requirement_set.requirements._dict[name] requirement_set.requirements._keys.remove(name)
def __read_lines(self): with open(self.path) as f: lines = f.read().splitlines() lines_enum = enumerate(lines, start=1) lines_enum = join_lines(lines_enum) lines_enum = ignore_comments(lines_enum) for line_number, line in lines_enum: if line.startswith('-e '): ui.warn('requirements file flags are not supported yet:', line) continue req_iter = process_line( line, None, 0, None, None, None, None, None, ) for req in req_iter: yield req, line_number
def parse_requirements(filename, finder=None, comes_from=None, options=None, session=None, constraint=False, wheel_cache=None): """Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. :param finder: Instance of pip.index.PackageFinder. :param comes_from: Origin description of requirements. :param options: Global options. :param session: Instance of pip.download.PipSession. :param constraint: If true, parsing a constraint file rather than requirements file. :param wheel_cache: Instance of pip.wheel.WheelCache """ if session is None: raise TypeError( "parse_requirements() missing 1 required keyword argument: " "'session'" ) _, content = get_file_content( filename, comes_from=comes_from, session=session ) lines = content.splitlines() lines = ignore_comments(lines) lines = join_lines(lines) lines = skip_regex(lines, options) lines = expand_env_variables(lines) kwargs = {} # The constraint keyword was only added in 7.1+ if tuple(pip.__version__.split('.')[:2]) > ('7', '0'): kwargs['constraint'] = constraint for line_number, line in enumerate(lines, 1): req_iter = process_line(line, filename, line_number, finder, comes_from, options, session, wheel_cache, **kwargs) for req in req_iter: yield req
def populate_requirement_set(self, requirement_set, args, options, finder, session, name, wheel_cache): # add all of the standard reqs first. InstallCommand.populate_requirement_set( requirement_set, args, options, finder, session, name, wheel_cache ) packages_config = getattr(self, "packages_config", {}) # add our constraints. if hasattr(self, "constraint_dict"): for package_name, specifier in self.constraint_dict.items(): requirement = package_name if specifier: requirement += specifier for req in process_line( requirement, "", 0, finder=finder, options=options, session=session, wheel_cache=wheel_cache, constraint=True ): if packages_config and req.name in packages_config: # Wrap the requirement's install method so we can # apply custom install options if provided req.install = functools.partial( types.MethodType(requirement_install, req), packages_config.get(req.name)) try: existing_req = requirement_set.get_requirement( package_name) existing_req.req.specifier &= req.specifier except KeyError: requirement_set.add_requirement(req) for r in requirement_set.unnamed_requirements: if r.editable: r.run_egg_info() name = r.pkg_info()["name"] if name in requirement_set.requirements: del requirement_set.requirements._dict[name] requirement_set.requirements._keys.remove(name)
def test_variant5(self, finder): list(process_line("--index-url='url'", "file", 1, finder=finder)) assert finder.index_urls == ['url']
def test_variant1(self, finder): list(process_line("-i url", "file", 1, finder=finder)) assert finder.index_urls == ['url']
def test_set_finder_process_dependency_links(self, finder): list(process_line( "--process-dependency-links", "file", 1, finder=finder)) assert finder.process_dependency_links
def test_set_finder_allow_all_prereleases(self, finder): list(process_line("--pre", "file", 1, finder=finder)) assert finder.allow_all_prereleases