def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = RequirementSet() # Test that there must be exactly 1 specifier: reqset.add_requirement( list(process_line('simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement(list(process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', resolver.resolve, reqset)
def run(self, options, args): super(UninstallCommand, self).run(options, args) if not options.remove_uninstalled: return with self._build_session(options) as session: requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, isolated=options.isolated_mode, session=session, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, isolated=options.isolated_mode ) ) for filename in options.requirements: for req in parse_requirements( filename, options=options, session=session): requirement_set.add_requirement(req) if not requirement_set.has_requirements: return here = os.getcwd() requirements_file = os.path.abspath(os.path.join(here, options.save_file)) requirements = self._parse_requirements(requirement_set.requirements.values()) self._update_requirements_file(requirements_file, remove_requirements=requirements, session=session)
def pip_install(name, tmpdir): build_dir = os.path.join(tmpdir, 'build') src_dir = os.path.join(tmpdir, 'src') download_dir = os.path.join(tmpdir, 'download') os.mkdir(build_dir) os.mkdir(src_dir) os.mkdir(download_dir) finder = PackageFinder( find_links=[], index_urls=['https://pypi.python.org/simple/'], use_mirrors=False, allow_all_external=True, allow_all_insecure=True, ) requirement_set = RequirementSet( build_dir=build_dir, src_dir=src_dir, download_dir=download_dir, ignore_installed=True, ignore_dependencies=True ) requirement_set.add_requirement(InstallRequirement.from_line(name, None)) requirement_set.prepare_files(finder) # should be exactly one filename = os.listdir(download_dir)[0] path = os.path.join(download_dir, filename) return path
def get_requirements(req_specs, requirement_files=None): """ Get set of requirements from pip-like input arguments Parameters ---------- req_specs : sequence sequence of requirement specifiers, maybe with versions requirement_files : None or sequence, optional sequence of filenames or URLs with requirements Returns ------- requirement_set : RequiremenSet instance Pip requirements set """ if requirement_files is None: requirement_files = [] session = PipSession() requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, session=session, ) for name in req_specs: requirement_set.add_requirement( InstallRequirement.from_line(name)) for filename in requirement_files: for req in parse_requirements(filename, session=session): requirement_set.add_requirement(req) return requirement_set
def _install_requirement(self, requirement): if not self.local_options: self.local_options = [] if self._check_requirement(requirement): return True, None try: # Parse requirement requirement = InstallRequirement.from_line(str(requirement), None) # Build the requirement set. We're doing this one at a time so we can actually detect # which ones fail. requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement_set.add_requirement(requirement) # Download and build requirement try: requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) except PreviousBuildDirError, err: # Remove previous build directories if they're detected.. shouldn't be an issue # now that we've removed upstream dependencies from requirements.txt. location = requirement.build_location(build_prefix, True) shutil.rmtree(location) requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) # Finally, install the requirement. requirement_set.install(self.local_options, [])
def getDependencies(name, requirementSet=None, finder=None): """Get dependencies of a python project @param name: name of python project @param requirements: RequirementSet @param finder: PackageFinder """ if requirementSet is None: requirementSet = RequirementSet( build_dir=os.path.abspath(build_prefix), src_dir=os.path.abspath(src_prefix), download_dir=None, download_cache=None, upgrade=False, ignore_installed=True, ignore_dependencies=False) if finder is None: finder = PackageFinder(find_links=[], index_urls=['http://pypi.python.org/simple']) # lead pip download all dependencies req = InstallRequirement.from_line(name, None) requirementSet.add_requirement(req) requirementSet.install_files(finder) # trace the dependencies relationships between projects dependencies = [] traceDependencys(req, requirementSet, dependencies) return dependencies
def run(self, options, args): self.check_required_packages() cmdoptions.check_install_build_global(options) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.debug('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=None, ignore_dependencies=options.ignore_dependencies, ignore_installed=True, ignore_requires_python=options.ignore_requires_python, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, wheel_download_dir=options.wheel_dir, require_hashes=options.require_hashes, progress_bar=options.progress_bar ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) try: # build wheels wb = WheelBuilder( requirement_set, finder, build_options=options.build_options or [], global_options=options.global_options or [], no_clean=options.no_clean, ) if not wb.build(): raise CommandError( "Failed to build one or more wheels" ) except PreviousBuildDirError: options.no_clean = True raise finally: if not options.no_clean: requirement_set.cleanup_files()
def _get_local_deps(self, req): rs = RequirementSet( build_dir=None, src_dir=None, download_dir=None, session=self.session, ) return rs._prepare_file(self.finder, req)
def test_exclusive_environment_markers(): """Make sure RequirementSet accepts several excluding env markers""" eq26 = InstallRequirement.from_line("Django>=1.6.10,<1.7 ; python_version == '2.6'") ne26 = InstallRequirement.from_line("Django>=1.6.10,<1.8 ; python_version != '2.6'") req_set = RequirementSet("", "", "", session=PipSession()) req_set.add_requirement(eq26) req_set.add_requirement(ne26) assert req_set.has_requirement("Django")
def __init__(self, egg_directory, install_options=[], global_options=[]): self.egg_directory = egg_directory = os.path.abspath( os.path.expanduser(egg_directory)) self.install_options += ["--home=%s" % egg_directory] sys.path += [os.path.join(egg_directory, "lib", "python")] self.requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True)
def wheel(self, package, sender_data): source = sender_data[1].pop('path') # If the file has the wheel extention, we bail. We don't have to do # anything :) if re.findall('whl$', source): raise NotForMe target = os.path.dirname(source) # The package finder is what PIP uses to find packages given their # names. This finder won't use internet at all, only the folder we know # that our file is. finder = PackageFinder(find_links=[target], index_urls=[]) # Another requirement to use PIP API, we have to build a requirement # set. build_dir = tempfile.mkdtemp() requirement_set = RequirementSet( build_dir=build_dir, src_dir=None, download_dir=None, download_cache=None, ignore_dependencies=True, ignore_installed=True, ) requirement_set.add_requirement( InstallRequirement.from_line(package)) # Here we go, we're finally converting the package from a regular # format to a wheel. Notice that the wheel dir is another tmp # directory. See comments below. wheel_dir = tempfile.mkdtemp() builder = WheelBuilder( requirement_set, finder, wheel_dir=wheel_dir, build_options=[], global_options=[], ) builder.build() # Since I just can't retrieve the brand new file name through the API, # the wheel dir is a tmp directory so the *only* file over there *is* # the one that we want. wheel_file = os.listdir(wheel_dir)[0] path = self.index.from_file(os.path.join(wheel_dir, wheel_file)) # Cleaning up the mess. Here I kill the two temp folders I created to # 1) build the package into a wheel, 2) output the wheel file # separately shutil.rmtree(build_dir) shutil.rmtree(wheel_dir) # Finally, we just say where in the storage the file is return {'path': os.path.join(os.path.dirname(source), wheel_file)}
def test_unsupported_wheel_link_requirement_raises(self): reqset = RequirementSet() req = InstallRequirement.from_line( 'https://whatever.com/peppercorn-0.4-py2.py3-bogus-any.whl', ) assert req.link is not None assert req.link.is_wheel assert req.link.scheme == "https" with pytest.raises(InstallationError): reqset.add_requirement(req)
def test_unsupported_wheel_local_file_requirement_raises(self, data): reqset = RequirementSet() req = InstallRequirement.from_line( data.packages.join('simple.dist-0.1-py1-none-invalid.whl'), ) assert req.link is not None assert req.link.is_wheel assert req.link.scheme == "file" with pytest.raises(InstallationError): reqset.add_requirement(req)
def wheel(self, package, sender_data): source = sender_data[1].pop('path') # If the file has the wheel extention, we bail. We don't have to do # anything :) if re.findall('whl$', source): raise NotForMe target = os.path.dirname(source) # The package finder is what PIP uses to find packages given their # names. This finder won't use internet at all, only the folder we know # that our file is. finder = PackageFinder(find_links=[target], index_urls=[]) # Another requirement to use PIP API, we have to build a requirement # set. build_dir = tempfile.mkdtemp() requirement_set = RequirementSet( build_dir=build_dir, src_dir=None, download_dir=None, download_cache=None, ignore_dependencies=True, ignore_installed=True, ) requirement_set.add_requirement(InstallRequirement.from_line(package)) # Here we go, we're finally converting the package from a regular # format to a wheel. Notice that the wheel dir is another tmp # directory. See comments below. wheel_dir = tempfile.mkdtemp() builder = WheelBuilder( requirement_set, finder, wheel_dir=wheel_dir, build_options=[], global_options=[], ) builder.build() # Since I just can't retrieve the brand new file name through the API, # the wheel dir is a tmp directory so the *only* file over there *is* # the one that we want. wheel_file = os.listdir(wheel_dir)[0] path = self.index.from_file(os.path.join(wheel_dir, wheel_file)) # Cleaning up the mess. Here I kill the two temp folders I created to # 1) build the package into a wheel, 2) output the wheel file # separately shutil.rmtree(build_dir) shutil.rmtree(wheel_dir) # Finally, we just say where in the storage the file is return {'path': os.path.join(os.path.dirname(source), wheel_file)}
def test_exclusive_environment_markers(): """Make sure RequirementSet accepts several excluding env markers""" eq26 = InstallRequirement.from_line( "Django>=1.6.10,<1.7 ; python_version == '2.6'") ne26 = InstallRequirement.from_line( "Django>=1.6.10,<1.8 ; python_version != '2.6'") req_set = RequirementSet('', '', '', session=PipSession()) req_set.add_requirement(eq26) req_set.add_requirement(ne26) assert req_set.has_requirement('Django')
def __init__(self, egg_directory): self.egg_directory = egg_directory = os.path.abspath( os.path.expanduser(egg_directory)) sys.path += [ os.path.join(egg_directory, "lib", "python" + PYTHON_VERSION, "site-packages") ] with BuildDirectory() as build_prefix: self.requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True, session=PipSession())
def run(self, options, args): with self._build_session(options) as session: format_control = pip.index.FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, isolated=options.isolated_mode, wheel_cache=wheel_cache)) for filename in options.requirements: for req in parse_requirements(filename, options=options, session=session, wheel_cache=wheel_cache): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name)) requirement_set.uninstall(auto_confirm=options.yes)
def install(name): """ Try to install the package with 'name' into folder 'libs/python27'. """ print "Installation directory:" print python27_dir() requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement_set.add_requirement(InstallRequirement.from_line(name, None)) install_options = ["--prefix=%s" % python27_dir()] global_options = [] finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"]) try: requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) requirement_set.install(install_options, global_options) print "\nSuccessfully installed\n==================================" for package in requirement_set.successfully_installed: print package.name print "\nDone.\n" except DistributionNotFound: print "No package found with name: %s" % name except Exception as e: print "Error:", e
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement(list(process_line('simple==1.0', 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' r'fb866d6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
class PrintRequirementsTestCase(TestCase): def setUp(self): self.requirement_set = RequirementSet(None, None, None, session='dummy') def tearDown(self): output = StringIO() pip_compile.print_requirements(self.requirement_set, output=output) assert output.getvalue() == self.expected def test_no_requirements(self): self.expected = '' def test_one_requirement_no_version(self): self.requirement_set.add_requirement( InstallRequirement('pkg', None)) self.expected = 'pkg\n' def test_one_requirement_with_version(self): self.requirement_set.add_requirement( InstallRequirement('pkg==1.0.1', None)) self.expected = 'pkg==1.0.1\n' def test_one_requirement_with_constraint(self): self.requirement_set.add_requirement( InstallRequirement('pkg==1.0.1', None, constraint=True)) self.requirement_set.add_requirement( InstallRequirement('pkg==1.0.2-ignored', None)) self.expected = 'pkg==1.0.1\n'
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url((data.packages / 'simple-1.0.tar.gz').abspath) reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb866d' r'6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def run(self, options, args): if not options.req_repository: logger.notify('You need to specify a repository. This utility ' 'does not upload to PyPI') return 1 options.build_dir = os.path.abspath(options.req_cache_dir) options.src_dir = os.path.abspath(options.req_cache_dir) options.no_install = True options.ignore_installed = True install_options = options.install_options or [] global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = self._build_package_finder(options, index_urls) requirement_set = RequirementSet(build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies) req_req = open(os.path.abspath(options.req_req_requirements), 'w') req_req.writelines(['--index-url=%s' % self.repository_url(options), '\n']) try: for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): logger.info('req ' + str(req.req)) req_req.writelines([str(req.req), '\n']) requirement_set.add_requirement(req) finally: req_req.close() if not options.no_download: requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) else: requirement_set.locate_files() if not os.path.exists(os.path.abspath(options.req_cache_dir)): os.mkdir(os.path.abspath(options.req_cache_dir)) if not options.req_no_upload: self.upload_to_repository(options) if options.req_clean_cache: requirement_set.cleanup_files(bundle=False) return requirement_set
def __init__(self, egg_directory): self.egg_directory = egg_directory = os.path.abspath(os.path.expanduser(egg_directory)) sys.path += [os.path.join(egg_directory, "lib", "python" + PYTHON_VERSION, "site-packages")] with BuildDirectory() as build_prefix: self.requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True, session=PipSession() )
def basic_reqset(self): return RequirementSet( build_dir=os.path.join(self.tempdir, 'build'), src_dir=os.path.join(self.tempdir, 'src'), download_dir=None, download_cache=os.path.join(self.tempdir, 'download_cache') )
def get_requirement_set(self, finder, line): requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=self.upgrade, ) with tempfile.NamedTemporaryFile() as single_req_file: single_req_file.write(line) single_req_file.flush() for requirement in parse_requirements(single_req_file.name, finder=finder): requirement = InstallRequirement.from_line(line, None) requirement_set.add_requirement(requirement) return requirement_set
def basic_reqset(self): return RequirementSet( build_dir=os.path.join(self.tempdir, 'build'), src_dir=os.path.join(self.tempdir, 'src'), download_dir=None, session=PipSession(), )
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True else: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = PackageFinder(find_links=options.find_links, index_urls=index_urls) requirement_set = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=False, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name, None)) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirement_set.add_requirement(req) requirement_set.prepare_files( finder, force_root_egg_info=self.bundle, bundle=self.bundle, ) return requirement_set
def test_environment_marker_extras(self, data): """ Test that the environment marker extras are used with non-wheel installs. """ reqset = RequirementSet() req = InstallRequirement.from_editable( data.packages.join("LocalEnvironMarker")) reqset.add_requirement(req) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) resolver.resolve(reqset) # This is hacky but does test both case in py2 and py3 if sys.version_info[:2] in ((2, 7), (3, 4)): assert reqset.has_requirement('simple') else: assert not reqset.has_requirement('simple')
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True else: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = PackageFinder( find_links=options.find_links, index_urls=index_urls) requirementSet = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=False) requirements = [] for name in args: requirements.append( InstallRequirement.from_line(name, None)) for name in options.editables: requirements.append( InstallRequirement.from_editable(name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirements.append(req) # add all requirements into requirements set for req in requirements: requirementSet.add_requirement(req) requirementSet.install_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) return requirements, requirementSet
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('simple==1.0', 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' r'fb866d6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def run(self, options, args): requirement_set = RequirementSet(build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement(InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) requirement_set.uninstall(auto_confirm=options.yes)
def run(self, options, args): with self._build_session(options) as session: format_control = pip.index.FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, isolated=options.isolated_mode, wheel_cache=wheel_cache ) ) for filename in options.requirements: for req in parse_requirements( filename, options=options, session=session, wheel_cache=wheel_cache): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name) ) requirement_set.uninstall(auto_confirm=options.yes)
def __init__(self, egg_directory, install_options=[], global_options=[]): self.egg_directory = egg_directory = os.path.abspath(os.path.expanduser(egg_directory)) self.install_options += ["--home=%s" % egg_directory] sys.path += [os.path.join(egg_directory, "lib", "python")] self.requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True)
def test_no_reuse_existing_build_dir(self, data): """Test prepare_files raise exception with previous build dir""" build_dir = os.path.join(self.tempdir, 'build', 'simple') os.makedirs(build_dir) open(os.path.join(build_dir, "setup.py"), 'w') reqset = RequirementSet() req = InstallRequirement.from_line('simple') reqset.add_requirement(req) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( PreviousBuildDirError, r"pip can't proceed with [\s\S]*%s[\s\S]*%s" % (req, build_dir.replace('\\', '\\\\')), resolver.resolve, reqset, )
def test_unhashed_deps_on_require_hashes(self, data): """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = RequirementSet() finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) reqset.add_requirement(next(process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) assert_raises_regexp( HashErrors, r'In --require-hashes mode, all requirements must have their ' r'versions pinned.*\n' r' TopoRequires from .*$', resolver.resolve, reqset)
def test_unhashed_deps_on_require_hashes(self, data): """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = RequirementSet() finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) reqset.add_requirement( next( process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) assert_raises_regexp( HashErrors, r'In --require-hashes mode, all requirements must have their ' r'versions pinned.*\n' r' TopoRequires from .*$', resolver.resolve, reqset)
def test_missing_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement has no hash in implicit hash-checking mode. """ reqset = RequirementSet() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: reqset.add_requirement( list(process_line('blessings==1.0', 'file', 1))[0]) # This flag activates --require-hashes mode: reqset.add_requirement( list( process_line('tracefront==0.1 --hash=sha256:somehash', 'file', 2))[0]) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( list( process_line( 'https://pypi.python.org/packages/source/m/more-' 'itertools/more-itertools-1.0.tar.gz#md5=b21850c' '3cfa7efbb70fd662ab5413bdd', 'file', 3))[0]) # The error text should list this as a URL and not `peep==3.1.1`: reqset.add_requirement( list( process_line( 'https://pypi.python.org/packages/source/p/peep/' 'peep-3.1.1.tar.gz', 'file', 4))[0]) finder = PackageFinder([], ['https://pypi.python.org/simple'], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' https://pypi\.python\.org/packages/source/p/peep/peep' r'-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n' r' blessings==1.0 --hash=sha256:[0-9a-f]+\n' r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' tracefront==0.1 .*:\n' r' Expected sha256 somehash\n' r' Got [0-9a-f]+$', resolver.resolve, reqset)
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url( (data.packages / 'simple-1.0.tar.gz').abspath) reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb866d' r'6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def test_hashed_deps_on_require_hashes(self, data): """Make sure hashed dependencies get installed when --require-hashes is on. (We actually just check that no "not all dependencies are hashed!" error gets raised while preparing; there is no reason to expect installation to then fail, as the code paths are the same as ever.) """ reqset = RequirementSet() reqset.add_requirement(next(process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) reqset.add_requirement(next(process_line( 'TopoRequires==0.0.1 ' '--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb' '3d0a6d4e8bfa6', 'file', 2)))
def req_set_from_file(cls, filename, download_dir, deplinks=None): src_dir = path(src_prefix).abspath() finder = cls.package_finder(deplinks) requirement_set = RequirementSet( build_dir=cls.build_dir, src_dir=src_dir, download_dir=download_dir, download_cache=None, upgrade=False, ignore_installed=True, ignore_dependencies=False) options = cls.options() names = [] for req in cls.parse_requirements(filename, finder=finder, options=options): requirement_set.add_requirement(req) names.append(req.req) yield requirement_set yield finder
def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = RequirementSet() # Test that there must be exactly 1 specifier: reqset.add_requirement( list( process_line( 'simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement( list( process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', resolver.resolve, reqset)
def test_unsupported_hashes(self, data): """VCS and dir links should raise errors when --require-hashes is on. In addition, complaints about the type of requirement (VCS or dir) should trump the presence or absence of a hash. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list( process_line( 'git+git://github.com/pypa/pip-test-package --hash=sha256:123', 'file', 1))[0]) dir_path = data.packages.join('FSPkg') reqset.add_requirement( list(process_line('file://%s' % (dir_path, ), 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) sep = os.path.sep if sep == '\\': sep = '\\\\' # This needs to be escaped for the regex assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " r"have a way to hash version control repositories:\n" r" git\+git://github\.com/pypa/pip-test-package \(from -r file " r"\(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because they " r"point to directories:\n" r" file://.*{sep}data{sep}packages{sep}FSPkg " r"\(from -r file \(line 2\)\)".format(sep=sep), resolver.resolve, reqset)
def _make_requirement_set(session, tempdir, download_dir, wheel_download_dir): """ construction a RequirementSet """ format_control = FormatControl(set(), set()) wheel_cache = WheelCache(os.path.join(tempdir, ".cache"), format_control) requirement_set = RequirementSet( build_dir=os.path.join(tempdir, 'build'), src_dir=os.path.join(tempdir, 'src'), ignore_installed=True, download_dir=download_dir, wheel_download_dir=wheel_download_dir, wheel_cache=wheel_cache, session=session, ) try: requirement_set.add_requirement( EditInstallRequirement.from_editable("."), ) except Exception as e: sys.exit(e) if os.path.exists("requirements.txt"): with open("requirements.txt") as f: for line in f: requirement_set.add_requirement( InstallRequirement.from_line(line), ) return requirement_set
class Pip(object): """ A class to puppet PIP to install new eggs """ requirement_set = None # the requirement set # the package finder finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"], session=PipSession()) def __init__(self, egg_directory): self.egg_directory = egg_directory = os.path.abspath(os.path.expanduser(egg_directory)) sys.path += [os.path.join(egg_directory, "lib", "python" + PYTHON_VERSION, "site-packages")] with BuildDirectory() as build_prefix: self.requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True, session=PipSession() ) def delete_all_eggs(self): """ delete all the eggs in the directory specified """ path_to_delete = os.path.join(self.egg_directory, "lib", "python") if os.path.exists(path_to_delete): shutil.rmtree(path_to_delete) def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement(InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder) self.requirement_set.install(["--prefix=" + self.egg_directory], []) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
def run(self, options, args): requirement_set = RequirementSet(build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement(InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) requirement_set.uninstall(auto_confirm=options.yes)
class Pip(object): """ A class to puppet PIP to install new eggs """ requirement_set = None # the requirement set # the package finder finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"]) install_options = [] # the install options with pip global_options = [] # the global options with pip def __init__(self, egg_directory, install_options=[], global_options=[]): self.egg_directory = egg_directory = os.path.abspath(os.path.expanduser(egg_directory)) self.install_options += ["--home=%s" % egg_directory] sys.path += [os.path.join(egg_directory, "lib", "python")] self.requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True) def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement( InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) self.requirement_set.install(self.install_options, self.global_options) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
def super_run(self, options, args): """Copy of relevant parts from InstallCommand's run()""" index_urls = self.config.get_indexes() temp_target_dir = (self.cleanup << temp_dir('pip2nix-temp-target')) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) wheel_cache = WheelCache(options.cache_dir, options.format_control) with BuildDirectory(options.build_dir, delete=True) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, wheel_cache=wheel_cache, ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) requirement_set.prepare_files(finder) self.process_requirements(options, requirement_set, finder) requirement_set.cleanup_files() return requirement_set
def run(self, options, args): requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) requirement_set.uninstall(auto_confirm=options.yes)
def test_missing_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement has no hash in implicit hash-checking mode. """ reqset = RequirementSet() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: reqset.add_requirement( list(process_line('blessings==1.0', 'file', 1))[0]) # This flag activates --require-hashes mode: reqset.add_requirement( list(process_line('tracefront==0.1 --hash=sha256:somehash', 'file', 2))[0]) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( list(process_line('https://pypi.python.org/packages/source/m/more-' 'itertools/more-itertools-1.0.tar.gz#md5=b21850c' '3cfa7efbb70fd662ab5413bdd', 'file', 3))[0]) # The error text should list this as a URL and not `peep==3.1.1`: reqset.add_requirement( list(process_line('https://pypi.python.org/packages/source/p/peep/' 'peep-3.1.1.tar.gz', 'file', 4))[0]) finder = PackageFinder([], ['https://pypi.python.org/simple'], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' https://pypi\.python\.org/packages/source/p/peep/peep' r'-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n' r' blessings==1.0 --hash=sha256:[0-9a-f]+\n' r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' tracefront==0.1 .*:\n' r' Expected sha256 somehash\n' r' Got [0-9a-f]+$', resolver.resolve, reqset)
def run(self, options, args): options.ignore_installed = True options.src_dir = os.path.abspath(options.src_dir) options.download_dir = normalize_path(options.download_dir) ensure_dir(options.download_dir) with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) if options.cache_dir and not check_path_owner(options.cache_dir): logger.warning( "The directory '%s' or its parent directory is not owned " "by the current user and caching wheels has been " "disabled. check the permissions and owner of that " "directory. If executing pip with sudo, you may want " "sudo's -H flag.", options.cache_dir, ) options.cache_dir = None with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, ignore_installed=True, ignore_dependencies=options.ignore_dependencies, session=session, isolated=options.isolated_mode, require_hashes=options.require_hashes) self.populate_requirement_set(requirement_set, args, options, finder, session, self.name, None) if not requirement_set.has_requirements: return requirement_set.prepare_files(finder) downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info('Successfully downloaded %s', downloaded) # Clean up if not options.no_clean: requirement_set.cleanup_files() return requirement_set
def test_missing_hash_with_require_hashes_in_reqs_file(self, data, tmpdir): """--require-hashes in a requirements file should make its way to the RequirementSet. """ req_set = RequirementSet(require_hashes=False) session = PipSession() finder = PackageFinder([data.find_links], [], session=session) command = InstallCommand() with requirements_file('--require-hashes', tmpdir) as reqs_file: options, args = command.parse_args(['-r', reqs_file]) command.populate_requirement_set(req_set, args, options, finder, session, command.name, wheel_cache=None) assert req_set.require_hashes