def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = RequirementSet() # Test that there must be exactly 1 specifier: reqset.add_requirement( list( process_line( 'simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement( list( process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', resolver.resolve, reqset)
def run(self, options, args): with self._build_session(options) as session: format_control = pip.index.FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, isolated=options.isolated_mode, wheel_cache=wheel_cache)) for filename in options.requirements: for req in parse_requirements(filename, options=options, session=session, wheel_cache=wheel_cache): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name)) requirement_set.uninstall(auto_confirm=options.yes)
def _install_requirement(self, requirement): if not self.local_options: self.local_options = [] if self._check_requirement(requirement): return True, None try: # Parse requirement requirement = InstallRequirement.from_line(str(requirement), None) # Build the requirement set. We're doing this one at a time so we can actually detect # which ones fail. requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement_set.add_requirement(requirement) # Download and build requirement try: requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) except PreviousBuildDirError, err: # Remove previous build directories if they're detected.. shouldn't be an issue # now that we've removed upstream dependencies from requirements.txt. location = requirement.build_location(build_prefix, True) shutil.rmtree(location) requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) # Finally, install the requirement. requirement_set.install(self.local_options, [])
def getDependencies(name, requirementSet=None, finder=None): """Get dependencies of a python project @param name: name of python project @param requirements: RequirementSet @param finder: PackageFinder """ if requirementSet is None: requirementSet = RequirementSet( build_dir=os.path.abspath(build_prefix), src_dir=os.path.abspath(src_prefix), download_dir=None, download_cache=None, upgrade=False, ignore_installed=True, ignore_dependencies=False) if finder is None: finder = PackageFinder(find_links=[], index_urls=['http://pypi.python.org/simple']) # lead pip download all dependencies req = InstallRequirement.from_line(name, None) requirementSet.add_requirement(req) requirementSet.install_files(finder) # trace the dependencies relationships between projects dependencies = [] traceDependencys(req, requirementSet, dependencies) return dependencies
class Pip(object): """ A class to puppet PIP to install new eggs """ requirement_set = None # the requirement set # the package finder finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"], session=PipSession()) def __init__(self, egg_directory): self.egg_directory = egg_directory = os.path.abspath(os.path.expanduser(egg_directory)) sys.path += [os.path.join(egg_directory, "lib", "python" + PYTHON_VERSION, "site-packages")] with BuildDirectory() as build_prefix: self.requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True, session=PipSession() ) def delete_all_eggs(self): """ delete all the eggs in the directory specified """ path_to_delete = os.path.join(self.egg_directory, "lib", "python") if os.path.exists(path_to_delete): shutil.rmtree(path_to_delete) def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement(InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder) self.requirement_set.install(["--prefix=" + self.egg_directory], []) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
def _make_requirement_set(session, tempdir, download_dir, wheel_download_dir): """ construction a RequirementSet """ format_control = FormatControl(set(), set()) wheel_cache = WheelCache(os.path.join(tempdir, ".cache"), format_control) requirement_set = RequirementSet( build_dir=os.path.join(tempdir, 'build'), src_dir=os.path.join(tempdir, 'src'), ignore_installed=True, download_dir=download_dir, wheel_download_dir=wheel_download_dir, wheel_cache=wheel_cache, session=session, ) try: requirement_set.add_requirement( EditInstallRequirement.from_editable("."), ) except Exception as e: sys.exit(e) if os.path.exists("requirements.txt"): with open("requirements.txt") as f: for line in f: requirement_set.add_requirement( InstallRequirement.from_line(line), ) return requirement_set
def test_unsupported_hashes(self, data): """VCS and dir links should raise errors when --require-hashes is on. In addition, complaints about the type of requirement (VCS or dir) should trump the presence or absence of a hash. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list( process_line( 'git+git://github.com/pypa/pip-test-package --hash=sha256:123', 'file', 1))[0]) dir_path = data.packages.join('FSPkg') reqset.add_requirement( list(process_line('file://%s' % (dir_path, ), 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) sep = os.path.sep if sep == '\\': sep = '\\\\' # This needs to be escaped for the regex assert_raises_regexp( HashErrors, r"Can't verify hashes for these requirements because we don't " r"have a way to hash version control repositories:\n" r" git\+git://github\.com/pypa/pip-test-package \(from -r file " r"\(line 1\)\)\n" r"Can't verify hashes for these file:// requirements because they " r"point to directories:\n" r" file://.*{sep}data{sep}packages{sep}FSPkg " r"\(from -r file \(line 2\)\)".format(sep=sep), resolver.resolve, reqset)
def install(name): """ Try to install the package with 'name' into folder 'libs/python27'. """ print "Installation directory:" print python27_dir() requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement_set.add_requirement(InstallRequirement.from_line(name, None)) install_options = ["--prefix=%s" % python27_dir()] global_options = [] finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"]) try: requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) requirement_set.install(install_options, global_options) print "\nSuccessfully installed\n==================================" for package in requirement_set.successfully_installed: print package.name print "\nDone.\n" except DistributionNotFound: print "No package found with name: %s" % name except Exception as e: print "Error:", e
def pip_install(name, tmpdir): build_dir = os.path.join(tmpdir, 'build') src_dir = os.path.join(tmpdir, 'src') download_dir = os.path.join(tmpdir, 'download') os.mkdir(build_dir) os.mkdir(src_dir) os.mkdir(download_dir) finder = PackageFinder( find_links=[], index_urls=['https://pypi.python.org/simple/'], use_mirrors=False, allow_all_external=True, allow_all_insecure=True, ) requirement_set = RequirementSet( build_dir=build_dir, src_dir=src_dir, download_dir=download_dir, ignore_installed=True, ignore_dependencies=True ) requirement_set.add_requirement(InstallRequirement.from_line(name, None)) requirement_set.prepare_files(finder) # should be exactly one filename = os.listdir(download_dir)[0] path = os.path.join(download_dir, filename) return path
def get_requirements(req_specs, requirement_files=None): """ Get set of requirements from pip-like input arguments Parameters ---------- req_specs : sequence sequence of requirement specifiers, maybe with versions requirement_files : None or sequence, optional sequence of filenames or URLs with requirements Returns ------- requirement_set : RequiremenSet instance Pip requirements set """ if requirement_files is None: requirement_files = [] session = PipSession() requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, session=session, ) for name in req_specs: requirement_set.add_requirement( InstallRequirement.from_line(name)) for filename in requirement_files: for req in parse_requirements(filename, session=session): requirement_set.add_requirement(req) return requirement_set
class Pip(object): """ A class to puppet PIP to install new eggs """ requirement_set = None # the requirement set # the package finder finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"]) install_options = [] # the install options with pip global_options = [] # the global options with pip def __init__(self, egg_directory, install_options=[], global_options=[]): self.egg_directory = egg_directory = os.path.abspath(os.path.expanduser(egg_directory)) self.install_options += ["--home=%s" % egg_directory] sys.path += [os.path.join(egg_directory, "lib", "python")] self.requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True) def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement( InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) self.requirement_set.install(self.install_options, self.global_options) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
def run(self, options, args): super(UninstallCommand, self).run(options, args) if not options.remove_uninstalled: return with self._build_session(options) as session: requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, isolated=options.isolated_mode, session=session, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, isolated=options.isolated_mode ) ) for filename in options.requirements: for req in parse_requirements( filename, options=options, session=session): requirement_set.add_requirement(req) if not requirement_set.has_requirements: return here = os.getcwd() requirements_file = os.path.abspath(os.path.join(here, options.save_file)) requirements = self._parse_requirements(requirement_set.requirements.values()) self._update_requirements_file(requirements_file, remove_requirements=requirements, session=session)
def run(self, options, args): with self._build_session(options) as session: format_control = pip.index.FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, isolated=options.isolated_mode, wheel_cache=wheel_cache ) ) for filename in options.requirements: for req in parse_requirements( filename, options=options, session=session, wheel_cache=wheel_cache): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError( 'You must give at least one requirement to %(name)s (see ' '"pip help %(name)s")' % dict(name=self.name) ) requirement_set.uninstall(auto_confirm=options.yes)
def test_unpinned_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement is not version-pinned in hash-checking mode. """ reqset = RequirementSet() # Test that there must be exactly 1 specifier: reqset.add_requirement( list(process_line('simple --hash=sha256:a90427ae31f5d1d0d7ec06ee97' 'd9fcf2d0fc9a786985250c1c83fd68df5911dd', 'file', 1))[0]) # Test that the operator must be ==: reqset.add_requirement(list(process_line( 'simple2>1.0 --hash=sha256:3ad45e1e9aa48b4462af0' '123f6a7e44a9115db1ef945d4d92c123dfe21815a06', 'file', 2))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, # Make sure all failing requirements are listed: r'versions pinned with ==. These do not:\n' r' simple .* \(from -r file \(line 1\)\)\n' r' simple2>1.0 .* \(from -r file \(line 2\)\)', resolver.resolve, reqset)
def test_exclusive_environment_markers(): """Make sure RequirementSet accepts several excluding env markers""" eq26 = InstallRequirement.from_line("Django>=1.6.10,<1.7 ; python_version == '2.6'") ne26 = InstallRequirement.from_line("Django>=1.6.10,<1.8 ; python_version != '2.6'") req_set = RequirementSet("", "", "", session=PipSession()) req_set.add_requirement(eq26) req_set.add_requirement(ne26) assert req_set.has_requirement("Django")
def wheel(self, package, sender_data): source = sender_data[1].pop('path') # If the file has the wheel extention, we bail. We don't have to do # anything :) if re.findall('whl$', source): raise NotForMe target = os.path.dirname(source) # The package finder is what PIP uses to find packages given their # names. This finder won't use internet at all, only the folder we know # that our file is. finder = PackageFinder(find_links=[target], index_urls=[]) # Another requirement to use PIP API, we have to build a requirement # set. build_dir = tempfile.mkdtemp() requirement_set = RequirementSet( build_dir=build_dir, src_dir=None, download_dir=None, download_cache=None, ignore_dependencies=True, ignore_installed=True, ) requirement_set.add_requirement( InstallRequirement.from_line(package)) # Here we go, we're finally converting the package from a regular # format to a wheel. Notice that the wheel dir is another tmp # directory. See comments below. wheel_dir = tempfile.mkdtemp() builder = WheelBuilder( requirement_set, finder, wheel_dir=wheel_dir, build_options=[], global_options=[], ) builder.build() # Since I just can't retrieve the brand new file name through the API, # the wheel dir is a tmp directory so the *only* file over there *is* # the one that we want. wheel_file = os.listdir(wheel_dir)[0] path = self.index.from_file(os.path.join(wheel_dir, wheel_file)) # Cleaning up the mess. Here I kill the two temp folders I created to # 1) build the package into a wheel, 2) output the wheel file # separately shutil.rmtree(build_dir) shutil.rmtree(wheel_dir) # Finally, we just say where in the storage the file is return {'path': os.path.join(os.path.dirname(source), wheel_file)}
def run(self, options, args): requirement_set = RequirementSet(build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement(InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) requirement_set.uninstall(auto_confirm=options.yes)
def test_unsupported_wheel_link_requirement_raises(self): reqset = RequirementSet() req = InstallRequirement.from_line( 'https://whatever.com/peppercorn-0.4-py2.py3-bogus-any.whl', ) assert req.link is not None assert req.link.is_wheel assert req.link.scheme == "https" with pytest.raises(InstallationError): reqset.add_requirement(req)
def test_unsupported_wheel_local_file_requirement_raises(self, data): reqset = RequirementSet() req = InstallRequirement.from_line( data.packages.join('simple.dist-0.1-py1-none-invalid.whl'), ) assert req.link is not None assert req.link.is_wheel assert req.link.scheme == "file" with pytest.raises(InstallationError): reqset.add_requirement(req)
def wheel(self, package, sender_data): source = sender_data[1].pop('path') # If the file has the wheel extention, we bail. We don't have to do # anything :) if re.findall('whl$', source): raise NotForMe target = os.path.dirname(source) # The package finder is what PIP uses to find packages given their # names. This finder won't use internet at all, only the folder we know # that our file is. finder = PackageFinder(find_links=[target], index_urls=[]) # Another requirement to use PIP API, we have to build a requirement # set. build_dir = tempfile.mkdtemp() requirement_set = RequirementSet( build_dir=build_dir, src_dir=None, download_dir=None, download_cache=None, ignore_dependencies=True, ignore_installed=True, ) requirement_set.add_requirement(InstallRequirement.from_line(package)) # Here we go, we're finally converting the package from a regular # format to a wheel. Notice that the wheel dir is another tmp # directory. See comments below. wheel_dir = tempfile.mkdtemp() builder = WheelBuilder( requirement_set, finder, wheel_dir=wheel_dir, build_options=[], global_options=[], ) builder.build() # Since I just can't retrieve the brand new file name through the API, # the wheel dir is a tmp directory so the *only* file over there *is* # the one that we want. wheel_file = os.listdir(wheel_dir)[0] path = self.index.from_file(os.path.join(wheel_dir, wheel_file)) # Cleaning up the mess. Here I kill the two temp folders I created to # 1) build the package into a wheel, 2) output the wheel file # separately shutil.rmtree(build_dir) shutil.rmtree(wheel_dir) # Finally, we just say where in the storage the file is return {'path': os.path.join(os.path.dirname(source), wheel_file)}
def test_exclusive_environment_markers(): """Make sure RequirementSet accepts several excluding env markers""" eq26 = InstallRequirement.from_line( "Django>=1.6.10,<1.7 ; python_version == '2.6'") ne26 = InstallRequirement.from_line( "Django>=1.6.10,<1.8 ; python_version != '2.6'") req_set = RequirementSet('', '', '', session=PipSession()) req_set.add_requirement(eq26) req_set.add_requirement(ne26) assert req_set.has_requirement('Django')
def run(self, options, args): requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) requirement_set.uninstall(auto_confirm=options.yes)
def run(self, options, args): if not options.req_repository: logger.notify('You need to specify a repository. This utility ' 'does not upload to PyPI') return 1 options.build_dir = os.path.abspath(options.req_cache_dir) options.src_dir = os.path.abspath(options.req_cache_dir) options.no_install = True options.ignore_installed = True install_options = options.install_options or [] global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = self._build_package_finder(options, index_urls) requirement_set = RequirementSet(build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies) req_req = open(os.path.abspath(options.req_req_requirements), 'w') req_req.writelines(['--index-url=%s' % self.repository_url(options), '\n']) try: for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): logger.info('req ' + str(req.req)) req_req.writelines([str(req.req), '\n']) requirement_set.add_requirement(req) finally: req_req.close() if not options.no_download: requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) else: requirement_set.locate_files() if not os.path.exists(os.path.abspath(options.req_cache_dir)): os.mkdir(os.path.abspath(options.req_cache_dir)) if not options.req_no_upload: self.upload_to_repository(options) if options.req_clean_cache: requirement_set.cleanup_files(bundle=False) return requirement_set
def install(args, lock_filename="requirements.txt"): deps = OrderedDict() filename = get_requirement_file() for line in get_requirements(filename): line = line.strip() deps[line] = [] requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement = InstallRequirement.from_line(line, None) requirement_set.add_requirement(requirement) install_options = [] global_options = [] # TODO: specify index_urls from optional requirements.yml finder = PackageFinder( find_links=[], index_urls=["http://pypi.python.org/simple/"] ) requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) requirement_set.install(install_options, global_options) for package in requirement_set.requirements.values(): deps[line].append("%s==%s" % (package.name, package.installed_version)) for package in requirement_set.successfully_installed: deps[line].append("%s==%s" % (package.name, package.installed_version)) deps[line] = set(deps[line]) package_set = set([]) with open(lock_filename, "w") as output: output.write("# this file generated from '%s' by pundler:\n" % (filename,)) for requested_package in deps: output.write("# requirement '%s' depends on:\n" % (requested_package,)) for dependency in deps[requested_package]: logger.info("dependency %s" % dependency) if dependency not in package_set: dependency = dependency.lower() package_set.add(dependency) output.write("%s\n" % (dependency,)) else: output.write("#%s\n" % (dependency,)) output.write("\n")
def run(self, options, args): requirement_set = RequirementSet(build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement(InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) requirement_set.uninstall(auto_confirm=options.yes)
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement(list(process_line('simple==1.0', 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' r'fb866d6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
class PrintRequirementsTestCase(TestCase): def setUp(self): self.requirement_set = RequirementSet(None, None, None, session='dummy') def tearDown(self): output = StringIO() pip_compile.print_requirements(self.requirement_set, output=output) assert output.getvalue() == self.expected def test_no_requirements(self): self.expected = '' def test_one_requirement_no_version(self): self.requirement_set.add_requirement( InstallRequirement('pkg', None)) self.expected = 'pkg\n' def test_one_requirement_with_version(self): self.requirement_set.add_requirement( InstallRequirement('pkg==1.0.1', None)) self.expected = 'pkg==1.0.1\n' def test_one_requirement_with_constraint(self): self.requirement_set.add_requirement( InstallRequirement('pkg==1.0.1', None, constraint=True)) self.requirement_set.add_requirement( InstallRequirement('pkg==1.0.2-ignored', None)) self.expected = 'pkg==1.0.1\n'
def run(self, options, args): requirement_set = RequirementSet( build_dir=None, src_dir=None, download_dir=None) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name)) for filename in options.requirements: for req in parse_requirements(filename, options=options): requirement_set.add_requirement(req) if not requirement_set.has_requirements: raise InstallationError('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) requirement_set.uninstall(auto_confirm=options.yes)
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url((data.packages / 'simple-1.0.tar.gz').abspath) reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb866d' r'6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def install_package(package, version=None): requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None) if version: install = '{}=={}'.format(package, version) else: install = package requirement = InstallRequirement.from_line(install, None) requirement_set.add_requirement(requirement) finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"]) requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) requirement_set.install([], [])
def get_requirement_set(self, finder, line): requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=self.upgrade, ) with tempfile.NamedTemporaryFile() as single_req_file: single_req_file.write(line) single_req_file.flush() for requirement in parse_requirements(single_req_file.name, finder=finder): requirement = InstallRequirement.from_line(line, None) requirement_set.add_requirement(requirement) return requirement_set
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True else: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = PackageFinder(find_links=options.find_links, index_urls=index_urls) requirementSet = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=False) requirements = [] for name in args: requirements.append(InstallRequirement.from_line(name, None)) for name in options.editables: requirements.append( InstallRequirement.from_editable( name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirements.append(req) # add all requirements into requirements set for req in requirements: requirementSet.add_requirement(req) requirementSet.install_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) return requirements, requirementSet
def test_environment_marker_extras(self, data): """ Test that the environment marker extras are used with non-wheel installs. """ reqset = RequirementSet() req = InstallRequirement.from_editable( data.packages.join("LocalEnvironMarker")) reqset.add_requirement(req) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) resolver.resolve(reqset) # This is hacky but does test both case in py2 and py3 if sys.version_info[:2] in ((2, 7), (3, 4)): assert reqset.has_requirement('simple') else: assert not reqset.has_requirement('simple')
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True else: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = PackageFinder( find_links=options.find_links, index_urls=index_urls) requirementSet = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=False) requirements = [] for name in args: requirements.append( InstallRequirement.from_line(name, None)) for name in options.editables: requirements.append( InstallRequirement.from_editable(name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirements.append(req) # add all requirements into requirements set for req in requirements: requirementSet.add_requirement(req) requirementSet.install_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) return requirements, requirementSet
def test_missing_hash_with_require_hashes(self, data): """Setting --require-hashes explicitly should raise errors if hashes are missing. """ reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('simple==1.0', 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1af95' r'fb866d6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def test_no_reuse_existing_build_dir(self, data): """Test prepare_files raise exception with previous build dir""" build_dir = os.path.join(self.tempdir, 'build', 'simple') os.makedirs(build_dir) open(os.path.join(build_dir, "setup.py"), 'w') reqset = RequirementSet() req = InstallRequirement.from_line('simple') reqset.add_requirement(req) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( PreviousBuildDirError, r"pip can't proceed with [\s\S]*%s[\s\S]*%s" % (req, build_dir.replace('\\', '\\\\')), resolver.resolve, reqset, )
def test_unhashed_deps_on_require_hashes(self, data): """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = RequirementSet() finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) reqset.add_requirement(next(process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) assert_raises_regexp( HashErrors, r'In --require-hashes mode, all requirements must have their ' r'versions pinned.*\n' r' TopoRequires from .*$', resolver.resolve, reqset)
def test_unhashed_deps_on_require_hashes(self, data): """Make sure unhashed, unpinned, or otherwise unrepeatable dependencies get complained about when --require-hashes is on.""" reqset = RequirementSet() finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) reqset.add_requirement( next( process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) assert_raises_regexp( HashErrors, r'In --require-hashes mode, all requirements must have their ' r'versions pinned.*\n' r' TopoRequires from .*$', resolver.resolve, reqset)
class Pip(object): """ A class to puppet PIP to install new eggs """ requirement_set = None # the requirement set # the package finder finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"], session=PipSession()) def __init__(self, egg_directory): self.egg_directory = egg_directory = os.path.abspath( os.path.expanduser(egg_directory)) sys.path += [ os.path.join(egg_directory, "lib", "python" + PYTHON_VERSION, "site-packages") ] with BuildDirectory() as build_prefix: self.requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True, session=PipSession()) def delete_all_eggs(self): """ delete all the eggs in the directory specified """ path_to_delete = os.path.join(self.egg_directory, "lib", "python") if os.path.exists(path_to_delete): shutil.rmtree(path_to_delete) def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement( InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder) self.requirement_set.install(['--prefix=' + self.egg_directory], []) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
class Pip(object): """ A class to puppet PIP to install new eggs """ requirement_set = None # the requirement set # the package finder finder = PackageFinder(find_links=[], index_urls=["http://pypi.python.org/simple/"]) install_options = [] # the install options with pip global_options = [] # the global options with pip def __init__(self, egg_directory, install_options=[], global_options=[]): self.egg_directory = egg_directory = os.path.abspath( os.path.expanduser(egg_directory)) self.install_options += ["--home=%s" % egg_directory] sys.path += [os.path.join(egg_directory, "lib", "python")] self.requirement_set = RequirementSet(build_dir=build_prefix, src_dir=src_prefix, download_dir=None, upgrade=True) def delete_all_eggs(self): """ delete all the eggs in the directory specified """ path_to_delete = os.path.join(self.egg_directory, "lib", "python") if os.path.exists(path_to_delete): shutil.rmtree(path_to_delete) def install_egg(self, egg_name): """ Install an egg into the egg directory """ if not os.path.exists(self.egg_directory): os.makedirs(self.egg_directory) self.requirement_set.add_requirement( InstallRequirement.from_line(egg_name, None)) try: self.requirement_set.prepare_files(self.finder, force_root_egg_info=False, bundle=False) self.requirement_set.install(self.install_options, self.global_options) except DistributionNotFound: self.requirement_set.requirements._keys.remove(egg_name) raise PipException()
def test_hashed_deps_on_require_hashes(self, data): """Make sure hashed dependencies get installed when --require-hashes is on. (We actually just check that no "not all dependencies are hashed!" error gets raised while preparing; there is no reason to expect installation to then fail, as the code paths are the same as ever.) """ reqset = RequirementSet() reqset.add_requirement(next(process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) reqset.add_requirement(next(process_line( 'TopoRequires==0.0.1 ' '--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb' '3d0a6d4e8bfa6', 'file', 2)))
def test_hash_mismatch(self, data): """A hash mismatch should raise an error.""" file_url = path_to_url( (data.packages / 'simple-1.0.tar.gz').abspath) reqset = RequirementSet(require_hashes=True) reqset.add_requirement( list(process_line('%s --hash=sha256:badbad' % file_url, 'file', 1))[0]) finder = PackageFinder([data.find_links], [], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' file:///.*/data/packages/simple-1\.0\.tar\.gz .*:\n' r' Expected sha256 badbad\n' r' Got 393043e672415891885c9a2a0929b1af95fb866d' r'6ca016b42d2e6ce53619b653$', resolver.resolve, reqset)
def req_set_from_file(cls, filename, download_dir, deplinks=None): src_dir = path(src_prefix).abspath() finder = cls.package_finder(deplinks) requirement_set = RequirementSet( build_dir=cls.build_dir, src_dir=src_dir, download_dir=download_dir, download_cache=None, upgrade=False, ignore_installed=True, ignore_dependencies=False) options = cls.options() names = [] for req in cls.parse_requirements(filename, finder=finder, options=options): requirement_set.add_requirement(req) names.append(req.req) yield requirement_set yield finder
def pip_install(package, test_module=None): if not test_module: test_module = package if module_exists(test_module): return # If pip doesn't exist install it first. if not module_exists('pip'): with download('https://bootstrap.pypa.io/get-pip.py') as f: check_call(['python', f]) # Install the package. print('Installing', package) from pip.index import PackageFinder from pip.req import InstallRequirement, RequirementSet from pip.locations import build_prefix, src_prefix requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement_set.add_requirement(InstallRequirement.from_line(package, None)) finder = PackageFinder( find_links=[], index_urls=['http://pypi.python.org/simple/']) requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) requirement_set.install([], [])
def main(): requirement_set = RequirementSet( build_dir=build_prefix, src_dir=src_prefix, download_dir=None) requirement_set.add_requirement(InstallRequirement.from_line('git+ssh://[email protected]/arc90/readability-selectors@5-bootstrap#egg=rdbselectors', None)) install_options = [] global_options = [] finder = PackageFinder(find_links=[], index_urls=['http://pypi.python.org/simple/']) requirement_set.prepare_files(finder, force_root_egg_info=False, bundle=False) requirement_set.install(install_options, global_options) print '\n' print 'Installed' print '==================================' names = [package.name for package in requirement_set.successfully_installed] print names print '\n' pass
def test_hashed_deps_on_require_hashes(self, data): """Make sure hashed dependencies get installed when --require-hashes is on. (We actually just check that no "not all dependencies are hashed!" error gets raised while preparing; there is no reason to expect installation to then fail, as the code paths are the same as ever.) """ reqset = RequirementSet() reqset.add_requirement( next( process_line( 'TopoRequires2==0.0.1 ' # requires TopoRequires '--hash=sha256:eaf9a01242c9f2f42cf2bd82a6a848cd' 'e3591d14f7896bdbefcf48543720c970', 'file', 1))) reqset.add_requirement( next( process_line( 'TopoRequires==0.0.1 ' '--hash=sha256:d6dd1e22e60df512fdcf3640ced3039b3b02a56ab2cee81ebcb' '3d0a6d4e8bfa6', 'file', 2)))
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True else: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = PackageFinder(find_links=options.find_links, index_urls=index_urls) requirement_set = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name, None)) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirement_set.add_requirement(req) requirement_set.install_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) if not options.no_install and not self.bundle: requirement_set.install(install_options) installed = ' '.join( [req.name for req in requirement_set.successfully_installed]) if installed: logger.notify('Successfully installed %s' % installed) elif not self.bundle: downloaded = ' '.join( [req.name for req in requirement_set.successfully_downloaded]) if downloaded: logger.notify('Successfully downloaded %s' % downloaded) return requirement_set
def test_missing_hash_checking(self, data): """Make sure prepare_files() raises an error when a requirement has no hash in implicit hash-checking mode. """ reqset = RequirementSet() # No flags here. This tests that detection of later flags nonetheless # requires earlier packages to have hashes: reqset.add_requirement( list(process_line('blessings==1.0', 'file', 1))[0]) # This flag activates --require-hashes mode: reqset.add_requirement( list( process_line('tracefront==0.1 --hash=sha256:somehash', 'file', 2))[0]) # This hash should be accepted because it came from the reqs file, not # from the internet: reqset.add_requirement( list( process_line( 'https://pypi.python.org/packages/source/m/more-' 'itertools/more-itertools-1.0.tar.gz#md5=b21850c' '3cfa7efbb70fd662ab5413bdd', 'file', 3))[0]) # The error text should list this as a URL and not `peep==3.1.1`: reqset.add_requirement( list( process_line( 'https://pypi.python.org/packages/source/p/peep/' 'peep-3.1.1.tar.gz', 'file', 4))[0]) finder = PackageFinder([], ['https://pypi.python.org/simple'], session=PipSession()) resolver = self._basic_resolver(finder) assert_raises_regexp( HashErrors, r'Hashes are required in --require-hashes mode, but they are ' r'missing .*\n' r' https://pypi\.python\.org/packages/source/p/peep/peep' r'-3\.1\.1\.tar\.gz --hash=sha256:[0-9a-f]+\n' r' blessings==1.0 --hash=sha256:[0-9a-f]+\n' r'THESE PACKAGES DO NOT MATCH THE HASHES.*\n' r' tracefront==0.1 .*:\n' r' Expected sha256 somehash\n' r' Got [0-9a-f]+$', resolver.resolve, reqset)
def run(self, options, args): # confirm requirements try: import wheel.bdist_wheel except ImportError: if sys.version_info < (3,): debian_package = 'python-wheel' else: debian_package = 'python3-wheel' raise CommandError("'pip wheel' requires the 'wheel' package. To fix this, run: sudo apt-get install %s" % debian_package) try: import pkg_resources except ImportError: raise CommandError( "'pip wheel' requires setuptools >= 0.8 for dist-info support." " To fix this, run: pip install --upgrade setuptools" ) else: if not hasattr(pkg_resources, 'DistInfoDistribution'): raise CommandError( "'pip wheel' requires setuptools >= 0.8 for dist-info " "support. To fix this, run: pip install --upgrade " "setuptools" ) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] if options.use_mirrors: logger.deprecated("1.7", "--use-mirrors has been deprecated and will be removed" " in the future. Explicit uses of --index-url and/or " "--extra-index-url is suggested.") if options.mirrors: logger.deprecated("1.7", "--mirrors has been deprecated and will be removed in " " the future. Explicit uses of --index-url and/or " "--extra-index-url is suggested.") index_urls += options.mirrors if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) session = self._build_session(options) finder = PackageFinder(find_links=options.find_links, index_urls=index_urls, use_wheel=options.use_wheel, allow_external=options.allow_external, allow_unverified=options.allow_unverified, allow_all_external=options.allow_all_external, allow_all_prereleases=options.pre, process_dependency_links= options.process_dependency_links, session=session, ) build_delete = (not (options.no_clean or options.build_dir)) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=None, download_dir=None, download_cache=options.download_cache, ignore_dependencies=options.ignore_dependencies, ignore_installed=True, session=session, wheel_download_dir=options.wheel_dir ) # make the wheelhouse if not os.path.exists(options.wheel_dir): os.makedirs(options.wheel_dir) #parse args and/or requirements files for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name, None)) for filename in options.requirements: for req in parse_requirements( filename, finder=finder, options=options, session=session): if req.editable: logger.notify("ignoring %s" % req.url) continue requirement_set.add_requirement(req) #fail if no requirements if not requirement_set.has_requirements: opts = {'name': self.name} msg = ('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % opts) logger.error(msg) return try: #build wheels wb = WheelBuilder( requirement_set, finder, options.wheel_dir, build_options = options.build_options or [], global_options = options.global_options or [] ) wb.build() except PreviousBuildDirError: options.no_clean = True raise finally: if not options.no_clean: requirement_set.cleanup_files()
def run(self, options, args): if ( options.no_install or options.no_download ): warnings.warn( "--no-install and --no-download are deprecated. " "See https://github.com/pypa/pip/issues/906.", RemovedInPip7Warning, ) # If we have --no-install or --no-download and no --build we use the # legacy static build dir if (options.build_dir is None and (options.no_install or options.no_download)): options.build_dir = build_prefix if options.download_dir: options.no_install = True options.ignore_installed = True if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) install_options.append('--user') temp_target_dir = None if options.target_dir: options.ignore_installed = True temp_target_dir = tempfile.mkdtemp() options.target_dir = os.path.abspath(options.target_dir) if (os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir)): raise CommandError( "Target path exists but is not a directory, will not " "continue." ) install_options.append('--home=' + temp_target_dir) global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.use_mirrors: warnings.warn( "--use-mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) if options.mirrors: warnings.warn( "--mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) index_urls += options.mirrors if options.download_cache: warnings.warn( "--download-cache has been deprecated and will be removed in " "the future. Pip now automatically uses and configures its " "cache.", RemovedInPip8Warning, ) with self._build_session(options) as session: finder = self._build_package_finder(options, index_urls, session) build_delete = (not (options.no_clean or options.build_dir)) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=options.download_dir, upgrade=options.upgrade, as_egg=options.as_egg, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies, force_reinstall=options.force_reinstall, use_user_site=options.use_user_site, target_dir=temp_target_dir, session=session, pycompile=options.compile, isolated=options.isolated_mode, ) for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, None, isolated=options.isolated_mode, ) ) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs, isolated=options.isolated_mode, ) ) for filename in options.requirements: for req in parse_requirements( filename, finder=finder, options=options, session=session): requirement_set.add_requirement(req) if not requirement_set.has_requirements: opts = {'name': self.name} if options.find_links: msg = ('You must give at least one requirement to ' '%(name)s (maybe you meant "pip %(name)s ' '%(links)s"?)' % dict(opts, links=' '.join(options.find_links))) else: msg = ('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % opts) logger.warning(msg) return try: if not options.no_download: requirement_set.prepare_files(finder) else: # This is the only call site of locate_files. Nuke with # fire. requirement_set.locate_files() if not options.no_install: requirement_set.install( install_options, global_options, root=options.root_path, ) reqs = sorted( requirement_set.successfully_installed, key=operator.attrgetter('name')) items = [] for req in reqs: item = req.name try: if hasattr(req, 'installed_version'): if req.installed_version: item += '-' + req.installed_version except Exception: pass items.append(item) installed = ' '.join(items) if installed: logger.info('Successfully installed %s', installed) else: downloaded = ' '.join([ req.name for req in requirement_set.successfully_downloaded ]) if downloaded: logger.info( 'Successfully downloaded %s', downloaded ) except PreviousBuildDirError: options.no_clean = True raise finally: # Clean up if ((not options.no_clean) and ((not options.no_install) or options.download_dir)): requirement_set.cleanup_files() if options.target_dir: if not os.path.exists(options.target_dir): os.makedirs(options.target_dir) lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] for item in os.listdir(lib_dir): target_item_dir = os.path.join(options.target_dir, item) if os.path.exists(target_item_dir): if not options.upgrade: logger.warning( 'Target directory %s already exists. Specify ' '--upgrade to force replacement.', target_item_dir ) continue if os.path.islink(target_item_dir): logger.warning( 'Target directory %s already exists and is ' 'a link. Pip will not automatically replace ' 'links, please remove if replacement is ' 'desired.', target_item_dir ) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move( os.path.join(lib_dir, item), target_item_dir ) shutil.rmtree(temp_target_dir) return requirement_set
def run(self, options, args): if not options.build_dir: options.build_dir = build_prefix if not options.src_dir: options.src_dir = src_prefix if options.download_dir: options.no_install = True options.ignore_installed = True options.build_dir = os.path.abspath(options.build_dir) options.src_dir = os.path.abspath(options.src_dir) install_options = options.install_options or [] if options.use_user_site: install_options.append('--user') global_options = options.global_options or [] index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) index_urls = [] finder = self._build_package_finder(options, index_urls) requirement_set = RequirementSet( build_dir=options.build_dir, src_dir=options.src_dir, download_dir=options.download_dir, download_cache=options.download_cache, upgrade=options.upgrade, ignore_installed=options.ignore_installed, ignore_dependencies=options.ignore_dependencies) for name in args: requirement_set.add_requirement( InstallRequirement.from_line(name, None)) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs)) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options): requirement_set.add_requirement(req) if not requirement_set.has_requirements: if options.find_links: raise InstallationError( 'You must give at least one ' 'requirement to %s (maybe you meant "pip install %s"?)' % (self.name, " ".join(options.find_links))) raise InstallationError('You must give at least one requirement ' 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) if (options.use_user_site and sys.version_info < (2, 6)): raise InstallationError( '--user is only supported in Python version 2.6 and newer') import setuptools if (options.use_user_site and requirement_set.has_editables and not getattr(setuptools, '_distribute', False)): raise InstallationError( '--user --editable not supported with setuptools, use distribute' ) if not options.no_download: requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) else: requirement_set.locate_files() if not options.no_install and not self.bundle: requirement_set.install(install_options, global_options) installed = ' '.join( [req.name for req in requirement_set.successfully_installed]) if installed: logger.notify('Successfully installed %s' % installed) elif not self.bundle: downloaded = ' '.join( [req.name for req in requirement_set.successfully_downloaded]) if downloaded: logger.notify('Successfully downloaded %s' % downloaded) elif self.bundle: requirement_set.create_bundle(self.bundle_filename) logger.notify('Created bundle in %s' % self.bundle_filename) # Clean up if not options.no_install: requirement_set.cleanup_files(bundle=self.bundle) return requirement_set
def run(self, options, args): # confirm requirements try: import wheel.bdist_wheel # Hack to make flake8 not complain about an unused import wheel.bdist_wheel except ImportError: raise CommandError( "'pip wheel' requires the 'wheel' package. To fix this, run: " "pip install wheel") try: import pkg_resources except ImportError: raise CommandError( "'pip wheel' requires setuptools >= 0.8 for dist-info support." " To fix this, run: pip install --upgrade setuptools") else: if not hasattr(pkg_resources, 'DistInfoDistribution'): raise CommandError( "'pip wheel' requires setuptools >= 0.8 for dist-info " "support. To fix this, run: pip install --upgrade " "setuptools") index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.use_mirrors: warnings.warn( "--use-mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) if options.mirrors: warnings.warn( "--mirrors has been deprecated and will be removed in the " "future. Explicit uses of --index-url and/or --extra-index-url" " is suggested.", RemovedInPip7Warning, ) index_urls += options.mirrors if options.download_cache: warnings.warn( "--download-cache has been deprecated and will be removed in " "the future. Pip now automatically uses and configures its " "cache.", RemovedInPip8Warning, ) if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) with self._build_session(options) as session: finder = PackageFinder( find_links=options.find_links, index_urls=index_urls, use_wheel=options.use_wheel, allow_external=options.allow_external, allow_unverified=options.allow_unverified, allow_all_external=options.allow_all_external, allow_all_prereleases=options.pre, trusted_hosts=options.trusted_hosts, process_dependency_links=options.process_dependency_links, session=session, ) build_delete = (not (options.no_clean or options.build_dir)) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=None, ignore_dependencies=options.ignore_dependencies, ignore_installed=True, isolated=options.isolated_mode, session=session, wheel_download_dir=options.wheel_dir) # make the wheelhouse if not os.path.exists(options.wheel_dir): os.makedirs(options.wheel_dir) # parse args and/or requirements files for name in args: requirement_set.add_requirement( InstallRequirement.from_line( name, None, isolated=options.isolated_mode, )) for name in options.editables: requirement_set.add_requirement( InstallRequirement.from_editable( name, default_vcs=options.default_vcs, isolated=options.isolated_mode, )) for filename in options.requirements: for req in parse_requirements(filename, finder=finder, options=options, session=session): requirement_set.add_requirement(req) # fail if no requirements if not requirement_set.has_requirements: logger.error( "You must give at least one requirement to %s " "(see \"pip help %s\")", self.name, ) return try: # build wheels wb = WheelBuilder( requirement_set, finder, options.wheel_dir, build_options=options.build_options or [], global_options=options.global_options or [], ) if not wb.build(): raise CommandError( "Failed to build one or more wheels") except PreviousBuildDirError: options.no_clean = True raise finally: if not options.no_clean: requirement_set.cleanup_files()