def test_resolve_prereleases_multiple_set(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep1 = make_sdist(name='dep', version='3.0.0rc3') prerelease_dep2 = make_sdist(name='dep', version='3.0.0rc4') prerelease_dep3 = make_sdist(name='dep', version='3.0.0rc5') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep1, prerelease_dep2, prerelease_dep3): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] def assert_resolve(expected_version, **resolve_kwargs): dists = resolve( [ 'dep>=3.0.0rc1', 'dep==3.0.0rc4', ], fetchers=fetchers, **resolve_kwargs) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version # This should resolve with explicit prerelease being set or implicitly. assert_resolve('3.0.0rc4', allow_prereleases=True) assert_resolve('3.0.0rc4')
def _resolve_multi(self, interpreter, requirements, find_links): """Multi-platform dependency resolution for PEX files. Returns a list of distributions that must be included in order to satisfy a set of requirements. That may involve distributions for multiple platforms. :param interpreter: The :class:`PythonInterpreter` to resolve for. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param find_links: Additional paths to search for source packages during resolution. :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed to satisfy the requirements on that platform. """ python_setup = PythonSetup.global_instance() python_repos = PythonRepos.global_instance() distributions = {} fetchers = python_repos.get_fetchers() fetchers.extend(Fetcher([path]) for path in find_links) for platform in python_setup.platforms: requirements_cache_dir = os.path.join( python_setup.resolver_cache_dir, str(interpreter.identity)) distributions[platform] = resolve( requirements=[req.requirement for req in requirements], interpreter=interpreter, fetchers=fetchers, platform=None if platform == 'current' else platform, context=python_repos.get_network_context(), cache=requirements_cache_dir, cache_ttl=python_setup.resolver_cache_ttl) return distributions
def test_cached_dependency_pinned_unpinned_resolution_multi_run(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/178 project1_0_0 = make_sdist(name='project', version='1.0.0') project1_1_0 = make_sdist(name='project', version='1.1.0') with temporary_dir() as td: for sdist in (project1_0_0, project1_1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: # First run, pinning 1.0.0 in the cache dists = resolve(['project', 'project==1.0.0'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 1 assert dists[0].version == '1.0.0' # This simulates separate invocations of pex but allows us to keep the same tmp cache dir Crawler.reset_cache() # Second, run, the unbounded 'project' req will find the 1.0.0 in the cache. But should also # return SourcePackages found in td dists = resolve(['project', 'project==1.1.0'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 1 assert dists[0].version == '1.1.0' # Third run, if exact resolvable and inexact resolvable, and cache_ttl is expired, exact # resolvable should pull from pypi as well since inexact will and the resulting # resolvable_set.merge() would fail. Crawler.reset_cache() time.sleep(1) dists = resolve(['project', 'project==1.1.0'], fetchers=fetchers, cache=cd, cache_ttl=1) assert len(dists) == 1 assert dists[0].version == '1.1.0'
def test_resolver_blacklist(): if PY2: blacklist = {'project2': '<3'} required_project = "project2;python_version>'3'" else: blacklist = {'project2': '>3'} required_project = "project2;python_version<'3'" project1 = make_sdist(name='project1', version='1.0.0', install_reqs=[required_project]) project2 = make_sdist(name='project2', version='1.1.0') with temporary_dir() as td: safe_copy(project1, os.path.join(td, os.path.basename(project1))) safe_copy(project2, os.path.join(td, os.path.basename(project2))) fetchers = [Fetcher([td])] dists = resolve(['project1'], fetchers=fetchers) assert len(dists) == 2 dists = resolve(['project1'], fetchers=fetchers, pkg_blacklist=blacklist) assert len(dists) == 1
def _resolve_multi(self, requirements, find_links): """Multi-platform dependency resolution for PEX files. Given a pants configuration and a set of requirements, return a list of distributions that must be included in order to satisfy them. That may involve distributions for multiple platforms. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param find_links: Additional paths to search for source packages during resolution. """ distributions = dict() platforms = self.get_platforms(self._platforms or self._python_setup.platforms) fetchers = self._python_repos.get_fetchers() fetchers.extend(Fetcher([path]) for path in find_links) context = self._python_repos.get_network_context() for platform in platforms: requirements_cache_dir = os.path.join( self._python_setup.resolver_cache_dir, str(self._interpreter.identity)) distributions[platform] = resolve( requirements=[req.requirement for req in requirements], interpreter=self._interpreter, fetchers=fetchers, platform=platform, context=context, cache=requirements_cache_dir, cache_ttl=self._python_setup.resolver_cache_ttl) return distributions
def process_find_links(option, option_str, option_value, parser, builder): repos = getattr(parser.values, option.dest, []) repo = Fetcher([option_value]) if repo not in repos: repos.append(repo) setattr(parser.values, option.dest, repos) builder.add_repository(option_value)
def test_resolve_prereleases_cached(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: def assert_resolve(dep, expected_version, **resolve_kwargs): dists = list( resolve_multi([dep], cache=cd, cache_ttl=1000, **resolve_kwargs) ) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version Crawler.reset_cache() # First do a run to load it into the cache. assert_resolve('dep>=1,<4', '3.0.0rc3', allow_prereleases=True, fetchers=fetchers) # This simulates running from another pex command. The Crawler cache actually caches an empty # cache so this fails in the same "process". Crawler.reset_cache() # Now assert that we can get it from the cache by removing the source. assert_resolve('dep>=1,<4', '3.0.0rc3', allow_prereleases=True, fetchers=[]) # It should also be able to resolve without allow_prereleases, if explicitly requested. Crawler.reset_cache() assert_resolve('dep>=1.rc1,<4', '3.0.0rc3', fetchers=[])
def test_resolve_prereleases_and_no_version(): prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: safe_copy(prerelease_dep, os.path.join(td, os.path.basename(prerelease_dep))) fetchers = [Fetcher([td])] def assert_resolve(deps, expected_version, **resolve_kwargs): dists = list( resolve_multi(deps, fetchers=fetchers, **resolve_kwargs) ) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version # When allow_prereleases is specified, the requirement (from two dependencies) # for a specific pre-release version and no version specified, accepts the pre-release # version correctly. assert_resolve(['dep==3.0.0rc3', 'dep'], '3.0.0rc3', allow_prereleases=True) # Without allow_prereleases set, the pre-release version is rejected. # This used to be an issue when a command-line use did not pass the `--pre` option # correctly into the API call for resolve_multi() from build_pex() in pex.py. with pytest.raises(Unsatisfiable): assert_resolve(['dep==3.0.0rc3', 'dep'], '3.0.0rc3')
def test_simple_local_resolve(): project_sdist = make_sdist(name='project') with temporary_dir() as td: safe_copy(project_sdist, os.path.join(td, os.path.basename(project_sdist))) fetchers = [Fetcher([td])] dists = resolve(['project'], fetchers=fetchers) assert len(dists) == 1
def fetchers_from_config(config): fetchers = [] fetchers.extend( Fetcher([url]) for url in config.getlist('python-repos', 'repos', [])) fetchers.extend( PyPIFetcher(url) for url in config.getlist('python-repos', 'indices', [])) return fetchers
def test_diamond_local_resolve_cached(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/120 project1_sdist = make_sdist(name='project1', install_reqs=['project2<1.0.0']) project2_sdist = make_sdist(name='project2') with temporary_dir() as dd: for sdist in (project1_sdist, project2_sdist): safe_copy(sdist, os.path.join(dd, os.path.basename(sdist))) fetchers = [Fetcher([dd])] with temporary_dir() as cd: dists = resolve(['project1', 'project2'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 2
def package_iterator(self, resolvable, existing=None): iterator = Iterator(fetchers=[Fetcher([self.__cache])], allow_prereleases=self._allow_prereleases) packages = self.filter_packages_by_supported_tags(resolvable.compatible(iterator)) if packages and self.__cache_ttl: packages = self.filter_packages_by_ttl(packages, self.__cache_ttl) return itertools.chain( packages, super(CachingResolver, self).package_iterator(resolvable, existing=existing) )
def iter(self, requirement): if hasattr(requirement, 'repository') and requirement.repository: obtainer = CachingObtainer( install_cache=self.install_cache, ttl=self.ttl, crawler=self._crawler, fetchers=[Fetcher([requirement.repository])], translators=self._translator) for package in obtainer.iter(requirement): yield package else: for package in super(PantsObtainer, self).iter(requirement): yield package
def test_resolve_extra_sdist(): project1_sdist = make_sdist(name='project1', version='1.0.0', extras_require={'foo': ['project2']}) project2_sdist = make_sdist(name='project2', version='2.0.0') with temporary_dir() as td: for sdist in (project1_sdist, project2_sdist): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] resolved_dists = do_resolve_multi(['project1[foo]'], fetchers=fetchers) assert ({_parse_requirement(req) for req in ('project1[foo]', 'project2; extra=="foo"')} == {_parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists})
def resolve_multi(config, requirements, interpreter=None, platforms=None, ttl=3600, find_links=None): """Multi-platform dependency resolution for PEX files. Given a pants configuration and a set of requirements, return a list of distributions that must be included in order to satisfy them. That may involve distributions for multiple platforms. :param config: Pants :class:`Config` object. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved. If None specified, defaults to current interpreter. :param platforms: Optional list of platforms against requirements will be resolved. If None specified, the defaults from `config` will be used. :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g. "flask>=0.2" if a matching distribution is available on disk. Defaults to 3600. :param find_links: Additional paths to search for source packages during resolution. """ distributions = dict() interpreter = interpreter or PythonInterpreter.get() if not isinstance(interpreter, PythonInterpreter): raise TypeError( 'Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs') platforms = get_platforms( platforms or config.getlist('python-setup', 'platforms', ['current'])) fetchers = fetchers_from_config(config) if find_links: fetchers.extend(Fetcher([path]) for path in find_links) context = context_from_config(config) for platform in platforms: distributions[platform] = resolve(requirements=requirements, interpreter=interpreter, fetchers=fetchers, platform=platform, context=context, cache=cache, cache_ttl=ttl) return distributions
def test_ambiguous_transitive_resolvable(): # If an unbounded or larger bounded resolvable is resolved first, and a # transitive resolvable is resolved later in another round, Error(Ambiguous resolvable) can be # raised because foo pulls in foo-2.0.0 and bar->foo==1.0.0 pulls in foo-1.0.0. foo1_0 = make_sdist(name='foo', version='1.0.0') foo2_0 = make_sdist(name='foo', version='2.0.0') bar1_0 = make_sdist(name='bar', version='1.0.0', install_reqs=['foo==1.0.0']) with temporary_dir() as td: for sdist in (foo1_0, foo2_0, bar1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: dists = resolve(['foo', 'bar'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 2 assert dists[0].version == '1.0.0'
def test_intransitive(): foo1_0 = make_sdist(name='foo', version='1.0.0') # The nonexistent req ensures that we are actually not acting transitively (as that would fail). bar1_0 = make_sdist(name='bar', version='1.0.0', install_reqs=['nonexistent==1.0.0']) with temporary_dir() as td: for sdist in (foo1_0, bar1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: resolved_dists = do_resolve_multi(['foo', 'bar'], fetchers=fetchers, cache=cd, cache_ttl=1000, transitive=False) assert len(resolved_dists) == 2
def test_resolve_prereleases(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] def assert_resolve(expected_version, **resolve_kwargs): dists = resolve(['dep>=1,<4'], fetchers=fetchers, **resolve_kwargs) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version assert_resolve('2.0.0') assert_resolve('2.0.0', allow_prereleases=False) assert_resolve('3.0.0rc3', allow_prereleases=True)
def _resolve_multi(self, interpreter, requirements, platforms, find_links): """Multi-platform dependency resolution for PEX files. Returns a list of distributions that must be included in order to satisfy a set of requirements. That may involve distributions for multiple platforms. :param interpreter: The :class:`PythonInterpreter` to resolve for. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param platforms: A list of :class:`Platform`s to resolve for. :param find_links: Additional paths to search for source packages during resolution. :return: Map of platform name -> list of :class:`pkg_resources.Distribution` instances needed to satisfy the requirements on that platform. """ python_setup = self._python_setup_subsystem python_repos = self._python_repos_subsystem platforms = platforms or python_setup.platforms find_links = find_links or [] distributions = {} fetchers = python_repos.get_fetchers() fetchers.extend(Fetcher([path]) for path in find_links) for platform in platforms: requirements_cache_dir = os.path.join( python_setup.resolver_cache_dir, str(interpreter.identity)) resolved_dists = resolve( requirements=[req.requirement for req in requirements], interpreter=interpreter, fetchers=fetchers, platform=platform, context=python_repos.get_network_context(), cache=requirements_cache_dir, cache_ttl=python_setup.resolver_cache_ttl, allow_prereleases=python_setup.resolver_allow_prereleases, use_manylinux=python_setup.use_manylinux) distributions[platform] = [ resolved_dist.distribution for resolved_dist in resolved_dists ] return distributions
def add_repository(self, repo): fetcher = Fetcher([repo]) if fetcher not in self._fetchers: self._fetchers.append(fetcher) return self
def test_clp_prereleases_resolver(): prerelease_dep = make_sdist(name='dep', version='1.2.3b1') with temporary_dir() as td: safe_copy(prerelease_dep, os.path.join(td, os.path.basename(prerelease_dep))) fetcher = Fetcher([td]) # When no specific options are specified, allow_prereleases is None parser, resolver_options_builder = configure_clp() assert resolver_options_builder._allow_prereleases is None # When we specify `--pre`, allow_prereleases is True options, reqs = parser.parse_args( args=['--pre', 'dep==1.2.3b1', 'dep']) assert resolver_options_builder._allow_prereleases # We need to use our own fetcher instead of PyPI resolver_options_builder._fetchers.insert(0, fetcher) ##### # The resolver created during processing of command line options (configure_clp) # is not actually passed into the API call (resolve_multi) from build_pex(). # Instead, resolve_multi() calls resolve() where a new ResolverOptionsBuilder instance # is created. The only way to supply our own fetcher to that new instance is to patch it # here in the test so that it can fetch our test package (dep-1.2.3b1). Hence, this class # below and the change in the `pex.resolver` module where the patched object resides. # import pex.resolver class BuilderWithFetcher(ResolverOptionsBuilder): def __init__(self, fetchers=None, allow_all_external=False, allow_external=None, allow_unverified=None, allow_prereleases=None, use_manylinux=None, precedence=None, context=None): super(BuilderWithFetcher, self).__init__(fetchers=fetchers, allow_all_external=allow_all_external, allow_external=allow_external, allow_unverified=allow_unverified, allow_prereleases=allow_prereleases, use_manylinux=None, precedence=precedence, context=context) self._fetchers.insert(0, fetcher) # end stub ##### # Without a corresponding fix in pex.py, this test failed for a dependency requirement of # dep==1.2.3b1 from one package and just dep (any version accepted) from another package. # The failure was an exit from build_pex() with the message: # # Could not satisfy all requirements for dep==1.2.3b1: # dep==1.2.3b1, dep # # With a correct behavior the assert line is reached and pex_builder object created. with mock.patch.object(pex.resolver, 'ResolverOptionsBuilder', BuilderWithFetcher): pex_builder = build_pex(reqs, options, resolver_options_builder) assert pex_builder is not None
def build_pex(args, options): interpreter = interpreter_from_options(options) pex_builder = PEXBuilder( path=safe_mkdtemp(), interpreter=interpreter, ) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.inherit_path = options.inherit_path installer = WheelInstaller if options.use_wheel else EggInstaller interpreter = interpreter_from_options(options) fetchers = [Fetcher(options.repos)] if options.pypi: fetchers.append(PyPIFetcher()) if options.indices: fetchers.extend(PyPIFetcher(index) for index in options.indices) translator = translator_from_options(options) if options.use_wheel: precedence = (WheelPackage, EggPackage, SourcePackage) else: precedence = (EggPackage, SourcePackage) requirements = options.requirements[:] if options.source_dirs: temporary_package_root = safe_mkdtemp() for source_dir in options.source_dirs: try: sdist = Packager(source_dir).sdist() except installer.Error: die('Failed to run installer for %s' % source_dir, CANNOT_DISTILL) # record the requirement information sdist_pkg = Package.from_href(sdist) requirements.append('%s==%s' % (sdist_pkg.name, sdist_pkg.raw_version)) # copy the source distribution shutil.copyfile( sdist, os.path.join(temporary_package_root, os.path.basename(sdist))) # Tell pex where to find the packages fetchers.append(Fetcher([temporary_package_root])) with TRACER.timed('Resolving distributions'): resolveds = requirement_resolver(requirements, fetchers=fetchers, translator=translator, interpreter=interpreter, platform=options.platform, precedence=precedence, cache=options.cache_dir, cache_ttl=options.cache_ttl) for pkg in resolveds: log(' %s' % pkg, v=options.verbosity) pex_builder.add_distribution(pkg) pex_builder.add_requirement(pkg.as_requirement()) if options.entry_point is not None: log('Setting entry point to %s' % options.entry_point, v=options.verbosity) pex_builder.info.entry_point = options.entry_point else: log('Creating environment PEX.', v=options.verbosity) return pex_builder
def get_fetchers(self): fetchers = [] fetchers.extend(Fetcher([url]) for url in self.repos) fetchers.extend(PyPIFetcher(url) for url in self.indexes) return fetchers
def build_pex(args, options): interpreter = interpreter_from_options(options) pex_builder = PEXBuilder( path=safe_mkdtemp(), interpreter=interpreter, ) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.inherit_path = options.inherit_path installer = WheelInstaller if options.use_wheel else EggInstaller interpreter = interpreter_from_options(options) fetchers = [Fetcher(options.repos)] if options.pypi: fetchers.append(PyPIFetcher()) if options.indices: fetchers.extend(PyPIFetcher(index) for index in options.indices) translator = translator_from_options(options) if options.use_wheel: precedence = (WheelPackage, EggPackage, SourcePackage) else: precedence = (EggPackage, SourcePackage) with TRACER.timed('Resolving distributions'): resolveds = requirement_resolver( options.requirements, fetchers=fetchers, translator=translator, interpreter=interpreter, platform=options.platform, precedence=precedence, cache=options.cache_dir, cache_ttl=options.cache_ttl) for pkg in resolveds: log(' %s' % pkg, v=options.verbosity) pex_builder.add_distribution(pkg) pex_builder.add_requirement(pkg.as_requirement()) for source_dir in options.source_dirs: try: bdist = installer(source_dir).bdist() except installer.Error: die('Failed to run installer for %s' % source_dir, CANNOT_DISTILL) pex_builder.add_dist_location(bdist) if options.entry_point is not None: log('Setting entry point to %s' % options.entry_point, v=options.verbosity) pex_builder.info.entry_point = options.entry_point else: log('Creating environment PEX.', v=options.verbosity) return pex_builder