def test_resolve_cache(): # type: () -> None project_wheel = build_wheel(name="project") with temporary_dir() as td, temporary_dir() as cache: safe_copy(project_wheel, os.path.join(td, os.path.basename(project_wheel))) # Without a cache, each resolve should be isolated, but otherwise identical. resolved_dists1 = local_resolve_multi(["project"], find_links=[td]) resolved_dists2 = local_resolve_multi(["project"], find_links=[td]) assert resolved_dists1 != resolved_dists2 assert len(resolved_dists1) == 1 assert len(resolved_dists2) == 1 assert resolved_dists1[0].direct_requirement == resolved_dists2[ 0].direct_requirement assert resolved_dists1[0].distribution.location != resolved_dists2[ 0].distribution.location # With a cache, each resolve should be identical. resolved_dists3 = local_resolve_multi(["project"], find_links=[td], cache=cache) resolved_dists4 = local_resolve_multi(["project"], find_links=[td], cache=cache) assert resolved_dists1 != resolved_dists3 assert resolved_dists2 != resolved_dists3 assert resolved_dists3 == resolved_dists4
def test_resolve_prereleases_multiple_set(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep1 = make_sdist(name='dep', version='3.0.0rc3') prerelease_dep2 = make_sdist(name='dep', version='3.0.0rc4') prerelease_dep3 = make_sdist(name='dep', version='3.0.0rc5') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep1, prerelease_dep2, prerelease_dep3): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] def assert_resolve(expected_version, **resolve_kwargs): dists = resolve( [ 'dep>=3.0.0rc1', 'dep==3.0.0rc4', ], fetchers=fetchers, **resolve_kwargs) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version # This should resolve with explicit prerelease being set or implicitly. assert_resolve('3.0.0rc4', allow_prereleases=True) assert_resolve('3.0.0rc4')
def test_resolve_prereleases_cached(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: def assert_resolve(dep, expected_version, **resolve_kwargs): dists = list( resolve_multi([dep], cache=cd, cache_ttl=1000, **resolve_kwargs) ) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version Crawler.reset_cache() # First do a run to load it into the cache. assert_resolve('dep>=1,<4', '3.0.0rc3', allow_prereleases=True, fetchers=fetchers) # This simulates running from another pex command. The Crawler cache actually caches an empty # cache so this fails in the same "process". Crawler.reset_cache() # Now assert that we can get it from the cache by removing the source. assert_resolve('dep>=1,<4', '3.0.0rc3', allow_prereleases=True, fetchers=[]) # It should also be able to resolve without allow_prereleases, if explicitly requested. Crawler.reset_cache() assert_resolve('dep>=1.rc1,<4', '3.0.0rc3', fetchers=[])
def test_resolver_blacklist(): if PY2: blacklist = {'project2': '<3'} required_project = "project2;python_version>'3'" else: blacklist = {'project2': '>3'} required_project = "project2;python_version<'3'" project1 = make_sdist(name='project1', version='1.0.0', install_reqs=[required_project]) project2 = make_sdist(name='project2', version='1.1.0') with temporary_dir() as td: safe_copy(project1, os.path.join(td, os.path.basename(project1))) safe_copy(project2, os.path.join(td, os.path.basename(project2))) fetchers = [Fetcher([td])] dists = resolve(['project1'], fetchers=fetchers) assert len(dists) == 2 dists = resolve(['project1'], fetchers=fetchers, pkg_blacklist=blacklist) assert len(dists) == 1
def test_cached_dependency_pinned_unpinned_resolution_multi_run(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/178 project1_0_0 = make_sdist(name='project', version='1.0.0') project1_1_0 = make_sdist(name='project', version='1.1.0') with temporary_dir() as td: for sdist in (project1_0_0, project1_1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: # First run, pinning 1.0.0 in the cache dists = resolve(['project', 'project==1.0.0'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 1 assert dists[0].version == '1.0.0' # This simulates separate invocations of pex but allows us to keep the same tmp cache dir Crawler.reset_cache() # Second, run, the unbounded 'project' req will find the 1.0.0 in the cache. But should also # return SourcePackages found in td dists = resolve(['project', 'project==1.1.0'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 1 assert dists[0].version == '1.1.0' # Third run, if exact resolvable and inexact resolvable, and cache_ttl is expired, exact # resolvable should pull from pypi as well since inexact will and the resulting # resolvable_set.merge() would fail. Crawler.reset_cache() time.sleep(1) dists = resolve(['project', 'project==1.1.0'], fetchers=fetchers, cache=cd, cache_ttl=1) assert len(dists) == 1 assert dists[0].version == '1.1.0'
def test_resolve_prereleases_and_no_version(): prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: safe_copy(prerelease_dep, os.path.join(td, os.path.basename(prerelease_dep))) fetchers = [Fetcher([td])] def assert_resolve(deps, expected_version, **resolve_kwargs): dists = list( resolve_multi(deps, fetchers=fetchers, **resolve_kwargs) ) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version # When allow_prereleases is specified, the requirement (from two dependencies) # for a specific pre-release version and no version specified, accepts the pre-release # version correctly. assert_resolve(['dep==3.0.0rc3', 'dep'], '3.0.0rc3', allow_prereleases=True) # Without allow_prereleases set, the pre-release version is rejected. # This used to be an issue when a command-line use did not pass the `--pre` option # correctly into the API call for resolve_multi() from build_pex() in pex.py. with pytest.raises(Unsatisfiable): assert_resolve(['dep==3.0.0rc3', 'dep'], '3.0.0rc3')
def test_simple_local_resolve(): project_sdist = make_sdist(name='project') with temporary_dir() as td: safe_copy(project_sdist, os.path.join(td, os.path.basename(project_sdist))) fetchers = [Fetcher([td])] dists = resolve(['project'], fetchers=fetchers) assert len(dists) == 1
def test_simple_local_resolve(): project_wheel = build_wheel(name='project') with temporary_dir() as td: safe_copy(project_wheel, os.path.join(td, os.path.basename(project_wheel))) resolved_dists = local_resolve_multi(['project'], find_links=[td]) assert len(resolved_dists) == 1
def test_simple_local_resolve(): project_sdist = make_sdist(name='project') with temporary_dir() as td: safe_copy(project_sdist, os.path.join(td, os.path.basename(project_sdist))) fetchers = [Fetcher([td])] dists = list(resolve_multi(['project'], fetchers=fetchers)) assert len(dists) == 1
def test_clp_prereleases_resolver(): # type: () -> None with nested( built_wheel(name="prerelease-dep", version="1.2.3b1"), built_wheel(name="transitive-dep", install_reqs=["prerelease-dep"]), built_wheel(name="dep", install_reqs=["prerelease-dep>=1.2", "transitive-dep"]), temporary_dir(), temporary_dir(), ) as (prerelease_dep, transitive_dep, dep, dist_dir, cache_dir): for dist in (prerelease_dep, transitive_dep, dep): safe_copy(dist, os.path.join(dist_dir, os.path.basename(dist))) parser = configure_clp() options, reqs = parser.parse_args(args=[ "--no-index", "--find-links", dist_dir, "--cache-dir", cache_dir, # Avoid dangling {pex_root}. "--no-pre", "dep", ]) assert not options.allow_prereleases with pytest.raises( SystemExit, message="Should have failed to resolve prerelease dep"): build_pex(reqs, options) # When we specify `--pre`, allow_prereleases is True options, reqs = parser.parse_args(args=[ "--no-index", "--find-links", dist_dir, "--cache-dir", cache_dir, # Avoid dangling {pex_root}. "--pre", "dep", ]) assert options.allow_prereleases # Without a corresponding fix in pex.py, this test failed for a dependency requirement of # dep==1.2.3b1 from one package and just dep (any version accepted) from another package. # The failure was an exit from build_pex() with the message: # # Could not satisfy all requirements for dep==1.2.3b1: # dep==1.2.3b1, dep # # With a correct behavior the assert line is reached and pex_builder object created. pex_builder = build_pex(reqs, options) assert pex_builder is not None assert len( pex_builder.info.distributions) == 3, "Should have resolved deps"
def test_clp_prereleases_resolver(): prerelease_dep = make_sdist(name='dep', version='1.2.3b1') with temporary_dir() as td: safe_copy(prerelease_dep, os.path.join(td, os.path.basename(prerelease_dep))) fetcher = Fetcher([td]) # When no specific options are specified, allow_prereleases is None parser, resolver_options_builder = configure_clp() assert resolver_options_builder._allow_prereleases is None # When we specify `--pre`, allow_prereleases is True options, reqs = parser.parse_args(args=['--pre', 'dep==1.2.3b1', 'dep']) assert resolver_options_builder._allow_prereleases # We need to use our own fetcher instead of PyPI resolver_options_builder._fetchers.insert(0, fetcher) ##### # The resolver created during processing of command line options (configure_clp) # is not actually passed into the API call (resolve_multi) from build_pex(). # Instead, resolve_multi() calls resolve() where a new ResolverOptionsBuilder instance # is created. The only way to supply our own fetcher to that new instance is to patch it # here in the test so that it can fetch our test package (dep-1.2.3b1). Hence, this class # below and the change in the `pex.resolver` module where the patched object resides. # import pex.resolver class BuilderWithFetcher(ResolverOptionsBuilder): def __init__(self, fetchers=None, allow_all_external=False, allow_external=None, allow_unverified=None, allow_prereleases=None, precedence=None, context=None ): super(BuilderWithFetcher, self).__init__(fetchers=fetchers, allow_all_external=allow_all_external, allow_external=allow_external, allow_unverified=allow_unverified, allow_prereleases=allow_prereleases, precedence=precedence, context=context) self._fetchers.insert(0, fetcher) # end stub ##### # Without a corresponding fix in pex.py, this test failed for a dependency requirement of # dep==1.2.3b1 from one package and just dep (any version accepted) from another package. # The failure was an exit from build_pex() with the message: # # Could not satisfy all requirements for dep==1.2.3b1: # dep==1.2.3b1, dep # # With a correct behavior the assert line is reached and pex_builder object created. with mock.patch.object(pex.resolver, 'ResolverOptionsBuilder', BuilderWithFetcher): pex_builder = build_pex(reqs, options, resolver_options_builder) assert pex_builder is not None
def test_diamond_local_resolve_cached(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/120 project1_sdist = make_sdist(name='project1', install_reqs=['project2<1.0.0']) project2_sdist = make_sdist(name='project2') with temporary_dir() as dd: for sdist in (project1_sdist, project2_sdist): safe_copy(sdist, os.path.join(dd, os.path.basename(sdist))) fetchers = [Fetcher([dd])] with temporary_dir() as cd: dists = resolve(['project1', 'project2'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 2
def test_resolve_extra_setup_py(): # type: () -> None with make_source_dir( name="project1", version="1.0.0", extras_require={"foo": ["project2"]} ) as project1_dir: project2_wheel = build_wheel(name="project2", version="2.0.0") with temporary_dir() as td: safe_copy(project2_wheel, os.path.join(td, os.path.basename(project2_wheel))) resolved_dists = local_resolve_multi(["{}[foo]".format(project1_dir)], find_links=[td]) assert {_parse_requirement(req) for req in ("project1==1.0.0", "project2==2.0.0")} == { _parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists }
def test_intransitive(): # type: () -> None foo1_0 = build_wheel(name="foo", version="1.0.0") # The nonexistent req ensures that we are actually not acting transitively (as that would fail). bar1_0 = build_wheel(name="bar", version="1.0.0", install_reqs=["nonexistent==1.0.0"]) with temporary_dir() as td: for wheel in (foo1_0, bar1_0): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) with temporary_dir() as cd: resolved_dists = local_resolve_multi( ["foo", "bar"], find_links=[td], cache=cd, transitive=False ) assert len(resolved_dists) == 2
def test_resolve_extra_sdist(): project1_sdist = make_sdist(name='project1', version='1.0.0', extras_require={'foo': ['project2']}) project2_sdist = make_sdist(name='project2', version='2.0.0') with temporary_dir() as td: for sdist in (project1_sdist, project2_sdist): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] resolved_dists = do_resolve_multi(['project1[foo]'], fetchers=fetchers) assert ({_parse_requirement(req) for req in ('project1[foo]', 'project2; extra=="foo"')} == {_parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists})
def test_resolve_extra_wheel(): # type: () -> None project1_wheel = build_wheel( name="project1", version="1.0.0", extras_require={"foo": ["project2"]} ) project2_wheel = build_wheel(name="project2", version="2.0.0") with temporary_dir() as td: for wheel in (project1_wheel, project2_wheel): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) resolved_dists = local_resolve_multi(["project1[foo]"], find_links=[td]) assert {_parse_requirement(req) for req in ("project1==1.0.0", "project2==2.0.0")} == { _parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists }
def test_diamond_local_resolve_cached(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/120 project1_wheel = build_wheel(name='project1', install_reqs=['project2<1.0.0']) project2_wheel = build_wheel(name='project2') with temporary_dir() as dd: for wheel in (project1_wheel, project2_wheel): safe_copy(wheel, os.path.join(dd, os.path.basename(wheel))) with temporary_dir() as cd: resolved_dists = local_resolve_multi(['project1', 'project2'], find_links=[dd], cache=cd) assert len(resolved_dists) == 2
def translate(self, package, into=None): """From a binary package, translate to a local binary distribution.""" if not package.local: raise ValueError( 'BinaryTranslator cannot translate remote packages.') if not isinstance(package, self._package_type): return None if not package.compatible(self._supported_tags): TRACER.log('Target package %s is not compatible with %s' % (package, self._supported_tags)) return None into = into or safe_mkdtemp() target_path = os.path.join(into, package.filename) safe_copy(package.local_path, target_path) return DistributionHelper.distribution_from_path(target_path)
def test_intransitive(): foo1_0 = build_wheel(name='foo', version='1.0.0') # The nonexistent req ensures that we are actually not acting transitively (as that would fail). bar1_0 = build_wheel(name='bar', version='1.0.0', install_reqs=['nonexistent==1.0.0']) with temporary_dir() as td: for wheel in (foo1_0, bar1_0): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) with temporary_dir() as cd: resolved_dists = local_resolve_multi(['foo', 'bar'], find_links=[td], cache=cd, transitive=False) assert len(resolved_dists) == 2
def test_ambiguous_transitive_resolvable(): # If an unbounded or larger bounded resolvable is resolved first, and a # transitive resolvable is resolved later in another round, Error(Ambiguous resolvable) can be # raised because foo pulls in foo-2.0.0 and bar->foo==1.0.0 pulls in foo-1.0.0. foo1_0 = make_sdist(name='foo', version='1.0.0') foo2_0 = make_sdist(name='foo', version='2.0.0') bar1_0 = make_sdist(name='bar', version='1.0.0', install_reqs=['foo==1.0.0']) with temporary_dir() as td: for sdist in (foo1_0, foo2_0, bar1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: dists = resolve(['foo', 'bar'], fetchers=fetchers, cache=cd, cache_ttl=1000) assert len(dists) == 2 assert dists[0].version == '1.0.0'
def test_intransitive(): foo1_0 = make_sdist(name='foo', version='1.0.0') # The nonexistent req ensures that we are actually not acting transitively (as that would fail). bar1_0 = make_sdist(name='bar', version='1.0.0', install_reqs=['nonexistent==1.0.0']) with temporary_dir() as td: for sdist in (foo1_0, bar1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: resolved_dists = do_resolve_multi(['foo', 'bar'], fetchers=fetchers, cache=cd, cache_ttl=1000, transitive=False) assert len(resolved_dists) == 2
def translate(self, package, into=None): """From a SourcePackage, translate to a binary distribution.""" if not isinstance(package, SourcePackage): return None if not package.local: raise ValueError( 'SourceTranslator cannot translate remote packages.') installer = None version = self._interpreter.version unpack_path = Archiver.unpack(package.local_path) into = into or safe_mkdtemp() try: if self._use_2to3 and version >= (3, ): with TRACER.timed('Translating 2->3 %s' % package.name): self.run_2to3(unpack_path) installer = self._installer_impl(unpack_path, interpreter=self._interpreter) with TRACER.timed('Packaging %s' % package.name): try: dist_path = installer.bdist() except self._installer_impl.InstallFailure as e: TRACER.log('Failed to install package at %s: %s' % (unpack_path, e)) return None target_path = os.path.join(into, os.path.basename(dist_path)) safe_copy(dist_path, target_path) target_package = Package.from_href(target_path) if not target_package: TRACER.log('Target path %s does not look like a Package.' % target_path) return None if not target_package.compatible(self._supported_tags): TRACER.log('Target package %s is not compatible with %s' % (target_package, self._supported_tags)) return None return DistributionHelper.distribution_from_path(target_path) except Exception as e: TRACER.log('Failed to translate %s' % package) TRACER.log(traceback.format_exc()) finally: if installer: installer.cleanup() if unpack_path: safe_rmtree(unpack_path)
def test_resolve_prereleases(): # type: () -> None stable_dep = build_wheel(name="dep", version="2.0.0") prerelease_dep = build_wheel(name="dep", version="3.0.0rc3") with temporary_dir() as td: for wheel in (stable_dep, prerelease_dep): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) def assert_resolve(expected_version, **resolve_kwargs): resolved_dists = local_resolve_multi(["dep>=1,<4"], find_links=[td], **resolve_kwargs) assert 1 == len(resolved_dists) resolved_dist = resolved_dists[0] assert expected_version == resolved_dist.distribution.version assert_resolve("2.0.0") assert_resolve("2.0.0", allow_prereleases=False) assert_resolve("3.0.0rc3", allow_prereleases=True)
def test_resolve_extra_setup_py(): with make_source_dir(name='project1', version='1.0.0', extras_require={'foo': ['project2']}) as project1_dir: project2_wheel = build_wheel(name='project2', version='2.0.0') with temporary_dir() as td: safe_copy(project2_wheel, os.path.join(td, os.path.basename(project2_wheel))) resolved_dists = local_resolve_multi( ['{}[foo]'.format(project1_dir)], find_links=[td]) assert ({ _parse_requirement(req) for req in ('project1==1.0.0', 'project2==2.0.0') } == { _parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists })
def test_resolve_prereleases(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] def assert_resolve(expected_version, **resolve_kwargs): dists = resolve(['dep>=1,<4'], fetchers=fetchers, **resolve_kwargs) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version assert_resolve('2.0.0') assert_resolve('2.0.0', allow_prereleases=False) assert_resolve('3.0.0rc3', allow_prereleases=True)
def test_resolve_extra_wheel(): project1_wheel = build_wheel(name='project1', version='1.0.0', extras_require={'foo': ['project2']}) project2_wheel = build_wheel(name='project2', version='2.0.0') with temporary_dir() as td: for wheel in (project1_wheel, project2_wheel): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) resolved_dists = local_resolve_multi(['project1[foo]'], find_links=[td]) assert ({ _parse_requirement(req) for req in ('project1==1.0.0', 'project2==2.0.0') } == { _parse_requirement(resolved_dist.requirement) for resolved_dist in resolved_dists })
def test_cached_dependency_pinned_unpinned_resolution_multi_run(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/178 project1_0_0 = make_sdist(name='project', version='1.0.0') project1_1_0 = make_sdist(name='project', version='1.1.0') with temporary_dir() as td: for sdist in (project1_0_0, project1_1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: # First run, pinning 1.0.0 in the cache dists = list( resolve_multi(['project', 'project==1.0.0'], fetchers=fetchers, cache=cd, cache_ttl=1000) ) assert len(dists) == 1 assert dists[0].version == '1.0.0' # This simulates separate invocations of pex but allows us to keep the same tmp cache dir Crawler.reset_cache() # Second, run, the unbounded 'project' req will find the 1.0.0 in the cache. But should also # return SourcePackages found in td dists = list( resolve_multi(['project', 'project==1.1.0'], fetchers=fetchers, cache=cd, cache_ttl=1000) ) assert len(dists) == 1 assert dists[0].version == '1.1.0' # Third run, if exact resolvable and inexact resolvable, and cache_ttl is expired, exact # resolvable should pull from pypi as well since inexact will and the resulting # resolvable_set.merge() would fail. Crawler.reset_cache() time.sleep(1) dists = list( resolve_multi(['project', 'project==1.1.0'], fetchers=fetchers, cache=cd, cache_ttl=1) ) assert len(dists) == 1 assert dists[0].version == '1.1.0'
def test_resolve_prereleases(): stable_dep = build_wheel(name='dep', version='2.0.0') prerelease_dep = build_wheel(name='dep', version='3.0.0rc3') with temporary_dir() as td: for wheel in (stable_dep, prerelease_dep): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) def assert_resolve(expected_version, **resolve_kwargs): resolved_dists = local_resolve_multi(['dep>=1,<4'], find_links=[td], **resolve_kwargs) assert 1 == len(resolved_dists) resolved_dist = resolved_dists[0] assert expected_version == resolved_dist.distribution.version assert_resolve('2.0.0') assert_resolve('2.0.0', allow_prereleases=False) assert_resolve('3.0.0rc3', allow_prereleases=True)
def test_cached_dependency_pinned_unpinned_resolution_multi_run(): # type: () -> None # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/178 project1_0_0 = build_wheel(name="project", version="1.0.0") project1_1_0 = build_wheel(name="project", version="1.1.0") with temporary_dir() as td: for wheel in (project1_0_0, project1_1_0): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) with temporary_dir() as cd: # First run, pinning 1.0.0 in the cache resolved_dists = local_resolve_multi(["project==1.0.0"], find_links=[td], cache=cd) assert len(resolved_dists) == 1 assert resolved_dists[0].distribution.version == "1.0.0" # Second, run, the unbounded 'project' req will find the 1.0.0 in the cache. But should also # return SourcePackages found in td resolved_dists = local_resolve_multi(["project"], find_links=[td], cache=cd) assert len(resolved_dists) == 1 assert resolved_dists[0].distribution.version == "1.1.0"
def test_ambiguous_transitive_resolvable(): # If an unbounded or larger bounded resolvable is resolved first, and a # transitive resolvable is resolved later in another round, Error(Ambiguous resolvable) can be # raised because foo pulls in foo-2.0.0 and bar->foo==1.0.0 pulls in foo-1.0.0. foo1_0 = make_sdist(name='foo', version='1.0.0') foo2_0 = make_sdist(name='foo', version='2.0.0') bar1_0 = make_sdist(name='bar', version='1.0.0', install_reqs=['foo==1.0.0']) with temporary_dir() as td: for sdist in (foo1_0, foo2_0, bar1_0): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] with temporary_dir() as cd: dists = list( resolve_multi(['foo', 'bar'], fetchers=fetchers, cache=cd, cache_ttl=1000) ) assert len(dists) == 2 assert dists[0].version == '1.0.0'
def test_resolve_prereleases(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep = make_sdist(name='dep', version='3.0.0rc3') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] def assert_resolve(expected_version, **resolve_kwargs): dists = list( resolve_multi(['dep>=1,<4'], fetchers=fetchers, **resolve_kwargs) ) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version assert_resolve('2.0.0') assert_resolve('2.0.0', allow_prereleases=False) assert_resolve('3.0.0rc3', allow_prereleases=True)
def isolated(): """Returns a chroot for third_party isolated from the ``sys.path``. PEX will typically be installed in site-packages flat alongside many other distributions; as such, adding the location of the pex distribution to the ``sys.path`` will typically expose many other distributions. An isolated chroot can be used as a ``sys.path`` entry to effect only the exposure of pex. :return: The path of the chroot. :rtype: str """ global _ISOLATED if _ISOLATED is None: from pex import vendor from pex.common import atomic_directory, safe_copy from pex.util import CacheHelper from pex.variables import ENV pex_path = os.path.join(vendor.VendorSpec.ROOT, 'pex') with _tracer().timed('Isolating pex'): isolated_dir = os.path.join(ENV.PEX_ROOT, 'isolated', CacheHelper.dir_hash(pex_path)) with atomic_directory(isolated_dir) as chroot: if chroot: with _tracer().timed( 'Extracting pex to {}'.format(isolated_dir)): pex_path = os.path.join(vendor.VendorSpec.ROOT, 'pex') for root, dirs, files in os.walk(pex_path): relroot = os.path.relpath(root, pex_path) for d in dirs: os.makedirs( os.path.join(chroot, 'pex', relroot, d)) for f in files: if not f.endswith('.pyc'): safe_copy( os.path.join(root, f), os.path.join(chroot, 'pex', relroot, f)) _ISOLATED = isolated_dir return _ISOLATED
def test_resolve_prereleases_multiple_set(): stable_dep = make_sdist(name='dep', version='2.0.0') prerelease_dep1 = make_sdist(name='dep', version='3.0.0rc3') prerelease_dep2 = make_sdist(name='dep', version='3.0.0rc4') prerelease_dep3 = make_sdist(name='dep', version='3.0.0rc5') with temporary_dir() as td: for sdist in (stable_dep, prerelease_dep1, prerelease_dep2, prerelease_dep3): safe_copy(sdist, os.path.join(td, os.path.basename(sdist))) fetchers = [Fetcher([td])] def assert_resolve(expected_version, **resolve_kwargs): dists = list( resolve_multi(['dep>=3.0.0rc1', 'dep==3.0.0rc4'], fetchers=fetchers, **resolve_kwargs) ) assert 1 == len(dists) dist = dists[0] assert expected_version == dist.version # This should resolve with explicit prerelease being set or implicitly. assert_resolve('3.0.0rc4', allow_prereleases=True) assert_resolve('3.0.0rc4')
def test_clp_prereleases_resolver(): prerelease_dep = make_sdist(name='dep', version='1.2.3b1') with temporary_dir() as td: safe_copy(prerelease_dep, os.path.join(td, os.path.basename(prerelease_dep))) fetcher = Fetcher([td]) # When no specific options are specified, allow_prereleases is None parser, resolver_options_builder = configure_clp() assert resolver_options_builder._allow_prereleases is None # When we specify `--pre`, allow_prereleases is True options, reqs = parser.parse_args( args=['--pre', 'dep==1.2.3b1', 'dep']) assert resolver_options_builder._allow_prereleases # We need to use our own fetcher instead of PyPI resolver_options_builder._fetchers.insert(0, fetcher) ##### # The resolver created during processing of command line options (configure_clp) # is not actually passed into the API call (resolve_multi) from build_pex(). # Instead, resolve_multi() calls resolve() where a new ResolverOptionsBuilder instance # is created. The only way to supply our own fetcher to that new instance is to patch it # here in the test so that it can fetch our test package (dep-1.2.3b1). Hence, this class # below and the change in the `pex.resolver` module where the patched object resides. # import pex.resolver class BuilderWithFetcher(ResolverOptionsBuilder): def __init__(self, fetchers=None, allow_all_external=False, allow_external=None, allow_unverified=None, allow_prereleases=None, use_manylinux=None, precedence=None, context=None): super(BuilderWithFetcher, self).__init__(fetchers=fetchers, allow_all_external=allow_all_external, allow_external=allow_external, allow_unverified=allow_unverified, allow_prereleases=allow_prereleases, use_manylinux=None, precedence=precedence, context=context) self._fetchers.insert(0, fetcher) # end stub ##### # Without a corresponding fix in pex.py, this test failed for a dependency requirement of # dep==1.2.3b1 from one package and just dep (any version accepted) from another package. # The failure was an exit from build_pex() with the message: # # Could not satisfy all requirements for dep==1.2.3b1: # dep==1.2.3b1, dep # # With a correct behavior the assert line is reached and pex_builder object created. with mock.patch.object(pex.resolver, 'ResolverOptionsBuilder', BuilderWithFetcher): pex_builder = build_pex(reqs, options, resolver_options_builder) assert pex_builder is not None