def test_pex_builder_copy_or_link(): # type: () -> None with nested(temporary_dir(), temporary_dir(), temporary_dir()) as (td1, td2, td3): src = os.path.join(td1, "exe.py") with open(src, "w") as fp: fp.write(exe_main) def build_and_check(path, copy): # type: (str, bool) -> None pb = PEXBuilder(path=path, copy=copy) pb.add_source(src, "exe.py") path_clone = os.path.join(path, "__clone") pb.clone(into=path_clone) for root in path, path_clone: s1 = os.stat(src) s2 = os.stat(os.path.join(root, "exe.py")) is_link = (s1[stat.ST_INO], s1[stat.ST_DEV]) == (s2[stat.ST_INO], s2[stat.ST_DEV]) if copy: assert not is_link else: assert is_link build_and_check(td2, False) build_and_check(td3, True)
def test_pex_builder_from_requirements_pex(): def build_from_req_pex(path, req_pex): pb = PEXBuilder(path=path) pb.add_from_requirements_pex(req_pex) with open(os.path.join(path, 'exe.py'), 'w') as fp: fp.write(exe_main) pb.set_executable(os.path.join(path, 'exe.py')) pb.freeze() return pb def verify(pb): success_txt = os.path.join(pb.path(), 'success.txt') PEX(pb.path(), interpreter=pb.interpreter).run(args=[success_txt]) assert os.path.exists(success_txt) with open(success_txt) as fp: assert fp.read() == 'success' # Build from pex dir. with temporary_dir() as td2: with nested(temporary_dir(), make_bdist('p1')) as (td1, p1): pb1 = write_pex(td1, dists=[p1]) pb2 = build_from_req_pex(td2, pb1.path()) verify(pb2) # Build from .pex file. with temporary_dir() as td4: with nested(temporary_dir(), make_bdist('p1')) as (td3, p1): pb3 = write_pex(td3, dists=[p1]) target = os.path.join(td3, 'foo.pex') pb3.build(target) pb4 = build_from_req_pex(td4, target) verify(pb4)
def assert_chroot_perms(copyfn): with temporary_dir() as src: one = os.path.join(src, "one") touch(one) two = os.path.join(src, "two") touch(two) chmod_plus_x(two) with temporary_dir() as dst: chroot = Chroot(dst) copyfn(chroot, one, "one") copyfn(chroot, two, "two") assert extract_perms(one) == extract_perms( os.path.join(chroot.path(), "one")) assert extract_perms(two) == extract_perms( os.path.join(chroot.path(), "two")) zip_path = os.path.join(src, "chroot.zip") chroot.zip(zip_path) with temporary_dir() as extract_dir: with contextlib.closing(PermPreservingZipFile(zip_path)) as zf: zf.extractall(extract_dir) assert extract_perms(one) == extract_perms( os.path.join(extract_dir, "one")) assert extract_perms(two) == extract_perms( os.path.join(extract_dir, "two"))
def assert_access_zipped_assets(distribution_helper_import): test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with nested(temporary_dir(), temporary_dir()) as (td1, td2): pb = PEXBuilder(path=td1) with open(os.path.join(td1, 'exe.py'), 'w') as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, 'my_package', 'submodule') safe_mkdir(submodule) mod_path = os.path.join(submodule, 'mod.py') with open(mod_path, 'w') as fp: fp.write('accessed') pb.add_source(fp.name, 'my_package/submodule/mod.py') pb.add_source(None, 'my_package/__init__.py') pb.add_source(None, 'my_package/submodule/__init__.py') pex = os.path.join(td2, 'app.pex') pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b'accessed\n' == stdout return stderr
def test_pex_run_strip_env(): with temporary_dir() as pex_root: pex_env = dict(PEX_MODULE='does_not_exist_in_sub_pex', PEX_ROOT=pex_root) with environment_as(**pex_env), temporary_dir() as pex_chroot: pex_builder = PEXBuilder(path=pex_chroot) with tempfile.NamedTemporaryFile(mode="w") as fp: fp.write( dedent(""" import json import os print(json.dumps({k: v for k, v in os.environ.items() if k.startswith("PEX_")})) """)) fp.flush() pex_builder.set_executable(fp.name, 'print_pex_env.py') pex_builder.freeze() stdout, returncode = run_simple_pex(pex_chroot) assert 0 == returncode assert {} == json.loads(stdout.decode('utf-8')), ( 'Expected the entrypoint environment to be stripped of PEX_ environment variables.' ) assert pex_env == { k: v for k, v in os.environ.items() if k.startswith("PEX_") }, ('Expected the parent environment to be left un-stripped.')
def assert_access_zipped_assets(distribution_helper_import): # type: (str) -> bytes test_executable = dedent(""" import os {distribution_helper_import} temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule') with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp: for line in fp: print(line) """.format(distribution_helper_import=distribution_helper_import)) with temporary_dir() as td1, temporary_dir() as td2: pb = PEXBuilder(path=td1) with open(os.path.join(td1, "exe.py"), "w") as fp: fp.write(test_executable) pb.set_executable(fp.name) submodule = os.path.join(td1, "my_package", "submodule") safe_mkdir(submodule) mod_path = os.path.join(submodule, "mod.py") with open(mod_path, "w") as fp: fp.write("accessed") pb.add_source(fp.name, "my_package/submodule/mod.py") pb.add_source(None, "my_package/__init__.py") pb.add_source(None, "my_package/submodule/__init__.py") pex = os.path.join(td2, "app.pex") pb.build(pex) process = PEX(pex, interpreter=pb.interpreter).run(blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert process.returncode == 0 assert b"accessed\n" == stdout return cast(bytes, stderr)
def test_resolve_cache(): # type: () -> None project_wheel = build_wheel(name="project") with temporary_dir() as td, temporary_dir() as cache: safe_copy(project_wheel, os.path.join(td, os.path.basename(project_wheel))) # Without a cache, each resolve should be isolated, but otherwise identical. resolved_dists1 = local_resolve_multi(["project"], find_links=[td]) resolved_dists2 = local_resolve_multi(["project"], find_links=[td]) assert resolved_dists1 != resolved_dists2 assert len(resolved_dists1) == 1 assert len(resolved_dists2) == 1 assert resolved_dists1[0].direct_requirement == resolved_dists2[ 0].direct_requirement assert resolved_dists1[0].distribution.location != resolved_dists2[ 0].distribution.location # With a cache, each resolve should be identical. resolved_dists3 = local_resolve_multi(["project"], find_links=[td], cache=cache) resolved_dists4 = local_resolve_multi(["project"], find_links=[td], cache=cache) assert resolved_dists1 != resolved_dists3 assert resolved_dists2 != resolved_dists3 assert resolved_dists3 == resolved_dists4
def test_pex_builder_compilation(): # type: () -> None with temporary_dir() as td1, temporary_dir() as td2, temporary_dir( ) as td3: src = os.path.join(td1, "src.py") with open(src, "w") as fp: fp.write(exe_main) exe = os.path.join(td1, "exe.py") with open(exe, "w") as fp: fp.write(exe_main) def build_and_check(path, precompile): # type: (str, bool) -> None pb = PEXBuilder(path=path) pb.add_source(src, "lib/src.py") pb.set_executable(exe, "exe.py") pb.freeze(bytecode_compile=precompile) for pyc_file in ("exe.pyc", "lib/src.pyc", "__main__.pyc"): pyc_exists = os.path.exists(os.path.join(path, pyc_file)) if precompile: assert pyc_exists else: assert not pyc_exists bootstrap_dir = os.path.join(path, pb.info.bootstrap) bootstrap_pycs = [] # type: List[str] for _, _, files in os.walk(bootstrap_dir): bootstrap_pycs.extend(f for f in files if f.endswith(".pyc")) if precompile: assert len(bootstrap_pycs) > 0 else: assert 0 == len(bootstrap_pycs) build_and_check(td2, False) build_and_check(td3, True)
def test_pex_builder_from_requirements_pex(): # type: () -> None def build_from_req_pex(path, req_pex): # type: (str, str) -> PEXBuilder pb = PEXBuilder(path=path) pb.add_from_requirements_pex(req_pex) with open(os.path.join(path, "exe.py"), "w") as fp: fp.write(exe_main) pb.set_executable(os.path.join(path, "exe.py")) pb.freeze() return pb def verify(pb): # type: (PEXBuilder) -> None success_txt = os.path.join(pb.path(), "success.txt") PEX(pb.path(), interpreter=pb.interpreter).run(args=[success_txt]) assert os.path.exists(success_txt) with open(success_txt) as fp: assert fp.read() == "success" # Build from pex dir. with temporary_dir() as td2: with temporary_dir() as td1, make_bdist("p1") as p1: pb1 = write_pex(td1, dists=[p1]) pb2 = build_from_req_pex(td2, pb1.path()) verify(pb2) # Build from .pex file. with temporary_dir() as td4: with temporary_dir() as td3, make_bdist("p1") as p1: pb3 = write_pex(td3, dists=[p1]) target = os.path.join(td3, "foo.pex") pb3.build(target) pb4 = build_from_req_pex(td4, target) verify(pb4)
def test_pex_builder_compilation(): with nested(temporary_dir(), temporary_dir(), temporary_dir()) as (td1, td2, td3): src = os.path.join(td1, 'src.py') with open(src, 'w') as fp: fp.write(exe_main) exe = os.path.join(td1, 'exe.py') with open(exe, 'w') as fp: fp.write(exe_main) def build_and_check(path, precompile): pb = PEXBuilder(path=path) pb.add_source(src, 'lib/src.py') pb.set_executable(exe, 'exe.py') pb.freeze(bytecode_compile=precompile) for pyc_file in ('exe.pyc', 'lib/src.pyc', '__main__.pyc'): pyc_exists = os.path.exists(os.path.join(path, pyc_file)) if precompile: assert pyc_exists else: assert not pyc_exists bootstrap_dir = os.path.join(path, BOOTSTRAP_DIR) bootstrap_pycs = [] for _, _, files in os.walk(bootstrap_dir): bootstrap_pycs.extend(f for f in files if f.endswith('.pyc')) if precompile: assert len(bootstrap_pycs) > 0 else: assert 0 == len(bootstrap_pycs) build_and_check(td2, False) build_and_check(td3, True)
def test_clp_prereleases_resolver(): # type: () -> None with nested( built_wheel(name="prerelease-dep", version="1.2.3b1"), built_wheel(name="transitive-dep", install_reqs=["prerelease-dep"]), built_wheel(name="dep", install_reqs=["prerelease-dep>=1.2", "transitive-dep"]), temporary_dir(), temporary_dir(), ) as (prerelease_dep, transitive_dep, dep, dist_dir, cache_dir): for dist in (prerelease_dep, transitive_dep, dep): safe_copy(dist, os.path.join(dist_dir, os.path.basename(dist))) parser = configure_clp() options, reqs = parser.parse_args(args=[ "--no-index", "--find-links", dist_dir, "--cache-dir", cache_dir, # Avoid dangling {pex_root}. "--no-pre", "dep", ]) assert not options.allow_prereleases with pytest.raises( SystemExit, message="Should have failed to resolve prerelease dep"): build_pex(reqs, options) # When we specify `--pre`, allow_prereleases is True options, reqs = parser.parse_args(args=[ "--no-index", "--find-links", dist_dir, "--cache-dir", cache_dir, # Avoid dangling {pex_root}. "--pre", "dep", ]) assert options.allow_prereleases # Without a corresponding fix in pex.py, this test failed for a dependency requirement of # dep==1.2.3b1 from one package and just dep (any version accepted) from another package. # The failure was an exit from build_pex() with the message: # # Could not satisfy all requirements for dep==1.2.3b1: # dep==1.2.3b1, dep # # With a correct behavior the assert line is reached and pex_builder object created. pex_builder = build_pex(reqs, options) assert pex_builder is not None assert len( pex_builder.info.distributions) == 3, "Should have resolved deps"
def test_intransitive(): # type: () -> None foo1_0 = build_wheel(name="foo", version="1.0.0") # The nonexistent req ensures that we are actually not acting transitively (as that would fail). bar1_0 = build_wheel(name="bar", version="1.0.0", install_reqs=["nonexistent==1.0.0"]) with temporary_dir() as td: for wheel in (foo1_0, bar1_0): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) with temporary_dir() as cd: resolved_dists = local_resolve_multi( ["foo", "bar"], find_links=[td], cache=cd, transitive=False ) assert len(resolved_dists) == 2
def test_diamond_local_resolve_cached(): # This exercises the issue described here: https://github.com/pantsbuild/pex/issues/120 project1_wheel = build_wheel(name='project1', install_reqs=['project2<1.0.0']) project2_wheel = build_wheel(name='project2') with temporary_dir() as dd: for wheel in (project1_wheel, project2_wheel): safe_copy(wheel, os.path.join(dd, os.path.basename(wheel))) with temporary_dir() as cd: resolved_dists = local_resolve_multi(['project1', 'project2'], find_links=[dd], cache=cd) assert len(resolved_dists) == 2
def run_simple_pex_test( body, # type: str args=(), # type: Iterable[str] env=None, # type: Optional[Mapping[str, str]] dists=None, # type: Optional[Iterable[Distribution]] coverage=False, # type: bool interpreter=None, # type: Optional[PythonInterpreter] ): # type: (...) -> Tuple[bytes, int] with temporary_dir() as td1, temporary_dir() as td2: pb = write_simple_pex(td1, body, dists=dists, coverage=coverage, interpreter=interpreter) pex = os.path.join(td2, "app.pex") pb.build(pex) return run_simple_pex(pex, args=args, env=env, interpreter=interpreter)
def run_simple_pex_test(body, args=(), env=None, dists=None, coverage=False, interpreter=None): with temporary_dir() as td1, temporary_dir() as td2: pb = write_simple_pex(td1, body, dists=dists, coverage=coverage, interpreter=interpreter) pex = os.path.join(td2, "app.pex") pb.build(pex) return run_simple_pex(pex, args=args, env=env, interpreter=interpreter)
def test_intransitive(): foo1_0 = build_wheel(name='foo', version='1.0.0') # The nonexistent req ensures that we are actually not acting transitively (as that would fail). bar1_0 = build_wheel(name='bar', version='1.0.0', install_reqs=['nonexistent==1.0.0']) with temporary_dir() as td: for wheel in (foo1_0, bar1_0): safe_copy(wheel, os.path.join(td, os.path.basename(wheel))) with temporary_dir() as cd: resolved_dists = local_resolve_multi(['foo', 'bar'], find_links=[td], cache=cd, transitive=False) assert len(resolved_dists) == 2
def test_pex_builder_copy_or_link(): # type: () -> None with temporary_dir() as td: src = os.path.join(td, "exe.py") with safe_open(src, "w") as fp: fp.write(exe_main) def build_and_check(copy_mode): # type: (CopyMode.Value) -> None pb = PEXBuilder(copy_mode=copy_mode) path = pb.path() pb.add_source(src, "exe.py") path_clone = os.path.join(path, "__clone") pb.clone(into=path_clone) for root in path, path_clone: s1 = os.stat(src) s2 = os.stat(os.path.join(root, "exe.py")) is_link = (s1[stat.ST_INO], s1[stat.ST_DEV]) == (s2[stat.ST_INO], s2[stat.ST_DEV]) if copy_mode == CopyMode.COPY: assert not is_link else: # Since os.stat follows symlinks; so in CopyMode.SYMLINK, this just proves # the symlink points to the original file. Going further and checking path # and path_clone for the presence of a symlink (an os.islink test) is # trickier since a Linux hardlink of a symlink produces a symlink whereas a # macOS hardlink of a symlink produces a hardlink. assert is_link build_and_check(CopyMode.LINK) build_and_check(CopyMode.COPY) build_and_check(CopyMode.SYMLINK)
def installed_wheel(wheel_path): # type: (str) -> Iterator[Distribution] with temporary_dir() as install_dir: get_pip().spawn_install_wheel(wheel=wheel_path, install_dir=install_dir).wait() dist = DistributionHelper.distribution_from_path(install_dir) assert dist is not None, "Could not load a distribution from {}.".format(install_dir) yield dist
def test_dir_hash(): # type: () -> None with temporary_dir() as tmp_dir: safe_mkdir(os.path.join(tmp_dir, "a", "b")) with safe_open(os.path.join(tmp_dir, "c", "d", "e.py"), "w") as fp: fp.write("contents1") with safe_open(os.path.join(tmp_dir, "f.py"), "w") as fp: fp.write("contents2") hash1 = CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c"), os.path.join(tmp_dir, "c-renamed")) assert hash1 != CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c-renamed"), os.path.join(tmp_dir, "c")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) pycache_dir = os.path.join(tmp_dir, "__pycache__") safe_mkdir(pycache_dir) touch(os.path.join(pycache_dir, "f.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.py")) assert hash1 == CacheHelper.dir_hash( tmp_dir ), "All content under __pycache__ directories should be ignored."
def test_pex_run_custom_pex_useable(): old_pex_version = '0.7.0' resolved_dists = resolve( ['pex=={}'.format(old_pex_version), 'setuptools==40.6.3']) dists = [resolved_dist.distribution for resolved_dist in resolved_dists] with temporary_dir() as temp_dir: from pex.version import __version__ pex = write_simple_pex( temp_dir, exe_contents=textwrap.dedent(""" import sys try: # The 0.7.0 release embedded the version directly in setup.py so it should only be # available via distribution metadata. from pex.version import __version__ sys.exit(1) except ImportError: import pkg_resources dist = pkg_resources.working_set.find(pkg_resources.Requirement.parse('pex')) print(dist.version) """), dists=dists, ) process = PEX(pex.path()).run(blocking=False, stdout=subprocess.PIPE) stdout, _ = process.communicate() assert process.returncode == 0 assert old_pex_version == stdout.strip().decode('utf-8') assert old_pex_version != __version__
def test_pex_run_conflicting_custom_setuptools_useable(): # Here we use our vendored, newer setuptools to build the pex which has an older setuptools # requirement. These setuptools dists have different pkg_resources APIs: # $ diff \ # <(zipinfo -1 setuptools-20.3.1-py2.py3-none-any.whl | grep pkg_resources/ | sort) \ # <(zipinfo -1 setuptools-40.6.2-py2.py3-none-any.whl | grep pkg_resources/ | sort) # 2a3,4 # > pkg_resources/py31compat.py # > pkg_resources/_vendor/appdirs.py resolved_dists = resolve(['setuptools==20.3.1']) dists = [resolved_dist.distribution for resolved_dist in resolved_dists] with temporary_dir() as temp_dir: pex = write_simple_pex( temp_dir, exe_contents=textwrap.dedent(""" import sys import pkg_resources try: from pkg_resources import appdirs sys.exit(1) except ImportError: pass try: from pkg_resources import py31compat sys.exit(2) except ImportError: pass """), dists=dists, ) rc = PEX(pex.path()).run(env={'PEX_VERBOSE': '9'}) assert rc == 0
def test_from_env(): # type: () -> None with temporary_dir() as td: pex_root = os.path.realpath(os.path.join(td, "pex_root")) environ = dict( PEX_ROOT=pex_root, PEX_MODULE="entry:point", PEX_SCRIPT="script.sh", PEX_FORCE_LOCAL="true", PEX_UNZIP="true", PEX_INHERIT_PATH="prefer", PEX_IGNORE_ERRORS="true", PEX_ALWAYS_CACHE="true", ) info = dict( pex_root=pex_root, entry_point="entry:point", script="script.sh", zip_safe=False, unzip=True, inherit_path=True, ignore_errors=True, always_write_cache=True, ) assert_same_info(PexInfo(info=info), PexInfo.from_env(env=Variables(environ=environ)))
def test_pex_paths(): # Tests that PEX_PATH allows importing sources from the referenced pex. with named_temporary_file() as fake_stdout: with temporary_dir() as temp_dir: pex1_path = os.path.join(temp_dir, 'pex1') write_simple_pex(pex1_path, sources=[('foo_pkg/__init__.py', ''), ('foo_pkg/foo_module.py', 'def foo_func():\n return "42"')]) pex2_path = os.path.join(temp_dir, 'pex2') pex2 = write_simple_pex( pex2_path, 'import sys; from bar_pkg.bar_module import bar_func; ' 'sys.stdout.write(bar_func()); sys.exit(0)', sources= [('bar_pkg/__init__.py', ''), ('bar_pkg/bar_module.py', 'from foo_pkg.foo_module import foo_func\ndef bar_func():\n return foo_func()' )]) rc = PEX(pex2.path()).run(stdin=None, stdout=fake_stdout, env={'PEX_PATH': pex1_path}) assert rc == 0 fake_stdout.seek(0) assert fake_stdout.read() == b'42'
def test_iter_pth_paths(mock_exists): # Ensure path checking always returns True for dummy paths. mock_exists.return_value = True with temporary_dir() as tmpdir: in_tmp = lambda f: os.path.join(tmpdir, f) PTH_TEST_MAPPING = { # A mapping of .pth file content -> expected paths. '/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python\n': [ '/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python' ], 'relative_path\nrelative_path2\n\nrelative_path3': [ in_tmp('relative_path'), in_tmp('relative_path2'), in_tmp('relative_path3') ], 'duplicate_path\nduplicate_path': [in_tmp('duplicate_path')], 'randompath\nimport nosuchmodule\n': [in_tmp('randompath')], 'import sys\nfoo\n/bar/baz': [in_tmp('foo'), '/bar/baz'], 'import nosuchmodule\nfoo': [], 'import nosuchmodule\n': [], 'import bad)syntax\n': [], } for i, pth_content in enumerate(PTH_TEST_MAPPING): pth_tmp_path = os.path.abspath( os.path.join(tmpdir, 'test%s.pth' % i)) with open(pth_tmp_path, 'wb') as f: f.write(to_bytes(pth_content)) assert sorted(PTH_TEST_MAPPING[pth_content]) == sorted( list(iter_pth_paths(pth_tmp_path)))
def yield_pex_builder(zip_safe=True, interpreter=None): with nested(temporary_dir(), make_bdist('p1', zip_safe=zip_safe, interpreter=interpreter)) as (td, p1): pb = PEXBuilder(path=td, interpreter=interpreter) pb.add_dist_location(p1.location) yield pb
def test_osx_platform_intel_issue_523(): def bad_interpreter(): return PythonInterpreter.from_binary(_KNOWN_BAD_APPLE_INTERPRETER) with temporary_dir() as cache: # We need to run the bad interpreter with a modern, non-Apple-Extras setuptools in order to # successfully install psutil; yield_pex_builder sets up the bad interpreter with our vendored # setuptools and wheel extras. with nested(yield_pex_builder(interpreter=bad_interpreter()), temporary_filename()) as (pb, pex_file): for resolved_dist in resolver.resolve(['psutil==5.4.3'], cache=cache, interpreter=pb.interpreter): pb.add_dist_location(resolved_dist.distribution.location) pb.build(pex_file) # NB: We want PEX to find the bare bad interpreter at runtime. pex = PEX(pex_file, interpreter=bad_interpreter()) def run(args, **env): pex_env = os.environ.copy() pex_env['PEX_VERBOSE'] = '1' pex_env.update(**env) process = pex.run(args=args, env=pex_env, blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() return process.returncode, stdout, stderr returncode, _, stderr = run(['-c', 'import psutil']) assert 0 == returncode, ( 'Process failed with exit code {} and stderr:\n{}'.format( returncode, stderr)) returncode, stdout, stderr = run(['-c', 'import pkg_resources']) assert 0 != returncode, ( 'Isolated pex process succeeded but should not have found pkg-resources:\n' 'STDOUT:\n' '{}\n' 'STDERR:\n' '{}'.format(stdout, stdout, stderr)) returncode, stdout, stderr = run( [ '-c', 'import pkg_resources; print(pkg_resources.get_supported_platform())' ], # Let the bad interpreter site-packages setuptools leak in. PEX_INHERIT_PATH='1') assert 0 == returncode, ( 'Process failed with exit code {} and stderr:\n{}'.format( returncode, stderr)) # Verify this worked along side the previously problematic pkg_resources-reported platform. release, _, _ = platform.mac_ver() major_minor = '.'.join(release.split('.')[:2]) assert to_bytes( 'macosx-{}-intel'.format(major_minor)) == stdout.strip()
def test_iter_pth_paths(mock_exists): # type: (Any) -> None # Ensure path checking always returns True for dummy paths. mock_exists.return_value = True with temporary_dir() as tmpdir: in_tmp = lambda f: os.path.join(tmpdir, f) PTH_TEST_MAPPING = { # A mapping of .pth file content -> expected paths. "/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python\n": [ "/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python" ], "relative_path\nrelative_path2\n\nrelative_path3": [ in_tmp("relative_path"), in_tmp("relative_path2"), in_tmp("relative_path3"), ], "duplicate_path\nduplicate_path": [in_tmp("duplicate_path")], "randompath\nimport nosuchmodule\n": [in_tmp("randompath")], "import sys\nfoo\n/bar/baz": [in_tmp("foo"), "/bar/baz"], "import nosuchmodule\nfoo": [], "import nosuchmodule\n": [], "import bad)syntax\n": [], } # type: Dict[str, List[str]] for i, pth_content in enumerate(PTH_TEST_MAPPING): pth_tmp_path = os.path.abspath(os.path.join(tmpdir, "test%s.pth" % i)) with open(pth_tmp_path, "wb") as f: f.write(to_bytes(pth_content)) assert sorted(PTH_TEST_MAPPING[pth_content]) == sorted( list(iter_pth_paths(pth_tmp_path)) )
def test_empty_resolve(): empty_resolve_multi = local_resolve_multi([]) assert empty_resolve_multi == [] with temporary_dir() as td: empty_resolve_multi = local_resolve_multi([], cache=td) assert empty_resolve_multi == []
def test_pyenv_shims(self): # type: () -> None py35, _, run_pyenv = ensure_python_distribution(PY35) py36 = ensure_python_interpreter(PY36) pyenv_root = str(run_pyenv(["root"]).strip()) pyenv_shims = os.path.join(pyenv_root, "shims") def pyenv_global(*versions): run_pyenv(["global"] + list(versions)) def assert_shim(shim_name, expected_binary_path): python = PythonInterpreter.from_binary( os.path.join(pyenv_shims, shim_name)) assert expected_binary_path == python.binary with temporary_dir() as pex_root: with ENV.patch(PEX_ROOT=pex_root) as pex_env: with environment_as(PYENV_ROOT=pyenv_root, **pex_env): pyenv_global(PY35, PY36) assert_shim("python3", py35) pyenv_global(PY36, PY35) # The python3 shim is now pointing at python3.6 but the Pex cache has a valid # entry for the old python3.5 association (the interpreter still exists.) assert_shim("python3", py35) # The shim pointer is now invalid since python3.5 was uninstalled and so should # be re-read. py35_deleted = "{}.uninstalled".format(py35) os.rename(py35, py35_deleted) try: assert_shim("python3", py36) finally: os.rename(py35_deleted, py35)
def test_pex_paths(): # Tests that PEX_PATH allows importing sources from the referenced pex. with named_temporary_file() as fake_stdout: with temporary_dir() as temp_dir: pex1_path = os.path.join(temp_dir, "pex1") write_simple_pex( pex1_path, sources=[ ("foo_pkg/__init__.py", ""), ("foo_pkg/foo_module.py", 'def foo_func():\n return "42"'), ], ) pex2_path = os.path.join(temp_dir, "pex2") pex2 = write_simple_pex( pex2_path, "import sys; from bar_pkg.bar_module import bar_func; " "sys.stdout.write(bar_func()); sys.exit(0)", sources=[ ("bar_pkg/__init__.py", ""), ( "bar_pkg/bar_module.py", "from foo_pkg.foo_module import foo_func\ndef bar_func():\n return foo_func()", ), ], ) rc = PEX(pex2.path()).run(stdin=None, stdout=fake_stdout, env={"PEX_PATH": pex1_path}) assert rc == 0 fake_stdout.seek(0) assert fake_stdout.read() == b"42"