def test_dir_hash(): # type: () -> None with temporary_dir() as tmp_dir: safe_mkdir(os.path.join(tmp_dir, "a", "b")) with safe_open(os.path.join(tmp_dir, "c", "d", "e.py"), "w") as fp: fp.write("contents1") with safe_open(os.path.join(tmp_dir, "f.py"), "w") as fp: fp.write("contents2") hash1 = CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c"), os.path.join(tmp_dir, "c-renamed")) assert hash1 != CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c-renamed"), os.path.join(tmp_dir, "c")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) pycache_dir = os.path.join(tmp_dir, "__pycache__") safe_mkdir(pycache_dir) touch(os.path.join(pycache_dir, "f.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.py")) assert hash1 == CacheHelper.dir_hash( tmp_dir ), "All content under __pycache__ directories should be ignored."
def pythonpath_isolation_test(): # type: () -> Iterator[PythonpathIsolationTest] with temporary_dir() as temp_dir: pythonpath = os.path.join(temp_dir, "one") with safe_open(os.path.join(pythonpath, "foo.py"), "w") as fp: fp.write("BAR = 42") with safe_open(os.path.join(pythonpath, "bar.py"), "w") as fp: fp.write("FOO = 137") dist_content = { "setup.py": textwrap.dedent( """ from setuptools import setup setup( name='foo', version='0.0.0', zip_safe=True, packages=['foo'], install_requires=[], ) """ ), "foo/__init__.py": "BAR = 137", } with temporary_content(dist_content) as project_dir: installer = WheelBuilder(project_dir) foo_bdist = installer.bdist() exe_contents = textwrap.dedent( """ import sys try: import bar except ImportError: import collections bar = collections.namedtuple('bar', ['FOO'])(None) import foo sys.stdout.write("foo.BAR={} bar.FOO={}".format(foo.BAR, bar.FOO)) """ ) yield PythonpathIsolationTest( pythonpath=pythonpath, dists=[foo_bdist], exe=exe_contents )
def build(self, filename, bytecode_compile=True, deterministic_timestamp=False): """Package the PEX into a zipfile. :param filename: The filename where the PEX should be stored. :param bytecode_compile: If True, precompile .py files into .pyc files. :param deterministic_timestamp: If True, will use our hardcoded time for zipfile timestamps. If the PEXBuilder is not yet frozen, it will be frozen by ``build``. This renders the PEXBuilder immutable. """ if not self._frozen: self.freeze(bytecode_compile=bytecode_compile) tmp_zip = filename + "~" try: os.unlink(tmp_zip) self._logger.warning( "Previous binary unexpectedly exists, cleaning: {}".format( tmp_zip)) except OSError: # The expectation is that the file does not exist, so continue pass with safe_open(tmp_zip, "ab") as pexfile: assert os.path.getsize(pexfile.name) == 0 pexfile.write(to_bytes("{}\n".format(self._shebang))) with TRACER.timed("Zipping PEX file."): self._chroot.zip(tmp_zip, mode="a", deterministic_timestamp=deterministic_timestamp) if os.path.exists(filename): os.unlink(filename) os.rename(tmp_zip, filename) chmod_plus_x(filename)
def test_pex_builder_copy_or_link(): # type: () -> None with temporary_dir() as td: src = os.path.join(td, "exe.py") with safe_open(src, "w") as fp: fp.write(exe_main) def build_and_check(copy_mode): # type: (CopyMode.Value) -> None pb = PEXBuilder(copy_mode=copy_mode) path = pb.path() pb.add_source(src, "exe.py") path_clone = os.path.join(path, "__clone") pb.clone(into=path_clone) for root in path, path_clone: s1 = os.stat(src) s2 = os.stat(os.path.join(root, "exe.py")) is_link = (s1[stat.ST_INO], s1[stat.ST_DEV]) == (s2[stat.ST_INO], s2[stat.ST_DEV]) if copy_mode == CopyMode.COPY: assert not is_link else: # Since os.stat follows symlinks; so in CopyMode.SYMLINK, this just proves # the symlink points to the original file. Going further and checking path # and path_clone for the presence of a symlink (an os.islink test) is # trickier since a Linux hardlink of a symlink produces a symlink whereas a # macOS hardlink of a symlink produces a hardlink. assert is_link build_and_check(CopyMode.LINK) build_and_check(CopyMode.COPY) build_and_check(CopyMode.SYMLINK)
def pythonpath_isolation_test(): with temporary_dir() as temp_dir: pythonpath = os.path.join(temp_dir, 'one') with safe_open(os.path.join(pythonpath, 'foo.py'), 'w') as fp: fp.write('BAR = 42') with safe_open(os.path.join(pythonpath, 'bar.py'), 'w') as fp: fp.write('FOO = 137') dist_content = { 'setup.py': textwrap.dedent(""" from setuptools import setup setup( name='foo', version='0.0.0', zip_safe=True, packages=['foo'], install_requires=[], ) """), 'foo/__init__.py': 'BAR = 137', } with temporary_content(dist_content) as project_dir: installer = WheelInstaller(project_dir) foo_bdist = DistributionHelper.distribution_from_path( installer.bdist()) exe_contents = textwrap.dedent(""" import sys try: import bar except ImportError: import collections bar = collections.namedtuple('bar', ['FOO'])(None) import foo sys.stdout.write("foo.BAR={} bar.FOO={}".format(foo.BAR, bar.FOO)) """) yield PythonpathIsolationTest(pythonpath=pythonpath, dists=[foo_bdist], exe=exe_contents)
def test_safe_open_abs(temporary_working_dir): # type: (str) -> None abs_path = os.path.join(temporary_working_dir, "path") with safe_open(abs_path, "w") as fp: fp.write("contents") with open(abs_path) as fp: assert "contents" == fp.read()
def test_venv_multiprocessing_issues_1236( tmpdir, # type: Any start_method, # type: Optional[str] ): # type: (...) -> None src = os.path.join(str(tmpdir), "src") with safe_open(os.path.join(src, "foo.py"), "w") as fp: fp.write( dedent( """\ def bar(): print('hello') """ ) ) with safe_open(os.path.join(src, "main.py"), "w") as fp: fp.write( dedent( """\ import multiprocessing from foo import bar if __name__ == '__main__': if {start_method!r}: multiprocessing.set_start_method({start_method!r}) p = multiprocessing.Process(target=bar) p.start() """.format( start_method=start_method ) ) ) pex_file = os.path.join(str(tmpdir), "mp.pex") result = run_pex_command(args=["-D", src, "-m", "main", "-o", pex_file, "--include-tools"]) result.assert_success() # Confirm multiprocessing works via normal PEX file execution. output = subprocess.check_output(args=[pex_file]) assert "hello" == output.decode("utf-8").strip() # Confirm multiprocessing works via the `pex` venv script. venv = os.path.join(str(tmpdir), "venv") subprocess.check_call(args=[pex_file, "venv", venv], env=make_env(PEX_TOOLS=True)) output = subprocess.check_output(args=[os.path.join(venv, "pex")]) assert "hello" == output.decode("utf-8").strip()
def test_safe_open_relative(temporary_working_dir): # type: (str) -> None rel_path = "rel_path" with safe_open(rel_path, "w") as fp: fp.write("contents") abs_path = os.path.join(temporary_working_dir, rel_path) with open(abs_path) as fp: assert "contents" == fp.read()
def pex(): # type: () -> Iterator[str] with temporary_dir() as tmpdir: pex_path = os.path.join(tmpdir, "example.pex") src = os.path.join(tmpdir, "src") with safe_open(os.path.join(src, "data", "url.txt"), "w") as fp: fp.write("https://example.com") with safe_open(os.path.join(src, "main.py"), "w") as fp: fp.write( dedent( """\ from __future__ import print_function import os import sys import requests def do(): with open(os.path.join(os.path.dirname(__file__), "data", "url.txt")) as fp: url = fp.read().strip() print("Fetching from {} ...".format(url)) print(requests.get(url).text, file=sys.stderr) """ ) ) result = run_pex_command( args=[ "-D", src, "requests==2.25.1", "-e", "main:do", "--interpreter-constraint", "CPython>=2.7,<4", "-o", pex_path, "--include-tools", ], ) result.assert_success() yield os.path.realpath(pex_path)
def test_venv_entrypoint_function_exit_code_issue_1241(tmpdir): # type: (Any) -> None pex_file = os.path.join(str(tmpdir), "ep-function.pex") src = os.path.join(str(tmpdir), "src") with safe_open(os.path.join(src, "module.py"), "w") as fp: fp.write( dedent("""\ import sys def target(): args = sys.argv[1:] if args: exit = args[0] try: return int(exit) except ValueError: return exit """)) result = run_pex_command(args=[ "-D", src, "-e", "module:target", "--include-tools", "-o", pex_file ]) result.assert_success() venv = os.path.join(str(tmpdir), "ep-function.venv") subprocess.check_call(args=[pex_file, "venv", venv], env=make_env(PEX_TOOLS=1)) venv_pex = os.path.join(venv, "pex") assert 0 == subprocess.Popen(args=[venv_pex]).wait() def assert_venv_process( args, # type: List[str] expected_returncode, # type: int expected_stdout="", # type: str expected_stderr="", # type: str ): # type: (...) -> None process = subprocess.Popen(args=[venv_pex] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() assert expected_returncode == process.returncode assert expected_stdout == stdout.decode("utf-8") assert expected_stderr == stderr.decode("utf-8") assert_venv_process(args=["bob"], expected_returncode=1, expected_stderr="bob\n") assert_venv_process(args=["42"], expected_returncode=42)
def output( cls, options, # type: Namespace binary=False, # type: bool ): # type: (...) -> Iterator[IO] if cls.is_stdout(options): stdout = getattr(sys.stdout, "buffer", sys.stdout) if binary else sys.stdout yield stdout else: with safe_open(options.output, mode="wb" if binary else "w") as out: yield out
def test_pex_builder_add_source_relpath_issues_1192( tmp_chroot, # type: str copy_mode, # type: CopyMode.Value ): # type: (...) -> None pb = PEXBuilder(copy_mode=copy_mode) with safe_open("src/main.py", "w") as fp: fp.write("import sys; sys.exit(42)") pb.add_source("src/main.py", "main.py") pb.set_entry_point("main") pb.build("test.pex") process = Executor.open_process(cmd=[os.path.abspath("test.pex")]) process.wait() assert 42 == process.returncode
def test_parse_requirements_failure_bad_requirement(chroot): # type: (str) -> None other_requirement_file = os.path.realpath(os.path.join(chroot, "other-requirements.txt")) with safe_open(other_requirement_file, "w") as fp: fp.write( dedent( """\ GoodRequirement # A comment. AnotherRequirement # Another comment. BadRequirement\\ [extra, another]; \\ bad_marker == "2.7" \\ --global-option=foo # End of line comment. """ ) ) req_iter = parse_requirements(Source.from_text("-r other-requirements.txt")) parsed_requirement = next(req_iter) assert isinstance(parsed_requirement, PyPIRequirement) assert "GoodRequirement" == parsed_requirement.requirement.project_name parsed_requirement = next(req_iter) assert isinstance(parsed_requirement, PyPIRequirement) assert "AnotherRequirement" == parsed_requirement.requirement.project_name with pytest.raises(ParseError) as exc_info: next(req_iter) assert exc_info.value.logical_line == LogicalLine( raw_text=( "BadRequirement\\\n" "[extra, another]; \\\n" 'bad_marker == "2.7" \\\n' " --global-option=foo # End of line comment.\n" ), processed_text='BadRequirement[extra, another]; bad_marker == "2.7" --global-option=foo', source=other_requirement_file, start_line=7, end_line=10, )
def build(self, filename, bytecode_compile=True, deterministic_timestamp=False): """Package the PEX into a zipfile. :param filename: The filename where the PEX should be stored. :param bytecode_compile: If True, precompile .py files into .pyc files. :param deterministic_timestamp: If True, will use our hardcoded time for zipfile timestamps. If the PEXBuilder is not yet frozen, it will be frozen by ``build``. This renders the PEXBuilder immutable. """ if not self._frozen: self.freeze(bytecode_compile=bytecode_compile) tmp_zip = filename + "~" try: os.unlink(tmp_zip) self._logger.warning( "Previous binary unexpectedly exists, cleaning: {}".format( tmp_zip)) except OSError: # The expectation is that the file does not exist, so continue pass with safe_open(tmp_zip, "ab") as pexfile: assert os.path.getsize(pexfile.name) == 0 pexfile.write(to_bytes("{}\n".format(self._shebang))) with TRACER.timed("Zipping PEX file."): self._chroot.zip( tmp_zip, mode="a", deterministic_timestamp=deterministic_timestamp, # When configured with a `copy_mode` of `CopyMode.SYMLINK`, we symlink distributions # as pointers to installed wheel directories in ~/.pex/installed_wheels/... Since # those installed wheels reside in a shared cache, they can be in-use by other # processes and so their code may be in the process of being bytecode compiled as we # attempt to zip up our chroot. Bytecode compilation produces ephemeral temporary # pyc files that we should avoid copying since they are unuseful and inherently # racy. exclude_file=is_pyc_temporary_file, ) if os.path.exists(filename): os.unlink(filename) os.rename(tmp_zip, filename) chmod_plus_x(filename)
def test_relocatable_venv(tmpdir): # type: (Any) -> None pex_file = os.path.join(str(tmpdir), "relocatable.pex") src = os.path.join(str(tmpdir), "src") with safe_open(os.path.join(src, "main.py"), "w") as fp: fp.write( dedent( """\ import sys from colors import blue print(blue(sys.executable)) """ ) ) result = run_pex_command( args=["-D", src, "ansicolors==1.1.8", "-m", "main", "--include-tools", "-o", pex_file] ) result.assert_success() venv = os.path.join(str(tmpdir), "relocatable.venv") subprocess.check_call(args=[pex_file, "venv", venv], env=make_env(PEX_TOOLS=1)) subprocess.check_call(args=[os.path.join(venv, "pex")]) relocated_relpath = "relocated.venv" relocated_venv = os.path.join(str(tmpdir), relocated_relpath) # Since the venv pex script contains a shebang with an absolute path to the venv python # interpreter, a move of the venv makes the script un-runnable directly. shutil.move(venv, relocated_venv) with pytest.raises(OSError) as exec_info: subprocess.check_call(args=[os.path.join(relocated_venv, "pex")]) assert errno.ENOENT == exec_info.value.errno # But we should be able to run the script using the moved venv's interpreter. subprocess.check_call( args=[ os.path.join(relocated_relpath, "bin", "python"), os.path.join(relocated_relpath, "pex"), ], cwd=str(tmpdir), )
def test_venv_symlinked_source_issues_1239(tmpdir): # type: (Any) -> None src = os.path.join(str(tmpdir), "src") main = os.path.join(src, "main.py") with safe_open(main, "w") as fp: fp.write("import sys; sys.exit(42)") pex_builder = PEXBuilder(copy_mode=CopyMode.SYMLINK) pex_builder.set_executable(main) pex_file = os.path.join(str(tmpdir), "a.pex") pex_builder.build(pex_file, bytecode_compile=False) assert 42 == subprocess.Popen(args=[pex_file]).wait() venv = os.path.join(str(tmpdir), "a.venv") subprocess.check_call( args=[sys.executable, "-m", "pex.tools", pex_builder.path(), "venv", venv] ) venv_pex = os.path.join(venv, "pex") shutil.rmtree(src) assert 42 == subprocess.Popen(args=[venv_pex]).wait()
def fetch(self, link, into=None): """Fetch the binary content associated with the link and write to a file. :param link: The :class:`Link` to fetch. :keyword into: If specified, write into the directory ``into``. If ``None``, creates a new temporary directory that persists for the duration of the interpreter. """ target = os.path.join(into or safe_mkdtemp(), link.filename) if os.path.exists(target): # Assume that if the local file already exists, it is safe to use. return target with TRACER.timed('Fetching %s' % link.url, V=2): target_tmp = '%s.%s' % (target, uuid.uuid4()) with contextlib.closing(self.open(link)) as in_fp: with safe_open(target_tmp, 'wb') as out_fp: shutil.copyfileobj(in_fp, out_fp) os.rename(target_tmp, target) return target
def cache_distribution(cls, zf, source, target_dir): """Possibly cache an egg from within a zipfile into target_cache. Given a zipfile handle and a filename corresponding to an egg distribution within that zip, maybe write to the target cache and return a Distribution.""" dependency_basename = os.path.basename(source) if not os.path.exists(target_dir): target_dir_tmp = target_dir + '.' + uuid.uuid4().hex for name in zf.namelist(): if name.startswith(source) and not name.endswith('/'): # strip off prefix + '/' target_name = os.path.join(dependency_basename, name[len(source) + 1:]) with contextlib.closing(zf.open(name)) as zi: with safe_open( os.path.join(target_dir_tmp, target_name), 'wb') as fp: shutil.copyfileobj(zi, fp) rename_if_empty(target_dir_tmp, target_dir) dist = DistributionHelper.distribution_from_path(target_dir) assert dist is not None, 'Failed to cache distribution %s' % source return dist
def write_source(path, valid=True): with safe_open(path, 'wb') as fp: fp.write(to_bytes('basename = %r\n' % os.path.basename(path))) if not valid: fp.write(to_bytes('invalid!\n'))
def test_issues_789_demo(): # type: () -> None tmpdir = safe_mkdtemp() pex_project_dir = (subprocess.check_output( ["git", "rev-parse", "--show-toplevel"]).decode("utf-8").strip()) # 1. Imagine we've pre-resolved the requirements needed in our wheel house. requirements = [ "ansicolors", "isort", "setuptools", # N.B.: isort doesn't declare its setuptools dependency. ] wheelhouse = os.path.join(tmpdir, "wheelhouse") get_pip().spawn_download_distributions(download_dir=wheelhouse, requirements=requirements).wait() # 2. Also imagine this configuration is passed to a tool (PEX or a wrapper as in this test # example) via the CLI or other configuration data sources. For example, Pants has a `PythonSetup` # that combines with BUILD target data to get you this sort of configuration info outside pex. resolver_settings = dict( indexes=[], # Turn off pypi. find_links=[wheelhouse], # Use our wheel house. build=False, # Use only pre-built wheels. ) # type: Dict[str, Any] # 3. That same configuration was used to build a standard pex: resolver_args = [] if len(resolver_settings["find_links"]) == 0: resolver_args.append("--no-index") else: for index in resolver_settings["indexes"]: resolver_args.extend(["--index", index]) for repo in resolver_settings["find_links"]: resolver_args.extend(["--find-links", repo]) resolver_args.append( "--build" if resolver_settings["build"] else "--no-build") project_code_dir = os.path.join(tmpdir, "project_code_dir") with safe_open(os.path.join(project_code_dir, "colorized_isort.py"), "w") as fp: fp.write( dedent("""\ import colors import os import subprocess import sys def run(): env = os.environ.copy() env.update(PEX_MODULE='isort') isort_process = subprocess.Popen( sys.argv, env=env, stdout = subprocess.PIPE, stderr = subprocess.PIPE ) stdout, stderr = isort_process.communicate() print(colors.green(stdout.decode('utf-8'))) print(colors.red(stderr.decode('utf-8'))) sys.exit(isort_process.returncode) """)) colorized_isort_pex = os.path.join(tmpdir, "colorized_isort.pex") args = [ "--sources-directory", project_code_dir, "--entry-point", "colorized_isort:run", "--output-file", colorized_isort_pex, ] result = run_pex_command(args + resolver_args + requirements) result.assert_success() # 4. Now the tool builds a "dehydrated" PEX using the standard pex + resolve settings as the # template. ptex_cache = os.path.join(tmpdir, ".ptex") colorized_isort_pex_info = PexInfo.from_pex(colorized_isort_pex) colorized_isort_pex_info.pex_root = ptex_cache # Force the standard pex to extract its code. An external tool like Pants would already know the # orignal source code file paths, but we need to discover here. colorized_isort_pex_code_dir = os.path.join( colorized_isort_pex_info.zip_unsafe_cache, colorized_isort_pex_info.code_hash) env = os.environ.copy() env.update(PEX_ROOT=ptex_cache, PEX_INTERPRETER="1", PEX_FORCE_LOCAL="1") subprocess.check_call([colorized_isort_pex, "-c", ""], env=env) colorized_isort_ptex_code_dir = os.path.join( tmpdir, "colorized_isort_ptex_code_dir") safe_mkdir(colorized_isort_ptex_code_dir) code = [] for root, dirs, files in os.walk(colorized_isort_pex_code_dir): rel_root = os.path.relpath(root, colorized_isort_pex_code_dir) for f in files: # Don't ship compiled python from the code extract above, the target interpreter will not # match ours in general. if f.endswith(".pyc"): continue rel_path = os.path.normpath(os.path.join(rel_root, f)) # The root __main__.py is special for any zipapp including pex, let it write its own # __main__.py bootstrap. Similarly. PEX-INFO is special to pex and we want the PEX-INFO for # The ptex pex, not the pex being ptexed. if rel_path in ("__main__.py", PexInfo.PATH): continue os.symlink(os.path.join(root, f), os.path.join(colorized_isort_ptex_code_dir, rel_path)) code.append(rel_path) ptex_code_dir = os.path.join(tmpdir, "ptex_code_dir") ptex_info = dict(code=code, resolver_settings=resolver_settings) with safe_open(os.path.join(ptex_code_dir, "PTEX-INFO"), "w") as fp: json.dump(ptex_info, fp) with safe_open(os.path.join(ptex_code_dir, "IPEX-INFO"), "w") as fp: fp.write(colorized_isort_pex_info.dump()) with safe_open(os.path.join(ptex_code_dir, "ptex.py"), "w") as fp: fp.write( dedent("""\ import json import os import sys from pex import resolver from pex.common import open_zip from pex.pex_builder import PEXBuilder from pex.pex_info import PexInfo from pex.util import CacheHelper from pex.variables import ENV self = sys.argv[0] ipex_file = '{}.ipex'.format(os.path.splitext(self)[0]) if not os.path.isfile(ipex_file): print('Hydrating {} to {}'.format(self, ipex_file)) ptex_pex_info = PexInfo.from_pex(self) code_root = os.path.join(ptex_pex_info.zip_unsafe_cache, ptex_pex_info.code_hash) with open_zip(self) as zf: # Populate the pex with the pinned requirements and distribution names & hashes. ipex_info = PexInfo.from_json(zf.read('IPEX-INFO')) ipex_builder = PEXBuilder(pex_info=ipex_info) # Populate the pex with the needed code. ptex_info = json.loads(zf.read('PTEX-INFO').decode('utf-8')) for path in ptex_info['code']: ipex_builder.add_source(os.path.join(code_root, path), path) # Perform a fully pinned intransitive resolve to hydrate the install cache (not the # pex!). resolver_settings = ptex_info['resolver_settings'] resolved_distributions = resolver.resolve( requirements=[str(req) for req in ipex_info.requirements], cache=ipex_info.pex_root, transitive=False, **resolver_settings ) ipex_builder.build(ipex_file) os.execv(ipex_file, [ipex_file] + sys.argv[1:]) """)) colorized_isort_ptex = os.path.join(tmpdir, "colorized_isort.ptex") result = run_pex_command([ "--not-zip-safe", "--always-write-cache", "--pex-root", ptex_cache, pex_project_dir, # type: ignore[list-item] # This is unicode in Py2, whereas everthing else is bytes. That's fine. "--sources-directory", ptex_code_dir, "--sources-directory", colorized_isort_ptex_code_dir, "--entry-point", "ptex", "--output-file", colorized_isort_ptex, ]) result.assert_success() subprocess.check_call([colorized_isort_ptex, "--version"]) with pytest.raises(CalledProcessError): subprocess.check_call([colorized_isort_ptex, "--not-a-flag"]) safe_rmtree(ptex_cache) # The dehydrated pex now fails since it lost its hydration from the cache. with pytest.raises(CalledProcessError): subprocess.check_call([colorized_isort_ptex, "--version"])
def test_compile(tmpdir): # type: (Any) -> None def collect_files( root_dir, # type: str extension, # type: str ): # type: (...) -> Set[str] return { os.path.relpath(os.path.join(root, f), root_dir) for root, _, files in os.walk(root_dir, followlinks=False) for f in files if f.endswith(extension) } pex_file = os.path.join(str(tmpdir), "compile.pex") src = os.path.join(str(tmpdir), "src") with safe_open(os.path.join(src, "main.py"), "w") as fp: fp.write( dedent("""\ from colors import yellow print(yellow("Slartibartfast")) """)) result = run_pex_command(args=[ "-D", src, "ansicolors==1.0.2", "-m", "main", "--include-tools", "-o", pex_file ]) result.assert_success() venv = os.path.join(str(tmpdir), "venv") subprocess.check_call(args=[pex_file, "venv", venv], env=make_env(PEX_TOOLS=1)) # N.B.: The right way to discover the site-packages dir is via site.getsitepackages(). # Unfortunately we use an old version of virtualenv to create PyPy and CPython 2.7 venvs and it # does not add a getsitepackages function to site.py; so we cheat. if IS_PYPY: site_packages = "site-packages" else: site_packages = os.path.join( "lib", "python{}.{}".format(sys.version_info[0], sys.version_info[1]), "site-packages") # Ensure we have at least the basic direct dependency python files we expect. venv_py_files = collect_files(venv, ".py") assert os.path.join(site_packages, "main.py") in venv_py_files assert os.path.join(site_packages, "colors.py") in venv_py_files assert "__main__.py" in venv_py_files compile_venv = os.path.join(str(tmpdir), "compile.venv") subprocess.check_call(args=[pex_file, "venv", "--compile", compile_venv], env=make_env(PEX_TOOLS=1)) # Ensure all original py files have a compiled counterpart. for py_file in venv_py_files: if PY2: assert os.path.exists(os.path.join(compile_venv, py_file + "c")) else: name, _ = os.path.splitext(os.path.basename(py_file)) assert os.path.exists( os.path.join( compile_venv, os.path.dirname(py_file), "__pycache__", "{name}.{cache_tag}.pyc".format( name=name, cache_tag=sys.implementation.cache_tag), )) compile_venv_pyc_files = collect_files(compile_venv, ".pyc") subprocess.check_call(args=[os.path.join(compile_venv, "pex")]) assert compile_venv_pyc_files == collect_files( compile_venv, ".pyc"), "Expected no new compiled python files."
def _extract( self, pex, # type: PEX options, # type: Namespace ): # type: (...) -> Result if not options.serve and not options.dest_dir: return Error("Specify a --find-links directory to extract wheels to.") dest_dir = ( os.path.abspath(os.path.expanduser(options.dest_dir)) if options.dest_dir else safe_mkdtemp() ) safe_mkdir(dest_dir) if options.sources: self._extract_sdist(pex, dest_dir) def spawn_extract(distribution): # type: (Distribution) -> SpawnedJob[Text] job = spawn_python_job( args=["-m", "wheel", "pack", "--dest-dir", dest_dir, distribution.location], interpreter=pex.interpreter, expose=["wheel"], stdout=subprocess.PIPE, ) return SpawnedJob.stdout( job, result_func=lambda out: "{}: {}".format(distribution, out.decode()) ) with self._distributions_output(pex, options) as (distributions, output): errors = [] for result in execute_parallel(distributions, spawn_extract, error_handler=Retain()): if isinstance(result, tuple): distribution, error = result errors.append(distribution) output.write( "Failed to build a wheel for {distribution}: {error}\n".format( distribution=distribution, error=error ) ) else: output.write(result) if errors: return Error( "Failed to build wheels for {count} {distributions}.".format( count=len(errors), distributions=pluralize(errors, "distribution") ) ) if not options.serve: return Ok() repo = FindLinksRepo.serve( interpreter=pex.interpreter, port=options.port, directory=dest_dir ) output.write( "Serving find-links repo of {pex} via {find_links} at http://localhost:{port}\n".format( pex=os.path.normpath(pex.path()), find_links=dest_dir, port=repo.port ) ) if options.pid_file: with safe_open(options.pid_file, "w") as fp: fp.write("{}:{}".format(repo.pid, repo.port)) try: return Result(exit_code=repo.join(), message=" ".join(repo.cmd)) except KeyboardInterrupt: repo.kill() return Ok("Shut down server for find links repo at {}.".format(dest_dir))
def write_source(path, valid=True): # type: (str, bool) -> None with safe_open(path, "wb") as fp: fp.write(to_bytes("basename = %r\n" % os.path.basename(path))) if not valid: fp.write(to_bytes("invalid!\n"))
def test_parse_requirements_stress(chroot): # type: (str) -> None with safe_open(os.path.join(chroot, "other-requirements.txt"), "w") as fp: fp.write( # This includes both example snippets taken directly from # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format # not already covered by # https://pip.pypa.io/en/stable/reference/pip_install/#example-requirements-file. dedent( """\ SomeProject SomeProject == 1.3 SomeProject >=1.2,<2.0 SomeProject[foo, bar] SomeProject~=1.4.2 SomeProject ==5.4 ; python_version < '2.7' SomeProject; sys_platform == 'win32' SomeProject @ https://example.com/somewhere/over/here SomeProject @ file:somewhere/over/here FooProject >= 1.2 --global-option="--no-user-cfg" \\ --install-option="--prefix='/usr/local'" \\ --install-option="--no-compile" git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject git+ssh://git.example.com/MyProject#egg=MyProject git+file:///home/user/projects/MyProject#egg=MyProject&subdirectory=pkg_dir # N.B. This is not from the Pip docs unlike the examples above. We just want to # chain in one more set of stress tests. -r extra/stress.txt """ ) ) touch("somewhere/over/here/pyproject.toml") with safe_open(os.path.join(chroot, "extra", "stress.txt"), "w") as fp: fp.write( # These are tests of edge cases not included anywhere in the examples found in # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format. dedent( """\ -c file:subdir/more-requirements.txt a/local/project[foo]; python_full_version == "2.7.8" ./another/local/project;python_version == "2.7.*" ./another/local/project ./ # Local projects with basenames that are invalid Python project names (trailing _): tmp/tmpW8tdb_ tmp/tmpW8tdb_[foo] tmp/tmpW8tdb_[foo];python_version == "3.9" hg+http://hg.example.com/MyProject@da39a3ee5e6b#egg=AnotherProject[extra,more];python_version=="3.9.*"&subdirectory=foo/bar ftp://a/${PROJECT_NAME}-1.0.tar.gz http://a/${PROJECT_NAME}-1.0.zip https://a/numpy-1.9.2-cp34-none-win32.whl Django@ git+https://github.com/django/django.git Django@git+https://github.com/django/django.git@stable/2.1.x Django@ git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8 Django @ file:projects/django-2.3.zip; python_version >= "3.10" """ ) ) touch("extra/pyproject.toml") touch("extra/a/local/project/pyproject.toml") touch("extra/another/local/project/setup.py") touch("extra/tmp/tmpW8tdb_/setup.py") touch("extra/projects/django-2.3.zip") with safe_open(os.path.join(chroot, "subdir", "more-requirements.txt"), "w") as fp: fp.write( # This checks requirements (`ReqInfo`s) are wrapped up into `Constraints`. dedent( """\ AnotherProject """ ) ) req_iter = parse_requirements( Source.from_text( # N.B.: Taken verbatim from: # https://pip.pypa.io/en/stable/reference/pip_install/#example-requirements-file dedent( """\ # ####### example-requirements.txt ####### # ###### Requirements without Version Specifiers ###### nose nose-cov beautifulsoup4 # ###### Requirements with Version Specifiers ###### # See https://www.python.org/dev/peps/pep-0440/#version-specifiers docopt == 0.6.1 # Version Matching. Must be version 0.6.1 keyring >= 4.1.1 # Minimum version 4.1.1 coverage != 3.5 # Version Exclusion. Anything except version 3.5 Mopidy-Dirble ~= 1.1 # Compatible release. Same as >= 1.1, == 1.* # ###### Refer to other requirements files ###### -r other-requirements.txt # # ###### A particular file ###### ./downloads/numpy-1.9.2-cp34-none-win32.whl http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl # ###### Additional Requirements without Version Specifiers ###### # Same as 1st section, just here to show that you can put things in any order. rejected green # """ ), ) ) # Ensure local non-distribution files matching distribution names are not erroneously probed # as distributions to find name and version metadata. touch("nose") touch("downloads/numpy-1.9.2-cp34-none-win32.whl") with environment_as(PROJECT_NAME="Project"): results = normalize_results(req_iter) assert [ req(project_name="nose"), req(project_name="nose-cov"), req(project_name="beautifulsoup4"), req(project_name="docopt", specifier="==0.6.1"), req(project_name="keyring", specifier=">=4.1.1"), req(project_name="coverage", specifier="!=3.5"), req(project_name="Mopidy-Dirble", specifier="~=1.1"), req(project_name="SomeProject"), req(project_name="SomeProject", specifier="==1.3"), req(project_name="SomeProject", specifier=">=1.2,<2.0"), req(project_name="SomeProject", extras=["foo", "bar"]), req(project_name="SomeProject", specifier="~=1.4.2"), req(project_name="SomeProject", specifier="==5.4", marker="python_version < '2.7'"), req(project_name="SomeProject", marker="sys_platform == 'win32'"), url_req(project_name="SomeProject", url="https://example.com/somewhere/over/here"), local_req(path=os.path.realpath("somewhere/over/here")), req(project_name="FooProject", specifier=">=1.2"), url_req( project_name="MyProject", url="git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709", ), url_req(project_name="MyProject", url="git+ssh://git.example.com/MyProject"), url_req(project_name="MyProject", url="git+file:/home/user/projects/MyProject"), Constraint(DUMMY_LINE, Requirement.parse("AnotherProject")), local_req( path=os.path.realpath("extra/a/local/project"), extras=["foo"], marker="python_full_version == '2.7.8'", ), local_req( path=os.path.realpath("extra/another/local/project"), marker="python_version == '2.7.*'", ), local_req(path=os.path.realpath("extra/another/local/project")), local_req(path=os.path.realpath("extra")), local_req(path=os.path.realpath("extra/tmp/tmpW8tdb_")), local_req(path=os.path.realpath("extra/tmp/tmpW8tdb_"), extras=["foo"]), local_req( path=os.path.realpath("extra/tmp/tmpW8tdb_"), extras=["foo"], marker="python_version == '3.9'", ), url_req( project_name="AnotherProject", url="hg+http://hg.example.com/MyProject@da39a3ee5e6b", extras=["more", "extra"], marker="python_version == '3.9.*'", ), url_req(project_name="Project", url="ftp://a/Project-1.0.tar.gz", specifier="==1.0"), url_req(project_name="Project", url="http://a/Project-1.0.zip", specifier="==1.0"), url_req( project_name="numpy", url="https://a/numpy-1.9.2-cp34-none-win32.whl", specifier="==1.9.2", ), url_req(project_name="Django", url="git+https://github.com/django/django.git"), url_req(project_name="Django", url="git+https://github.com/django/django.git@stable/2.1.x"), url_req( project_name="Django", url="git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8", ), url_req( project_name="Django", url=os.path.realpath("extra/projects/django-2.3.zip"), specifier="==2.3", marker="python_version>='3.10'", ), url_req( project_name="numpy", url=os.path.realpath("./downloads/numpy-1.9.2-cp34-none-win32.whl"), specifier="==1.9.2", ), url_req( project_name="wxPython_Phoenix", url="http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl", specifier="==3.0.3.dev1820+49a8884", ), req(project_name="rejected"), req(project_name="green"), ] == results
def supported_tags(self, manylinux=None): # type: (Optional[str]) -> Tuple[tags.Tag, ...] # We use a 2 level cache, probing memory first and then a json file on disk in order to # avoid calculating tags when possible since it's an O(500ms) operation that involves # spawning Pip. # Read level 1. memory_cache_key = (self, manylinux) supported_tags = self._SUPPORTED_TAGS_BY_PLATFORM.get(memory_cache_key) if supported_tags is not None: return supported_tags # Read level 2. components = list(attr.astuple(self)) if manylinux: components.append(manylinux) disk_cache_key = os.path.join(ENV.PEX_ROOT, "platforms", self.SEP.join(components)) with atomic_directory(target_dir=disk_cache_key, exclusive=False) as cache_dir: if not cache_dir.is_finalized: # Missed both caches - spawn calculation. plat_info = attr.asdict(self) plat_info.update( supported_tags=[ (tag.interpreter, tag.abi, tag.platform) for tag in self._calculate_tags(manylinux=manylinux) ], ) # Write level 2. with safe_open(os.path.join(cache_dir.work_dir, self.PLAT_INFO_FILE), "w") as fp: json.dump(plat_info, fp) with open(os.path.join(disk_cache_key, self.PLAT_INFO_FILE)) as fp: try: data = json.load(fp) except ValueError as e: TRACER.log( "Regenerating the platform info file at {} since it did not contain parsable " "JSON data: {}".format(fp.name, e) ) safe_rmtree(disk_cache_key) return self.supported_tags(manylinux=manylinux) if not isinstance(data, dict): TRACER.log( "Regenerating the platform info file at {} since it did not contain a " "configuration object. Found: {!r}".format(fp.name, data) ) safe_rmtree(disk_cache_key) return self.supported_tags(manylinux=manylinux) sup_tags = data.get("supported_tags") if not isinstance(sup_tags, list): TRACER.log( "Regenerating the platform info file at {} since it was missing a valid " "`supported_tags` list. Found: {!r}".format(fp.name, sup_tags) ) safe_rmtree(disk_cache_key) return self.supported_tags(manylinux=manylinux) count = len(sup_tags) def parse_tag( index, # type: int tag, # type: List[Any] ): # type: (...) -> tags.Tag if len(tag) != 3 or not all( isinstance(component, compatibility.string) for component in tag ): raise ValueError( "Serialized platform tags should be lists of three strings. Tag {index} of " "{count} was: {tag!r}.".format(index=index, count=count, tag=tag) ) interpreter, abi, platform = tag return tags.Tag(interpreter=interpreter, abi=abi, platform=platform) try: supported_tags = tuple(parse_tag(index, tag) for index, tag in enumerate(sup_tags)) # Write level 1. self._SUPPORTED_TAGS_BY_PLATFORM[memory_cache_key] = supported_tags return supported_tags except ValueError as e: TRACER.log( "Regenerating the platform info file at {} since it did not contain parsable " "tag data: {}".format(fp.name, e) ) safe_rmtree(disk_cache_key) return self.supported_tags(manylinux=manylinux)