def test_can_add_handles_invalid_wheel_filename(python_35_interpreter): # type: (PythonInterpreter) -> None pex_environment = PEXEnvironment( pex="", pex_info=PexInfo.default(python_35_interpreter), interpreter=python_35_interpreter) assert pex_environment.can_add(Distribution("pep427-invalid.whl")) is False
def _activate(self): # type: () -> WorkingSet working_set = WorkingSet([]) # set up the local .pex environment pex_info = self.pex_info() self._envs.append(PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment.declare_namespace_packages(working_set) self.patch_pkg_resources(working_set) return working_set
def test_force_local(): # type: () -> None with nested(yield_pex_builder(), temporary_dir(), temporary_filename()) as ( pb, pex_root, pex_file, ): pb.info.pex_root = pex_root pb.build(pex_file) code_cache = PEXEnvironment._force_local(pex_file, pb.info) assert os.path.exists(pb.info.zip_unsafe_cache) listing = set(os.listdir(pb.info.zip_unsafe_cache)) # The code_cache should be a write-locked directory. assert len(listing) == 2 listing.remove(os.path.basename(code_cache)) lockfile = listing.pop() assert os.path.isfile(os.path.join(pb.info.zip_unsafe_cache, lockfile)) assert set(os.listdir(code_cache)) == {PexInfo.PATH, "__main__.py", "__main__.pyc"} # idempotence assert PEXEnvironment._force_local(pex_file, pb.info) == code_cache
def _activate(self): # type: () -> Iterable[Distribution] # set up the local .pex environment pex_info = self.pex_info() target = DistributionTarget.for_interpreter(self._interpreter) self._envs.append(PEXEnvironment(self._pex, pex_info, target=target)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append(PEXEnvironment(pex_path, pex_info, target=target)) # activate all of them activated_dists = [] # type: List[Distribution] for env in self._envs: activated_dists.extend(env.activate()) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment._declare_namespace_packages(activated_dists) return activated_dists
def _activate(self): if not self._working_set: working_set = WorkingSet([]) # set up the local .pex environment pex_info = self._pex_info.copy() pex_info.update(self._pex_info_overrides) pex_info.merge_pex_path(self._vars.PEX_PATH) self._envs.append( PEXEnvironment(self._pex, pex_info, interpreter=self._interpreter)) # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) self._envs.append( PEXEnvironment(pex_path, pex_info, interpreter=self._interpreter)) # activate all of them for env in self._envs: for dist in env.activate(): working_set.add(dist) self._working_set = working_set return self._working_set
def activate_pex(): entry_point = os.environ.get('UWSGI_PEX') if not entry_point: sys.stderr.write('couldnt determine pex from UWSGI_PEX environment variable, bailing!\n') sys.exit(1) sys.stderr.write('entry_point=%s\n' % entry_point) sys.path[0] = os.path.abspath(sys.path[0]) sys.path.insert(0, entry_point) sys.path.insert(0, os.path.abspath(os.path.join(entry_point, '.bootstrap'))) from pex import pex_bootstrapper from pex.environment import PEXEnvironment from pex.finders import register_finders from pex.pex_info import PexInfo pex_bootstrapper.monkeypatch_build_zipmanifest() register_finders() pex_info = PexInfo.from_pex(entry_point) print str(pex_info) env = PEXEnvironment(entry_point, pex_info) working_set = env.activate() sys.stderr.write('sys.path=%s\n\n' % sys.path) return entry_point, pex_info, env, working_set
def assert_dist_cache(zip_safe): # type: (bool) -> None with nested(yield_pex_builder(zip_safe=zip_safe), temporary_dir(), temporary_filename()) as ( pb, pex_root, pex_file, ): pb.info.pex_root = pex_root pb.build(pex_file) with open_zip(pex_file) as zf: dists = PEXEnvironment._write_zipped_internal_cache( zf=zf, pex_info=pb.info) assert len(dists) == 1 original_location = normalize(dists[0].location) assert original_location.startswith( normalize(pb.info.install_cache)) # Call a second time to validate idempotence of caching. dists = PEXEnvironment._write_zipped_internal_cache(zf=None, pex_info=pb.info) assert len(dists) == 1 assert normalize(dists[0].location) == original_location
def test_can_add_handles_optional_build_tag_in_wheel( python_35_interpreter, wheel_distribution, wheel_is_linux ): # type: (PythonInterpreter, str, bool) -> None pex_environment = PEXEnvironment( pex="", pex_info=PexInfo.default(python_35_interpreter), interpreter=python_35_interpreter ) native_wheel = IS_LINUX and wheel_is_linux assert pex_environment.can_add(wheel_distribution) is native_wheel
def test_can_add_handles_optional_build_tag_in_wheel(python_35_interpreter, wheel_filename, wheel_is_linux): pex_environment = PEXEnvironment( pex="", pex_info=PexInfo.default(python_35_interpreter), interpreter=python_35_interpreter) native_wheel = IS_LINUX and wheel_is_linux assert pex_environment.can_add( Distribution(wheel_filename)) is native_wheel
def _activate(self): # type: () -> Iterable[Distribution] activated_dists = [] # type: List[Distribution] for env in self._loaded_envs: activated_dists.extend(env.activate()) # Ensure that pkg_resources is not imported until at least every pex environment # (i.e. PEX_PATH) has been merged into the environment PEXEnvironment._declare_namespace_packages(activated_dists) return activated_dists
def test_force_local(): with nested(yield_pex_builder(), temporary_dir(), temporary_file()) as (pb, pex_root, pex_file): pb.info.pex_root = pex_root pb.build(pex_file.name) code_cache = PEXEnvironment.force_local(pex_file.name, pb.info) assert os.path.exists(pb.info.zip_unsafe_cache) assert len(os.listdir(pb.info.zip_unsafe_cache)) == 1 assert [os.path.basename(code_cache)] == os.listdir(pb.info.zip_unsafe_cache) assert set(os.listdir(code_cache)) == set([PexInfo.PATH, '__main__.py']) # idempotence assert PEXEnvironment.force_local(pex_file.name, pb.info) == code_cache
def test_force_local(): with nested(yield_pex_builder(), temporary_dir(), temporary_filename()) as (pb, pex_root, pex_file): pb.info.pex_root = pex_root pb.build(pex_file) code_cache = PEXEnvironment.force_local(pex_file, pb.info) assert os.path.exists(pb.info.zip_unsafe_cache) assert len(os.listdir(pb.info.zip_unsafe_cache)) == 1 assert [os.path.basename(code_cache)] == os.listdir(pb.info.zip_unsafe_cache) assert set(os.listdir(code_cache)) == set([PexInfo.PATH, "__main__.py", "__main__.pyc"]) # idempotence assert PEXEnvironment.force_local(pex_file, pb.info) == code_cache
def test_write_zipped_internal_cache(): # zip_safe pex will not be written to install cache unless always_write_cache with nested(yield_pex_builder(zip_safe=True), temporary_dir(), temporary_file()) as (pb, pex_root, pex_file): pb.info.pex_root = pex_root pb.build(pex_file.name) existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache( pex_file.name, pb.info) assert len(zip_safe) == 1 assert normalize(zip_safe[0].location).startswith( normalize(os.path.join(pex_file.name, pb.info.internal_cache))), ( 'loc: %s, cache: %s' % (normalize(zip_safe[0].location), normalize(os.path.join(pex_file.name, pb.info.internal_cache)))) pb.info.always_write_cache = True existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache( pex_file.name, pb.info) assert len(new) == 1 assert normalize(new[0].location).startswith( normalize(pb.info.install_cache)) # Check that we can read from the cache existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache( pex_file.name, pb.info) assert len(existing) == 1 assert normalize(existing[0].location).startswith( normalize(pb.info.install_cache)) # non-zip_safe pex will be written to install cache with nested(yield_pex_builder(zip_safe=False), temporary_dir(), temporary_file()) as (pb, pex_root, pex_file): pb.info.pex_root = pex_root pb.build(pex_file.name) existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache( pex_file.name, pb.info) assert len(new) == 1 assert normalize(new[0].location).startswith( normalize(pb.info.install_cache)) original_location = normalize(new[0].location) # do the second time to validate idempotence of caching existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache( pex_file.name, pb.info) assert len(existing) == 1 assert normalize(existing[0].location) == original_location
def test_load_internal_cache_unzipped(): # zip_safe pex will not be written to install cache unless always_write_cache with nested(yield_pex_builder(zip_safe=True), temporary_dir()) as (pb, pex_root): pb.info.pex_root = pex_root pb.freeze() dists = list(PEXEnvironment.load_internal_cache(pb.path(), pb.info)) assert len(dists) == 1 assert normalize(dists[0].location).startswith(normalize(os.path.join(pb.path(), pb.info.internal_cache)))
def test_load_internal_cache_unzipped(): # zip_safe pex will not be written to install cache unless always_write_cache with nested(yield_pex_builder(zip_safe=True), temporary_dir()) as (pb, pex_root): pb.info.pex_root = pex_root pb.freeze() dists = list(PEXEnvironment.load_internal_cache(pb.path(), pb.info)) assert len(dists) == 1 assert normalize(dists[0].location).startswith( normalize(os.path.join(pb.path(), pb.info.internal_cache)))
def test_write_zipped_internal_cache(): # zip_safe pex will not be written to install cache unless always_write_cache with nested(yield_pex_builder(zip_safe=True), temporary_dir(), temporary_file()) as ( pb, pex_root, pex_file): pb.info.pex_root = pex_root pb.build(pex_file.name) existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(zip_safe) == 1 assert normalize(zip_safe[0].location).startswith( normalize(os.path.join(pex_file.name, pb.info.internal_cache))), ( 'loc: %s, cache: %s' % ( normalize(zip_safe[0].location), normalize(os.path.join(pex_file.name, pb.info.internal_cache)))) pb.info.always_write_cache = True existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(new) == 1 assert normalize(new[0].location).startswith(normalize(pb.info.install_cache)) # Check that we can read from the cache existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(existing) == 1 assert normalize(existing[0].location).startswith(normalize(pb.info.install_cache)) # non-zip_safe pex will be written to install cache with nested(yield_pex_builder(zip_safe=False), temporary_dir(), temporary_file()) as ( pb, pex_root, pex_file): pb.info.pex_root = pex_root pb.build(pex_file.name) existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(new) == 1 assert normalize(new[0].location).startswith(normalize(pb.info.install_cache)) original_location = normalize(new[0].location) # do the second time to validate idempotence of caching existing, new, zip_safe = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(existing) == 1 assert normalize(existing[0].location) == original_location
def test_load_internal_cache_unzipped(): # Unzipped pexes should use distributions from the pex internal cache. with nested(yield_pex_builder(zip_safe=True), temporary_dir()) as (pb, pex_root): pb.info.pex_root = pex_root pb.freeze() dists = list(PEXEnvironment._load_internal_cache(pb.path(), pb.info)) assert len(dists) == 1 assert normalize(dists[0].location).startswith( normalize(os.path.join(pb.path(), pb.info.internal_cache)))
def _loaded_envs(self): # type: () -> Iterable[PEXEnvironment] if self._envs is None: # set up the local .pex environment pex_info = self.pex_info() target = DistributionTarget.for_interpreter(self._interpreter) envs = [PEXEnvironment(self._pex, pex_info, target=target)] # N.B. by this point, `pex_info.pex_path` will contain a single pex path # merged from pex_path in `PEX-INFO` and `PEX_PATH` set in the environment. # `PEX_PATH` entries written into `PEX-INFO` take precedence over those set # in the environment. if pex_info.pex_path: # set up other environments as specified in pex_path for pex_path in filter(None, pex_info.pex_path.split(os.pathsep)): pex_info = PexInfo.from_pex(pex_path) pex_info.update(self._pex_info_overrides) envs.append( PEXEnvironment(pex_path, pex_info, target=target)) self._envs = tuple(envs) return self._envs
def test_write_zipped_internal_cache(): # zip_safe pex will not be written to install cache unless always_write_cache with nested(yield_pex_builder(zip_safe=True), temporary_dir(), temporary_file()) as ( pb, pex_root, pex_file): pex = pb.path() pb.info.pex_root = pex_root pb.build(pex_file.name) dists = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(dists) == 1 assert normalize(dists[0].location).startswith( normalize(os.path.join(pex_file.name, pb.info.internal_cache))), ( 'loc: %s, cache: %s' % ( normalize(dists[0].location), normalize(os.path.join(pex_file.name, pb.info.internal_cache)))) pb.info.always_write_cache = True dists = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(dists) == 1 assert normalize(dists[0].location).startswith(normalize(pb.info.install_cache)) # zip_safe pex will not be written to install cache unless always_write_cache with nested(yield_pex_builder(zip_safe=False), temporary_dir(), temporary_file()) as ( pb, pex_root, pex_file): pex = pb.path() pb.info.pex_root = pex_root pb.build(pex_file.name) dists = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(dists) == 1 assert normalize(dists[0].location).startswith(normalize(pb.info.install_cache)) original_location = normalize(dists[0].location) # do the second time to validate idempotence of caching dists = PEXEnvironment.write_zipped_internal_cache(pex_file.name, pb.info) assert len(dists) == 1 assert normalize(dists[0].location) == original_location
def populate_venv_with_pex( venv, # type: Virtualenv pex, # type: PEX bin_path=BinPath.FALSE, # type: BinPath.Value python=None, # type: Optional[str] collisions_ok=True, # type: bool ): # type: (...) -> str venv_python = python or venv.interpreter.binary venv_bin_dir = os.path.dirname(python) if python else venv.bin_dir venv_dir = os.path.dirname(venv_bin_dir) if python else venv.venv_dir # 1. Populate the venv with the PEX contents. provenance = defaultdict(list) def record_provenance(src_to_dst): # type: (Iterable[Tuple[str, str]]) -> None for src, dst in src_to_dst: provenance[dst].append(src) pex_info = pex.pex_info() if zipfile.is_zipfile(pex.path()): record_provenance( PEXEnvironment(pex.path()).explode_code(venv.site_packages_dir, exclude=("__main__.py", ))) else: record_provenance( _copytree( src=pex.path(), dst=venv.site_packages_dir, exclude=(pex_info.internal_cache, pex_builder.BOOTSTRAP_DIR, "__main__.py"), )) for dist in pex.activate(): record_provenance( _copytree(src=dist.location, dst=venv.site_packages_dir, exclude=("bin", ))) dist_bin_dir = os.path.join(dist.location, "bin") if os.path.isdir(dist_bin_dir): record_provenance(_copytree(dist_bin_dir, venv.bin_dir)) collisions = { dst: srcs for dst, srcs in provenance.items() if len(srcs) > 1 } if collisions: message_lines = [ "Encountered {collision} building venv at {venv_dir} from {pex}:". format(collision=pluralize(collisions, "collision"), venv_dir=venv_dir, pex=pex.path()) ] for index, (dst, srcs) in enumerate(collisions.items(), start=1): message_lines.append( "{index}. {dst} was provided by:\n\t{srcs}".format( index=index, dst=dst, srcs="\n\t".join(srcs))) message = "\n".join(message_lines) if not collisions_ok: raise CollisionError(message) pex_warnings.warn(message) # 2. Add a __main__ to the root of the venv for running the venv dir like a loose PEX dir # and a main.py for running as a script. shebang = "#!{} -sE".format(venv_python) main_contents = dedent("""\ {shebang} if __name__ == "__main__": import os import sys venv_dir = os.path.abspath(os.path.dirname(__file__)) venv_bin_dir = os.path.join(venv_dir, "bin") shebang_python = {shebang_python!r} python = os.path.join(venv_bin_dir, os.path.basename(shebang_python)) if sys.executable not in (python, shebang_python): sys.stderr.write("Re-execing from {{}}\\n".format(sys.executable)) os.execv(python, [python, "-sE"] + sys.argv) os.environ["VIRTUAL_ENV"] = venv_dir sys.path.extend(os.environ.get("PEX_EXTRA_SYS_PATH", "").split(os.pathsep)) bin_path = os.environ.get("PEX_VENV_BIN_PATH", {bin_path!r}) if bin_path != "false": PATH = os.environ.get("PATH", "").split(os.pathsep) if bin_path == "prepend": PATH.insert(0, venv_bin_dir) elif bin_path == "append": PATH.append(venv_bin_dir) else: sys.stderr.write( "PEX_VENV_BIN_PATH must be one of 'false', 'prepend' or 'append', given: " "{{!r}}\\n".format( bin_path ) ) sys.exit(1) os.environ["PATH"] = os.pathsep.join(PATH) PEX_EXEC_OVERRIDE_KEYS = ("PEX_INTERPRETER", "PEX_SCRIPT", "PEX_MODULE") pex_overrides = {{ key: os.environ.pop(key) for key in PEX_EXEC_OVERRIDE_KEYS if key in os.environ }} if len(pex_overrides) > 1: sys.stderr.write( "Can only specify one of {{overrides}}; found: {{found}}\\n".format( overrides=", ".join(PEX_EXEC_OVERRIDE_KEYS), found=" ".join("{{}}={{}}".format(k, v) for k, v in pex_overrides.items()) ) ) sys.exit(1) pex_script = pex_overrides.get("PEX_SCRIPT") if pex_script: script_path = os.path.join(venv_bin_dir, pex_script) os.execv(script_path, [script_path] + sys.argv[1:]) pex_interpreter = pex_overrides.get("PEX_INTERPRETER", "").lower() in ("1", "true") PEX_INTERPRETER_ENTRYPOINT = "code:interact" entry_point = ( PEX_INTERPRETER_ENTRYPOINT if pex_interpreter else pex_overrides.get("PEX_MODULE", {entry_point!r} or PEX_INTERPRETER_ENTRYPOINT) ) if entry_point == PEX_INTERPRETER_ENTRYPOINT and len(sys.argv) > 1: args = sys.argv[1:] arg = args[0] if arg == "-m": if len(args) < 2: sys.stderr.write("Argument expected for the -m option\\n") sys.exit(2) entry_point = module = args[1] sys.argv = args[1:] # Fall through to entry_point handling below. else: filename = arg sys.argv = args if arg == "-c": if len(args) < 2: sys.stderr.write("Argument expected for the -c option\\n") sys.exit(2) filename = "-c <cmd>" content = args[1] sys.argv = ["-c"] + args[2:] elif arg == "-": content = sys.stdin.read() else: with open(arg) as fp: content = fp.read() ast = compile(content, filename, "exec", flags=0, dont_inherit=1) globals_map = globals().copy() globals_map["__name__"] = "__main__" globals_map["__file__"] = filename locals_map = globals_map {exec_ast} sys.exit(0) module_name, _, function = entry_point.partition(":") if not function: import runpy runpy.run_module(module_name, run_name="__main__") else: import importlib module = importlib.import_module(module_name) # N.B.: Functions may be hung off top-level objects in the module namespace, # e.g.: Class.method; so we drill down through any attributes to the final function # object. namespace, func = module, None for attr in function.split("."): func = namespace = getattr(namespace, attr) sys.exit(func()) """.format( shebang=shebang, shebang_python=venv_python, bin_path=bin_path, entry_point=pex_info.entry_point, exec_ast=("exec ast in globals_map, locals_map" if venv.interpreter.version[0] == 2 else "exec(ast, globals_map, locals_map)"), )) with open(venv.join_path("__main__.py"), "w") as fp: fp.write(main_contents) chmod_plus_x(fp.name) os.symlink(os.path.basename(fp.name), venv.join_path("pex")) # 3. Re-write any (console) scripts to use the venv Python. for script in venv.rewrite_scripts(python=venv_python, python_args="-sE"): TRACER.log("Re-writing {}".format(script)) return shebang
def cpython_35_environment(python_35_interpreter): return PEXEnvironment( pex="", pex_info=PexInfo.default(python_35_interpreter), target=DistributionTarget.for_interpreter(python_35_interpreter), )
def _extract_sdist( pex, # type: PEX dest_dir, # type: str ): # type: (...) -> None chroot = safe_mkdtemp() src = os.path.join(chroot, "src") safe_mkdir(src) excludes = ["__main__.py", "PEX-INFO"] if zipfile.is_zipfile(pex.path()): PEXEnvironment(pex.path()).explode_code(src, exclude=excludes) else: shutil.copytree(pex.path(), src, ignore=lambda _dir, _names: excludes) pex_info = pex.pex_info() name, _ = os.path.splitext(os.path.basename(pex.path())) version = "0.0.0+{}".format(pex_info.code_hash) zip_safe = pex_info.zip_safe py_modules = [os.path.splitext(f)[0] for f in os.listdir(src) if f.endswith(".py")] packages = [ os.path.relpath(os.path.join(root, d), src).replace(os.sep, ".") for root, dirs, _ in os.walk(src) for d in dirs ] install_requires = [str(req) for req in pex_info.requirements] python_requires = None if len(pex_info.interpreter_constraints) == 1: python_requires = str( PythonIdentity.parse_requirement(pex_info.interpreter_constraints[0]).specifier ) elif pex_info.interpreter_constraints: pex_warnings.warn( "Omitting `python_requires` for {name} sdist since {pex} has multiple " "interpreter constraints:\n{interpreter_constraints}".format( name=name, pex=os.path.normpath(pex.path()), interpreter_constraints="\n".join( "{index}.) {constraint}".format(index=index, constraint=constraint) for index, constraint in enumerate( pex_info.interpreter_constraints, start=1 ) ), ) ) entry_points = [] if pex_info.entry_point and ":" in pex_info.entry_point: entry_points = [(name, pex_info.entry_point)] with open(os.path.join(chroot, "setup.cfg"), "w") as fp: fp.write( dedent( """\ [metadata] name = {name} version = {version} [options] zip_safe = {zip_safe} {py_modules} {packages} package_dir = =src include_package_data = True {python_requires} {install_requires} [options.entry_points] {entry_points} """ ).format( name=name, version=version, zip_safe=zip_safe, py_modules=( "py_modules =\n {}".format("\n ".join(py_modules)) if py_modules else "" ), packages=( "packages = \n {}".format("\n ".join(packages)) if packages else "" ), install_requires=( "install_requires =\n {}".format("\n ".join(install_requires)) if install_requires else "" ), python_requires=( "python_requires = {}".format(python_requires) if python_requires else "" ), entry_points=( "console_scripts =\n {}".format( "\n ".join( "{} = {}".format(name, entry_point) for name, entry_point in entry_points ) ) if entry_points else "" ), ) ) with open(os.path.join(chroot, "MANIFEST.in"), "w") as fp: fp.write("recursive-include src *") with open(os.path.join(chroot, "setup.py"), "w") as fp: fp.write("import setuptools; setuptools.setup()") spawn_python_job( args=["setup.py", "sdist", "--dist-dir", dest_dir], interpreter=pex.interpreter, expose=["setuptools"], cwd=chroot, ).wait()