def _parse_sdist_package_info(sdist_path): # type: (str) -> Optional[Message] sdist_filename = _strip_sdist_path(sdist_path) if sdist_filename is None: return None pkg_info_path = os.path.join(sdist_filename, Distribution.PKG_INFO) if zipfile.is_zipfile(sdist_path): with open_zip(sdist_path) as zip: try: return _parse_message(zip.read(pkg_info_path).decode("utf-8")) except KeyError as e: pex_warnings.warn( "Source distribution {} did not have the expected metadata file {}: {}" .format(sdist_path, pkg_info_path, e)) return None if tarfile.is_tarfile(sdist_path): with tarfile.open(sdist_path) as tf: try: pkg_info = tf.extractfile(pkg_info_path) if pkg_info is None: # N.B.: `extractfile` returns None for directories and special files. return None with closing(pkg_info) as fp: return _parse_message(fp.read().decode("utf-8")) except KeyError as e: pex_warnings.warn( "Source distribution {} did not have the expected metadata file {}: {}" .format(sdist_path, pkg_info_path, e)) return None return None
def _declare_namespace_packages(dist): if not dist.has_metadata('namespace_packages.txt'): return # Nothing to do here. # NB: Some distributions (notably `twitter.common.*`) in the wild declare setuptools-specific # `namespace_packages` but do not properly declare a dependency on setuptools which they must # use to: # 1. Declare namespace_packages metadata which we just verified they have with the check above. # 2. Declare namespace packages at runtime via the canonical: # `__import__('pkg_resources').declare_namespace(__name__)` # # As such, we assume the best and try to import pkg_resources from the distribution they depend # on, and then fall back to our vendored version only if not present. This is safe, since we'll # only introduce our shaded version when no other standard version is present and even then tear # it all down when we hand off from the bootstrap to user code. pkg_resources, vendored = _import_pkg_resources() if vendored: pex_warnings.warn( 'The `pkg_resources` package was loaded from a pex vendored version when ' 'declaring namespace packages defined by {dist}. The {dist} distribution ' 'should fix its `install_requires` to include `setuptools`'. format(dist=dist)) for pkg in dist.get_metadata_lines('namespace_packages.txt'): if pkg in sys.modules: pkg_resources.declare_namespace(pkg)
def exercise_warnings(pex_info, **env): # type: (PexInfo, **str) -> List[warnings.WarningMessage] with warnings.catch_warnings(record=True) as events: pex_warnings.configure_warnings(pex_info, env=Variables(environ=env)) pex_warnings.warn("test") assert events is not None return events
def _ensure_writeable_pex_root(self, pex_root): if not can_write_dir(pex_root): tmp_root = os.path.realpath(safe_mkdtemp()) pex_warnings.warn( "PEX_ROOT is configured as {pex_root} but that path is un-writeable, " "falling back to a temporary PEX_ROOT of {tmp_root} which will hurt " "performance.".format(pex_root=pex_root, tmp_root=tmp_root)) pex_root = self._environ["PEX_ROOT"] = tmp_root return pex_root
def find_module(self, fullname, path=None): for importable in self._importables: loader = importable.loader_for(fullname) if loader is not None: self._loaders.append(loader) if self._warning: from pex import pex_warnings pex_warnings.warn('Found loader for `import {}`:\n\t{}'.format(fullname, self._warning)) return loader return None
def _parse_requirement_tuple(cls, requirement_tuple): if isinstance(requirement_tuple, (tuple, list)): if len(requirement_tuple) != 3: raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,)) # pre 0.8.x requirement type: pex_warnings.warn('Attempting to use deprecated PEX feature. Please upgrade past PEX 0.8.x.') return requirement_tuple[0] elif isinstance(requirement_tuple, compatibility_string): return requirement_tuple raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,))
def _parse_requirement_tuple(cls, requirement_tuple): if isinstance(requirement_tuple, (tuple, list)): if len(requirement_tuple) != 3: raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,)) # pre 0.8.x requirement type: pex_warnings.warn('Attempting to use deprecated PEX feature. Please upgrade past PEX 0.8.x.') return requirement_tuple[0] elif isinstance(requirement_tuple, compatibility_string): return requirement_tuple raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,))
def _declare_namespace_packages(cls, resolved_dists): # type: (Iterable[Distribution]) -> None namespace_packages_by_dist = OrderedDict() for dist in resolved_dists: namespace_packages = cls._get_namespace_packages(dist) # NB: Dists can explicitly declare empty namespace packages lists to indicate they have none. # We only care about dists with one or more namespace packages though; thus, the guard. if namespace_packages: namespace_packages_by_dist[dist] = namespace_packages if not namespace_packages_by_dist: return # Nothing to do here. # When declaring namespace packages, we need to do so with the `setuptools` distribution that # will be active in the pex environment at runtime and, as such, care must be taken. # # Properly behaved distributions will declare a dependency on `setuptools`, in which case we # use that (non-vendored) distribution. A side-effect of importing `pkg_resources` from that # distribution is that a global `pkg_resources.working_set` will be populated. For various # `pkg_resources` distribution discovery functions to work, that global # `pkg_resources.working_set` must be built with the `sys.path` fully settled. Since all dists # in the dependency set (`resolved_dists`) have already been resolved and added to the # `sys.path` we're safe to proceed here. # # Other distributions (notably `twitter.common.*`) in the wild declare `setuptools`-specific # `namespace_packages` but do not properly declare a dependency on `setuptools` which they must # use to: # 1. Declare `namespace_packages` metadata which we just verified they have with the check # above. # 2. Declare namespace packages at runtime via the canonical: # `__import__('pkg_resources').declare_namespace(__name__)` # # For such distributions we fall back to our vendored version of `setuptools`. This is safe, # since we'll only introduce our shaded version when no other standard version is present and # even then tear it all down when we hand off from the bootstrap to user code. pkg_resources, vendored = _import_pkg_resources() if vendored: dists = "\n".join( "\n{index}. {dist} namespace packages:\n {ns_packages}". format( index=index + 1, dist=dist.as_requirement(), ns_packages="\n ".join(ns_packages), ) for index, (dist, ns_packages ) in enumerate(namespace_packages_by_dist.items())) pex_warnings.warn( "The `pkg_resources` package was loaded from a pex vendored version when " "declaring namespace packages defined by:\n{dists}\n\nThese distributions " "should fix their `install_requires` to include `setuptools`". format(dists=dists)) for pkg in itertools.chain(*namespace_packages_by_dist.values()): if pkg in sys.modules: pkg_resources.declare_namespace(pkg)
def pex_root(self): # type: () -> str pex_root = os.path.expanduser(self.raw_pex_root) if not can_write_dir(pex_root): tmp_root = safe_mkdtemp() pex_warnings.warn( "PEX_ROOT is configured as {pex_root} but that path is un-writeable, " "falling back to a temporary PEX_ROOT of {tmp_root} which will hurt " "performance.".format(pex_root=pex_root, tmp_root=tmp_root)) pex_root = self._pex_info["pex_root"] = tmp_root return pex_root
def pex_root(self): pex_root = os.path.expanduser( self._pex_info.get('pex_root', os.path.join('~', '.pex'))) if not can_write_dir(pex_root): tmp_root = safe_mkdtemp() pex_warnings.warn( 'PEX_ROOT is configured as {pex_root} but that path is un-writeable, ' 'falling back to a temporary PEX_ROOT of {tmp_root} which will hurt ' 'performance.'.format(pex_root=pex_root, tmp_root=tmp_root)) pex_root = self._pex_info['pex_root'] = tmp_root return pex_root
def find_module(self, fullname, path=None): for importable in self._importables: loader = importable.loader_for(fullname) if loader is not None: self._loaders.append(loader) if self._warning: from pex import pex_warnings pex_warnings.warn( 'Found loader for `import {}`:\n\t{}'.format( fullname, self._warning)) return loader return None
def __init__(self, pypi_base=PYPI_BASE, use_mirrors=False): if use_mirrors: pex_warnings.warn('use_mirrors is now deprecated.') if not pypi_base.endswith('/'): pypi_base += '/' pypi_url = urlparse.urlparse(pypi_base) if not pypi_url.scheme: self._pypi_base = 'http://' + pypi_base else: self._pypi_base = pypi_base
def add_resource(self, filename, env_filename): """Add a resource to the PEX environment. :param filename: The source filename to add to the PEX; None to create an empty file at `env_filename`. :param env_filename: The destination filename in the PEX. This path must be a relative path. """ pex_warnings.warn( "The `add_resource` method is deprecated. Resources should be added via the " "`add_source` method instead.") self._ensure_unfrozen("Adding a resource") self._copy_or_link(filename, env_filename, "resource")
def requires_dists(dist): # type: (DistributionLike) -> Iterator[Requirement] """Examines dist for and returns any declared requirements. Looks for `Requires-Dist` metadata. The older `Requires` metadata is intentionally ignored, athough we do log a warning if it is found to draw attention to this ~work-around and the associated issue in case any new data comes in. See: + https://www.python.org/dev/peps/pep-0345/#requires-dist-multiple-use + https://www.python.org/dev/peps/pep-0314/#requires-multiple-use :param dist: A distribution to check for requirement metadata. :return: All requirements found. """ pkg_info = _parse_pkg_info(dist) if pkg_info is None: return for requires_dist in pkg_info.get_all("Requires-Dist", ()): yield Requirement.parse(requires_dist) legacy_requires = pkg_info.get_all("Requires", []) # type: List[str] if legacy_requires: name_and_version = project_name_and_version(dist) project_name = name_and_version.project_name if name_and_version else dist pex_warnings.warn( dedent( """\ Ignoring {count} `Requires` {field} in {dist} metadata: {requires} You may have issues using the '{project_name}' distribution as a result. More information on this workaround can be found here: https://github.com/pantsbuild/pex/issues/1201#issuecomment-791715585 """ ).format( dist=dist, project_name=project_name, count=len(legacy_requires), field=pluralize(legacy_requires, "field"), requires=os.linesep.join( "{index}.) Requires: {req}".format(index=index, req=req) for index, req in enumerate(legacy_requires, start=1) ), ) )
def check_glibc_version(version_str, required_major, minimum_minor): # Parse string and check against requested version. # # We use a regexp instead of str.split because we want to discard any # random junk that might come after the minor version -- this might happen # in patched/forked versions of glibc (e.g. Linaro's version of glibc # uses version strings like "2.20-2014.11"). See gh-3588. m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) if not m: # NB: Modified from `warnings.warn(..., RuntimeError)` pex_warnings.warn("Expected glibc version with 2 components major.minor," " got: %s" % version_str) return False return (int(m.group("major")) == required_major and int(m.group("minor")) >= minimum_minor)
def check_glibc_version(version_str, required_major, minimum_minor): # Parse string and check against requested version. # # We use a regexp instead of str.split because we want to discard any # random junk that might come after the minor version -- this might happen # in patched/forked versions of glibc (e.g. Linaro's version of glibc # uses version strings like "2.20-2014.11"). See gh-3588. m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) if not m: # NB: Modified from `warnings.warn(..., RuntimeError)` pex_warnings.warn( "Expected glibc version with 2 components major.minor," " got: %s" % version_str) return False return (int(m.group("major")) == required_major and int(m.group("minor")) >= minimum_minor)
def PEX_ROOT(self): """Directory The directory location for PEX to cache any dependencies and code. PEX must write not-zip-safe eggs and all wheels to disk in order to activate them. Default: ~/.pex """ pex_root = self._get_path('PEX_ROOT', default=os.path.expanduser('~/.pex')) if pex_root is None: # PEX_ROOT is not set and we're running in stripped_defaults mode. return None if not can_write_dir(pex_root): tmp_root = os.path.realpath(safe_mkdtemp()) pex_warnings.warn('PEX_ROOT is configured as {pex_root} but that path is un-writeable, ' 'falling back to a temporary PEX_ROOT of {tmp_root} which will hurt ' 'performance.'.format(pex_root=pex_root, tmp_root=tmp_root)) pex_root = self._environ['PEX_ROOT'] = tmp_root return pex_root
def _declare_namespace_packages(resolved_dists): namespace_package_dists = [dist for dist in resolved_dists if dist.has_metadata('namespace_packages.txt')] if not namespace_package_dists: return # Nothing to do here. # When declaring namespace packages, we need to do so with the `setuptools` distribution that # will be active in the pex environment at runtime and, as such, care must be taken. # # Properly behaved distributions will declare a dependency on `setuptools`, in which case we # use that (non-vendored) distribution. A side-effect of importing `pkg_resources` from that # distribution is that a global `pkg_resources.working_set` will be populated. For various # `pkg_resources` distribution discovery functions to work, that global # `pkg_resources.working_set` must be built with the `sys.path` fully settled. Since all dists # in the dependency set (`resolved_dists`) have already been resolved and added to the # `sys.path` we're safe to proceed here. # # Other distributions (notably `twitter.common.*`) in the wild declare `setuptools`-specific # `namespace_packages` but do not properly declare a dependency on `setuptools` which they must # use to: # 1. Declare `namespace_packages` metadata which we just verified they have with the check # above. # 2. Declare namespace packages at runtime via the canonical: # `__import__('pkg_resources').declare_namespace(__name__)` # # For such distributions we fall back to our vendored version of `setuptools`. This is safe, # since we'll only introduce our shaded version when no other standard version is present and # even then tear it all down when we hand off from the bootstrap to user code. pkg_resources, vendored = _import_pkg_resources() if vendored: pex_warnings.warn('The `pkg_resources` package was loaded from a pex vendored version when ' 'declaring namespace packages defined by {dists}. These distributions ' 'should fix their `install_requires` to include `setuptools`' .format(dists=namespace_package_dists)) for dist in namespace_package_dists: for pkg in dist.get_metadata_lines('namespace_packages.txt'): if pkg in sys.modules: pkg_resources.declare_namespace(pkg)
def ensure_venv(pex): # type: (PEX) -> str pex_info = pex.pex_info() venv_dir = pex_info.venv_dir if venv_dir is None: raise AssertionError( "Expected PEX-INFO for {} to have the components of a venv directory" .format(pex.path())) with atomic_directory(venv_dir, exclusive=True) as venv: if venv: from .tools.commands.venv import populate_venv_with_pex from .tools.commands.virtualenv import Virtualenv virtualenv = Virtualenv.create( venv_dir=venv, interpreter=pex.interpreter, copies=pex_info.venv_copies, ) pex_path = os.path.abspath(pex.path()) short_venv_dir = os.path.join(pex_info.pex_root, "venvs", "short") safe_mkdir(short_venv_dir) # A sha1 hash is 160 bits -> 20 bytes -> 40 hex characters. We start with 8 characters # (32 bits) of entropy since that is short and _very_ unlikely to collide with another # PEX venv on this machine. If we still collide after using the whole sha1 (for a total # of 33 collisions), then the universe is broken and we raise. It's the least we can do. venv_hash = hashlib.sha1(venv_dir.encode("utf-8")).hexdigest() collisions = [] for chars in range(8, len(venv_hash) + 1): entropy = venv_hash[:chars] short_venv_path = os.path.join(short_venv_dir, entropy) try: os.symlink(venv_dir, short_venv_path) break except OSError as e: if e.errno != errno.EEXIST: raise e collisions.append(short_venv_path) if entropy == venv_hash: raise RuntimeError( "The venv for {pex} at {venv} has hash collisions with {count} other " "{venvs}!\n{collisions}".format( pex=pex_path, venv=venv_dir, count=len(collisions), venvs=pluralize(collisions, "venv"), collisions="\n".join( "{index}.) {venv_path}".format( index=index, venv_path=os.path.realpath(path)) for index, path in enumerate(collisions, start=1)), )) shenbang = populate_venv_with_pex( virtualenv, pex, bin_path=pex_info.venv_bin_path, python=os.path.join(short_venv_path, "bin", os.path.basename(pex.interpreter.binary)), collisions_ok=True, ) # There are popular Linux distributions with shebang length limits (BINPRM_BUF_SIZE # in /usr/include/linux/binfmts.h) set at 128 characters, so we warn in the _very_ # unlikely case that our shortened shebang is longer than this. if len(shenbang) > 128: pex_warnings.warn( "The venv for {pex} at {venv} has script shebangs of {shebang!r} with {count} " "characters. On some systems this may be too long and cause problems running " "the venv scripts. You may be able adjust PEX_ROOT from {pex_root} to a " "shorter path as a work-around.".format( pex=pex_path, venv=venv_dir, shebang=shenbang, count=len(shenbang), pex_root=pex_info.pex_root, )) return os.path.join(venv_dir, "pex")
def exercise_warnings(pex_info, **env): with warnings.catch_warnings(record=True) as events: pex_warnings.configure_warnings(pex_info, env=Variables(environ=env)) pex_warnings.warn('test') return events
def populate_venv_with_pex( venv, # type: Virtualenv pex, # type: PEX bin_path=BinPath.FALSE, # type: BinPath.Value python=None, # type: Optional[str] collisions_ok=True, # type: bool ): # type: (...) -> str venv_python = python or venv.interpreter.binary venv_bin_dir = os.path.dirname(python) if python else venv.bin_dir venv_dir = os.path.dirname(venv_bin_dir) if python else venv.venv_dir # 1. Populate the venv with the PEX contents. provenance = defaultdict(list) def record_provenance(src_to_dst): # type: (Iterable[Tuple[str, str]]) -> None for src, dst in src_to_dst: provenance[dst].append(src) pex_info = pex.pex_info() if zipfile.is_zipfile(pex.path()): record_provenance( PEXEnvironment(pex.path()).explode_code(venv.site_packages_dir, exclude=("__main__.py", ))) else: record_provenance( _copytree( src=pex.path(), dst=venv.site_packages_dir, exclude=(pex_info.internal_cache, pex_builder.BOOTSTRAP_DIR, "__main__.py"), )) for dist in pex.activate(): record_provenance( _copytree(src=dist.location, dst=venv.site_packages_dir, exclude=("bin", ))) dist_bin_dir = os.path.join(dist.location, "bin") if os.path.isdir(dist_bin_dir): record_provenance(_copytree(dist_bin_dir, venv.bin_dir)) collisions = { dst: srcs for dst, srcs in provenance.items() if len(srcs) > 1 } if collisions: message_lines = [ "Encountered {collision} building venv at {venv_dir} from {pex}:". format(collision=pluralize(collisions, "collision"), venv_dir=venv_dir, pex=pex.path()) ] for index, (dst, srcs) in enumerate(collisions.items(), start=1): message_lines.append( "{index}. {dst} was provided by:\n\t{srcs}".format( index=index, dst=dst, srcs="\n\t".join(srcs))) message = "\n".join(message_lines) if not collisions_ok: raise CollisionError(message) pex_warnings.warn(message) # 2. Add a __main__ to the root of the venv for running the venv dir like a loose PEX dir # and a main.py for running as a script. shebang = "#!{} -sE".format(venv_python) main_contents = dedent("""\ {shebang} if __name__ == "__main__": import os import sys venv_dir = os.path.abspath(os.path.dirname(__file__)) venv_bin_dir = os.path.join(venv_dir, "bin") shebang_python = {shebang_python!r} python = os.path.join(venv_bin_dir, os.path.basename(shebang_python)) if sys.executable not in (python, shebang_python): sys.stderr.write("Re-execing from {{}}\\n".format(sys.executable)) os.execv(python, [python, "-sE"] + sys.argv) os.environ["VIRTUAL_ENV"] = venv_dir sys.path.extend(os.environ.get("PEX_EXTRA_SYS_PATH", "").split(os.pathsep)) bin_path = os.environ.get("PEX_VENV_BIN_PATH", {bin_path!r}) if bin_path != "false": PATH = os.environ.get("PATH", "").split(os.pathsep) if bin_path == "prepend": PATH.insert(0, venv_bin_dir) elif bin_path == "append": PATH.append(venv_bin_dir) else: sys.stderr.write( "PEX_VENV_BIN_PATH must be one of 'false', 'prepend' or 'append', given: " "{{!r}}\\n".format( bin_path ) ) sys.exit(1) os.environ["PATH"] = os.pathsep.join(PATH) PEX_EXEC_OVERRIDE_KEYS = ("PEX_INTERPRETER", "PEX_SCRIPT", "PEX_MODULE") pex_overrides = {{ key: os.environ.pop(key) for key in PEX_EXEC_OVERRIDE_KEYS if key in os.environ }} if len(pex_overrides) > 1: sys.stderr.write( "Can only specify one of {{overrides}}; found: {{found}}\\n".format( overrides=", ".join(PEX_EXEC_OVERRIDE_KEYS), found=" ".join("{{}}={{}}".format(k, v) for k, v in pex_overrides.items()) ) ) sys.exit(1) pex_script = pex_overrides.get("PEX_SCRIPT") if pex_script: script_path = os.path.join(venv_bin_dir, pex_script) os.execv(script_path, [script_path] + sys.argv[1:]) pex_interpreter = pex_overrides.get("PEX_INTERPRETER", "").lower() in ("1", "true") PEX_INTERPRETER_ENTRYPOINT = "code:interact" entry_point = ( PEX_INTERPRETER_ENTRYPOINT if pex_interpreter else pex_overrides.get("PEX_MODULE", {entry_point!r} or PEX_INTERPRETER_ENTRYPOINT) ) if entry_point == PEX_INTERPRETER_ENTRYPOINT and len(sys.argv) > 1: args = sys.argv[1:] arg = args[0] if arg == "-m": if len(args) < 2: sys.stderr.write("Argument expected for the -m option\\n") sys.exit(2) entry_point = module = args[1] sys.argv = args[1:] # Fall through to entry_point handling below. else: filename = arg sys.argv = args if arg == "-c": if len(args) < 2: sys.stderr.write("Argument expected for the -c option\\n") sys.exit(2) filename = "-c <cmd>" content = args[1] sys.argv = ["-c"] + args[2:] elif arg == "-": content = sys.stdin.read() else: with open(arg) as fp: content = fp.read() ast = compile(content, filename, "exec", flags=0, dont_inherit=1) globals_map = globals().copy() globals_map["__name__"] = "__main__" globals_map["__file__"] = filename locals_map = globals_map {exec_ast} sys.exit(0) module_name, _, function = entry_point.partition(":") if not function: import runpy runpy.run_module(module_name, run_name="__main__") else: import importlib module = importlib.import_module(module_name) # N.B.: Functions may be hung off top-level objects in the module namespace, # e.g.: Class.method; so we drill down through any attributes to the final function # object. namespace, func = module, None for attr in function.split("."): func = namespace = getattr(namespace, attr) sys.exit(func()) """.format( shebang=shebang, shebang_python=venv_python, bin_path=bin_path, entry_point=pex_info.entry_point, exec_ast=("exec ast in globals_map, locals_map" if venv.interpreter.version[0] == 2 else "exec(ast, globals_map, locals_map)"), )) with open(venv.join_path("__main__.py"), "w") as fp: fp.write(main_contents) chmod_plus_x(fp.name) os.symlink(os.path.basename(fp.name), venv.join_path("pex")) # 3. Re-write any (console) scripts to use the venv Python. for script in venv.rewrite_scripts(python=venv_python, python_args="-sE"): TRACER.log("Re-writing {}".format(script)) return shebang
def build_pex(reqs, options, cache=None): interpreters = None # Default to the current interpreter. pex_python_path = options.python_path # If None, this will result in using $PATH. # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag. if pex_python_path is None and (options.rc_file or not ENV.PEX_IGNORE_RCFILES): rc_variables = Variables(rc=options.rc_file) pex_python_path = rc_variables.PEX_PYTHON_PATH # NB: options.python and interpreter constraints cannot be used together. if options.python: with TRACER.timed("Resolving interpreters", V=2): def to_python_interpreter(full_path_or_basename): if os.path.isfile(full_path_or_basename): return PythonInterpreter.from_binary(full_path_or_basename) else: interp = PythonInterpreter.from_env(full_path_or_basename) if interp is None: die("Failed to find interpreter: %s" % full_path_or_basename) return interp interpreters = [ to_python_interpreter(interp) for interp in options.python ] elif options.interpreter_constraint: with TRACER.timed("Resolving interpreters", V=2): constraints = options.interpreter_constraint validate_constraints(constraints) try: interpreters = list( iter_compatible_interpreters( path=pex_python_path, interpreter_constraints=constraints)) except UnsatisfiableInterpreterConstraintsError as e: die( e.create_message( "Could not find a compatible interpreter."), CANNOT_SETUP_INTERPRETER, ) platforms = OrderedSet(options.platforms) interpreters = interpreters or [] if options.platforms and options.resolve_local_platforms: with TRACER.timed( "Searching for local interpreters matching {}".format( ", ".join(map(str, platforms)))): candidate_interpreters = OrderedSet( iter_compatible_interpreters(path=pex_python_path)) candidate_interpreters.add(PythonInterpreter.get()) for candidate_interpreter in candidate_interpreters: resolved_platforms = candidate_interpreter.supported_platforms.intersection( platforms) if resolved_platforms: for resolved_platform in resolved_platforms: TRACER.log("Resolved {} for platform {}".format( candidate_interpreter, resolved_platform)) platforms.remove(resolved_platform) interpreters.append(candidate_interpreter) if platforms: TRACER.log( "Could not resolve a local interpreter for {}, will resolve only binary distributions " "for {}.".format( ", ".join(map(str, platforms)), "this platform" if len(platforms) == 1 else "these platforms", )) interpreter = (PythonInterpreter.latest_release_of_min_compatible_version( interpreters) if interpreters else None) try: with open(options.preamble_file) as preamble_fd: preamble = preamble_fd.read() except TypeError: # options.preamble_file is None preamble = None pex_builder = PEXBuilder( path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble, copy_mode=CopyMode.SYMLINK, include_tools=options.include_tools or options.venv, ) if options.resources_directory: pex_warnings.warn( "The `-R/--resources-directory` option is deprecated. Resources should be added via " "`-D/--sources-directory` instead.") for directory in OrderedSet(options.sources_directory + options.resources_directory): src_dir = os.path.normpath(directory) for root, _, files in os.walk(src_dir): for f in files: src_file_path = os.path.join(root, f) dst_path = os.path.relpath(src_file_path, src_dir) pex_builder.add_source(src_file_path, dst_path) pex_info = pex_builder.info pex_info.zip_safe = options.zip_safe pex_info.unzip = options.unzip pex_info.venv = bool(options.venv) pex_info.venv_bin_path = options.venv pex_info.venv_copies = options.venv_copies pex_info.pex_path = options.pex_path pex_info.always_write_cache = options.always_write_cache pex_info.ignore_errors = options.ignore_errors pex_info.emit_warnings = options.emit_warnings pex_info.inherit_path = InheritPath.for_value(options.inherit_path) pex_info.pex_root = options.runtime_pex_root pex_info.strip_pex_env = options.strip_pex_env if options.interpreter_constraint: for ic in options.interpreter_constraint: pex_builder.add_interpreter_constraint(ic) indexes = compute_indexes(options) for requirements_pex in options.requirements_pexes: pex_builder.add_from_requirements_pex(requirements_pex) with TRACER.timed( "Resolving distributions ({})".format(reqs + options.requirement_files)): if options.cache_ttl: pex_warnings.warn( "The --cache-ttl option is deprecated and no longer has any effect." ) if options.headers: pex_warnings.warn( "The --header option is deprecated and no longer has any effect." ) network_configuration = NetworkConfiguration( retries=options.retries, timeout=options.timeout, proxy=options.proxy, cert=options.cert, client_cert=options.client_cert, ) try: if options.pex_repository: with TRACER.timed("Resolving requirements from PEX {}.".format( options.pex_repository)): resolveds = resolve_from_pex( pex=options.pex_repository, requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, network_configuration=network_configuration, transitive=options.transitive, interpreters=interpreters, platforms=list(platforms), manylinux=options.manylinux, ignore_errors=options.ignore_errors, ) else: with TRACER.timed("Resolving requirements."): resolveds = resolve_multi( requirements=reqs, requirement_files=options.requirement_files, constraint_files=options.constraint_files, allow_prereleases=options.allow_prereleases, transitive=options.transitive, interpreters=interpreters, platforms=list(platforms), indexes=indexes, find_links=options.find_links, resolver_version=ResolverVersion.for_value( options.resolver_version), network_configuration=network_configuration, cache=cache, build=options.build, use_wheel=options.use_wheel, compile=options.compile, manylinux=options.manylinux, max_parallel_jobs=options.max_parallel_jobs, ignore_errors=options.ignore_errors, ) for resolved_dist in resolveds: pex_builder.add_distribution(resolved_dist.distribution) if resolved_dist.direct_requirement: pex_builder.add_requirement( resolved_dist.direct_requirement) except Unsatisfiable as e: die(str(e)) if options.entry_point and options.script: die("Must specify at most one entry point or script.", INVALID_OPTIONS) if options.entry_point: pex_builder.set_entry_point(options.entry_point) elif options.script: pex_builder.set_script(options.script) if options.python_shebang: pex_builder.set_shebang(options.python_shebang) return pex_builder
def warn_ignore_pex_root(set_via): pex_warnings.warn( "The pex root has been set via {via} but --disable-cache is also set. " "Ignoring {via} and disabling caches.".format(via=set_via))
def main(args=None): args = args[:] if args else sys.argv[1:] args = [transform_legacy_arg(arg) for arg in args] parser = configure_clp() try: separator = args.index("--") args, cmdline = args[:separator], args[separator + 1:] except ValueError: args, cmdline = args, [] options = parser.parse_args(args=args) # Ensure the TMPDIR is an absolute path (So subprocesses that change CWD can find it) and # that it exists. tmpdir = os.path.realpath(options.tmpdir) if not os.path.exists(tmpdir): die("The specified --tmpdir does not exist: {}".format(tmpdir)) if not os.path.isdir(tmpdir): die("The specified --tmpdir is not a directory: {}".format(tmpdir)) tempfile.tempdir = os.environ["TMPDIR"] = tmpdir if options.cache_dir: pex_warnings.warn( "The --cache-dir option is deprecated, use --pex-root instead.") if options.pex_root and options.cache_dir != options.pex_root: die("Both --cache-dir and --pex-root were passed with conflicting values. " "Just set --pex-root.") if options.disable_cache: def warn_ignore_pex_root(set_via): pex_warnings.warn( "The pex root has been set via {via} but --disable-cache is also set. " "Ignoring {via} and disabling caches.".format(via=set_via)) if options.cache_dir: warn_ignore_pex_root("--cache-dir") elif options.pex_root: warn_ignore_pex_root("--pex-root") elif os.environ.get("PEX_ROOT"): warn_ignore_pex_root("PEX_ROOT") pex_root = safe_mkdtemp() else: pex_root = options.cache_dir or options.pex_root or ENV.PEX_ROOT if options.python and options.interpreter_constraint: die('The "--python" and "--interpreter-constraint" options cannot be used together.' ) if options.pex_repository and (options.indexes or options.find_links): die('The "--pex-repository" option cannot be used together with the "--index" or ' '"--find-links" options.') with ENV.patch(PEX_VERBOSE=str(options.verbosity), PEX_ROOT=pex_root, TMPDIR=tmpdir) as patched_env: with TRACER.timed("Building pex"): pex_builder = build_pex(options.requirements, options, cache=ENV.PEX_ROOT) pex_builder.freeze(bytecode_compile=options.compile) interpreter = pex_builder.interpreter pex = PEX(pex_builder.path(), interpreter=interpreter, verify_entry_point=options.validate_ep) if options.pex_name is not None: log("Saving PEX file to %s" % options.pex_name, V=options.verbosity) pex_builder.build( options.pex_name, bytecode_compile=options.compile, deterministic_timestamp=not options.use_system_time, ) if options.seed != Seed.NONE: seed_info = seed_cache(options, pex, verbose=options.seed == Seed.VERBOSE) print(seed_info) else: if not _compatible_with_current_platform(interpreter, options.platforms): log("WARNING: attempting to run PEX with incompatible platforms!", V=1) log( "Running on platform {} but built for {}".format( interpreter.platform, ", ".join(map(str, options.platforms))), V=1, ) log( "Running PEX file at %s with args %s" % (pex_builder.path(), cmdline), V=options.verbosity, ) sys.exit(pex.run(args=list(cmdline), env=patched_env))
def _extract_sdist( pex, # type: PEX dest_dir, # type: str ): # type: (...) -> None chroot = safe_mkdtemp() src = os.path.join(chroot, "src") safe_mkdir(src) excludes = ["__main__.py", "PEX-INFO"] if zipfile.is_zipfile(pex.path()): PEXEnvironment(pex.path()).explode_code(src, exclude=excludes) else: shutil.copytree(pex.path(), src, ignore=lambda _dir, _names: excludes) pex_info = pex.pex_info() name, _ = os.path.splitext(os.path.basename(pex.path())) version = "0.0.0+{}".format(pex_info.code_hash) zip_safe = pex_info.zip_safe py_modules = [os.path.splitext(f)[0] for f in os.listdir(src) if f.endswith(".py")] packages = [ os.path.relpath(os.path.join(root, d), src).replace(os.sep, ".") for root, dirs, _ in os.walk(src) for d in dirs ] install_requires = [str(req) for req in pex_info.requirements] python_requires = None if len(pex_info.interpreter_constraints) == 1: python_requires = str( PythonIdentity.parse_requirement(pex_info.interpreter_constraints[0]).specifier ) elif pex_info.interpreter_constraints: pex_warnings.warn( "Omitting `python_requires` for {name} sdist since {pex} has multiple " "interpreter constraints:\n{interpreter_constraints}".format( name=name, pex=os.path.normpath(pex.path()), interpreter_constraints="\n".join( "{index}.) {constraint}".format(index=index, constraint=constraint) for index, constraint in enumerate( pex_info.interpreter_constraints, start=1 ) ), ) ) entry_points = [] if pex_info.entry_point and ":" in pex_info.entry_point: entry_points = [(name, pex_info.entry_point)] with open(os.path.join(chroot, "setup.cfg"), "w") as fp: fp.write( dedent( """\ [metadata] name = {name} version = {version} [options] zip_safe = {zip_safe} {py_modules} {packages} package_dir = =src include_package_data = True {python_requires} {install_requires} [options.entry_points] {entry_points} """ ).format( name=name, version=version, zip_safe=zip_safe, py_modules=( "py_modules =\n {}".format("\n ".join(py_modules)) if py_modules else "" ), packages=( "packages = \n {}".format("\n ".join(packages)) if packages else "" ), install_requires=( "install_requires =\n {}".format("\n ".join(install_requires)) if install_requires else "" ), python_requires=( "python_requires = {}".format(python_requires) if python_requires else "" ), entry_points=( "console_scripts =\n {}".format( "\n ".join( "{} = {}".format(name, entry_point) for name, entry_point in entry_points ) ) if entry_points else "" ), ) ) with open(os.path.join(chroot, "MANIFEST.in"), "w") as fp: fp.write("recursive-include src *") with open(os.path.join(chroot, "setup.py"), "w") as fp: fp.write("import setuptools; setuptools.setup()") spawn_python_job( args=["setup.py", "sdist", "--dist-dir", dest_dir], interpreter=pex.interpreter, expose=["setuptools"], cwd=chroot, ).wait()
def exercise_warnings(pex_info, **env): with warnings.catch_warnings(record=True) as events: pex_warnings.configure_warnings(pex_info, env=Variables(environ=env)) pex_warnings.warn('test') return events
def main(args=None): args = args[:] if args else sys.argv[1:] args = [transform_legacy_arg(arg) for arg in args] parser = configure_clp() try: separator = args.index('--') args, cmdline = args[:separator], args[separator + 1:] except ValueError: args, cmdline = args, [] options, reqs = parser.parse_args(args=args) if options.cache_dir: pex_warnings.warn('The --cache-dir option is deprecated, use --pex-root instead.') if options.pex_root and options.cache_dir != options.pex_root: die('Both --cache-dir and --pex-root were passed with conflicting values. ' 'Just set --pex-root.') if options.disable_cache: def warn_ignore_pex_root(set_via): pex_warnings.warn('The pex root has been set via {via} but --disable-cache is also set. ' 'Ignoring {via} and disabling caches.'.format(via=set_via)) if options.cache_dir: warn_ignore_pex_root('--cache-dir') elif options.pex_root: warn_ignore_pex_root('--pex-root') elif os.environ.get('PEX_ROOT'): warn_ignore_pex_root('PEX_ROOT') pex_root = safe_mkdtemp() else: pex_root = options.cache_dir or options.pex_root or ENV.PEX_ROOT if options.python and options.interpreter_constraint: die('The "--python" and "--interpreter-constraint" options cannot be used together.') with ENV.patch(PEX_VERBOSE=str(options.verbosity), PEX_ROOT=pex_root) as patched_env: with TRACER.timed('Building pex'): pex_builder = build_pex(reqs, options, cache=ENV.PEX_ROOT) pex_builder.freeze(bytecode_compile=options.compile) pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter, verify_entry_point=options.validate_ep) if options.pex_name is not None: log('Saving PEX file to %s' % options.pex_name, V=options.verbosity) tmp_name = options.pex_name + '~' safe_delete(tmp_name) pex_builder.build( tmp_name, bytecode_compile=options.compile, deterministic_timestamp=not options.use_system_time ) os.rename(tmp_name, options.pex_name) else: if not _compatible_with_current_platform(options.platforms): log('WARNING: attempting to run PEX with incompatible platforms!', V=1) log('Running on platform {} but built for {}' .format(Platform.current(), ', '.join(map(str, options.platforms))), V=1) log('Running PEX file at %s with args %s' % (pex_builder.path(), cmdline), V=options.verbosity) sys.exit(pex.run(args=list(cmdline), env=patched_env))