Пример #1
0
class PexInfo(object):
    """PEX metadata.

    # Build metadata:
    build_properties: BuildProperties  # (key-value information about the build system)
    code_hash: str                     # sha1 hash of all names/code in the archive
    distributions: {dist_name: str}    # map from distribution name (i.e. path in
                                       # the internal cache) to its cache key (sha1)
    pex_hash: str                      # sha1 hash of all names/code and distributions in the pex
    requirements: list                 # list of requirements for this environment

    # Environment options
    pex_root: string                    # root of all pex-related files eg: ~/.pex
    entry_point: string                 # entry point into this pex
    script: string                      # script to execute in this pex environment
                                        # at most one of script/entry_point can be specified
    zip_safe: bool, default True        # is this pex zip safe?
    unzip: bool, default False          # should this pex be unzipped and re-executed from there?
    inherit_path: false/fallback/prefer # should this pex inherit site-packages + user site-packages
                                        # + PYTHONPATH?
    ignore_errors: True, default False  # should we ignore inability to resolve dependencies?
    always_write_cache: False           # should we always write the internal cache to disk first?
                                        # this is useful if you have very large dependencies that
                                        # do not fit in RAM constrained environments

    .. versionchanged:: 0.8
      Removed the ``repositories`` and ``indices`` information, as they were never
      implemented.
    """

    PATH = "PEX-INFO"
    INSTALL_CACHE = "installed_wheels"

    @classmethod
    def make_build_properties(cls, interpreter=None):
        # This lazy import is currently needed for performance reasons. At PEX runtime PexInfo is
        # read in the bootstrap to see if the PEX should run in `--unzip` mode. If so, it must
        # re-exec itself to run against its unzipped contents. Since `make_build_properties` is only
        # used at PEX buildtime and the transitive imports of PythonInterpreter are large and slow,
        # we avoid this import cost for runtime-only use.
        #
        # See: https://github.com/pantsbuild/pex/issues/1054
        from pex.interpreter import PythonInterpreter

        pi = interpreter or PythonInterpreter.get()
        plat = pi.platform
        platform_name = plat.platform
        return {
            "pex_version": pex_version,
            "class": pi.identity.interpreter,
            "version": pi.identity.version,
            "platform": platform_name,
        }

    @classmethod
    def default(cls, interpreter=None):
        # type: (Optional[PythonInterpreter]) -> PexInfo
        pex_info = {
            "requirements": [],
            "distributions": {},
            "build_properties": cls.make_build_properties(interpreter),
        }
        return cls(info=pex_info)

    @classmethod
    def from_pex(cls, pex):
        # type: (str) -> PexInfo
        if zipfile.is_zipfile(pex):  # Zip App
            with open_zip(pex) as zf:
                pex_info = zf.read(cls.PATH)
        elif os.path.isfile(pex):  # Venv
            with open(os.path.join(os.path.dirname(pex), cls.PATH)) as fp:
                pex_info = fp.read()
        else:  # Directory (Unzip mode or PEXBuilder.freeze)
            with open(os.path.join(pex, cls.PATH)) as fp:
                pex_info = fp.read()
        return cls.from_json(pex_info)

    @classmethod
    def from_json(cls, content):
        # type: (Union[bytes, Text]) -> PexInfo
        if isinstance(content, bytes):
            content = content.decode("utf-8")
        return cls(info=json.loads(content))

    @classmethod
    def from_env(cls, env=ENV):
        # type: (Variables) -> PexInfo
        pex_force_local = Variables.PEX_FORCE_LOCAL.strip_default(env)
        zip_safe = None if pex_force_local is None else not pex_force_local

        pex_inherit_path = Variables.PEX_INHERIT_PATH.strip_default(env)
        inherit_path = None if pex_inherit_path is None else pex_inherit_path.value

        pex_info = {
            "pex_root": Variables.PEX_ROOT.strip_default(env),
            "entry_point": env.PEX_MODULE,
            "script": env.PEX_SCRIPT,
            "zip_safe": zip_safe,
            "unzip": Variables.PEX_UNZIP.strip_default(env),
            "venv": Variables.PEX_VENV.strip_default(env),
            "inherit_path": inherit_path,
            "ignore_errors": Variables.PEX_IGNORE_ERRORS.strip_default(env),
            "always_write_cache":
            Variables.PEX_ALWAYS_CACHE.strip_default(env),
        }
        # Filter out empty entries not explicitly set in the environment.
        return cls(info=dict(
            (k, v) for (k, v) in pex_info.items() if v is not None))

    @classmethod
    def _parse_requirement_tuple(cls, requirement_tuple):
        if isinstance(requirement_tuple, (tuple, list)):
            if len(requirement_tuple) != 3:
                raise ValueError("Malformed PEX requirement: %r" %
                                 (requirement_tuple, ))
            # pre 0.8.x requirement type:
            pex_warnings.warn(
                "Attempting to use deprecated PEX feature.  Please upgrade past PEX 0.8.x."
            )
            return requirement_tuple[0]
        elif isinstance(requirement_tuple, compatibility_string):
            return requirement_tuple
        raise ValueError("Malformed PEX requirement: %r" %
                         (requirement_tuple, ))

    def __init__(self, info=None):
        # type: (Optional[Mapping[str, Any]]) -> None
        """Construct a new PexInfo.

        This should not be used directly.
        """

        if info is not None and not isinstance(info, dict):
            raise ValueError("PexInfo can only be seeded with a dict, got: "
                             "%s of type %s" % (info, type(info)))
        self._pex_info = dict(info) if info else {}  # type Dict[str, Any]
        self._distributions = self._pex_info.get("distributions", {})
        # cast as set because pex info from json must store interpreter_constraints as a list
        self._interpreter_constraints = set(
            self._pex_info.get("interpreter_constraints", set()))
        requirements = self._pex_info.get("requirements", [])
        if not isinstance(requirements, (list, tuple)):
            raise ValueError("Expected requirements to be a list, got %s" %
                             type(requirements))
        self._requirements = OrderedSet(
            self._parse_requirement_tuple(req) for req in requirements)

    def _get_safe(self, key):
        if key not in self._pex_info:
            return None
        value = self._pex_info[key]
        return value.encode("utf-8") if PY2 else value

    @property
    def build_properties(self):
        """Information about the system on which this PEX was generated.

        :returns: A dictionary containing metadata about the environment used to build this PEX.
        """
        return self._pex_info.get("build_properties", {})

    @build_properties.setter
    def build_properties(self, value):
        if not isinstance(value, dict):
            raise TypeError("build_properties must be a dictionary!")
        self._pex_info["build_properties"] = self.make_build_properties()
        self._pex_info["build_properties"].update(value)

    @property
    def zip_safe(self):
        """Whether or not this PEX should be treated as zip-safe.

        If set to false and the PEX is zipped, the contents of the PEX will be unpacked into a
        directory within the PEX_ROOT prior to execution.  This allows code and frameworks depending
        upon __file__ existing on disk to operate normally.

        By default zip_safe is True.  May be overridden at runtime by the $PEX_FORCE_LOCAL environment
        variable.
        """
        return self._pex_info.get("zip_safe", True)

    @zip_safe.setter
    def zip_safe(self, value):
        self._pex_info["zip_safe"] = bool(value)

    @property
    def unzip(self):
        """Whether or not PEX should be unzipped before it's executed.

        Unzipping a PEX is a operation that can be cached on the 1st run of a given PEX file which
        can result in lower startup latency in subsequent runs.
        """
        return self._pex_info.get("unzip", False)

    @unzip.setter
    def unzip(self, value):
        self._pex_info["unzip"] = bool(value)

    @property
    def unzip_dir(self):
        # type: () -> Optional[str]
        if not self.unzip:
            return None
        if self.pex_hash is None:
            raise ValueError(
                "The unzip_dir was requested but no pex_hash was set.")
        return variables.unzip_dir(self.pex_root, self.pex_hash)

    @property
    def venv(self):
        # type: () -> bool
        """Whether or not PEX should be converted to a venv before it's executed.

        Creating a venv from a PEX is a operation that can be cached on the 1st run of a given PEX
        file which results in lower startup latency in subsequent runs.
        """
        return self._pex_info.get("venv", False)

    @venv.setter
    def venv(self, value):
        # type: (bool) -> None
        self._pex_info["venv"] = bool(value)

    @property
    def venv_bin_path(self):
        # type: () -> BinPath.Value
        """When run as a venv, whether or not to include `bin/` scripts on the PATH."""
        return BinPath.for_value(
            self._pex_info.get("venv_bin_path", BinPath.FALSE.value))

    @venv_bin_path.setter
    def venv_bin_path(self, value):
        # type: (BinPath.Value) -> None
        self._pex_info["venv_bin_path"] = str(value)

    @property
    def venv_copies(self):
        # type: () -> bool
        return self._pex_info.get("venv_copies", False)

    @venv_copies.setter
    def venv_copies(self, value):
        # type: (bool) -> None
        self._pex_info["venv_copies"] = value

    @property
    def venv_dir(self):
        # type: () -> Optional[str]
        if not self.venv:
            return None
        if self.pex_hash is None:
            raise ValueError(
                "The venv_dir was requested but no pex_hash was set.")
        return variables.venv_dir(
            pex_root=self.pex_root,
            pex_hash=self.pex_hash,
            interpreter_constraints=self.interpreter_constraints,
            strip_pex_env=self.strip_pex_env,
            pex_path=self.pex_path,
        )

    @property
    def strip_pex_env(self):
        """Whether or not this PEX should strip `PEX_*` env vars before executing its entrypoint.

        You might want to set this to `False` if this PEX executes other PEXes or the Pex CLI itself
        and you want the executed PEX to be controlled via PEX environment variables.
        """
        return self._pex_info.get("strip_pex_env", True)

    @strip_pex_env.setter
    def strip_pex_env(self, value):
        self._pex_info["strip_pex_env"] = bool(value)

    @property
    def pex_path(self):
        # type: () -> Optional[str]
        """A colon separated list of other pex files to merge into the runtime environment.

        This pex info property is used to persist the PEX_PATH environment variable into the pex
        info metadata for reuse within a built pex.
        """
        return cast("Optional[str]", self._pex_info.get("pex_path"))

    @pex_path.setter
    def pex_path(self, value):
        # type: (str) -> None
        self._pex_info["pex_path"] = value

    @property
    def inherit_path(self):
        # type: () -> InheritPath.Value
        """Whether or not this PEX should be allowed to inherit system dependencies.

        By default, PEX environments are scrubbed of all system distributions prior to execution.
        This means that PEX files cannot rely upon preexisting system libraries.

        By default inherit_path is false. This may be overridden at runtime by the $PEX_INHERIT_PATH
        environment variable.
        """
        inherit_path = self._pex_info.get("inherit_path")
        return InheritPath.for_value(
            inherit_path) if inherit_path else InheritPath.FALSE

    @inherit_path.setter
    def inherit_path(self, value):
        # type: (InheritPath.Value) -> None
        self._pex_info["inherit_path"] = value.value

    @property
    def interpreter_constraints(self):
        """A list of constraints that determine the interpreter compatibility for this pex, using
        the Requirement-style format, e.g. ``'CPython>=3', or just '>=2.7,<3'`` for requirements
        agnostic to interpreter class.

        This property will be used at exec time when bootstrapping a pex to search PEX_PYTHON_PATH
        for a list of compatible interpreters.
        """
        return list(self._interpreter_constraints)

    def add_interpreter_constraint(self, value):
        self._interpreter_constraints.add(str(value))

    @property
    def ignore_errors(self):
        return self._pex_info.get("ignore_errors", False)

    @ignore_errors.setter
    def ignore_errors(self, value):
        self._pex_info["ignore_errors"] = bool(value)

    @property
    def emit_warnings(self):
        return self._pex_info.get("emit_warnings", True)

    @emit_warnings.setter
    def emit_warnings(self, value):
        self._pex_info["emit_warnings"] = bool(value)

    @property
    def code_hash(self):
        # type: () -> Optional[str]
        return self._pex_info.get("code_hash")

    @code_hash.setter
    def code_hash(self, value):
        # type: (str) -> None
        self._pex_info["code_hash"] = value

    @property
    def pex_hash(self):
        # type: () -> Optional[str]
        return self._pex_info.get("pex_hash")

    @pex_hash.setter
    def pex_hash(self, value):
        # type: (str) -> None
        self._pex_info["pex_hash"] = value

    @property
    def entry_point(self):
        return self._get_safe("entry_point")

    @entry_point.setter
    def entry_point(self, value):
        self._pex_info["entry_point"] = value

    @property
    def script(self):
        return self._get_safe("script")

    @script.setter
    def script(self, value):
        self._pex_info["script"] = value

    def add_requirement(self, requirement):
        self._requirements.add(str(requirement))

    @property
    def requirements(self):
        return self._requirements

    def add_distribution(self, location, sha):
        self._distributions[location] = sha

    @property
    def distributions(self):
        return self._distributions

    @property
    def always_write_cache(self):
        return self._pex_info.get("always_write_cache", False)

    @always_write_cache.setter
    def always_write_cache(self, value):
        self._pex_info["always_write_cache"] = bool(value)

    @property
    def raw_pex_root(self):
        # type: () -> str
        return cast(str,
                    self._pex_info.get("pex_root", os.path.join("~", ".pex")))

    @property
    def pex_root(self):
        # type: () -> str
        pex_root = os.path.expanduser(self.raw_pex_root)
        if not can_write_dir(pex_root):
            tmp_root = safe_mkdtemp()
            pex_warnings.warn(
                "PEX_ROOT is configured as {pex_root} but that path is un-writeable, "
                "falling back to a temporary PEX_ROOT of {tmp_root} which will hurt "
                "performance.".format(pex_root=pex_root, tmp_root=tmp_root))
            pex_root = self._pex_info["pex_root"] = tmp_root
        return pex_root

    @pex_root.setter
    def pex_root(self, value):
        # type: (Optional[str]) -> None
        if value is None:
            self._pex_info.pop("pex_root", None)
        else:
            self._pex_info["pex_root"] = value

    @property
    def bootstrap(self):
        return ".bootstrap"

    @property
    def internal_cache(self):
        return ".deps"

    @property
    def install_cache(self):
        return os.path.join(self.pex_root, self.INSTALL_CACHE)

    @property
    def zip_unsafe_cache(self):
        #: type: () -> str
        return os.path.join(self.pex_root, "code")

    def update(self, other):
        # type: (PexInfo) -> None
        if not isinstance(other, PexInfo):
            raise TypeError("Cannot merge a %r with PexInfo" % type(other))
        self._pex_info.update(other._pex_info)
        self._distributions.update(other.distributions)
        self._interpreter_constraints.update(other.interpreter_constraints)
        self._requirements.update(other.requirements)

    def as_json_dict(self):
        # type: () -> Dict[str, Any]
        data = self._pex_info.copy()
        data["inherit_path"] = self.inherit_path.value
        data["requirements"] = list(self._requirements)
        data["interpreter_constraints"] = list(self._interpreter_constraints)
        data["distributions"] = self._distributions.copy()
        return data

    def dump(self):
        # type: (...) -> str
        data = self.as_json_dict()
        data["requirements"].sort()
        data["interpreter_constraints"].sort()
        return json.dumps(data, sort_keys=True)

    def copy(self):
        # type: () -> PexInfo
        return PexInfo(self.as_json_dict())

    @staticmethod
    def _merge_split(*paths):
        filtered_paths = filter(None, paths)
        return [p for p in ":".join(filtered_paths).split(":") if p]

    def merge_pex_path(self, pex_path):
        """Merges a new PEX_PATH definition into the existing one (if any).

        :param str pex_path: The PEX_PATH to merge.
        """
        if not pex_path:
            return
        self.pex_path = ":".join(self._merge_split(self.pex_path, pex_path))

    def __repr__(self):
        return "{}({!r})".format(type(self).__name__, self._pex_info)
Пример #2
0
    def install_distributions(self,
                              ignore_errors=False,
                              workspace=None,
                              max_parallel_jobs=None):
        if not any((self._build_requests, self._install_requests)):
            # Nothing to build or install.
            return []

        cache = self._cache or workspace or safe_mkdtemp()

        built_wheels_dir = os.path.join(cache, 'built_wheels')
        spawn_wheel_build = functools.partial(self._spawn_wheel_build,
                                              built_wheels_dir)

        installed_wheels_dir = os.path.join(cache, PexInfo.INSTALL_CACHE)
        spawn_install = functools.partial(self._spawn_install,
                                          installed_wheels_dir)

        to_install = self._install_requests[:]
        to_calculate_requirements_for = []

        # 1. Build local projects and sdists.
        if self._build_requests:
            with TRACER.timed('Building distributions for:'
                              '\n  {}'.format('\n  '.join(
                                  map(str, self._build_requests)))):

                build_requests, install_requests = self._categorize_build_requests(
                    build_requests=self._build_requests,
                    dist_root=built_wheels_dir)
                to_install.extend(install_requests)

                for build_result in execute_parallel(
                        inputs=build_requests,
                        spawn_func=spawn_wheel_build,
                        raise_type=Untranslateable,
                        max_jobs=max_parallel_jobs):
                    to_install.extend(build_result.finalize_build())

        # 2. Install wheels in individual chroots.

        # Dedup by wheel name; e.g.: only install universal wheels once even though they'll get
        # downloaded / built for each interpreter or platform.
        install_requests_by_wheel_file = OrderedDict()
        for install_request in to_install:
            install_requests = install_requests_by_wheel_file.setdefault(
                install_request.wheel_file, [])
            install_requests.append(install_request)

        representative_install_requests = [
            requests[0]
            for requests in install_requests_by_wheel_file.values()
        ]

        def add_requirements_requests(install_result):
            install_requests = install_requests_by_wheel_file[
                install_result.request.wheel_file]
            to_calculate_requirements_for.extend(
                install_result.finalize_install(install_requests))

        with TRACER.timed('Installing:'
                          '\n  {}'.format('\n  '.join(
                              map(str, representative_install_requests)))):

            install_requests, install_results = self._categorize_install_requests(
                install_requests=representative_install_requests,
                installed_wheels_dir=installed_wheels_dir)
            for install_result in install_results:
                add_requirements_requests(install_result)

            for install_result in execute_parallel(inputs=install_requests,
                                                   spawn_func=spawn_install,
                                                   raise_type=Untranslateable,
                                                   max_jobs=max_parallel_jobs):
                add_requirements_requests(install_result)

        # 3. Calculate the final installed requirements.
        with TRACER.timed('Calculating installed requirements for:'
                          '\n  {}'.format('\n  '.join(
                              map(str, to_calculate_requirements_for)))):
            distribution_requirements = DistributionRequirements.merged(
                execute_parallel(inputs=to_calculate_requirements_for,
                                 spawn_func=DistributionRequirements.Request.
                                 spawn_calculation,
                                 raise_type=Untranslateable,
                                 max_jobs=max_parallel_jobs))

        installed_distributions = OrderedSet()
        for requirements_request in to_calculate_requirements_for:
            for distribution in requirements_request.distributions:
                installed_distributions.add(
                    InstalledDistribution(
                        target=requirements_request.target,
                        requirement=distribution_requirements.to_requirement(
                            distribution),
                        distribution=distribution))

        if not ignore_errors:
            self._check_install(installed_distributions)
        return installed_distributions
Пример #3
0
class PexInfo(object):
    """PEX metadata.

  # Build metadata:
  build_properties: BuildProperties  # (key-value information about the build system)
  code_hash: str                     # sha1 hash of all names/code in the archive
  distributions: {dist_name: str}    # map from distribution name (i.e. path in
                                     # the internal cache) to its cache key (sha1)
  requirements: list                 # list of requirements for this environment

  # Environment options
  pex_root: string                    # root of all pex-related files eg: ~/.pex
  entry_point: string                 # entry point into this pex
  script: string                      # script to execute in this pex environment
                                      # at most one of script/entry_point can be specified
  zip_safe: True, default False       # is this pex zip safe?
  inherit_path: false/fallback/prefer # should this pex inherit site-packages + PYTHONPATH?
  ignore_errors: True, default False  # should we ignore inability to resolve dependencies?
  always_write_cache: False           # should we always write the internal cache to disk first?
                                      # this is useful if you have very large dependencies that
                                      # do not fit in RAM constrained environments

  .. versionchanged:: 0.8
    Removed the ``repositories`` and ``indices`` information, as they were never
    implemented.
  """

    PATH = 'PEX-INFO'
    INTERNAL_CACHE = '.deps'

    @classmethod
    def make_build_properties(cls, interpreter=None):
        from .interpreter import PythonInterpreter
        from .platforms import Platform

        pi = interpreter or PythonInterpreter.get()
        plat = Platform.current()
        platform_name = plat.platform
        return {
            'pex_version': pex_version,
            'class': pi.identity.interpreter,
            'version': pi.identity.version,
            'platform': platform_name,
        }

    @classmethod
    def default(cls, interpreter=None):
        pex_info = {
            'requirements': [],
            'distributions': {},
            'build_properties': cls.make_build_properties(interpreter),
        }
        return cls(info=pex_info)

    @classmethod
    def from_pex(cls, pex):
        if os.path.isfile(pex):
            with open_zip(pex) as zf:
                pex_info = zf.read(cls.PATH)
        else:
            with open(os.path.join(pex, cls.PATH)) as fp:
                pex_info = fp.read()
        return cls.from_json(pex_info)

    @classmethod
    def from_json(cls, content):
        if isinstance(content, bytes):
            content = content.decode('utf-8')
        return cls(info=json.loads(content))

    @classmethod
    def from_env(cls, env=ENV):
        supplied_env = env.strip_defaults()
        zip_safe = None if supplied_env.PEX_FORCE_LOCAL is None else not supplied_env.PEX_FORCE_LOCAL
        pex_info = {
            'pex_root': supplied_env.PEX_ROOT,
            'entry_point': supplied_env.PEX_MODULE,
            'script': supplied_env.PEX_SCRIPT,
            'zip_safe': zip_safe,
            'inherit_path': supplied_env.PEX_INHERIT_PATH,
            'ignore_errors': supplied_env.PEX_IGNORE_ERRORS,
            'always_write_cache': supplied_env.PEX_ALWAYS_CACHE,
        }
        # Filter out empty entries not explicitly set in the environment.
        return cls(info=dict(
            (k, v) for (k, v) in pex_info.items() if v is not None))

    @classmethod
    def _parse_requirement_tuple(cls, requirement_tuple):
        if isinstance(requirement_tuple, (tuple, list)):
            if len(requirement_tuple) != 3:
                raise ValueError('Malformed PEX requirement: %r' %
                                 (requirement_tuple, ))
            # pre 0.8.x requirement type:
            pex_warnings.warn(
                'Attempting to use deprecated PEX feature.  Please upgrade past PEX 0.8.x.'
            )
            return requirement_tuple[0]
        elif isinstance(requirement_tuple, compatibility_string):
            return requirement_tuple
        raise ValueError('Malformed PEX requirement: %r' %
                         (requirement_tuple, ))

    def __init__(self, info=None):
        """Construct a new PexInfo.  This should not be used directly."""

        if info is not None and not isinstance(info, dict):
            raise ValueError('PexInfo can only be seeded with a dict, got: '
                             '%s of type %s' % (info, type(info)))
        self._pex_info = info or {}
        if 'inherit_path' in self._pex_info:
            self.inherit_path = self._pex_info['inherit_path']
        self._distributions = self._pex_info.get('distributions', {})
        # cast as set because pex info from json must store interpreter_constraints as a list
        self._interpreter_constraints = set(
            self._pex_info.get('interpreter_constraints', set()))
        requirements = self._pex_info.get('requirements', [])
        if not isinstance(requirements, (list, tuple)):
            raise ValueError('Expected requirements to be a list, got %s' %
                             type(requirements))
        self._requirements = OrderedSet(
            self._parse_requirement_tuple(req) for req in requirements)

    def _get_safe(self, key):
        if key not in self._pex_info:
            return None
        value = self._pex_info[key]
        return value.encode('utf-8') if PY2 else value

    @property
    def build_properties(self):
        """Information about the system on which this PEX was generated.

    :returns: A dictionary containing metadata about the environment used to build this PEX.
    """
        return self._pex_info.get('build_properties', {})

    @build_properties.setter
    def build_properties(self, value):
        if not isinstance(value, dict):
            raise TypeError('build_properties must be a dictionary!')
        self._pex_info['build_properties'] = self.make_build_properties()
        self._pex_info['build_properties'].update(value)

    @property
    def zip_safe(self):
        """Whether or not this PEX should be treated as zip-safe.

    If set to false and the PEX is zipped, the contents of the PEX will be unpacked into a
    directory within the PEX_ROOT prior to execution.  This allows code and frameworks depending
    upon __file__ existing on disk to operate normally.

    By default zip_safe is True.  May be overridden at runtime by the $PEX_FORCE_LOCAL environment
    variable.
    """
        return self._pex_info.get('zip_safe', True)

    @zip_safe.setter
    def zip_safe(self, value):
        self._pex_info['zip_safe'] = bool(value)

    @property
    def pex_path(self):
        """A colon separated list of other pex files to merge into the runtime environment.

    This pex info property is used to persist the PEX_PATH environment variable into the pex info
    metadata for reuse within a built pex.
    """
        return self._pex_info.get('pex_path')

    @pex_path.setter
    def pex_path(self, value):
        self._pex_info['pex_path'] = value

    @property
    def inherit_path(self):
        """Whether or not this PEX should be allowed to inherit system dependencies.

    By default, PEX environments are scrubbed of all system distributions prior to execution.
    This means that PEX files cannot rely upon preexisting system libraries.

    By default inherit_path is false.  This may be overridden at runtime by the $PEX_INHERIT_PATH
    environment variable.
    """
        return self._pex_info.get('inherit_path', 'false')

    @inherit_path.setter
    def inherit_path(self, value):
        if value is False:
            value = 'false'
        elif value is True:
            value = 'prefer'
        self._pex_info['inherit_path'] = value

    @property
    def interpreter_constraints(self):
        """A list of constraints that determine the interpreter compatibility for this
    pex, using the Requirement-style format, e.g. ``'CPython>=3', or just '>=2.7,<3'``
    for requirements agnostic to interpreter class.

    This property will be used at exec time when bootstrapping a pex to search PEX_PYTHON_PATH
    for a list of compatible interpreters.
    """
        return list(self._interpreter_constraints)

    def add_interpreter_constraint(self, value):
        self._interpreter_constraints.add(str(value))

    @property
    def ignore_errors(self):
        return self._pex_info.get('ignore_errors', False)

    @ignore_errors.setter
    def ignore_errors(self, value):
        self._pex_info['ignore_errors'] = bool(value)

    @property
    def emit_warnings(self):
        return self._pex_info.get('emit_warnings', True)

    @emit_warnings.setter
    def emit_warnings(self, value):
        self._pex_info['emit_warnings'] = bool(value)

    @property
    def code_hash(self):
        return self._pex_info.get('code_hash')

    @code_hash.setter
    def code_hash(self, value):
        self._pex_info['code_hash'] = value

    @property
    def entry_point(self):
        return self._get_safe('entry_point')

    @entry_point.setter
    def entry_point(self, value):
        self._pex_info['entry_point'] = value

    @property
    def script(self):
        return self._get_safe('script')

    @script.setter
    def script(self, value):
        self._pex_info['script'] = value

    def add_requirement(self, requirement):
        self._requirements.add(str(requirement))

    @property
    def requirements(self):
        return self._requirements

    def add_distribution(self, location, sha):
        self._distributions[location] = sha

    @property
    def distributions(self):
        return self._distributions

    @property
    def always_write_cache(self):
        return self._pex_info.get('always_write_cache', False)

    @always_write_cache.setter
    def always_write_cache(self, value):
        self._pex_info['always_write_cache'] = bool(value)

    @property
    def pex_root(self):
        return os.path.expanduser(
            self._pex_info.get('pex_root', os.path.join('~', '.pex')))

    @pex_root.setter
    def pex_root(self, value):
        self._pex_info['pex_root'] = value

    @property
    def internal_cache(self):
        return self.INTERNAL_CACHE

    @property
    def install_cache(self):
        return os.path.join(self.pex_root, 'install')

    @property
    def zip_unsafe_cache(self):
        return os.path.join(self.pex_root, 'code')

    def update(self, other):
        if not isinstance(other, PexInfo):
            raise TypeError('Cannot merge a %r with PexInfo' % type(other))
        self._pex_info.update(other._pex_info)
        self._distributions.update(other.distributions)
        self._interpreter_constraints.update(other.interpreter_constraints)
        self._requirements.update(other.requirements)

    def dump(self, **kwargs):
        pex_info_copy = self._pex_info.copy()
        pex_info_copy['requirements'] = list(self._requirements)
        pex_info_copy['interpreter_constraints'] = list(
            self._interpreter_constraints)
        pex_info_copy['distributions'] = self._distributions.copy()
        return json.dumps(pex_info_copy, **kwargs)

    def copy(self):
        return self.from_json(self.dump())

    def merge_pex_path(self, pex_path):
        """Merges a new PEX_PATH definition into the existing one (if any).
    :param string pex_path: The PEX_PATH to merge.
    """
        if not pex_path:
            return
        self.pex_path = ':'.join(merge_split(self.pex_path, pex_path))

    def __repr__(self):
        return '{}({!r})'.format(type(self).__name__, self._pex_info)
Пример #4
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = None  # Defaults to $PATH
    if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interpreter = PythonInterpreter.from_env(full_path_or_basename)
                    if interpreter is None:
                        die("Failed to find interpreter: %s" % full_path_or_basename)
                    return interpreter

            interpreters = [to_python_interpreter(interp) for interp in options.python]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(iter_compatible_interpreters(pex_python_path, constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message("Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
            "Searching for local interpreters matching {}".format(", ".join(map(str, platforms)))
        ):
            candidate_interpreters = OrderedSet(iter_compatible_interpreters(pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms
                )
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log(
                            "Resolved {} for platform {}".format(
                                candidate_interpreter, resolved_platform
                            )
                        )
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform" if len(platforms) == 1 else "these platforms",
                )
            )

    interpreter = min(interpreters) if interpreters else None
    if options.use_first_matching_interpreter and interpreters:
        if len(interpreters) > 1:
            unused_interpreters = set(interpreters) - {interpreter}
            TRACER.log(
                "Multiple interpreters resolved, but only using {} because "
                "`--use-first-matching-interpreter` was used. These interpreters were matched but "
                "will not be used: {}".format(
                    interpreter.binary,
                    ", ".join(interpreter.binary for interpreter in sorted(unused_interpreters)),
                )
            )
        interpreters = [interpreter]

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(path=safe_mkdtemp(), interpreter=interpreter, preamble=preamble)

    def walk_and_do(fn, src_dir):
        src_dir = os.path.normpath(src_dir)
        for root, dirs, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                fn(src_file_path, dst_path)

    for directory in options.sources_directory:
        walk_and_do(pex_builder.add_source, directory)

    for directory in options.resources_directory:
        walk_and_do(pex_builder.add_resource, directory)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = options.inherit_path
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    # If we're only building the PEX for the first of many interpreters due to
    # `--use-first-matching-interpreter` selection, we do not want to enable those same interpreter
    # constraints at runtime, where they could lead to a different interpreter being selected
    # leading to a failure to execute the PEX. Instead we rely on the shebang set by that single
    # interpreter to pick out a similar interpreter at runtime (for a CPython interpreter, the
    # shebang will be `#!/usr/bin/env pythonX.Y` which should generally be enough to select a
    # matching interpreter. To be clear though, there are many corners this will not work for
    # including mismatching abi (python2.7m vs python2.7mu) when the PEX contains platform specific
    # wheels, etc.
    if options.interpreter_constraint and not options.use_first_matching_interpreter:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    # NB: `None` means use the default (pypi) index, `[]` means use no indexes.
    indexes = None
    if options.indexes != [_PYPI] and options.indexes is not None:
        indexes = [str(index) for index in options.indexes]

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed("Resolving distributions ({})".format(reqs + options.requirement_files)):
        network_configuration = NetworkConfiguration.create(
            cache_ttl=options.cache_ttl,
            retries=options.retries,
            timeout=options.timeout,
            headers=options.headers,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            resolveds = resolve_multi(
                requirements=reqs,
                requirement_files=options.requirement_files,
                constraint_files=options.constraint_files,
                allow_prereleases=options.allow_prereleases,
                transitive=options.transitive,
                interpreters=interpreters,
                platforms=list(platforms),
                indexes=indexes,
                find_links=options.find_links,
                network_configuration=network_configuration,
                cache=cache,
                build=options.build,
                use_wheel=options.use_wheel,
                compile=options.compile,
                manylinux=options.manylinux,
                max_parallel_jobs=options.max_parallel_jobs,
                ignore_errors=options.ignore_errors,
            )

            for resolved_dist in resolveds:
                log(
                    "  %s -> %s" % (resolved_dist.requirement, resolved_dist.distribution),
                    V=options.verbosity,
                )
                pex_builder.add_distribution(resolved_dist.distribution)
                pex_builder.add_requirement(resolved_dist.requirement)
        except Unsatisfiable as e:
            die(e)

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Пример #5
0
    def resolve_distributions(self, ignore_errors=False):
        # This method has four stages:
        # 1. Resolve sdists and wheels.
        # 2. Build local projects and sdists.
        # 3. Install wheels in individual chroots.
        # 4. Calculate the final resolved requirements.
        #
        # You'd think we might be able to just pip install all the requirements, but pexes can be
        # multi-platform / multi-interpreter, in which case only a subset of distributions resolved into
        # the PEX should be activated for the runtime interpreter. Sometimes there are platform specific
        # wheels and sometimes python version specific dists (backports being the common case). As such,
        # we need to be able to add each resolved distribution to the `sys.path` individually
        # (`PEXEnvironment` handles this selective activation at runtime). Since pip install only
        # accepts a single location to install all resolved dists, that won't work.
        #
        # This means we need to separately resolve all distributions, then install each in their own
        # chroot. To do this we use `pip download` for the resolve and download of all needed
        # distributions and then `pip install` to install each distribution in its own chroot.
        #
        # As a complicating factor, the runtime activation scheme relies on PEP 425 tags; i.e.: wheel
        # names. Some requirements are only available or applicable in source form - either via sdist,
        # VCS URL or local projects. As such we need to insert a `pip wheel` step to generate wheels for
        # all requirements resolved in source form via `pip download` / inspection of requirements to
        # discover those that are local directories (local setup.py or pyproject.toml python projects).
        #
        # Finally, we must calculate the pinned requirement corresponding to each distribution we
        # resolved along with any environment markers that control which runtime environments the
        # requirement should be activated in.

        if not self._requirements and not self._requirement_files:
            # Nothing to resolve.
            return []

        workspace = safe_mkdtemp()
        cache = self._cache or workspace

        resolved_dists_dir = os.path.join(workspace, 'resolved_dists')
        spawn_resolve = functools.partial(self._spawn_resolve,
                                          resolved_dists_dir)
        to_resolve = self._targets

        built_wheels_dir = os.path.join(cache, 'built_wheels')
        spawn_wheel_build = functools.partial(self._spawn_wheel_build,
                                              built_wheels_dir)
        to_build = list(self._iter_local_projects())

        installed_wheels_dir = os.path.join(cache, PexInfo.INSTALL_CACHE)
        spawn_install = functools.partial(self._spawn_install,
                                          installed_wheels_dir)
        to_install = []

        to_calculate_requirements_for = []

        # 1. Resolve sdists and wheels.
        with TRACER.timed('Resolving for:\n  '.format('\n  '.join(
                map(str, to_resolve)))):
            for resolve_result in self._run_parallel(inputs=to_resolve,
                                                     spawn_func=spawn_resolve,
                                                     raise_type=Unsatisfiable):
                to_build.extend(resolve_result.build_requests())
                to_install.extend(resolve_result.install_requests())

        if not any((to_build, to_install)):
            # Nothing to build or install.
            return []

        # 2. Build local projects and sdists.
        if to_build:
            with TRACER.timed('Building distributions for:\n  {}'.format(
                    '\n  '.join(map(str, to_build)))):

                build_requests, install_requests = self._categorize_build_requests(
                    build_requests=to_build, dist_root=built_wheels_dir)
                to_install.extend(install_requests)

                for build_result in self._run_parallel(
                        inputs=build_requests,
                        spawn_func=spawn_wheel_build,
                        raise_type=Untranslateable):
                    to_install.extend(build_result.finalize_build())

        # 3. Install wheels in individual chroots.

        # Dedup by wheel name; e.g.: only install universal wheels once even though they'll get
        # downloaded / built for each interpreter or platform.
        install_requests_by_wheel_file = OrderedDict()
        for install_request in to_install:
            install_requests = install_requests_by_wheel_file.setdefault(
                install_request.wheel_file, [])
            install_requests.append(install_request)

        representative_install_requests = [
            requests[0]
            for requests in install_requests_by_wheel_file.values()
        ]

        def add_requirements_requests(install_result):
            install_requests = install_requests_by_wheel_file[
                install_result.request.wheel_file]
            to_calculate_requirements_for.extend(
                install_result.finalize_install(install_requests))

        with TRACER.timed('Installing:\n  {}'.format('\n  '.join(
                map(str, representative_install_requests)))):

            install_requests, install_results = self._categorize_install_requests(
                install_requests=representative_install_requests,
                installed_wheels_dir=installed_wheels_dir)
            for install_result in install_results:
                add_requirements_requests(install_result)

            for install_result in self._run_parallel(
                    inputs=install_requests,
                    spawn_func=spawn_install,
                    raise_type=Untranslateable):
                add_requirements_requests(install_result)

        # 4. Calculate the final resolved requirements.
        with TRACER.timed(
                'Calculating resolved requirements for:\n  {}'.format(
                    '\n  '.join(map(str, to_calculate_requirements_for)))):
            distribution_requirements = DistributionRequirements.merged(
                self._run_parallel(inputs=to_calculate_requirements_for,
                                   spawn_func=DistributionRequirements.Request.
                                   spawn_calculation,
                                   raise_type=Untranslateable))

        resolved_distributions = OrderedSet()
        for requirements_request in to_calculate_requirements_for:
            for distribution in requirements_request.distributions:
                resolved_distributions.add(
                    ResolvedDistribution(
                        target=requirements_request.target,
                        requirement=distribution_requirements.to_requirement(
                            distribution),
                        distribution=distribution))

        if not ignore_errors and self._transitive:
            self._check_resolve(resolved_distributions)
        return resolved_distributions
Пример #6
0
def build_pex(reqs, options, cache=None):
    interpreters = None  # Default to the current interpreter.

    pex_python_path = options.python_path  # If None, this will result in using $PATH.
    # TODO(#1075): stop looking at PEX_PYTHON_PATH and solely consult the `--python-path` flag.
    if pex_python_path is None and (options.rc_file
                                    or not ENV.PEX_IGNORE_RCFILES):
        rc_variables = Variables(rc=options.rc_file)
        pex_python_path = rc_variables.PEX_PYTHON_PATH

    # NB: options.python and interpreter constraints cannot be used together.
    if options.python:
        with TRACER.timed("Resolving interpreters", V=2):

            def to_python_interpreter(full_path_or_basename):
                if os.path.isfile(full_path_or_basename):
                    return PythonInterpreter.from_binary(full_path_or_basename)
                else:
                    interp = PythonInterpreter.from_env(full_path_or_basename)
                    if interp is None:
                        die("Failed to find interpreter: %s" %
                            full_path_or_basename)
                    return interp

            interpreters = [
                to_python_interpreter(interp) for interp in options.python
            ]
    elif options.interpreter_constraint:
        with TRACER.timed("Resolving interpreters", V=2):
            constraints = options.interpreter_constraint
            validate_constraints(constraints)
            try:
                interpreters = list(
                    iter_compatible_interpreters(
                        path=pex_python_path,
                        interpreter_constraints=constraints))
            except UnsatisfiableInterpreterConstraintsError as e:
                die(
                    e.create_message(
                        "Could not find a compatible interpreter."),
                    CANNOT_SETUP_INTERPRETER,
                )

    platforms = OrderedSet(options.platforms)
    interpreters = interpreters or []
    if options.platforms and options.resolve_local_platforms:
        with TRACER.timed(
                "Searching for local interpreters matching {}".format(
                    ", ".join(map(str, platforms)))):
            candidate_interpreters = OrderedSet(
                iter_compatible_interpreters(path=pex_python_path))
            candidate_interpreters.add(PythonInterpreter.get())
            for candidate_interpreter in candidate_interpreters:
                resolved_platforms = candidate_interpreter.supported_platforms.intersection(
                    platforms)
                if resolved_platforms:
                    for resolved_platform in resolved_platforms:
                        TRACER.log("Resolved {} for platform {}".format(
                            candidate_interpreter, resolved_platform))
                        platforms.remove(resolved_platform)
                    interpreters.append(candidate_interpreter)
        if platforms:
            TRACER.log(
                "Could not resolve a local interpreter for {}, will resolve only binary distributions "
                "for {}.".format(
                    ", ".join(map(str, platforms)),
                    "this platform"
                    if len(platforms) == 1 else "these platforms",
                ))

    interpreter = (PythonInterpreter.latest_release_of_min_compatible_version(
        interpreters) if interpreters else None)

    try:
        with open(options.preamble_file) as preamble_fd:
            preamble = preamble_fd.read()
    except TypeError:
        # options.preamble_file is None
        preamble = None

    pex_builder = PEXBuilder(
        path=safe_mkdtemp(),
        interpreter=interpreter,
        preamble=preamble,
        copy_mode=CopyMode.SYMLINK,
        include_tools=options.include_tools or options.venv,
    )

    if options.resources_directory:
        pex_warnings.warn(
            "The `-R/--resources-directory` option is deprecated. Resources should be added via "
            "`-D/--sources-directory` instead.")

    for directory in OrderedSet(options.sources_directory +
                                options.resources_directory):
        src_dir = os.path.normpath(directory)
        for root, _, files in os.walk(src_dir):
            for f in files:
                src_file_path = os.path.join(root, f)
                dst_path = os.path.relpath(src_file_path, src_dir)
                pex_builder.add_source(src_file_path, dst_path)

    pex_info = pex_builder.info
    pex_info.zip_safe = options.zip_safe
    pex_info.unzip = options.unzip
    pex_info.venv = bool(options.venv)
    pex_info.venv_bin_path = options.venv
    pex_info.venv_copies = options.venv_copies
    pex_info.pex_path = options.pex_path
    pex_info.always_write_cache = options.always_write_cache
    pex_info.ignore_errors = options.ignore_errors
    pex_info.emit_warnings = options.emit_warnings
    pex_info.inherit_path = InheritPath.for_value(options.inherit_path)
    pex_info.pex_root = options.runtime_pex_root
    pex_info.strip_pex_env = options.strip_pex_env

    if options.interpreter_constraint:
        for ic in options.interpreter_constraint:
            pex_builder.add_interpreter_constraint(ic)

    indexes = compute_indexes(options)

    for requirements_pex in options.requirements_pexes:
        pex_builder.add_from_requirements_pex(requirements_pex)

    with TRACER.timed(
            "Resolving distributions ({})".format(reqs +
                                                  options.requirement_files)):
        if options.cache_ttl:
            pex_warnings.warn(
                "The --cache-ttl option is deprecated and no longer has any effect."
            )
        if options.headers:
            pex_warnings.warn(
                "The --header option is deprecated and no longer has any effect."
            )

        network_configuration = NetworkConfiguration(
            retries=options.retries,
            timeout=options.timeout,
            proxy=options.proxy,
            cert=options.cert,
            client_cert=options.client_cert,
        )

        try:
            if options.pex_repository:
                with TRACER.timed("Resolving requirements from PEX {}.".format(
                        options.pex_repository)):
                    resolveds = resolve_from_pex(
                        pex=options.pex_repository,
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        network_configuration=network_configuration,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        manylinux=options.manylinux,
                        ignore_errors=options.ignore_errors,
                    )
            else:
                with TRACER.timed("Resolving requirements."):
                    resolveds = resolve_multi(
                        requirements=reqs,
                        requirement_files=options.requirement_files,
                        constraint_files=options.constraint_files,
                        allow_prereleases=options.allow_prereleases,
                        transitive=options.transitive,
                        interpreters=interpreters,
                        platforms=list(platforms),
                        indexes=indexes,
                        find_links=options.find_links,
                        resolver_version=ResolverVersion.for_value(
                            options.resolver_version),
                        network_configuration=network_configuration,
                        cache=cache,
                        build=options.build,
                        use_wheel=options.use_wheel,
                        compile=options.compile,
                        manylinux=options.manylinux,
                        max_parallel_jobs=options.max_parallel_jobs,
                        ignore_errors=options.ignore_errors,
                    )

            for resolved_dist in resolveds:
                pex_builder.add_distribution(resolved_dist.distribution)
                if resolved_dist.direct_requirement:
                    pex_builder.add_requirement(
                        resolved_dist.direct_requirement)
        except Unsatisfiable as e:
            die(str(e))

    if options.entry_point and options.script:
        die("Must specify at most one entry point or script.", INVALID_OPTIONS)

    if options.entry_point:
        pex_builder.set_entry_point(options.entry_point)
    elif options.script:
        pex_builder.set_script(options.script)

    if options.python_shebang:
        pex_builder.set_shebang(options.python_shebang)

    return pex_builder
Пример #7
0
class PexInfo(object):
  """PEX metadata.

  # Build metadata:
  build_properties: BuildProperties  # (key-value information about the build system)
  code_hash: str                     # sha1 hash of all names/code in the archive
  distributions: {dist_name: str}    # map from distribution name (i.e. path in
                                     # the internal cache) to its cache key (sha1)
  requirements: list                 # list of requirements for this environment

  # Environment options
  pex_root: string                    # root of all pex-related files eg: ~/.pex
  entry_point: string                 # entry point into this pex
  script: string                      # script to execute in this pex environment
                                      # at most one of script/entry_point can be specified
  zip_safe: True, default False       # is this pex zip safe?
  inherit_path: false/fallback/prefer # should this pex inherit site-packages + PYTHONPATH?
  ignore_errors: True, default False  # should we ignore inability to resolve dependencies?
  always_write_cache: False           # should we always write the internal cache to disk first?
                                      # this is useful if you have very large dependencies that
                                      # do not fit in RAM constrained environments

  .. versionchanged:: 0.8
    Removed the ``repositories`` and ``indices`` information, as they were never
    implemented.
  """

  PATH = 'PEX-INFO'
  INTERNAL_CACHE = '.deps'

  @classmethod
  def make_build_properties(cls, interpreter=None):
    from .interpreter import PythonInterpreter
    from .platforms import Platform

    pi = interpreter or PythonInterpreter.get()
    plat = Platform.current()
    platform_name = plat.platform
    return {
      'pex_version': pex_version,
      'class': pi.identity.interpreter,
      'version': pi.identity.version,
      'platform': platform_name,
    }

  @classmethod
  def default(cls, interpreter=None):
    pex_info = {
      'requirements': [],
      'distributions': {},
      'build_properties': cls.make_build_properties(interpreter),
    }
    return cls(info=pex_info)

  @classmethod
  def from_pex(cls, pex):
    if os.path.isfile(pex):
      with open_zip(pex) as zf:
        pex_info = zf.read(cls.PATH)
    else:
      with open(os.path.join(pex, cls.PATH)) as fp:
        pex_info = fp.read()
    return cls.from_json(pex_info)

  @classmethod
  def from_json(cls, content):
    if isinstance(content, bytes):
      content = content.decode('utf-8')
    return cls(info=json.loads(content))

  @classmethod
  def from_env(cls, env=ENV):
    supplied_env = env.strip_defaults()
    zip_safe = None if supplied_env.PEX_FORCE_LOCAL is None else not supplied_env.PEX_FORCE_LOCAL
    pex_info = {
      'pex_root': supplied_env.PEX_ROOT,
      'entry_point': supplied_env.PEX_MODULE,
      'script': supplied_env.PEX_SCRIPT,
      'zip_safe': zip_safe,
      'inherit_path': supplied_env.PEX_INHERIT_PATH,
      'ignore_errors': supplied_env.PEX_IGNORE_ERRORS,
      'always_write_cache': supplied_env.PEX_ALWAYS_CACHE,
    }
    # Filter out empty entries not explicitly set in the environment.
    return cls(info=dict((k, v) for (k, v) in pex_info.items() if v is not None))

  @classmethod
  def _parse_requirement_tuple(cls, requirement_tuple):
    if isinstance(requirement_tuple, (tuple, list)):
      if len(requirement_tuple) != 3:
        raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,))
      # pre 0.8.x requirement type:
      pex_warnings.warn('Attempting to use deprecated PEX feature.  Please upgrade past PEX 0.8.x.')
      return requirement_tuple[0]
    elif isinstance(requirement_tuple, compatibility_string):
      return requirement_tuple
    raise ValueError('Malformed PEX requirement: %r' % (requirement_tuple,))

  def __init__(self, info=None):
    """Construct a new PexInfo. This should not be used directly."""

    if info is not None and not isinstance(info, dict):
      raise ValueError('PexInfo can only be seeded with a dict, got: '
                       '%s of type %s' % (info, type(info)))
    self._pex_info = info or {}
    if 'inherit_path' in self._pex_info:
      self.inherit_path = self._pex_info['inherit_path']
    self._distributions = self._pex_info.get('distributions', {})
    # cast as set because pex info from json must store interpreter_constraints as a list
    self._interpreter_constraints = set(self._pex_info.get('interpreter_constraints', set()))
    requirements = self._pex_info.get('requirements', [])
    if not isinstance(requirements, (list, tuple)):
      raise ValueError('Expected requirements to be a list, got %s' % type(requirements))
    self._requirements = OrderedSet(self._parse_requirement_tuple(req) for req in requirements)

  def _get_safe(self, key):
    if key not in self._pex_info:
      return None
    value = self._pex_info[key]
    return value.encode('utf-8') if PY2 else value

  @property
  def build_properties(self):
    """Information about the system on which this PEX was generated.

    :returns: A dictionary containing metadata about the environment used to build this PEX.
    """
    return self._pex_info.get('build_properties', {})

  @build_properties.setter
  def build_properties(self, value):
    if not isinstance(value, dict):
      raise TypeError('build_properties must be a dictionary!')
    self._pex_info['build_properties'] = self.make_build_properties()
    self._pex_info['build_properties'].update(value)

  @property
  def zip_safe(self):
    """Whether or not this PEX should be treated as zip-safe.

    If set to false and the PEX is zipped, the contents of the PEX will be unpacked into a
    directory within the PEX_ROOT prior to execution.  This allows code and frameworks depending
    upon __file__ existing on disk to operate normally.

    By default zip_safe is True.  May be overridden at runtime by the $PEX_FORCE_LOCAL environment
    variable.
    """
    return self._pex_info.get('zip_safe', True)

  @zip_safe.setter
  def zip_safe(self, value):
    self._pex_info['zip_safe'] = bool(value)

  @property
  def pex_path(self):
    """A colon separated list of other pex files to merge into the runtime environment.

    This pex info property is used to persist the PEX_PATH environment variable into the pex info
    metadata for reuse within a built pex.
    """
    return self._pex_info.get('pex_path')

  @pex_path.setter
  def pex_path(self, value):
    self._pex_info['pex_path'] = value

  @property
  def inherit_path(self):
    """Whether or not this PEX should be allowed to inherit system dependencies.

    By default, PEX environments are scrubbed of all system distributions prior to execution.
    This means that PEX files cannot rely upon preexisting system libraries.

    By default inherit_path is false.  This may be overridden at runtime by the $PEX_INHERIT_PATH
    environment variable.
    """
    return self._pex_info.get('inherit_path', 'false')

  @inherit_path.setter
  def inherit_path(self, value):
    if value is False:
      value = 'false'
    elif value is True:
      value = 'prefer'
    self._pex_info['inherit_path'] = value

  @property
  def interpreter_constraints(self):
    """A list of constraints that determine the interpreter compatibility for this
    pex, using the Requirement-style format, e.g. ``'CPython>=3', or just '>=2.7,<3'``
    for requirements agnostic to interpreter class.

    This property will be used at exec time when bootstrapping a pex to search PEX_PYTHON_PATH
    for a list of compatible interpreters.
    """
    return list(self._interpreter_constraints)

  def add_interpreter_constraint(self, value):
    self._interpreter_constraints.add(str(value))

  @property
  def ignore_errors(self):
    return self._pex_info.get('ignore_errors', False)

  @ignore_errors.setter
  def ignore_errors(self, value):
    self._pex_info['ignore_errors'] = bool(value)

  @property
  def emit_warnings(self):
    return self._pex_info.get('emit_warnings', True)

  @emit_warnings.setter
  def emit_warnings(self, value):
    self._pex_info['emit_warnings'] = bool(value)

  @property
  def code_hash(self):
    return self._pex_info.get('code_hash')

  @code_hash.setter
  def code_hash(self, value):
    self._pex_info['code_hash'] = value

  @property
  def entry_point(self):
    return self._get_safe('entry_point')

  @entry_point.setter
  def entry_point(self, value):
    self._pex_info['entry_point'] = value

  @property
  def script(self):
    return self._get_safe('script')

  @script.setter
  def script(self, value):
    self._pex_info['script'] = value

  def add_requirement(self, requirement):
    self._requirements.add(str(requirement))

  @property
  def requirements(self):
    return self._requirements

  def add_distribution(self, location, sha):
    self._distributions[location] = sha

  @property
  def distributions(self):
    return self._distributions

  @property
  def always_write_cache(self):
    return self._pex_info.get('always_write_cache', False)

  @always_write_cache.setter
  def always_write_cache(self, value):
    self._pex_info['always_write_cache'] = bool(value)

  @property
  def pex_root(self):
    return os.path.expanduser(self._pex_info.get('pex_root', os.path.join('~', '.pex')))

  @pex_root.setter
  def pex_root(self, value):
    self._pex_info['pex_root'] = value

  @property
  def internal_cache(self):
    return self.INTERNAL_CACHE

  @property
  def install_cache(self):
    return os.path.join(self.pex_root, 'install')

  @property
  def zip_unsafe_cache(self):
    return os.path.join(self.pex_root, 'code')

  def update(self, other):
    if not isinstance(other, PexInfo):
      raise TypeError('Cannot merge a %r with PexInfo' % type(other))
    self._pex_info.update(other._pex_info)
    self._distributions.update(other.distributions)
    self._interpreter_constraints.update(other.interpreter_constraints)
    self._requirements.update(other.requirements)

  def dump(self, **kwargs):
    pex_info_copy = self._pex_info.copy()
    pex_info_copy['requirements'] = sorted(self._requirements)
    pex_info_copy['interpreter_constraints'] = sorted(self._interpreter_constraints)
    pex_info_copy['distributions'] = self._distributions.copy()
    return json.dumps(pex_info_copy, **kwargs)

  def copy(self):
    return self.from_json(self.dump())

  def merge_pex_path(self, pex_path):
    """Merges a new PEX_PATH definition into the existing one (if any).
    :param string pex_path: The PEX_PATH to merge.
    """
    if not pex_path:
      return
    self.pex_path = ':'.join(merge_split(self.pex_path, pex_path))

  def __repr__(self):
    return '{}({!r})'.format(type(self).__name__, self._pex_info)