Example #1
0
  def test_hash_all(self):
    self.digest.update('jake')
    self.digest.update('jones')
    self.digest.hexdigest().AndReturn('42')
    self.mox.ReplayAll()

    self.assertEqual('42', hash_all(['jake', 'jones'], digest=self.digest))
Example #2
0
  def _chroot_path(self, interpreter, pex_info, targets, platforms, extra_requirements,
                   executable_file_content):
    """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
    fingerprint_components = [str(interpreter.identity)]

    if pex_info:
      # TODO(John Sirois): When https://rbcommons.com/s/twitter/r/2517/ lands, leverage the dump
      # **kwargs to sort keys or else find some other better way to get a stable fingerprint of
      # PexInfo.
      fingerprint_components.append(json.dumps(json.loads(pex_info.dump()), sort_keys=True))

    fingerprint_components.extend(sorted(t.transitive_invalidation_hash() for t in set(targets)))

    if platforms:
      fingerprint_components.extend(sorted(set(platforms)))

    if extra_requirements:
      # TODO(John Sirois): The extras should be uniqified before fingerprinting, but
      # PythonRequirement arguably does not have a proper __eq__.  For now we lean on the cache_key
      # of unique PythonRequirement being unique - which is probably good enough (the cache key is
      # narrower than the full scope of PythonRequirement attributes at present, thus the hedge).
      fingerprint_components.extend(sorted(set(r.cache_key() for r in extra_requirements)))

    if executable_file_content is not None:
      fingerprint_components.append(executable_file_content)

    fingerprint = hash_utils.hash_all(fingerprint_components)
    return os.path.join(self.chroot_cache_dir, fingerprint)
Example #3
0
    def combine_ids(ids):
        """Generates a combined id for a set of ids.

    :API: public
    """
        return hash_all(
            sorted(ids))  # We sort so that the id isn't sensitive to order.
    def resolve_requirement_strings(self, interpreter, requirement_strings):
        """Resolve a list of pip-style requirement strings."""
        requirement_strings = sorted(requirement_strings)
        if len(requirement_strings) == 0:
            req_strings_id = "no_requirements"
        elif len(requirement_strings) == 1:
            req_strings_id = requirement_strings[0]
        else:
            req_strings_id = hash_all(requirement_strings)

        path = os.path.realpath(
            os.path.join(self.workdir, str(interpreter.identity),
                         req_strings_id))
        if not os.path.isdir(path):
            reqs = [
                PythonRequirement(req_str) for req_str in requirement_strings
            ]
            with safe_concurrent_creation(path) as safe_path:
                pex_builder = PexBuilderWrapper.Factory.create(
                    builder=PEXBuilder(path=safe_path,
                                       interpreter=interpreter,
                                       copy=True),
                    log=self.context.log,
                )
                pex_builder.add_resolved_requirements(reqs)
                pex_builder.freeze()
        return PEX(path, interpreter=interpreter)
Example #5
0
  def _chroot_path(self, interpreter, pex_info, targets, platforms, extra_requirements,
                   executable_file_content):
    """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
    fingerprint_components = [str(interpreter.identity)]

    if pex_info:
      # TODO(John Sirois): When https://rbcommons.com/s/twitter/r/2517/ lands, leverage the dump
      # **kwargs to sort keys or else find some other better way to get a stable fingerprint of
      # PexInfo.
      fingerprint_components.append(json.dumps(json.loads(pex_info.dump()), sort_keys=True))

    fingerprint_components.extend(sorted(t.transitive_invalidation_hash() for t in set(targets)))

    if platforms:
      fingerprint_components.extend(sorted(set(platforms)))

    if extra_requirements:
      # TODO(John Sirois): The extras should be uniqified before fingerprinting, but
      # PythonRequirement arguably does not have a proper __eq__.  For now we lean on the cache_key
      # of unique PythonRequirement being unique - which is probably good enough (the cache key is
      # narrower than the full scope of PythonRequirement attributes at present, thus the hedge).
      fingerprint_components.extend(sorted(set(r.cache_key() for r in extra_requirements)))

    if executable_file_content is not None:
      fingerprint_components.append(executable_file_content)

    fingerprint = hash_utils.hash_all(fingerprint_components)
    return os.path.join(self.chroot_cache_dir, fingerprint)
Example #6
0
    def combine_cache_keys(cache_keys):
        """Returns a cache key for a list of target sets that already have cache keys.

    This operation is 'idempotent' in the sense that if cache_keys contains a single key
    then that key is returned.

    Note that this operation is commutative but not associative.  We use the term 'combine' rather
    than 'merge' or 'union' to remind the user of this. Associativity is not a necessary property,
    in practice.
    """
        if len(cache_keys) == 1:
            return cache_keys[0]
        else:
            combined_id = Target.maybe_readable_combine_ids(
                cache_key.id for cache_key in cache_keys)
            combined_hash = hash_all(
                sorted(cache_key.hash for cache_key in cache_keys))
            combined_payloads = sorted(
                list(
                    itertools.chain(
                        *[cache_key.payloads for cache_key in cache_keys])))
            summed_chunking_units = sum(
                [cache_key.num_chunking_units for cache_key in cache_keys])
            return CacheKey(combined_id, combined_hash, summed_chunking_units,
                            combined_payloads)
  def compute_fingerprint(self, target):
    super_fingerprint = super(JvmFingerprintStrategy, self).compute_fingerprint(target)

    if not isinstance(target, JvmTarget):
      return super_fingerprint

    hasher = hashlib.sha1()
    hasher.update(super_fingerprint)
    hasher.update(bytes(hash_all(sorted(self.platform_data))))
    return hasher.hexdigest()
Example #8
0
    def compute_fingerprint(self, target):
        target_fp = target.payload.fingerprint()

        if not isinstance(target, JvmTarget):
            return target_fp

        hasher = hashlib.sha1()
        hasher.update(target_fp)
        hasher.update(bytes(hash_all(self.platform_data)))
        return hasher.hexdigest()
  def compute_fingerprint(self, target):
    target_fp = target.payload.fingerprint()

    if not isinstance(target, JvmTarget):
      return target_fp

    hasher = hashlib.sha1()
    hasher.update(target_fp)
    hasher.update(bytes(hash_all(self.platform_data)))
    return hasher.hexdigest()
Example #10
0
    def checker_pex(self, interpreter):
        # TODO(John Sirois): Formalize in pants.base?
        pants_dev_mode = os.environ.get('PANTS_DEV')

        if pants_dev_mode:
            checker_id = self.checker_target.transitive_invalidation_hash()
        else:
            checker_id = hash_all([self._CHECKER_REQ])

        pex_path = os.path.join(self.workdir, 'checker', checker_id,
                                str(interpreter.identity))

        if not os.path.exists(pex_path):
            with self.context.new_workunit(name='build-checker'):
                with safe_concurrent_creation(pex_path) as chroot:
                    pex_builder = PexBuilderWrapper(
                        PEXBuilder(path=chroot, interpreter=interpreter),
                        PythonRepos.global_instance(),
                        PythonSetup.global_instance(), self.context.log)

                    # Constraining is required to guard against the case where the user
                    # has a pexrc file set.
                    pex_builder.add_interpreter_constraint(
                        str(interpreter.identity.requirement))

                    if pants_dev_mode:
                        pex_builder.add_sources_from(self.checker_target)
                        req_libs = [
                            tgt for tgt in self.checker_target.closure()
                            if isinstance(tgt, PythonRequirementLibrary)
                        ]

                        pex_builder.add_requirement_libs_from(
                            req_libs=req_libs)
                    else:
                        try:
                            # The checker is already on sys.path, eg: embedded in pants.pex.
                            working_set = WorkingSet(entries=sys.path)
                            for dist in working_set.resolve(
                                [Requirement.parse(self._CHECKER_REQ)]):
                                pex_builder.add_direct_requirements(
                                    dist.requires())
                                pex_builder.add_distribution(dist)
                            pex_builder.add_direct_requirements(
                                [self._CHECKER_REQ])
                        except DistributionNotFound:
                            # We need to resolve the checker from a local or remote distribution repo.
                            pex_builder.add_resolved_requirements(
                                [PythonRequirement(self._CHECKER_REQ)])

                    pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
                    pex_builder.freeze()

        return PEX(pex_path, interpreter=interpreter)
    def compute_fingerprint(self, target):
        super_fingerprint = super(JvmFingerprintStrategy,
                                  self).compute_fingerprint(target)

        if not isinstance(target, JvmTarget):
            return super_fingerprint

        hasher = hashlib.sha1()
        hasher.update(super_fingerprint)
        hasher.update(bytes(hash_all(sorted(self.platform_data))))
        return hasher.hexdigest()
Example #12
0
  def checker_pex(self, interpreter):
    # TODO(John Sirois): Formalize in pants.base?
    pants_dev_mode = os.environ.get('PANTS_DEV')

    if pants_dev_mode:
      checker_id = self.checker_target.transitive_invalidation_hash()
    else:
      checker_id = hash_all([self._CHECKER_REQ])

    pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity))

    if not os.path.exists(pex_path):
      with self.context.new_workunit(name='build-checker'):
        with safe_concurrent_creation(pex_path) as chroot:
          pex_builder = PexBuilderWrapper.Factory.create(
            builder=PEXBuilder(path=chroot, interpreter=interpreter),
            log=self.context.log)

          # Constraining is required to guard against the case where the user
          # has a pexrc file set.
          pex_builder.add_interpreter_constraint(str(interpreter.identity.requirement))

          if pants_dev_mode:
            pex_builder.add_sources_from(self.checker_target)
            req_libs = [tgt for tgt in self.checker_target.closure()
                        if isinstance(tgt, PythonRequirementLibrary)]

            pex_builder.add_requirement_libs_from(req_libs=req_libs)
          else:
            try:
              # The checker is already on sys.path, eg: embedded in pants.pex.
              platform = Platform.current()
              platform_name = platform.platform
              env = Environment(search_path=sys.path,
                                platform=platform_name,
                                python=interpreter.version_string)
              working_set = WorkingSet(entries=sys.path)
              for dist in working_set.resolve([Requirement.parse(self._CHECKER_REQ)], env=env):
                pex_builder.add_direct_requirements(dist.requires())
                # NB: We add the dist location instead of the dist itself to make sure its a
                # distribution style pex knows how to package.
                pex_builder.add_dist_location(dist.location)
              pex_builder.add_direct_requirements([self._CHECKER_REQ])
            except (DistributionNotFound, PEXBuilder.InvalidDistribution):
              # We need to resolve the checker from a local or remote distribution repo.
              pex_builder.add_resolved_requirements(
                [PythonRequirement(self._CHECKER_REQ)])

          pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
          pex_builder.freeze()

    return PEX(pex_path, interpreter=interpreter)
Example #13
0
    def _get_mypy_pex(self, py3_interpreter: PythonInterpreter,
                      *extra_pexes: PEX) -> PEX:
        mypy_version = self._mypy_subsystem.options.version
        extras_hash = hash_utils.hash_all(
            hash_utils.hash_dir(Path(extra_pex.path()))
            for extra_pex in extra_pexes)

        path = Path(self.workdir, str(py3_interpreter.identity),
                    f"{mypy_version}-{extras_hash}")
        pex_dir = str(path)
        if not path.is_dir():
            mypy_requirement_pex = self.resolve_requirement_strings(
                py3_interpreter, [mypy_version])
            pex_info = PexInfo.default()
            pex_info.entry_point = "pants_mypy_launcher"
            with self.merged_pex(
                    path=pex_dir,
                    pex_info=pex_info,
                    interpreter=py3_interpreter,
                    pexes=[mypy_requirement_pex, *extra_pexes],
            ) as builder:
                with temporary_file(binary_mode=False) as exe_fp:
                    # MyPy searches for types for a package in packages containing a `py.types` marker file
                    # or else in a sibling `<package>-stubs` package as per PEP-0561. Going further than that
                    # PEP, MyPy restricts its search to `site-packages`. Since PEX deliberately isolates
                    # itself from `site-packages` as part of its raison d'etre, we monkey-patch
                    # `site.getsitepackages` to look inside the scrubbed PEX sys.path before handing off to
                    # `mypy`.
                    #
                    # See:
                    #   https://mypy.readthedocs.io/en/stable/installed_packages.html#installed-packages
                    #   https://www.python.org/dev/peps/pep-0561/#stub-only-packages
                    exe_fp.write(
                        dedent("""
                            import runpy
                            import site
                            import sys
                
                
                            site.getsitepackages = lambda: sys.path[:]
                
                            
                            runpy.run_module('mypy', run_name='__main__')
                            """))
                    exe_fp.flush()
                    builder.set_executable(
                        filename=exe_fp.name,
                        env_filename=f"{pex_info.entry_point}.py")
                builder.freeze(bytecode_compile=False)

        return PEX(pex_dir, py3_interpreter)
  def combine_cache_keys(cls, cache_keys):
    """Returns a cache key for a list of target sets that already have cache keys.

    This operation is 'idempotent' in the sense that if cache_keys contains a single key
    then that key is returned.

    Note that this operation is commutative but not associative.  We use the term 'combine' rather
    than 'merge' or 'union' to remind the user of this. Associativity is not a necessary property,
    in practice.
    """
    if len(cache_keys) == 1:
      return cache_keys[0]
    else:
      combined_id = Target.maybe_readable_combine_ids(cache_key.id for cache_key in cache_keys)
      combined_hash = hash_all(sorted(cache_key.hash for cache_key in cache_keys))
      return cls(combined_id, combined_hash)
Example #15
0
  def combine_cache_keys(cache_keys):
    """Returns a cache key for a list of target sets that already have cache keys.

    This operation is 'idempotent' in the sense that if cache_keys contains a single key
    then that key is returned.

    Note that this operation is commutative but not associative.  We use the term 'combine' rather
    than 'merge' or 'union' to remind the user of this. Associativity is not a necessary property,
    in practice.
    """
    if len(cache_keys) == 1:
      return cache_keys[0]
    else:
      combined_id = Target.maybe_readable_combine_ids(cache_key.id for cache_key in cache_keys)
      combined_hash = hash_all(sorted(cache_key.hash for cache_key in cache_keys))
      return CacheKey(combined_id, combined_hash)
  def resolve_requirement_strings(self, interpreter, requirement_strings):
    """Resolve a list of pip-style requirement strings."""
    requirement_strings = sorted(requirement_strings)
    if len(requirement_strings) == 0:
      req_strings_id = 'no_requirements'
    elif len(requirement_strings) == 1:
      req_strings_id = requirement_strings[0]
    else:
      req_strings_id = hash_all(requirement_strings)

    path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), req_strings_id))
    if not os.path.isdir(path):
      reqs = [PythonRequirement(req_str) for req_str in requirement_strings]
      with safe_concurrent_creation(path) as safe_path:
        builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True)
        dump_requirements(builder, interpreter, reqs, self.context.log)
        builder.freeze()
    return PEX(path, interpreter=interpreter)
Example #17
0
  def _chroot_path(self, python_setup, interpreter, pex_info, targets, platforms,
                   extra_requirements, executable_file_content):
    """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
    fingerprint_components = [str(interpreter.identity)]
    if pex_info:
      fingerprint_components.append(pex_info.dump())
    fingerprint_components.extend(filter(None, [t.payload.fingerprint() for t in targets]))
    if platforms:
      fingerprint_components.extend(platforms)
    if extra_requirements:
      fingerprint_components.extend([r.cache_key() for r in extra_requirements])
    if executable_file_content is not None:
      fingerprint_components.append(executable_file_content)

    fingerprint = hash_utils.hash_all(fingerprint_components)
    return os.path.join(python_setup.chroot_cache_dir, fingerprint)
Example #18
0
    def _chroot_path(self, python_setup, interpreter, pex_info, targets,
                     platforms, extra_requirements, executable_file_content):
        """Pick a unique, well-known directory name for the chroot with the specified parameters.

    TODO: How many of these do we expect to have? Currently they are all under a single
    directory, and some filesystems (E.g., HFS+) don't handle directories with thousands of
    entries well. GC'ing old chroots may be enough of a solution, assuming this is even a problem.
    """
        fingerprint_components = [str(interpreter.identity)]
        if pex_info:
            fingerprint_components.append(pex_info.dump())
        fingerprint_components.extend(
            filter(None, [t.payload.fingerprint() for t in targets]))
        if platforms:
            fingerprint_components.extend(platforms)
        if extra_requirements:
            fingerprint_components.extend(
                [r.cache_key() for r in extra_requirements])
        if executable_file_content is not None:
            fingerprint_components.append(executable_file_content)

        fingerprint = hash_utils.hash_all(fingerprint_components)
        return os.path.join(self.chroot_cache_dir, fingerprint)
Example #19
0
    def _consolidate_classpath(self, targets, classpath_products):
        """Convert loose directories in classpath_products into jars."""
        # TODO: find a way to not process classpath entries for valid VTs.

        # NB: It is very expensive to call to get entries for each target one at a time.
        # For performance reasons we look them all up at once.
        entries_map = defaultdict(list)
        for (cp, target
             ) in classpath_products.get_product_target_mappings_for_targets(
                 targets, True):
            entries_map[target].append(cp)

        with self.invalidated(targets=targets,
                              invalidate_dependents=True) as invalidation:
            for vt in invalidation.all_vts:
                entries = entries_map.get(vt.target, [])
                for conf, entry in entries:
                    relpath = fast_relpath(entry.path,
                                           self.get_options().pants_workdir)
                    suffix = hash_all([relpath])[:6]
                    if ClasspathUtil.is_dir(entry.path):
                        jarpath = os.path.join(vt.results_dir,
                                               f"output-{suffix}.jar")

                        # Regenerate artifact for invalid vts.
                        if not vt.valid:
                            with self.open_jar(jarpath,
                                               overwrite=True,
                                               compressed=False) as jar:
                                jar.write(entry.path)

                        # Replace directory classpath entry with its jarpath.
                        classpath_products.remove_for_target(
                            vt.target, [(conf, entry)])
                        classpath_products.add_for_target(
                            vt.target, [(conf, jarpath)])
Example #20
0
 def test_hash_all(self):
     expected_hash = hashlib.md5()
     expected_hash.update(b'jakejones')
     self.assertEqual(expected_hash.hexdigest(),
                      hash_all(['jake', 'jones'], digest=hashlib.md5()))
Example #21
0
 def test_hash_all(self):
   expected_hash = hashlib.md5()
   expected_hash.update(b'jakejones')
   self.assertEqual(expected_hash.hexdigest(), hash_all(['jake', 'jones'], digest=hashlib.md5()))
Example #22
0
 def test_hash_all(self):
     expected_hash = hashlib.sha1()
     expected_hash.update(b"jakejones")
     assert expected_hash.hexdigest() == hash_all(["jake", "jones"])
Example #23
0
    def _get_mypy_pex(self, py3_interpreter: PythonInterpreter, *extra_pexes: PEX) -> PEX:
        mypy_version = self._mypy_subsystem.options.version
        extras_hash = hash_utils.hash_all(
            hash_utils.hash_dir(Path(extra_pex.path())) for extra_pex in extra_pexes
        )

        path = Path(self.workdir, str(py3_interpreter.identity), f"{mypy_version}-{extras_hash}")
        pex_dir = str(path)
        if not path.is_dir():
            mypy_requirement_pex = self.resolve_requirement_strings(py3_interpreter, [mypy_version])
            pex_info = PexInfo.default()
            pex_info.entry_point = "pants_mypy_launcher"
            with self.merged_pex(
                path=pex_dir,
                pex_info=pex_info,
                interpreter=py3_interpreter,
                pexes=[mypy_requirement_pex, *extra_pexes],
            ) as builder:
                with temporary_file(binary_mode=False) as exe_fp:
                    # MyPy searches for types for a package in packages containing a `py.types`
                    # marker file or else in a sibling `<package>-stubs` package as per PEP-0561.
                    # Going further than that PEP, MyPy restricts its search to `site-packages`.
                    # Since PEX deliberately isolates itself from `site-packages` as part of its
                    # raison d'etre, we monkey-patch `site.getsitepackages` to look inside the
                    # scrubbed PEX sys.path before handing off to `mypy`.
                    #
                    # As a complication, MyPy does its own validation to ensure packages aren't
                    # both available in site-packages and on the PYTHONPATH. As such, we elide all
                    # PYTHONPATH entries from artificial site-packages we set up since MyPy will
                    # manually scan PYTHONPATH outside this PEX to find packages.
                    #
                    # See:
                    #   https://mypy.readthedocs.io/en/stable/installed_packages.html#installed-packages
                    #   https://www.python.org/dev/peps/pep-0561/#stub-only-packages
                    exe_fp.write(
                        dedent(
                            """
                            import os
                            import runpy
                            import site
                            import sys

                            PYTHONPATH = frozenset(
                                os.path.realpath(p)
                                for p in os.environ.get('PYTHONPATH', '').split(os.pathsep)
                            )
                            
                            site.getsitepackages = lambda: [
                                p for p in sys.path if os.path.realpath(p) not in PYTHONPATH
                            ]

                            runpy.run_module('mypy', run_name='__main__')
                            """
                        )
                    )
                    exe_fp.flush()
                    builder.set_executable(
                        filename=exe_fp.name, env_filename=f"{pex_info.entry_point}.py"
                    )
                builder.freeze(bytecode_compile=False)

        return PEX(pex_dir, py3_interpreter)
Example #24
0
 def test_hash_all(self):
     expected_hash = hashlib.md5()
     expected_hash.update(b"jakejones")
     self.assertEqual(expected_hash.hexdigest(),
                      hash_all(["jake", "jones"], digest=hashlib.md5()))
Example #25
0
 def combine_ids(ids):
   """Generates a combined id for a set of ids."""
   return hash_all(sorted(ids))  # We sort so that the id isn't sensitive to order.
Example #26
0
    def _get_mypy_pex(self, py3_interpreter: PythonInterpreter,
                      *extra_pexes: PEX) -> PEX:
        def get_mypy_version() -> str:
            task_version_configured = not self.get_options().is_default(
                'version')
            subsystem_version_configured = not self._mypy_subsystem.get_options(
            ).is_default('version')
            if task_version_configured and subsystem_version_configured:
                raise ValueError(
                    "Conflicting options for the MyPy version used. You used the new, preferred "
                    "`--mypy-version`, but also used the deprecated `--lint-mypy-version`.\nPlease use "
                    "only one of these (preferably `--mypy-version`).")
            if task_version_configured:
                return f"mypy=={self.get_options().version}"
            return cast(str, self._mypy_subsystem.get_options().version)

        mypy_version = get_mypy_version()
        extras_hash = hash_utils.hash_all(
            hash_utils.hash_dir(Path(extra_pex.path()))
            for extra_pex in extra_pexes)

        path = Path(self.workdir, str(py3_interpreter.identity),
                    f'{mypy_version}-{extras_hash}')
        pex_dir = str(path)
        if not path.is_dir():
            mypy_requirement_pex = self.resolve_requirement_strings(
                py3_interpreter, [mypy_version])
            pex_info = PexInfo.default()
            pex_info.entry_point = 'pants_mypy_launcher'
            with self.merged_pex(path=pex_dir,
                                 pex_info=pex_info,
                                 interpreter=py3_interpreter,
                                 pexes=[mypy_requirement_pex,
                                        *extra_pexes]) as builder:
                with temporary_file(binary_mode=False) as exe_fp:
                    # MyPy searches for types for a package in packages containing a `py.types` marker file
                    # or else in a sibling `<package>-stubs` package as per PEP-0561. Going further than that
                    # PEP, MyPy restricts its search to `site-packages`. Since PEX deliberately isolates
                    # itself from `site-packages` as part of its raison d'etre, we monkey-patch
                    # `site.getsitepackages` to look inside the scrubbed PEX sys.path before handing off to
                    # `mypy`.
                    #
                    # See:
                    #   https://mypy.readthedocs.io/en/stable/installed_packages.html#installed-packages
                    #   https://www.python.org/dev/peps/pep-0561/#stub-only-packages
                    exe_fp.write(
                        dedent("""
            import runpy
            import site
            import sys


            site.getsitepackages = lambda: sys.path[:]

            
            runpy.run_module('mypy', run_name='__main__')
          """))
                    exe_fp.flush()
                    builder.set_executable(
                        filename=exe_fp.name,
                        env_filename=f'{pex_info.entry_point}.py')
                builder.freeze(bytecode_compile=False)

        return PEX(pex_dir, py3_interpreter)