Ejemplo n.º 1
0
 def _as_local_file_path(self, url):
   path = re.sub(r'^//', '', strip_prefix(url, 'file:'))
   if path.startswith('/'):
     return path
   elif url.startswith('file:'):
     return os.path.join(self._root_dir, path)
   else:
     return None
Ejemplo n.º 2
0
 def test_strip_prefix(self):
   self.assertEqual('testString', strip_prefix('testString', '//'))
   self.assertEqual('/testString', strip_prefix('/testString', '//'))
   self.assertEqual('testString', strip_prefix('//testString', '//'))
   self.assertEqual('/testString', strip_prefix('///testString', '//'))
   self.assertEqual('//testString', strip_prefix('////testString', '//'))
   self.assertEqual('test//String', strip_prefix('test//String', '//'))
   self.assertEqual('testString//', strip_prefix('testString//', '//'))
Ejemplo n.º 3
0
def test_strip_prefix() -> None:
    assert "testString" == strip_prefix("testString", "//")
    assert "/testString" == strip_prefix("/testString", "//")
    assert "testString" == strip_prefix("//testString", "//")
    assert "/testString" == strip_prefix("///testString", "//")
    assert "//testString" == strip_prefix("////testString", "//")
    assert "test//String" == strip_prefix("test//String", "//")
    assert "testString//" == strip_prefix("testString//", "//")
Ejemplo n.º 4
0
 def test_strip_prefix(self):
     self.assertEquals('testString', strip_prefix('testString', '//'))
     self.assertEquals('/testString', strip_prefix('/testString', '//'))
     self.assertEquals('testString', strip_prefix('//testString', '//'))
     self.assertEquals('/testString', strip_prefix('///testString', '//'))
     self.assertEquals('//testString', strip_prefix('////testString', '//'))
     self.assertEquals('test//String', strip_prefix('test//String', '//'))
     self.assertEquals('testString//', strip_prefix('testString//', '//'))
Ejemplo n.º 5
0
def initialize_stdio_raw(
    global_level: LogLevel,
    log_show_rust_3rdparty: bool,
    show_target: bool,
    log_levels_by_target: dict[str, LogLevel],
    print_stacktrace: bool,
    ignore_warnings: list[str],
    pants_workdir: str,
) -> Iterator[None]:
    literal_filters = []
    regex_filters = []
    for filt in ignore_warnings:
        if filt.startswith("$regex$"):
            regex_filters.append(strip_prefix(filt, "$regex$"))
        else:
            literal_filters.append(filt)

    # Set the pants log destination.
    log_path = str(pants_log_path(PurePath(pants_workdir)))
    safe_mkdir_for(log_path)

    # Initialize thread-local stdio, and replace sys.std* with proxies.
    original_stdin, original_stdout, original_stderr = sys.stdin, sys.stdout, sys.stderr
    try:
        raw_stdin, sys.stdout, sys.stderr = native_engine.stdio_initialize(
            global_level.level,
            log_show_rust_3rdparty,
            show_target,
            {k: v.level
             for k, v in log_levels_by_target.items()},
            tuple(literal_filters),
            tuple(regex_filters),
            log_path,
        )
        sys.stdin = TextIOWrapper(
            BufferedReader(raw_stdin),
            # NB: We set the default encoding explicitly to bypass logic in the TextIOWrapper
            # constructor that would poke the underlying file (which is not valid until a
            # `stdio_destination` is set).
            encoding=locale.getpreferredencoding(False),
        )

        sys.__stdin__, sys.__stdout__, sys.__stderr__ = sys.stdin, sys.stdout, sys.stderr  # type: ignore[assignment]
        # Install a Python logger that will route through the Rust logger.
        with _python_logging_setup(global_level,
                                   log_levels_by_target,
                                   print_stacktrace=print_stacktrace):
            yield
    finally:
        sys.stdin, sys.stdout, sys.stderr = original_stdin, original_stdout, original_stderr
        sys.__stdin__, sys.__stdout__, sys.__stderr__ = sys.stdin, sys.stdout, sys.stderr  # type: ignore[assignment]
Ejemplo n.º 6
0
def download_pex_bin() -> Iterator[Path]:
    """Download PEX and return the path to the binary."""
    try:
        pex_version = next(
            strip_prefix(ln, "pex==").rstrip() for ln in Path(
                "3rdparty/python/requirements.txt").read_text().splitlines()
            if ln.startswith("pex=="))
    except (FileNotFoundError, StopIteration) as exc:
        die("Could not find a requirement starting with `pex==` in "
            f"3rdparty/python/requirements.txt: {repr(exc)}")

    with TemporaryDirectory() as tempdir:
        resp = requests.get(
            f"https://github.com/pantsbuild/pex/releases/download/v{pex_version}/pex"
        )
        resp.raise_for_status()
        result = Path(tempdir, "pex")
        result.write_bytes(resp.content)
        yield result
Ejemplo n.º 7
0
 def normalize_absolute_refs(ref):
   return strip_prefix(ref, '//')
Ejemplo n.º 8
0
 def normalize_absolute_refs(ref: str) -> str:
     return strip_prefix(ref, "//")
Ejemplo n.º 9
0
 def normalize_absolute_refs(ref):
   return strip_prefix(ref, '//')
Ejemplo n.º 10
0
    def parse(
        cls,
        spec: str,
        relative_to: str | None = None,
        subproject_roots: Sequence[str] | None = None,
    ) -> AddressInput:
        """Parse a string into an AddressInput.

        :param spec: Target address spec.
        :param relative_to: path to use for sibling specs, ie: ':another_in_same_build_family',
          interprets the missing spec_path part as `relative_to`.
        :param subproject_roots: Paths that correspond with embedded build roots under
          the current build root.

        For example:

            some_target(
                name='mytarget',
                dependencies=['path/to/buildfile:targetname'],
            )

        Where `path/to/buildfile:targetname` is the dependent target address spec.

        In there is no target name component, it defaults the default target in the resulting
        Address's spec_path.

        Optionally, specs can be prefixed with '//' to denote an absolute spec path. This is
        normally not significant except when a spec referring to a root level target is needed
        from deeper in the tree. For example, in `path/to/buildfile/BUILD`:

            some_target(
                name='mytarget',
                dependencies=[':targetname'],
            )

        The `targetname` spec refers to a target defined in `path/to/buildfile/BUILD*`. If instead
        you want to reference `targetname` in a root level BUILD file, use the absolute form.
        For example:

            some_target(
                name='mytarget',
                dependencies=['//:targetname'],
            )

        The spec may be for a generated target: `dir:generator#generated`.

        The spec may be a file, such as `a/b/c.txt`. It may include a relative address spec at the
        end, such as `a/b/c.txt:original` or `a/b/c.txt:../original`, to disambiguate which target
        the file comes from; otherwise, it will be assumed to come from the default target in the
        directory, i.e. a target which leaves off `name`.
        """
        subproject = (longest_dir_prefix(relative_to, subproject_roots)
                      if relative_to and subproject_roots else None)

        def prefix_subproject(spec_path: str) -> str:
            if not subproject:
                return spec_path
            if spec_path:
                return os.path.join(subproject, spec_path)
            return os.path.normpath(subproject)

        spec_parts = spec.split(":", maxsplit=1)
        path_component = spec_parts[0]
        if len(spec_parts) == 1:
            target_component = None
            generated_parts = path_component.split("#", maxsplit=1)
            if len(generated_parts) == 1:
                generated_component = None
            else:
                path_component, generated_component = generated_parts
        else:
            generated_parts = spec_parts[1].split("#", maxsplit=1)
            if len(generated_parts) == 1:
                target_component = generated_parts[0]
                generated_component = None
            else:
                target_component, generated_component = generated_parts

        normalized_relative_to = None
        if relative_to:
            normalized_relative_to = (fast_relpath(relative_to, subproject)
                                      if subproject else relative_to)
        if path_component.startswith("./") and normalized_relative_to:
            path_component = os.path.join(normalized_relative_to,
                                          path_component[2:])
        if not path_component and normalized_relative_to:
            path_component = normalized_relative_to

        path_component = prefix_subproject(strip_prefix(path_component, "//"))

        return cls(path_component, target_component, generated_component)
Ejemplo n.º 11
0
def initialize_stdio(
        global_bootstrap_options: OptionValueContainer) -> Iterator[None]:
    """Mutates sys.std* and logging to route stdio for a Pants process to thread local destinations.

    In this context, `sys.std*` and logging handlers will route through Rust code that uses
    thread-local information to decide whether to write to a file, or to stdio file handles.

    To control the stdio destination set by this method, use the `stdio_destination` context manager.

    This is called in two different processes:
    * PantsRunner, after it has determined that LocalPantsRunner will be running in process, and
      immediately before setting a `stdio_destination` for the remainder of the run.
    * PantsDaemon, immediately on startup. The process will then default to sending stdio to the log
      until client connections arrive, at which point `stdio_destination` is used per-connection.
    """
    global_level = global_bootstrap_options.level
    log_show_rust_3rdparty = global_bootstrap_options.log_show_rust_3rdparty
    show_target = global_bootstrap_options.show_log_target
    log_levels_by_target = _get_log_levels_by_target(global_bootstrap_options)
    print_stacktrace = global_bootstrap_options.print_stacktrace
    local_cleanup = global_bootstrap_options.process_execution_local_cleanup

    literal_filters = []
    regex_filters = []
    for filt in cast("list[str]", global_bootstrap_options.ignore_warnings):
        if filt.startswith("$regex$"):
            regex_filters.append(strip_prefix(filt, "$regex$"))
        else:
            literal_filters.append(filt)

    # Set the pants log destination.
    log_path = str(
        pants_log_path(PurePath(global_bootstrap_options.pants_workdir)))
    safe_mkdir_for(log_path)

    # Initialize thread-local stdio, and replace sys.std* with proxies.
    original_stdin, original_stdout, original_stderr = sys.stdin, sys.stdout, sys.stderr
    try:
        raw_stdin, sys.stdout, sys.stderr = native_engine.stdio_initialize(
            global_level.level,
            log_show_rust_3rdparty,
            show_target,
            {k: v.level
             for k, v in log_levels_by_target.items()},
            tuple(literal_filters),
            tuple(regex_filters),
            log_path,
        )
        sys.stdin = TextIOWrapper(
            BufferedReader(raw_stdin),
            # NB: We set the default encoding explicitly to bypass logic in the TextIOWrapper
            # constructor that would poke the underlying file (which is not valid until a
            # `stdio_destination` is set).
            encoding=locale.getpreferredencoding(False),
        )

        sys.__stdin__, sys.__stdout__, sys.__stderr__ = sys.stdin, sys.stdout, sys.stderr
        # Install a Python logger that will route through the Rust logger.
        with _python_logging_setup(global_level,
                                   print_stacktrace=print_stacktrace,
                                   local_cleanup=local_cleanup):
            yield
    finally:
        sys.stdin, sys.stdout, sys.stderr = original_stdin, original_stdout, original_stderr
        sys.__stdin__, sys.__stdout__, sys.__stderr__ = sys.stdin, sys.stdout, sys.stderr
Ejemplo n.º 12
0
async def download_and_analyze_third_party_packages(
    request: AllThirdPartyPackagesRequest, ) -> AllThirdPartyPackages:
    # NB: We download all modules to GOPATH=$(pwd)/gopath. Running `go list ...` from $(pwd) would
    # naively try analyzing the contents of the GOPATH like they were first-party packages. This
    # results in errors like this:
    #
    #   package <import_path>/gopath/pkg/mod/golang.org/x/[email protected]/unicode: can only use
    #   path@version syntax with 'go get' and 'go install' in module-aware mode
    #
    # Instead, we run `go list` from a subdirectory of the chroot. It can still access the
    # contents of `GOPATH`, but won't incorrectly treat its contents as first-party packages.
    go_mod_prefix = "go_mod_prefix"
    go_mod_prefixed_digest = await Get(
        Digest, AddPrefix(request.go_mod_stripped_digest, go_mod_prefix))

    list_argv = (
        "list",
        # This rule can't modify `go.mod` and `go.sum` as it would require mutating the workspace.
        # Instead, we expect them to be well-formed already.
        #
        # It would be convenient to set `-mod=mod` to allow edits, and then compare the resulting
        # files to the input so that we could print a diff for the user to know how to update. But
        # `-mod=mod` results in more packages being downloaded and added to `go.mod` than is
        # actually necessary.
        # TODO: nice error when `go.mod` and `go.sum` would need to change. Right now, it's a
        #  message from Go and won't be intuitive for Pants users what to do.
        "-mod=readonly",
        # There may be some packages in the transitive closure that cannot be built, but we should
        # not blow up Pants.
        #
        # For example, a package that sets the special value `package documentation` and has no
        # source files would naively error due to `build constraints exclude all Go files`, even
        # though we should not error on that package.
        "-e",
        "-json",
        # This matches all packages. `all` only matches first-party packages and complains that
        # there are no `.go` files.
        "...",
    )
    list_result = await Get(
        ProcessResult,
        GoSdkProcess(
            command=list_argv,
            # TODO: make this more descriptive: point to the actual `go_mod` target or path.
            description=
            "Run `go list` to download and analyze all third-party Go packages",
            input_digest=go_mod_prefixed_digest,
            output_directories=("gopath/pkg/mod", ),
            working_dir=go_mod_prefix,
            allow_downloads=True,
        ),
    )
    stripped_result_digest = await Get(
        Digest, RemovePrefix(list_result.output_digest, "gopath/pkg/mod"))

    all_digest_subset_gets = []
    all_pkg_info_kwargs = []
    all_failed_pkg_info = []
    for pkg_json in ijson.items(list_result.stdout, "", multiple_values=True):
        if "Standard" in pkg_json:
            continue
        import_path = pkg_json["ImportPath"]

        maybe_error, maybe_failed_pkg_info = maybe_raise_or_create_error_or_create_failed_pkg_info(
            pkg_json, import_path)
        if maybe_failed_pkg_info:
            all_failed_pkg_info.append(maybe_failed_pkg_info)
            continue

        dir_path = strip_prefix(strip_v2_chroot_path(pkg_json["Dir"]),
                                "gopath/pkg/mod/")
        all_pkg_info_kwargs.append(
            dict(
                import_path=import_path,
                subpath=dir_path,
                imports=tuple(pkg_json.get("Imports", ())),
                go_files=tuple(pkg_json.get("GoFiles", ())),
                s_files=tuple(pkg_json.get("SFiles", ())),
                minimum_go_version=pkg_json.get("Module", {}).get("GoVersion"),
                error=maybe_error,
            ))
        all_digest_subset_gets.append(
            Get(
                Digest,
                DigestSubset(
                    stripped_result_digest,
                    PathGlobs(
                        [os.path.join(dir_path, "*")],
                        glob_match_error_behavior=GlobMatchErrorBehavior.error,
                        description_of_origin=f"downloading {import_path}",
                    ),
                ),
            ))

    all_digest_subsets = await MultiGet(all_digest_subset_gets)
    import_path_to_info = {
        pkg_info_kwargs["import_path"]: ThirdPartyPkgInfo(digest=digest_subset,
                                                          **pkg_info_kwargs)
        for pkg_info_kwargs, digest_subset in zip(all_pkg_info_kwargs,
                                                  all_digest_subsets)
    }
    import_path_to_info.update(
        (pkg_info.import_path, pkg_info) for pkg_info in all_failed_pkg_info)
    return AllThirdPartyPackages(list_result.output_digest,
                                 FrozenDict(import_path_to_info))
Ejemplo n.º 13
0
    def parse(
        cls,
        spec: str,
        *,
        relative_to: str | None = None,
        subproject_roots: Sequence[str] | None = None,
        description_of_origin: str,
    ) -> AddressInput:
        """Parse a string into an AddressInput.

        :param spec: Target address spec.
        :param relative_to: path to use for sibling specs, ie: ':another_in_same_build_family',
          interprets the missing spec_path part as `relative_to`.
        :param subproject_roots: Paths that correspond with embedded build roots under
          the current build root.
        :param description_of_origin: where the AddressInput comes from, e.g. "CLI arguments" or
          "the option `--paths-from`". This is used for better error messages.

        For example:

            some_target(
                name='mytarget',
                dependencies=['path/to/buildfile:targetname'],
            )

        Where `path/to/buildfile:targetname` is the dependent target address spec.

        In there is no target name component, it defaults the default target in the resulting
        Address's spec_path.

        Optionally, specs can be prefixed with '//' to denote an absolute spec path. This is
        normally not significant except when a spec referring to a root level target is needed
        from deeper in the tree. For example, in `path/to/buildfile/BUILD`:

            some_target(
                name='mytarget',
                dependencies=[':targetname'],
            )

        The `targetname` spec refers to a target defined in `path/to/buildfile/BUILD*`. If instead
        you want to reference `targetname` in a root level BUILD file, use the absolute form.
        For example:

            some_target(
                name='mytarget',
                dependencies=['//:targetname'],
            )

        The spec may be for a generated target: `dir:generator#generated`.

        The spec may be a file, such as `a/b/c.txt`. It may include a relative address spec at the
        end, such as `a/b/c.txt:original` or `a/b/c.txt:../original`, to disambiguate which target
        the file comes from; otherwise, it will be assumed to come from the default target in the
        directory, i.e. a target which leaves off `name`.
        """
        subproject = (longest_dir_prefix(relative_to, subproject_roots)
                      if relative_to and subproject_roots else None)

        def prefix_subproject(spec_path: str) -> str:
            if not subproject:
                return spec_path
            if spec_path:
                return os.path.join(subproject, spec_path)
            return os.path.normpath(subproject)

        (
            (
                path_component,
                target_component,
                generated_component,
                parameters,
            ),
            wildcard,
        ) = native_engine.address_spec_parse(spec)

        if wildcard:
            raise UnsupportedWildcard(
                softwrap(f"""
                    The address `{spec}` from {description_of_origin} ended in a wildcard
                    (`{wildcard}`), which is not supported.
                    """))

        normalized_relative_to = None
        if relative_to:
            normalized_relative_to = (fast_relpath(relative_to, subproject)
                                      if subproject else relative_to)
        if path_component.startswith("./") and normalized_relative_to:
            path_component = os.path.join(normalized_relative_to,
                                          path_component[2:])
        if not path_component and normalized_relative_to:
            path_component = normalized_relative_to

        path_component = prefix_subproject(strip_prefix(path_component, "//"))

        return cls(
            path_component,
            target_component,
            generated_component=generated_component,
            parameters=FrozenDict(sorted(parameters)),
            description_of_origin=description_of_origin,
        )
Ejemplo n.º 14
0
def test_pex_lockfile_generation(no_binary: bool, only_binary: bool) -> None:
    rule_runner = RuleRunner(rules=[
        *lockfile_rules(),
        *pex.rules(),
        QueryRule(GenerateLockfileResult, [GeneratePythonLockfile]),
    ])
    args = []
    if no_binary:
        args.append("--python-no-binary=ansicolors")
    if only_binary:
        args.append("--python-only-binary=ansicolors")
    rule_runner.set_options(args, env_inherit=PYTHON_BOOTSTRAP_ENV)

    pex_header = dedent("""\
        // This lockfile was autogenerated by Pants. To regenerate, run:
        //
        //    ./pants generate-lockfiles --resolve=test
        //
        // --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
        // {
        //   "version": 2,
        //   "valid_for_interpreter_constraints": [],
        //   "generated_with_requirements": [
        //     "ansicolors==1.1.8"
        //   ]
        // }
        // --- END PANTS LOCKFILE METADATA ---
        """)
    pex_lock = _generate(rule_runner=rule_runner, use_pex=True)
    assert pex_lock.startswith(pex_header)
    lock_entry = json.loads(strip_prefix(pex_lock, pex_header))
    reqs = lock_entry["locked_resolves"][0]["locked_requirements"]
    assert len(reqs) == 1
    assert reqs[0]["project_name"] == "ansicolors"
    assert reqs[0]["version"] == "1.1.8"

    wheel = {
        "algorithm":
        "sha256",
        "hash":
        "00d2dde5a675579325902536738dd27e4fac1fd68f773fe36c21044eb559e187",
        "url":
        ("https://files.pythonhosted.org/packages/53/18/" +
         "a56e2fe47b259bb52201093a3a9d4a32014f9d85071ad07e9d60600890ca/" +
         "ansicolors-1.1.8-py2.py3-none-any.whl"),
    }
    sdist = {
        "algorithm":
        "sha256",
        "hash":
        "99f94f5e3348a0bcd43c82e5fc4414013ccc19d70bd939ad71e0133ce9c372e0",
        "url":
        ("https://files.pythonhosted.org/packages/76/31/" +
         "7faed52088732704523c259e24c26ce6f2f33fbeff2ff59274560c27628e/" +
         "ansicolors-1.1.8.zip"),
    }

    artifacts = reqs[0]["artifacts"]

    if not no_binary and not only_binary:
        # Don't assume that the order in artifacts is deterministic.
        # We can't just convert to a set because dicts aren't hashable.
        assert len(artifacts) == 2
        assert wheel in artifacts
        assert sdist in artifacts
    elif no_binary:
        assert artifacts == [sdist]
    elif only_binary:
        assert artifacts == [wheel]
Ejemplo n.º 15
0
def run_python_test(test_target, pytest, python_setup, source_root_config,
                    subprocess_encoding_environment):
    """Runs pytest for one target."""

    # TODO(7726): replace this with a proper API to get the `closure` for a
    # TransitiveHydratedTarget.
    transitive_hydrated_targets = yield Get(
        TransitiveHydratedTargets, BuildFileAddresses((test_target.address, )))
    all_targets = [t.adaptor for t in transitive_hydrated_targets.closure]

    interpreter_constraints = {
        constraint
        for target_adaptor in all_targets
        for constraint in python_setup.compatibility_or_constraints(
            getattr(target_adaptor, 'compatibility', None))
    }

    # Produce a pex containing pytest and all transitive 3rdparty requirements.
    output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex'
    all_target_requirements = []
    for maybe_python_req_lib in all_targets:
        # This is a python_requirement()-like target.
        if hasattr(maybe_python_req_lib, 'requirement'):
            all_target_requirements.append(
                str(maybe_python_req_lib.requirement))
        # This is a python_requirement_library()-like target.
        if hasattr(maybe_python_req_lib, 'requirements'):
            for py_req in maybe_python_req_lib.requirements:
                all_target_requirements.append(str(py_req.requirement))
    all_requirements = all_target_requirements + list(
        pytest.get_requirement_strings())
    resolved_requirements_pex = yield Get(
        RequirementsPex,
        RequirementsPexRequest(
            output_filename=output_pytest_requirements_pex_filename,
            requirements=tuple(sorted(all_requirements)),
            interpreter_constraints=tuple(sorted(interpreter_constraints)),
            entry_point="pytest:main",
        ))

    # Get the file names for the test_target, adjusted for the source root. This allows us to
    # specify to Pytest which files to test and thus to avoid the test auto-discovery defined by
    # https://pytest.org/en/latest/goodpractices.html#test-discovery. In addition to a performance
    # optimization, this ensures that any transitive sources, such as a test project file named
    # test_fail.py, do not unintentionally end up being run as tests.
    source_roots = source_root_config.get_source_roots()
    test_target_sources_file_names = []
    for source_target_filename in test_target.sources.files_relative_to_buildroot:
        source_root = source_roots.find_by_path(source_target_filename)
        test_target_sources_file_names.append(
            strip_prefix(source_target_filename,
                         prefix=f"{source_root.path}/"))

    # Gather sources and adjust for source roots.
    # TODO: make TargetAdaptor return a 'sources' field with an empty snapshot instead of raising to
    # simplify the hasattr() checks here!
    sources_digest_to_source_roots: Dict[Digest, Optional[SourceRoot]] = {}
    for maybe_source_target in all_targets:
        if not hasattr(maybe_source_target, 'sources'):
            continue
        digest = maybe_source_target.sources.snapshot.directory_digest
        source_root = source_roots.find_by_path(
            maybe_source_target.address.spec_path)
        if maybe_source_target.type_alias == Files.alias():
            # Loose `Files`, as opposed to `Resources` or `PythonTarget`s, have no (implied) package
            # structure and so we do not remove their source root like we normally do, so that Python
            # filesystem APIs may still access the files. See pex_build_util.py's `_create_source_dumper`.
            source_root = None
        sources_digest_to_source_roots[
            digest] = source_root.path if source_root else ""

    stripped_sources_digests = yield [
        Get(
            Digest,
            DirectoryWithPrefixToStrip(directory_digest=digest,
                                       prefix=source_root))
        for digest, source_root in sources_digest_to_source_roots.items()
    ]

    sources_digest = yield Get(
        Digest,
        DirectoriesToMerge(directories=tuple(stripped_sources_digests)),
    )

    inits_digest = yield Get(InjectedInitDigest, Digest, sources_digest)

    all_input_digests = [
        sources_digest,
        inits_digest.directory_digest,
        resolved_requirements_pex.directory_digest,
    ]
    merged_input_files = yield Get(
        Digest,
        DirectoriesToMerge,
        DirectoriesToMerge(directories=tuple(all_input_digests)),
    )

    interpreter_search_paths = create_path_env_var(
        python_setup.interpreter_search_paths)
    pex_exe_env = {
        'PATH': interpreter_search_paths,
        **subprocess_encoding_environment.invocation_environment_dict
    }

    # NB: we use the hardcoded and generic bin name `python`, rather than something dynamic like
    # `sys.executable`, to ensure that the interpreter may be discovered both locally and in remote
    # execution (so long as `env` is populated with a `PATH` env var and `python` is discoverable
    # somewhere on that PATH). This is only used to run the downloaded PEX tool; it is not
    # necessarily the interpreter that PEX will use to execute the generated .pex file.
    request = ExecuteProcessRequest(
        argv=("python", f'./{output_pytest_requirements_pex_filename}',
              *sorted(test_target_sources_file_names)),
        env=pex_exe_env,
        input_files=merged_input_files,
        description=f'Run Pytest for {test_target.address.reference()}',
    )

    result = yield Get(FallibleExecuteProcessResult, ExecuteProcessRequest,
                       request)
    status = Status.SUCCESS if result.exit_code == 0 else Status.FAILURE

    yield TestResult(
        status=status,
        stdout=result.stdout.decode(),
        stderr=result.stderr.decode(),
    )