示例#1
0
async def _pylint_interpreter_constraints(
    first_party_plugins: PylintFirstPartyPlugins,
    python_setup: PythonSetup,
) -> InterpreterConstraints:
    # While Pylint will run in partitions, we need a set of constraints that works with every
    # partition. We must also consider any 3rd-party requirements used by 1st-party plugins.
    #
    # This first computes the constraints for each individual target. Then, it ORs all unique
    # resulting interpreter constraints. The net effect is that every possible Python interpreter
    # used will be covered.
    all_tgts = await Get(AllTargets, AllTargetsRequest())

    unique_constraints = {
        InterpreterConstraints.create_from_compatibility_fields(
            (
                tgt[InterpreterConstraintsField],
                *first_party_plugins.interpreter_constraints_fields,
            ),
            python_setup,
        )
        for tgt in all_tgts
        if PylintFieldSet.is_applicable(tgt)
    }
    if not unique_constraints:
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                first_party_plugins.interpreter_constraints_fields,
                python_setup,
            )
        )
    constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints))
    return constraints or InterpreterConstraints(python_setup.interpreter_constraints)
示例#2
0
async def _flake8_interpreter_constraints(
    first_party_plugins: Flake8FirstPartyPlugins,
    python_setup: PythonSetup,
) -> InterpreterConstraints:
    # While Flake8 will run in partitions, we need a set of constraints that works with every
    # partition.
    #
    # This ORs all unique interpreter constraints. The net effect is that every possible Python
    # interpreter used will be covered.
    all_tgts = await Get(AllTargets, AllTargetsRequest())
    unique_constraints = {
        InterpreterConstraints.create_from_compatibility_fields(
            (
                tgt[InterpreterConstraintsField],
                *first_party_plugins.interpreter_constraints_fields,
            ),
            python_setup,
        )
        for tgt in all_tgts if Flake8FieldSet.is_applicable(tgt)
    }
    if not unique_constraints:
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                first_party_plugins.interpreter_constraints_fields,
                python_setup,
            ))
    constraints = InterpreterConstraints(
        itertools.chain.from_iterable(unique_constraints))
    return constraints or InterpreterConstraints(
        python_setup.interpreter_constraints)
示例#3
0
async def setup_pylint_lockfile(
    _: PylintLockfileSentinel,
    first_party_plugins: PylintFirstPartyPlugins,
    pylint: Pylint,
    python_setup: PythonSetup,
) -> GeneratePythonLockfile:
    if not pylint.uses_lockfile:
        return GeneratePythonLockfile.from_tool(
            pylint, use_pex=python_setup.generate_lockfiles_with_pex)

    # While Pylint will run in partitions, we need a single lockfile that works with every
    # partition. We must also consider any 3rd-party requirements used by 1st-party plugins.
    #
    # This first computes the constraints for each individual target, including its direct
    # dependencies (which will AND across each target in the closure). Then, it ORs all unique
    # resulting interpreter constraints. The net effect is that every possible Python interpreter
    # used will be covered.
    all_tgts = await Get(AllTargets, AllTargetsRequest())
    relevant_targets = tuple(tgt for tgt in all_tgts
                             if PylintFieldSet.is_applicable(tgt))
    direct_deps_per_target = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
        for tgt in relevant_targets)

    unique_constraints = set()
    for tgt, direct_deps in zip(relevant_targets, direct_deps_per_target):
        constraints_fields = (t[InterpreterConstraintsField]
                              for t in (tgt, *direct_deps)
                              if t.has_field(InterpreterConstraintsField))
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                (*constraints_fields,
                 *first_party_plugins.interpreter_constraints_fields),
                python_setup,
            ))
    if not unique_constraints:
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                first_party_plugins.interpreter_constraints_fields,
                python_setup,
            ))

    constraints = InterpreterConstraints(
        itertools.chain.from_iterable(unique_constraints))
    return GeneratePythonLockfile.from_tool(
        pylint,
        constraints
        or InterpreterConstraints(python_setup.interpreter_constraints),
        extra_requirements=first_party_plugins.requirement_strings,
        use_pex=python_setup.generate_lockfiles_with_pex,
    )
示例#4
0
async def setup_ipython_lockfile(
        _: IPythonLockfileSentinel, ipython: IPython,
        python_setup: PythonSetup) -> GeneratePythonLockfile:
    if not ipython.uses_lockfile:
        return GeneratePythonLockfile.from_tool(
            ipython, use_pex=python_setup.generate_lockfiles_with_pex)

    # IPython is often run against the whole repo (`./pants repl ::`), but it is possible to run
    # on subsets of the codebase with disjoint interpreter constraints, such as
    # `./pants repl py2::` and then `./pants repl py3::`. Still, even with those subsets possible,
    # we need a single lockfile that works with all possible Python interpreters in use.
    #
    # This ORs all unique interpreter constraints. The net effect is that every possible Python
    # interpreter used will be covered.
    all_tgts = await Get(AllTargets, AllTargetsRequest())
    unique_constraints = {
        InterpreterConstraints.create_from_compatibility_fields(
            [tgt[InterpreterConstraintsField]], python_setup)
        for tgt in all_tgts if tgt.has_field(InterpreterConstraintsField)
    }
    constraints = InterpreterConstraints(
        itertools.chain.from_iterable(unique_constraints))
    return GeneratePythonLockfile.from_tool(
        ipython,
        constraints
        or InterpreterConstraints(python_setup.interpreter_constraints),
        use_pex=python_setup.generate_lockfiles_with_pex,
    )
示例#5
0
async def flake8_lint(
    request: Flake8Request,
    flake8: Flake8,
    python_setup: PythonSetup,
    first_party_plugins: Flake8FirstPartyPlugins,
) -> LintResults:
    if flake8.skip:
        return LintResults([], linter_name=request.name)

    # NB: Flake8 output depends upon which Python interpreter version it's run with
    # (http://flake8.pycqa.org/en/latest/user/invocation.html). We batch targets by their
    # constraints to ensure, for example, that all Python 2 targets run together and all Python 3
    # targets run together.
    results = defaultdict(set)
    for fs in request.field_sets:
        constraints = InterpreterConstraints.create_from_compatibility_fields(
            [
                fs.interpreter_constraints,
                *first_party_plugins.interpreter_constraints_fields
            ],
            python_setup,
        )
        results[constraints].add(fs)

    partitioned_results = await MultiGet(
        Get(
            LintResult,
            Flake8Partition(
                tuple(sorted(field_sets, key=lambda fs: fs.address)),
                constraints),
        ) for constraints, field_sets in sorted(results.items()))
    return LintResults(partitioned_results, linter_name=request.name)
示例#6
0
async def setup_black(setup_request: SetupRequest, black: Black,
                      python_setup: PythonSetup) -> Setup:
    # Black requires 3.6+ but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, and 3.7.
    # However, typed-ast does not understand 3.8+, so instead we must run Black with Python 3.8+
    # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement
    # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on
    # the machine.
    all_interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
        (field_set.interpreter_constraints
         for field_set in setup_request.request.field_sets),
        python_setup,
    )
    tool_interpreter_constraints = (all_interpreter_constraints if (
        black.options.is_default("interpreter_constraints")
        and all_interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    black.interpreter_constraints)

    black_pex_get = Get(
        VenvPex,
        PexRequest,
        black.to_pex_request(
            interpreter_constraints=tool_interpreter_constraints),
    )

    source_files_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.source
                           for field_set in setup_request.request.field_sets),
    )

    source_files, black_pex = await MultiGet(source_files_get, black_pex_get)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    config_files = await Get(ConfigFiles, ConfigFilesRequest,
                             black.config_request(source_files_snapshot.dirs))
    input_digest = await Get(
        Digest,
        MergeDigests(
            (source_files_snapshot.digest, config_files.snapshot.digest)))

    process = await Get(
        Process,
        VenvPexProcess(
            black_pex,
            argv=generate_argv(source_files,
                               black,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            concurrency_available=len(setup_request.request.field_sets),
            description=
            f"Run Black on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_snapshot=source_files_snapshot)
示例#7
0
async def setup_flake8_lockfile(
    _: Flake8LockfileSentinel,
    first_party_plugins: Flake8FirstPartyPlugins,
    flake8: Flake8,
    python_setup: PythonSetup,
) -> GeneratePythonLockfile:
    if not flake8.uses_lockfile:
        return GeneratePythonLockfile.from_tool(
            flake8, use_pex=python_setup.generate_lockfiles_with_pex
        )

    # While Flake8 will run in partitions, we need a single lockfile that works with every
    # partition.
    #
    # This ORs all unique interpreter constraints. The net effect is that every possible Python
    # interpreter used will be covered.
    all_tgts = await Get(AllTargets, AllTargetsRequest())
    relevant_targets = tuple(tgt for tgt in all_tgts if Flake8FieldSet.is_applicable(tgt))
    unique_constraints = set()
    for tgt in relevant_targets:
        if tgt.has_field(InterpreterConstraintsField):
            constraints_field = tgt[InterpreterConstraintsField]
            unique_constraints.add(
                InterpreterConstraints.create_from_compatibility_fields(
                    (constraints_field, *first_party_plugins.interpreter_constraints_fields),
                    python_setup,
                )
            )
    if not unique_constraints:
        unique_constraints.add(
            InterpreterConstraints.create_from_compatibility_fields(
                first_party_plugins.interpreter_constraints_fields,
                python_setup,
            )
        )
    constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints))
    return GeneratePythonLockfile.from_tool(
        flake8,
        constraints or InterpreterConstraints(python_setup.interpreter_constraints),
        extra_requirements=first_party_plugins.requirement_strings,
        use_pex=python_setup.generate_lockfiles_with_pex,
    )
示例#8
0
async def pylint_lint(
    request: PylintRequest,
    pylint: Pylint,
    python_setup: PythonSetup,
    first_party_plugins: PylintFirstPartyPlugins,
) -> LintResults:
    if pylint.skip:
        return LintResults([], linter_name="Pylint")

    # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it
    # doesn't lint those direct dependencies nor does it care about transitive dependencies.
    linted_targets = await Get(
        Targets, Addresses(field_set.address for field_set in request.field_sets)
    )
    per_target_dependencies = await MultiGet(
        Get(Targets, DependenciesRequest(field_set.dependencies))
        for field_set in request.field_sets
    )

    # We batch targets by their interpreter constraints to ensure, for example, that all Python 2
    # targets run together and all Python 3 targets run together.
    # Note that Pylint uses the AST of the interpreter that runs it. So, we include any plugin
    # targets in this interpreter constraints calculation.
    interpreter_constraints_to_target_setup = defaultdict(set)
    for field_set, tgt, dependencies in zip(
        request.field_sets, linted_targets, per_target_dependencies
    ):
        target_setup = PylintTargetSetup(field_set, Targets([tgt, *dependencies]))
        interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
            (
                *(
                    tgt[InterpreterConstraintsField]
                    for tgt in [tgt, *dependencies]
                    if tgt.has_field(InterpreterConstraintsField)
                ),
                *first_party_plugins.interpreter_constraints_fields,
            ),
            python_setup,
        )
        interpreter_constraints_to_target_setup[interpreter_constraints].add(target_setup)

    partitions = (
        PylintPartition(
            tuple(sorted(target_setups, key=lambda tgt_setup: tgt_setup.field_set.address)),
            interpreter_constraints,
        )
        for interpreter_constraints, target_setups in sorted(
            interpreter_constraints_to_target_setup.items()
        )
    )
    partitioned_results = await MultiGet(
        Get(LintResult, PylintPartition, partition) for partition in partitions
    )
    return LintResults(partitioned_results, linter_name="Pylint")
示例#9
0
async def setup_user_lockfile_requests(
    requested: _SpecifiedUserResolves, python_setup: PythonSetup
) -> _UserLockfileRequests:
    # First, associate all resolves with their consumers.
    all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")]))
    resolves_to_roots = defaultdict(list)
    for tgt in all_build_targets:
        if not tgt.has_field(PythonResolveField):
            continue
        tgt[PythonResolveField].validate(python_setup)
        resolve = tgt[PythonResolveField].value
        if resolve is None:
            continue
        resolves_to_roots[resolve].append(tgt.address)

    # Expand the resolves for all specified.
    transitive_targets_per_resolve = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest(resolves_to_roots[resolve]))
        for resolve in requested
    )
    pex_requirements_per_resolve = []
    interpreter_constraints_per_resolve = []
    for transitive_targets in transitive_targets_per_resolve:
        req_fields = []
        ic_fields = []
        for tgt in transitive_targets.closure:
            if tgt.has_field(PythonRequirementsField):
                req_fields.append(tgt[PythonRequirementsField])
            if tgt.has_field(InterpreterConstraintsField):
                ic_fields.append(tgt[InterpreterConstraintsField])
        pex_requirements_per_resolve.append(
            PexRequirements.create_from_requirement_fields(req_fields)
        )
        interpreter_constraints_per_resolve.append(
            InterpreterConstraints.create_from_compatibility_fields(ic_fields, python_setup)
        )

    requests = (
        PythonLockfileRequest(
            requirements.req_strings,
            interpreter_constraints,
            resolve_name=resolve,
            lockfile_dest=python_setup.resolves_to_lockfiles[resolve],
        )
        for resolve, requirements, interpreter_constraints in zip(
            requested, pex_requirements_per_resolve, interpreter_constraints_per_resolve
        )
    )
    return _UserLockfileRequests(requests)
示例#10
0
async def pylint_determine_partitions(
        request: PylintRequest, python_setup: PythonSetup,
        first_party_plugins: PylintFirstPartyPlugins) -> PylintPartitions:
    # We batch targets by their interpreter constraints + resolve to ensure, for example, that all
    # Python targets run together and all Python 3 targets run together.
    #
    # Note that Pylint uses the AST of the interpreter that runs it. So, we include any plugin
    # targets in this interpreter constraints calculation. However, we don't have to consider the
    # resolve of the plugin targets, per https://github.com/pantsbuild/pants/issues/14320.
    transitive_targets_per_field_set = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
        for field_set in request.field_sets)

    resolve_and_interpreter_constraints_to_transitive_targets = defaultdict(
        set)
    for transitive_targets in transitive_targets_per_field_set:
        resolve = transitive_targets.roots[0][
            PythonResolveField].normalized_value(python_setup)
        interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
            (
                *(tgt[InterpreterConstraintsField]
                  for tgt in transitive_targets.closure
                  if tgt.has_field(InterpreterConstraintsField)),
                *first_party_plugins.interpreter_constraints_fields,
            ),
            python_setup,
        )
        resolve_and_interpreter_constraints_to_transitive_targets[(
            resolve, interpreter_constraints)].add(transitive_targets)

    partitions = []
    for (_resolve, interpreter_constraints), all_transitive_targets in sorted(
            resolve_and_interpreter_constraints_to_transitive_targets.items()):
        combined_roots: OrderedSet[Target] = OrderedSet()
        combined_closure: OrderedSet[Target] = OrderedSet()
        for transitive_targets in all_transitive_targets:
            combined_roots.update(transitive_targets.roots)
            combined_closure.update(transitive_targets.closure)
        partitions.append(
            # Note that we don't need to pass the resolve. pex_from_targets.py will already
            # calculate it by inspecting the roots & validating that all dependees are valid.
            PylintPartition(
                FrozenOrderedSet(combined_roots),
                FrozenOrderedSet(combined_closure),
                interpreter_constraints,
            ))
    return PylintPartitions(partitions)
示例#11
0
async def pylint_determine_partitions(
        request: PylintRequest, python_setup: PythonSetup,
        first_party_plugins: PylintFirstPartyPlugins) -> PylintPartitions:
    resolve_and_interpreter_constraints_to_coarsened_targets = (
        await partition._by_interpreter_constraints_and_resolve(
            request.field_sets, python_setup))

    first_party_ics = InterpreterConstraints.create_from_compatibility_fields(
        first_party_plugins.interpreter_constraints_fields, python_setup)

    return PylintPartitions(
        PylintPartition(
            FrozenOrderedSet(roots),
            FrozenOrderedSet(CoarsenedTargets(root_cts).closure()),
            resolve if len(python_setup.resolves) > 1 else None,
            InterpreterConstraints.merge((interpreter_constraints,
                                          first_party_ics)),
        ) for (resolve, interpreter_constraints), (roots, root_cts) in sorted(
            resolve_and_interpreter_constraints_to_coarsened_targets.items()))
示例#12
0
async def black_fmt(request: BlackRequest, black: Black, python_setup: PythonSetup) -> FmtResult:
    if black.skip:
        return FmtResult.skip(formatter_name=request.name)
    # Black requires 3.6+ but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, and 3.7.
    # However, typed-ast does not understand 3.8+, so instead we must run Black with Python 3.8+
    # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement
    # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on
    # the machine.
    tool_interpreter_constraints = black.interpreter_constraints
    if black.options.is_default("interpreter_constraints"):
        try:
            # Don't compute this unless we have to, since it might fail.
            all_interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
                (field_set.interpreter_constraints for field_set in request.field_sets),
                python_setup,
            )
        except ValueError:
            raise ValueError(
                softwrap(
                    """
                    Could not compute an interpreter to run Black on, due to conflicting requirements
                    in the repo.

                    Please set `[black].interpreter_constraints` explicitly in pants.toml to a
                    suitable interpreter.
                    """
                )
            )
        if all_interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe
        ):
            tool_interpreter_constraints = all_interpreter_constraints

    black_pex_get = Get(
        VenvPex,
        PexRequest,
        black.to_pex_request(interpreter_constraints=tool_interpreter_constraints),
    )
    config_files_get = Get(
        ConfigFiles, ConfigFilesRequest, black.config_request(request.snapshot.dirs)
    )

    black_pex, config_files = await MultiGet(black_pex_get, config_files_get)

    input_digest = await Get(
        Digest, MergeDigests((request.snapshot.digest, config_files.snapshot.digest))
    )

    result = await Get(
        ProcessResult,
        VenvPexProcess(
            black_pex,
            argv=(
                *(("--config", black.config) if black.config else ()),
                "-W",
                "{pants_concurrency}",
                *black.args,
                *request.snapshot.files,
            ),
            input_digest=input_digest,
            output_files=request.snapshot.files,
            concurrency_available=len(request.field_sets),
            description=f"Run Black on {pluralize(len(request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    output_snapshot = await Get(Snapshot, Digest, result.output_digest)
    return FmtResult.create(request, result, output_snapshot, strip_chroot_path=True)
示例#13
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    test_extra_env: TestExtraEnv,
    global_options: GlobalOptions,
) -> TestSetup:
    transitive_targets, plugin_setups = await MultiGet(
        Get(TransitiveTargets,
            TransitiveTargetsRequest([request.field_set.address])),
        Get(AllPytestPluginSetups,
            AllPytestPluginSetupsRequest(request.field_set.address)),
    )
    all_targets = transitive_targets.closure

    interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields(
        [request.field_set.interpreter_constraints], python_setup)

    requirements_pex_get = Get(
        Pex, RequirementsPexRequest([request.field_set.address]))
    pytest_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=pytest.pex_requirements(),
            interpreter_constraints=interpreter_constraints,
            internal_only=True,
        ),
    )

    # Ensure that the empty extra output dir exists.
    extra_output_directory_digest_get = Get(
        Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)]))

    prepared_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_get = Get(
        SourceFiles, SourceFilesRequest([request.field_set.source]))

    field_set_extra_env_get = Get(
        Environment,
        EnvironmentRequest(request.field_set.extra_env_vars.value or ()))

    (
        pytest_pex,
        requirements_pex,
        prepared_sources,
        field_set_source_files,
        field_set_extra_env,
        extra_output_directory_digest,
    ) = await MultiGet(
        pytest_pex_get,
        requirements_pex_get,
        prepared_sources_get,
        field_set_source_files_get,
        field_set_extra_env_get,
        extra_output_directory_digest_get,
    )

    local_dists = await Get(
        LocalDistsPex,
        LocalDistsPexRequest(
            [request.field_set.address],
            internal_only=True,
            interpreter_constraints=interpreter_constraints,
            sources=prepared_sources,
        ),
    )

    pytest_runner_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="pytest_runner.pex",
            interpreter_constraints=interpreter_constraints,
            main=pytest.main,
            internal_only=True,
            pex_path=[pytest_pex, requirements_pex, local_dists.pex],
        ),
    )
    config_files_get = Get(
        ConfigFiles,
        ConfigFilesRequest,
        pytest.config_request(field_set_source_files.snapshot.dirs),
    )
    pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get,
                                                     config_files_get)

    # The coverage and pytest config may live in the same config file (e.g., setup.cfg, tox.ini
    # or pyproject.toml), and wee may have rewritten those files to augment the coverage config,
    # in which case we must ensure that the original and rewritten files don't collide.
    pytest_config_digest = config_files.snapshot.digest
    if coverage_config.path in config_files.snapshot.files:
        subset_paths = list(config_files.snapshot.files)
        # Remove the original file, and rely on the rewritten file, which contains all the
        # pytest-related config unchanged.
        subset_paths.remove(coverage_config.path)
        pytest_config_digest = await Get(
            Digest, DigestSubset(pytest_config_digest,
                                 PathGlobs(subset_paths)))

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            local_dists.remaining_sources.source_files.snapshot.digest,
            pytest_config_digest,
            extra_output_directory_digest,
            *(plugin_setup.digest for plugin_setup in plugin_setups),
        )),
    )

    add_opts = [f"--color={'yes' if global_options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        pytest.validate_pytest_cov_included()
        output_files.append(".coverage")

        if coverage_subsystem.filter:
            cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter]
        else:
            # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources"
            # fails in certain contexts as detailed in:
            #   https://github.com/pantsbuild/pants/issues/12390
            # Instead we focus coverage on just the directories containing python source files
            # materialized to the Process chroot.
            cov_args = [
                f"--cov={source_root}"
                for source_root in prepared_sources.source_roots
            ]

        coverage_args = [
            "--cov-report=",  # Turn off output.
            f"--cov-config={coverage_config.path}",
            *cov_args,
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
        **test_extra_env.env,
        # NOTE: field_set_extra_env intentionally after `test_extra_env` to allow overriding within
        # `python_tests`.
        **field_set_extra_env,
    }

    # Cache test runs only if they are successful, or not at all if `--test-force`.
    cache_scope = (ProcessCacheScope.PER_SESSION
                   if test_subsystem.force else ProcessCacheScope.SUCCESSFUL)
    process = await Get(
        Process,
        VenvPexProcess(
            pytest_runner_pex,
            argv=(*pytest.args, *coverage_args, *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_directories=(_EXTRA_OUTPUT_DIR, ),
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
            cache_scope=cache_scope,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)