Example #1
0
async def setup(setup_request: SetupRequest, isort: Isort) -> Setup:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="isort.pex",
            requirements=PexRequirements(isort.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                isort.interpreter_constraints),
            entry_point=isort.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=isort.config,
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            conjunction=GlobExpansionConjunction.all_match,
            description_of_origin="the option `--isort-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, requirements_pex, config_digest = await MultiGet(
        source_files_request, requirements_pex_request, config_digest_request)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files_snapshot.digest, requirements_pex.digest,
                      config_digest)),
    )

    address_references = ", ".join(
        sorted(field_set.address.spec
               for field_set in setup_request.request.field_sets))

    process = await Get(
        Process,
        PexProcess(
            requirements_pex,
            argv=generate_args(source_files=source_files,
                               isort=isort,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            (f"Run isort on {pluralize(len(setup_request.request.field_sets), 'target')}: "
             f"{address_references}."),
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Example #2
0
async def map_first_party_modules_to_addresses(
) -> FirstPartyModuleToAddressMapping:
    all_expanded_targets = await Get(Targets,
                                     AddressSpecs([DescendantAddresses("")]))
    candidate_targets = tuple(tgt for tgt in all_expanded_targets
                              if tgt.has_field(PythonSources))
    stripped_sources_per_explicit_target = await MultiGet(
        Get(StrippedSourceFiles, SourceFilesRequest([tgt[PythonSources]]))
        for tgt in candidate_targets)

    modules_to_addresses: Dict[str, Address] = {}
    modules_with_multiple_owners: Set[str] = set()
    for tgt, stripped_sources in zip(candidate_targets,
                                     stripped_sources_per_explicit_target):
        for stripped_f in stripped_sources.snapshot.files:
            module = PythonModule.create_from_stripped_path(
                PurePath(stripped_f)).module
            if module in modules_to_addresses:
                modules_with_multiple_owners.add(module)
            else:
                modules_to_addresses[module] = tgt.address

    # Remove modules with ambiguous owners.
    for module in modules_with_multiple_owners:
        modules_to_addresses.pop(module)
    return FirstPartyModuleToAddressMapping(
        FrozenDict(sorted(modules_to_addresses.items())))
Example #3
0
async def infer_python_dependencies(
    request: InferPythonDependencies, python_inference: PythonInference
) -> InferredDependencies:
    if not python_inference.imports:
        return InferredDependencies()

    stripped_sources = await Get(StrippedSourceFiles, SourceFilesRequest([request.sources_field]))
    modules = tuple(
        PythonModule.create_from_stripped_path(PurePath(fp))
        for fp in stripped_sources.snapshot.files
    )
    digest_contents = await Get(DigestContents, Digest, stripped_sources.snapshot.digest)
    imports_per_file = tuple(
        find_python_imports(file_content.content.decode(), module_name=module.module)
        for file_content, module in zip(digest_contents, modules)
    )
    owner_per_import = await MultiGet(
        Get(PythonModuleOwner, PythonModule(imported_module))
        for file_imports in imports_per_file
        for imported_module in file_imports.explicit_imports
        if imported_module not in combined_stdlib
    )
    return InferredDependencies(
        owner.address
        for owner in owner_per_import
        if (
            owner.address
            and owner.address.maybe_convert_to_base_target() != request.sources_field.address
        )
    )
Example #4
0
 def run_docformatter(
     self,
     targets: List[Target],
     *,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResults, FmtResult]:
     args = ["--backend-packages=pants.backend.python.lint.docformatter"]
     if passthrough_args:
         args.append(f"--docformatter-args='{passthrough_args}'")
     if skip:
         args.append("--docformatter-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     field_sets = [DocformatterFieldSet.create(tgt) for tgt in targets]
     lint_results = self.request_single_product(
         LintResults,
         Params(DocformatterRequest(field_sets), options_bootstrapper))
     input_sources = self.request_single_product(
         SourceFiles,
         Params(
             SourceFilesRequest(field_set.sources
                                for field_set in field_sets),
             options_bootstrapper,
         ),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             DocformatterRequest(
                 field_sets, prior_formatter_result=input_sources.snapshot),
             options_bootstrapper,
         ),
     )
     return lint_results, fmt_result
Example #5
0
async def format_python_target(
    python_fmt_targets: PythonFmtTargets, union_membership: UnionMembership
) -> LanguageFmtResults:
    original_sources = await Get(
        SourceFiles,
        SourceFilesRequest(target[PythonSources] for target in python_fmt_targets.targets),
    )
    prior_formatter_result = original_sources.snapshot

    results: List[FmtResult] = []
    fmt_request_types: Iterable[Type[PythonFmtRequest]] = union_membership.union_rules[
        PythonFmtRequest
    ]
    for fmt_request_type in fmt_request_types:
        result = await Get(
            FmtResult,
            PythonFmtRequest,
            fmt_request_type(
                (
                    fmt_request_type.field_set_type.create(target)
                    for target in python_fmt_targets.targets
                ),
                prior_formatter_result=prior_formatter_result,
            ),
        )
        if result != FmtResult.noop():
            results.append(result)
        if result.did_change:
            prior_formatter_result = await Get(Snapshot, Digest, result.output)
    return LanguageFmtResults(
        tuple(results),
        input=original_sources.snapshot.digest,
        output=prior_formatter_result.digest,
    )
Example #6
0
async def setup(setup_request: SetupRequest, docformatter: Docformatter) -> Setup:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="docformatter.pex",
            requirements=PexRequirements(docformatter.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(docformatter.interpreter_constraints),
            entry_point=docformatter.entry_point,
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets),
    )

    source_files, requirements_pex = await MultiGet(source_files_request, requirements_pex_request)

    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None
        else setup_request.request.prior_formatter_result
    )

    input_digest = await Get(
        Digest, MergeDigests((source_files_snapshot.digest, requirements_pex.digest))
    )

    address_references = ", ".join(
        sorted(field_set.address.spec for field_set in setup_request.request.field_sets)
    )

    process = await Get(
        Process,
        PexProcess(
            requirements_pex,
            argv=generate_args(
                source_files=source_files,
                docformatter=docformatter,
                check_only=setup_request.check_only,
            ),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=(
                f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'target')}: "
                f"{address_references}."
            ),
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Example #7
0
 def assert_sources_resolved(
     self,
     sources_fields: Iterable[SourcesField],
     *,
     expected: Iterable[TargetSources],
     expected_unrooted: Iterable[str] = (),
 ) -> None:
     result = self.request_single_product(
         SourceFiles, Params(SourceFilesRequest(sources_fields), create_options_bootstrapper()),
     )
     assert list(result.snapshot.files) == sorted(
         set(itertools.chain.from_iterable(sources.full_paths for sources in expected))
     )
     assert list(result.unrooted_files) == sorted(expected_unrooted)
Example #8
0
 def run_isort(
     self,
     targets: List[Target],
     *,
     config: Optional[str] = None,
     passthrough_args: Optional[str] = None,
     skip: bool = False,
 ) -> Tuple[LintResults, FmtResult]:
     args = ["--backend-packages=pants.backend.python.lint.isort"]
     if config is not None:
         self.create_file(relpath=".isort.cfg", contents=config)
         args.append("--isort-config=.isort.cfg")
     if passthrough_args:
         args.append(f"--isort-args='{passthrough_args}'")
     if skip:
         args.append("--isort-skip")
     options_bootstrapper = create_options_bootstrapper(args=args)
     field_sets = [IsortFieldSet.create(tgt) for tgt in targets]
     lint_results = self.request_single_product(
         LintResults, Params(IsortRequest(field_sets),
                             options_bootstrapper))
     input_sources = self.request_single_product(
         SourceFiles,
         Params(
             SourceFilesRequest(field_set.sources
                                for field_set in field_sets),
             options_bootstrapper,
         ),
     )
     fmt_result = self.request_single_product(
         FmtResult,
         Params(
             IsortRequest(field_sets,
                          prior_formatter_result=input_sources.snapshot),
             options_bootstrapper,
         ),
     )
     return lint_results, fmt_result
Example #9
0
async def prepare_python_sources(
        request: PythonSourceFilesRequest,
        union_membership: UnionMembership) -> PythonSourceFiles:
    sources = await Get(
        SourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in request.targets),
            for_sources_types=request.valid_sources_types,
            enable_codegen=True,
        ),
    )

    missing_init_files = await Get(
        AncestorFiles,
        AncestorFilesRequest("__init__.py", sources.snapshot),
    )

    init_injected = await Get(
        Snapshot,
        MergeDigests(
            (sources.snapshot.digest, missing_init_files.snapshot.digest)),
    )

    source_root_objs = await MultiGet(
        Get(SourceRoot, SourceRootRequest, SourceRootRequest.for_target(tgt))
        for tgt in request.targets
        if (tgt.has_field(PythonSources) or tgt.has_field(ResourcesSources) or
            tgt.get(Sources).can_generate(PythonSources, union_membership) or
            tgt.get(Sources).can_generate(ResourcesSources, union_membership)))
    source_root_paths = {
        source_root_obj.path
        for source_root_obj in source_root_objs
    }
    return PythonSourceFiles(
        SourceFiles(init_injected, sources.unrooted_files),
        tuple(sorted(source_root_paths)))
Example #10
0
async def generate_python_from_protobuf(
    request: GeneratePythonFromProtobufRequest, protoc: Protoc
) -> GeneratedSources:
    download_protoc_request = Get(
        DownloadedExternalTool, ExternalToolRequest, protoc.get_request(Platform.current)
    )

    output_dir = "_generated_files"
    # TODO(#9650): replace this with a proper intrinsic to create empty directories.
    create_output_dir_request = Get(
        ProcessResult,
        Process(
            ("/bin/mkdir", output_dir),
            description=f"Create the directory {output_dir}",
            level=LogLevel.DEBUG,
            output_directories=(output_dir,),
        ),
    )

    # Protoc needs all transitive dependencies on `protobuf_libraries` to work properly. It won't
    # actually generate those dependencies; it only needs to look at their .proto files to work
    # with imports.
    transitive_targets = await Get(TransitiveTargets, Addresses([request.protocol_target.address]))
    # NB: By stripping the source roots, we avoid having to set the value `--proto_path`
    # for Protobuf imports to be discoverable.
    all_stripped_sources_request = Get(
        StrippedSourceFiles,
        SourceFilesRequest(
            (tgt.get(Sources) for tgt in transitive_targets.closure),
            for_sources_types=(ProtobufSources,),
        ),
    )
    target_stripped_sources_request = Get(
        StrippedSourceFiles, SourceFilesRequest([request.protocol_target[ProtobufSources]]),
    )

    (
        downloaded_protoc_binary,
        create_output_dir_result,
        all_sources_stripped,
        target_sources_stripped,
    ) = await MultiGet(
        download_protoc_request,
        create_output_dir_request,
        all_stripped_sources_request,
        target_stripped_sources_request,
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                all_sources_stripped.snapshot.digest,
                downloaded_protoc_binary.digest,
                create_output_dir_result.output_digest,
            )
        ),
    )

    result = await Get(
        ProcessResult,
        Process(
            (
                downloaded_protoc_binary.exe,
                "--python_out",
                output_dir,
                *target_sources_stripped.snapshot.files,
            ),
            input_digest=input_digest,
            description=f"Generating Python sources from {request.protocol_target.address}.",
            level=LogLevel.DEBUG,
            output_directories=(output_dir,),
        ),
    )

    # We must do some path manipulation on the output digest for it to look like normal sources,
    # including adding back a source root.
    py_source_root = request.protocol_target.get(PythonSourceRootField).value
    if py_source_root:
        # Verify that the python source root specified by the target is in fact a source root.
        source_root_request = SourceRootRequest(PurePath(py_source_root))
    else:
        # The target didn't specify a python source root, so use the protobuf_library's source root.
        source_root_request = SourceRootRequest.for_target(request.protocol_target)

    normalized_digest, source_root = await MultiGet(
        Get(Digest, RemovePrefix(result.output_digest, output_dir)),
        Get(SourceRoot, SourceRootRequest, source_root_request),
    )

    source_root_restored = (
        await Get(Snapshot, AddPrefix(normalized_digest, source_root.path))
        if source_root.path != "."
        else await Get(Snapshot, Digest, normalized_digest)
    )
    return GeneratedSources(source_root_restored)
Example #11
0
async def flake8_lint_partition(partition: Flake8Partition, flake8: Flake8,
                                lint_subsystem: LintSubsystem) -> LintResult:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="flake8.pex",
            requirements=PexRequirements(flake8.all_requirements),
            interpreter_constraints=(partition.interpreter_constraints
                                     or PexInterpreterConstraints(
                                         flake8.interpreter_constraints)),
            entry_point=flake8.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[flake8.config] if flake8.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--flake8-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets))

    requirements_pex, config_digest, source_files = await MultiGet(
        requirements_pex_request, config_digest_request, source_files_request)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files.snapshot.digest, requirements_pex.digest,
                      config_digest)),
    )

    address_references = ", ".join(
        sorted(field_set.address.spec for field_set in partition.field_sets))
    report_path = (lint_subsystem.reports_dir /
                   "flake8_report.txt" if lint_subsystem.reports_dir else None)

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            requirements_pex,
            argv=generate_args(
                source_files=source_files,
                flake8=flake8,
                output_file=report_path.name if report_path else None,
            ),
            input_digest=input_digest,
            output_files=(report_path.name, ) if report_path else None,
            description=
            (f"Run Flake8 on {pluralize(len(partition.field_sets), 'target')}: "
             f"{address_references}."),
        ),
    )

    results_file = None
    if report_path:
        report_file_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest, PathGlobs([report_path.name])))
        if len(report_file_snapshot.files) != 1:
            raise Exception(
                f"Unexpected report file snapshot: {report_file_snapshot.files}"
            )
        results_file = LintResultFile(output_path=report_path,
                                      digest=report_file_snapshot.digest)

    return LintResult.from_fallible_process_result(result,
                                                   linter_name="Flake8",
                                                   results_file=results_file)
Example #12
0
async def pylint_lint_partition(partition: PylintPartition,
                                pylint: Pylint) -> LintResult:
    # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies.
    # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`.
    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField))
    target_requirements = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in partition.targets_with_dependencies
        if tgt.has_field(PythonRequirementsField))
    pylint_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            requirements=PexRequirements(
                [*pylint.all_requirements, *plugin_requirements]),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="requirements.pex",
            requirements=target_requirements,
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    # TODO(John Sirois): Support shading python binaries:
    #   https://github.com/pantsbuild/pants/issues/9206
    # Right now any Pylint transitive requirements will shadow corresponding user
    # requirements, which could lead to problems.
    pylint_runner_pex_args = [
        "--pex-path", ":".join(["pylint.pex", "requirements.pex"])
    ]
    pylint_runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint_runner.pex",
            entry_point=pylint.entry_point,
            interpreter_constraints=partition.interpreter_constraints,
            additional_args=pylint_runner_pex_args,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[pylint.config] if pylint.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--pylint-config`",
        ),
    )

    prepare_plugin_sources_request = Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(partition.plugin_targets),
    )
    prepare_python_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(partition.targets_with_dependencies),
    )
    field_set_sources_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets),
    )

    (
        pylint_pex,
        requirements_pex,
        pylint_runner_pex,
        config_digest,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_request,
        requirements_pex_request,
        pylint_runner_pex_request,
        config_digest_request,
        prepare_plugin_sources_request,
        prepare_python_sources_request,
        field_set_sources_request,
    )

    prefixed_plugin_sources = (await Get(
        Digest,
        AddPrefix(
            prepared_plugin_sources.stripped_source_files.snapshot.digest,
            "__plugins"),
    ) if pylint.source_plugins else EMPTY_DIGEST)

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests((
            pylint_pex.digest,
            requirements_pex.digest,
            pylint_runner_pex.digest,
            config_digest,
            prefixed_plugin_sources,
            prepared_python_sources.source_files.snapshot.digest,
        )),
    )

    address_references = ", ".join(
        sorted(field_set.address.spec for field_set in partition.field_sets))

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            pylint_runner_pex,
            argv=generate_args(source_files=field_set_sources, pylint=pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=
            (f"Run Pylint on {pluralize(len(partition.field_sets), 'target')}: "
             f"{address_references}."),
        ),
    )
    return LintResult.from_fallible_process_result(result,
                                                   linter_name="Pylint")
Example #13
0
async def setup_pytest_for_target(
    field_set: PythonTestFieldSet,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
) -> TestTargetSetup:
    test_addresses = Addresses((field_set.address,))

    transitive_targets = await Get(TransitiveTargets, Addresses, test_addresses)
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (
            tgt[PythonInterpreterCompatibility]
            for tgt in all_targets
            if tgt.has_field(PythonInterpreterCompatibility)
        ),
        python_setup,
    )

    # Ensure all pexes we merge via PEX_PATH to form the test runner use the interpreter constraints
    # of the tests. This is handled by CreatePexFromTargetClosure, but we must pass this through for
    # CreatePex requests.
    pex_request = functools.partial(PexRequest, interpreter_constraints=interpreter_constraints)

    # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses
    # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly
    # from zip files. `zipp` has pathologically bad behavior with large zipfiles.
    # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term,
    # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata
    # and then by Pytest). See https://github.com/jaraco/zipp/pull/26.
    additional_args_for_pytest = ("--not-zip-safe",)

    pytest_pex_request = Get(
        Pex,
        PexRequest,
        pex_request(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            additional_args=additional_args_for_pytest,
        ),
    )

    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest(
            addresses=test_addresses,
            output_filename="requirements.pex",
            include_source_files=False,
            additional_args=additional_args_for_pytest,
        ),
    )

    test_runner_pex_request = Get(
        Pex,
        PexRequest,
        pex_request(
            output_filename="test_runner.pex",
            entry_point="pytest:main",
            interpreter_constraints=interpreter_constraints,
            additional_args=(
                "--pex-path",
                # TODO(John Sirois): Support shading python binaries:
                #   https://github.com/pantsbuild/pants/issues/9206
                # Right now any pytest transitive requirements will shadow corresponding user
                # requirements which will lead to problems when APIs that are used by either
                # `pytest:main` or the tests themselves break between the two versions.
                ":".join(
                    (
                        pytest_pex_request.subject.output_filename,
                        requirements_pex_request.subject.output_filename,
                    )
                ),
            ),
        ),
    )

    prepared_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)
    )

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_request = Get(SourceFiles, SourceFilesRequest([field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        test_runner_pex,
        prepared_sources,
        field_set_source_files,
    ) = await MultiGet(
        pytest_pex_request,
        requirements_pex_request,
        test_runner_pex_request,
        prepared_sources_request,
        field_set_source_files_request,
    )

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                coverage_config.digest,
                prepared_sources.source_files.snapshot.digest,
                requirements_pex.digest,
                pytest_pex.digest,
                test_runner_pex.digest,
            )
        ),
    )

    coverage_args = []
    if test_subsystem.use_coverage:
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (".",)
        coverage_args = [
            "--cov-report=",  # Turn off output.
            *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths),
        ]
    return TestTargetSetup(
        test_runner_pex=test_runner_pex,
        args=(*pytest.options.args, *coverage_args, *field_set_source_files.files),
        input_digest=input_digest,
        source_roots=prepared_sources.source_roots,
        timeout_seconds=field_set.timeout.calculate_from_global_options(pytest),
        xml_dir=pytest.options.junit_xml_dir,
        junit_family=pytest.options.junit_family,
        execution_slot_variable=pytest.options.execution_slot_var,
    )