Esempio n. 1
0
async def merge_coverage_data(
        data_collection: PytestCoverageDataCollection,
        coverage_setup: CoverageSetup) -> MergedCoverageData:
    if len(data_collection) == 1:
        return MergedCoverageData(data_collection[0].digest)
    # We prefix each .coverage file with its corresponding address to avoid collisions.
    coverage_digests = await MultiGet(
        Get(Digest, AddPrefix(data.digest, prefix=data.address.path_safe_spec))
        for data in data_collection)
    input_digest = await Get(
        Digest, MergeDigests((*coverage_digests, coverage_setup.pex.digest)))
    prefixes = sorted(f"{data.address.path_safe_spec}/.coverage"
                      for data in data_collection)
    result = await Get(
        ProcessResult,
        PexProcess(
            coverage_setup.pex,
            argv=("combine", *prefixes),
            input_digest=input_digest,
            output_files=(".coverage", ),
            description=f"Merge {len(prefixes)} Pytest coverage reports.",
            level=LogLevel.DEBUG,
        ),
    )
    return MergedCoverageData(result.output_digest)
Esempio n. 2
0
async def generate_coverage_reports(
    merged_coverage_data: MergedCoverageData,
    coverage_setup: CoverageSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    transitive_targets: TransitiveTargets,
) -> CoverageReports:
    """Takes all Python test results and generates a single coverage report."""
    sources = await Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure,
                                 include_resources=False),
    )
    input_digest = await Get(
        Digest,
        MergeDigests((
            merged_coverage_data.coverage_data,
            coverage_config.digest,
            coverage_setup.pex.digest,
            sources.source_files.snapshot.digest,
        )),
    )

    pex_processes = []
    report_types = []
    coverage_reports: List[CoverageReport] = []
    for report_type in coverage_subsystem.reports:
        if report_type == CoverageReportType.RAW:
            coverage_reports.append(
                FilesystemCoverageReport(
                    report_type=CoverageReportType.RAW,
                    result_digest=merged_coverage_data.coverage_data,
                    directory_to_materialize_to=coverage_subsystem.output_dir,
                    report_file=coverage_subsystem.output_dir / ".coverage",
                ))
            continue
        report_types.append(report_type)
        output_file = (f"coverage.{report_type.value}" if report_type in {
            CoverageReportType.XML, CoverageReportType.JSON
        } else None)
        pex_processes.append(
            PexProcess(
                coverage_setup.pex,
                # We pass `--ignore-errors` because Pants dynamically injects missing `__init__.py`
                # files and this will cause Coverage to fail.
                argv=(report_type.report_name, "--ignore-errors"),
                input_digest=input_digest,
                output_directories=("htmlcov", )
                if report_type == CoverageReportType.HTML else None,
                output_files=(output_file, ) if output_file else None,
                description=
                f"Generate Pytest {report_type.report_name} coverage report.",
                level=LogLevel.DEBUG,
            ))
    results = await MultiGet(
        Get(ProcessResult, PexProcess, process) for process in pex_processes)
    coverage_reports.extend(
        _get_coverage_reports(coverage_subsystem.output_dir, report_types,
                              results))
    return CoverageReports(tuple(coverage_reports))
Esempio n. 3
0
async def setup(setup_request: SetupRequest, isort: Isort) -> Setup:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="isort.pex",
            requirements=PexRequirements(isort.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                isort.interpreter_constraints),
            entry_point=isort.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=isort.config,
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            conjunction=GlobExpansionConjunction.all_match,
            description_of_origin="the option `--isort-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, requirements_pex, config_digest = await MultiGet(
        source_files_request, requirements_pex_request, config_digest_request)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files_snapshot.digest, requirements_pex.digest,
                      config_digest)),
    )

    address_references = ", ".join(
        sorted(field_set.address.spec
               for field_set in setup_request.request.field_sets))

    process = await Get(
        Process,
        PexProcess(
            requirements_pex,
            argv=generate_args(source_files=source_files,
                               isort=isort,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            (f"Run isort on {pluralize(len(setup_request.request.field_sets), 'target')}: "
             f"{address_references}."),
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Esempio n. 4
0
async def run_setup_py(req: RunSetupPyRequest,
                       setuptools_setup: SetuptoolsSetup) -> RunSetupPyResult:
    """Run a setup.py command on a single exported target."""
    input_digest = await Get(
        Digest,
        MergeDigests(
            (req.chroot.digest, setuptools_setup.requirements_pex.digest)))
    # The setuptools dist dir, created by it under the chroot (not to be confused with
    # pants's own dist dir, at the buildroot).
    dist_dir = "dist/"
    result = await Get(
        ProcessResult,
        PexProcess(
            setuptools_setup.requirements_pex,
            argv=("setup.py", *req.args),
            input_digest=input_digest,
            # setuptools commands that create dists write them to the distdir.
            # TODO: Could there be other useful files to capture?
            output_directories=(dist_dir, ),
            description=
            f"Run setuptools for {req.exported_target.target.address}",
        ),
    )
    output_digest = await Get(Digest,
                              RemovePrefix(result.output_digest, dist_dir))
    return RunSetupPyResult(output_digest)
Esempio n. 5
0
async def create_python_awslambda(
        field_set: PythonAwsLambdaFieldSet,
        lambdex_setup: LambdexSetup) -> CreatedAWSLambda:
    # Lambdas typically use the .zip suffix, so we use that instead of .pex.
    pex_filename = f"{field_set.address.target_name}.zip"
    # We hardcode the platform value to the appropriate one for each AWS Lambda runtime.
    # (Running the "hello world" lambda in the example code will report the platform, and can be
    # used to verify correctness of these platform strings.)
    py_major, py_minor = field_set.runtime.to_interpreter_version()
    platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}"
    # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707
    if py_major <= 3 and py_minor < 8:
        platform += "m"
    if (py_major, py_minor) == (2, 7):
        platform += "u"
    pex_request = TwoStepPexFromTargetsRequest(
        PexFromTargetsRequest(
            addresses=[field_set.address],
            internal_only=False,
            entry_point=None,
            output_filename=pex_filename,
            platforms=PexPlatforms([platform]),
            additional_args=[
                # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels.
                "--manylinux=manylinux2014",
                # When we're executing Pex on Linux, allow a local interpreter to be resolved if
                # available and matching the AMI platform.
                "--resolve-local-platforms",
            ],
        ))

    pex_result = await Get(TwoStepPex, TwoStepPexFromTargetsRequest,
                           pex_request)
    input_digest = await Get(
        Digest,
        MergeDigests(
            (pex_result.pex.digest, lambdex_setup.requirements_pex.digest)))

    # NB: Lambdex modifies its input pex in-place, so the input file is also the output file.
    result = await Get(
        ProcessResult,
        PexProcess(
            lambdex_setup.requirements_pex,
            argv=("build", "-e", field_set.handler.value, pex_filename),
            input_digest=input_digest,
            output_files=(pex_filename, ),
            description=f"Setting up handler in {pex_filename}",
        ),
    )
    return CreatedAWSLambda(
        digest=result.output_digest,
        zip_file_relpath=pex_filename,
        runtime=field_set.runtime.value,
        # The AWS-facing handler function is always lambdex_handler.handler, which is the wrapper
        # injected by lambdex that manages invocation of the actual handler.
        handler="lambdex_handler.handler",
    )
Esempio n. 6
0
async def setup_black(setup_request: SetupRequest, black: Black) -> Setup:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="black.pex",
            internal_only=True,
            requirements=PexRequirements(black.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                black.interpreter_constraints),
            entry_point=black.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[black.config] if black.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--black-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, requirements_pex, config_digest = await MultiGet(
        source_files_request, requirements_pex_request, config_digest_request)
    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files_snapshot.digest, requirements_pex.digest,
                      config_digest)),
    )

    process = await Get(
        Process,
        PexProcess(
            requirements_pex,
            argv=generate_args(source_files=source_files,
                               black=black,
                               check_only=setup_request.check_only),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            f"Run Black on {pluralize(len(setup_request.request.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Esempio n. 7
0
async def setup_docformatter(setup_request: SetupRequest,
                             docformatter: Docformatter) -> Setup:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="docformatter.pex",
            internal_only=True,
            requirements=PexRequirements(docformatter.all_requirements),
            interpreter_constraints=PexInterpreterConstraints(
                docformatter.interpreter_constraints),
            entry_point=docformatter.entry_point,
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in setup_request.request.field_sets),
    )

    source_files, requirements_pex = await MultiGet(source_files_request,
                                                    requirements_pex_request)

    source_files_snapshot = (
        source_files.snapshot
        if setup_request.request.prior_formatter_result is None else
        setup_request.request.prior_formatter_result)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files_snapshot.digest, requirements_pex.digest)))

    process = await Get(
        Process,
        PexProcess(
            requirements_pex,
            argv=generate_args(
                source_files=source_files,
                docformatter=docformatter,
                check_only=setup_request.check_only,
            ),
            input_digest=input_digest,
            output_files=source_files_snapshot.files,
            description=
            (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'file')}."
             ),
            level=LogLevel.DEBUG,
        ),
    )
    return Setup(process, original_digest=source_files_snapshot.digest)
Esempio n. 8
0
async def flake8_lint_partition(
    partition: Flake8Partition, flake8: Flake8, lint_subsystem: LintSubsystem
) -> LintResult:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="flake8.pex",
            internal_only=True,
            requirements=PexRequirements(flake8.all_requirements),
            interpreter_constraints=(
                partition.interpreter_constraints
                or PexInterpreterConstraints(flake8.interpreter_constraints)
            ),
            entry_point=flake8.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[flake8.config] if flake8.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--flake8-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )

    requirements_pex, config_digest, source_files = await MultiGet(
        requirements_pex_request, config_digest_request, source_files_request
    )

    input_digest = await Get(
        Digest,
        MergeDigests((source_files.snapshot.digest, requirements_pex.digest, config_digest)),
    )

    report_file_name = "flake8_report.txt" if lint_subsystem.reports_dir else None

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            requirements_pex,
            argv=generate_args(
                source_files=source_files, flake8=flake8, report_file_name=report_file_name
            ),
            input_digest=input_digest,
            output_files=(report_file_name,) if report_file_name else None,
            description=f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )

    report = None
    if report_file_name:
        report_digest = await Get(
            Digest,
            DigestSubset(
                result.output_digest,
                PathGlobs(
                    [report_file_name],
                    glob_match_error_behavior=GlobMatchErrorBehavior.warn,
                    description_of_origin="Flake8 report file",
                ),
            ),
        )
        report = LintReport(report_file_name, report_digest)

    return LintResult.from_fallible_process_result(
        result, partition_description=str(sorted(partition.interpreter_constraints)), report=report
    )
Esempio n. 9
0
async def mypy_typecheck(request: MyPyRequest, mypy: MyPy) -> TypecheckResults:
    if mypy.skip:
        return TypecheckResults([], typechecker_name="MyPy")

    transitive_targets = await Get(
        TransitiveTargets, Addresses(fs.address for fs in request.field_sets))

    prepared_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure),
    )
    pex_request = Get(
        Pex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            requirements=PexRequirements(mypy.all_requirements),
            # NB: This only determines what MyPy is run with. The user can specify what version
            # their code is with `--python-version`. See
            # https://mypy.readthedocs.io/en/stable/config_file.html#platform-configuration. We do
            # not auto-configure this for simplicity and to avoid Pants magically setting values for
            # users.
            interpreter_constraints=PexInterpreterConstraints(
                mypy.interpreter_constraints),
            entry_point=mypy.entry_point,
        ),
    )
    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[mypy.config] if mypy.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--mypy-config`",
        ),
    )
    prepared_sources, pex, config_digest = await MultiGet(
        prepared_sources_request, pex_request, config_digest_request)

    srcs_snapshot = prepared_sources.source_files.snapshot
    file_list_path = "__files.txt"
    python_files = "\n".join(f for f in srcs_snapshot.files
                             if f.endswith(".py"))
    file_list_digest = await Get(
        Digest,
        CreateDigest([FileContent(file_list_path, python_files.encode())]),
    )

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest, srcs_snapshot.digest, pex.digest, config_digest
        ]),
    )

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            pex,
            argv=generate_args(mypy, file_list_path=file_list_path),
            input_digest=merged_input_files,
            extra_env={
                "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots)
            },
            description=
            f"Run MyPy on {pluralize(len(srcs_snapshot.files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResults(
        [TypecheckResult.from_fallible_process_result(result)],
        typechecker_name="MyPy")
Esempio n. 10
0
async def flake8_lint_partition(partition: Flake8Partition, flake8: Flake8,
                                lint_subsystem: LintSubsystem) -> LintResult:
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="flake8.pex",
            requirements=PexRequirements(flake8.all_requirements),
            interpreter_constraints=(partition.interpreter_constraints
                                     or PexInterpreterConstraints(
                                         flake8.interpreter_constraints)),
            entry_point=flake8.entry_point,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[flake8.config] if flake8.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--flake8-config`",
        ),
    )

    source_files_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets))

    requirements_pex, config_digest, source_files = await MultiGet(
        requirements_pex_request, config_digest_request, source_files_request)

    input_digest = await Get(
        Digest,
        MergeDigests((source_files.snapshot.digest, requirements_pex.digest,
                      config_digest)),
    )

    address_references = ", ".join(
        sorted(field_set.address.spec for field_set in partition.field_sets))
    report_path = (lint_subsystem.reports_dir /
                   "flake8_report.txt" if lint_subsystem.reports_dir else None)

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            requirements_pex,
            argv=generate_args(
                source_files=source_files,
                flake8=flake8,
                output_file=report_path.name if report_path else None,
            ),
            input_digest=input_digest,
            output_files=(report_path.name, ) if report_path else None,
            description=
            (f"Run Flake8 on {pluralize(len(partition.field_sets), 'target')}: "
             f"{address_references}."),
        ),
    )

    results_file = None
    if report_path:
        report_file_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest, PathGlobs([report_path.name])))
        if len(report_file_snapshot.files) != 1:
            raise Exception(
                f"Unexpected report file snapshot: {report_file_snapshot.files}"
            )
        results_file = LintResultFile(output_path=report_path,
                                      digest=report_file_snapshot.digest)

    return LintResult.from_fallible_process_result(result,
                                                   linter_name="Flake8",
                                                   results_file=results_file)
Esempio n. 11
0
async def pylint_lint_partition(partition: PylintPartition,
                                pylint: Pylint) -> LintResult:
    # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies.
    # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`.
    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField))
    target_requirements = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in partition.targets_with_dependencies
        if tgt.has_field(PythonRequirementsField))
    pylint_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            requirements=PexRequirements(
                [*pylint.all_requirements, *plugin_requirements]),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    requirements_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="requirements.pex",
            requirements=target_requirements,
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    # TODO(John Sirois): Support shading python binaries:
    #   https://github.com/pantsbuild/pants/issues/9206
    # Right now any Pylint transitive requirements will shadow corresponding user
    # requirements, which could lead to problems.
    pylint_runner_pex_args = [
        "--pex-path", ":".join(["pylint.pex", "requirements.pex"])
    ]
    pylint_runner_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint_runner.pex",
            entry_point=pylint.entry_point,
            interpreter_constraints=partition.interpreter_constraints,
            additional_args=pylint_runner_pex_args,
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[pylint.config] if pylint.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--pylint-config`",
        ),
    )

    prepare_plugin_sources_request = Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(partition.plugin_targets),
    )
    prepare_python_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(partition.targets_with_dependencies),
    )
    field_set_sources_request = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets),
    )

    (
        pylint_pex,
        requirements_pex,
        pylint_runner_pex,
        config_digest,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_request,
        requirements_pex_request,
        pylint_runner_pex_request,
        config_digest_request,
        prepare_plugin_sources_request,
        prepare_python_sources_request,
        field_set_sources_request,
    )

    prefixed_plugin_sources = (await Get(
        Digest,
        AddPrefix(
            prepared_plugin_sources.stripped_source_files.snapshot.digest,
            "__plugins"),
    ) if pylint.source_plugins else EMPTY_DIGEST)

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests((
            pylint_pex.digest,
            requirements_pex.digest,
            pylint_runner_pex.digest,
            config_digest,
            prefixed_plugin_sources,
            prepared_python_sources.source_files.snapshot.digest,
        )),
    )

    address_references = ", ".join(
        sorted(field_set.address.spec for field_set in partition.field_sets))

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            pylint_runner_pex,
            argv=generate_args(source_files=field_set_sources, pylint=pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=
            (f"Run Pylint on {pluralize(len(partition.field_sets), 'target')}: "
             f"{address_references}."),
        ),
    )
    return LintResult.from_fallible_process_result(result,
                                                   linter_name="Pylint")
Esempio n. 12
0
async def run_python_test(
    field_set: PythonTestFieldSet,
    setup: TestTargetSetup,
    global_options: GlobalOptions,
    test_subsystem: TestSubsystem,
) -> TestResult:
    if field_set.is_conftest():
        return TestResult.skip(field_set.address)

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]

    output_files = []
    # Configure generation of JUnit-compatible test report.
    test_results_file = None
    if setup.xml_dir:
        test_results_file = f"{field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={test_results_file}", "-o",
                         f"junit_family={setup.junit_family}"))
        output_files.append(test_results_file)

    # Configure generation of a coverage report.
    if test_subsystem.use_coverage:
        output_files.append(".coverage")

    env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(setup.source_roots)
    }

    if test_subsystem.force:
        # This is a slightly hacky way to force the process to run: since the env var
        #  value is unique, this input combination will never have been seen before,
        #  and therefore never cached. The two downsides are:
        #  1. This leaks into the test's environment, albeit with a funky var name that is
        #     unlikely to cause problems in practice.
        #  2. This run will be cached even though it can never be re-used.
        # TODO: A more principled way of forcing rules to run?
        uuid = await Get(UUID, UUIDRequest())
        env["__PANTS_FORCE_TEST_RUN__"] = str(uuid)

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            setup.test_runner_pex,
            argv=setup.args,
            input_digest=setup.input_digest,
            output_files=tuple(output_files) if output_files else None,
            description=f"Run Pytest for {field_set.address}",
            timeout_seconds=setup.timeout_seconds,
            extra_env=env,
            execution_slot_variable=setup.execution_slot_variable,
            level=LogLevel.DEBUG,
        ),
    )

    coverage_data = None
    if test_subsystem.use_coverage:
        coverage_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest, PathGlobs([".coverage"])))
        if coverage_snapshot.files == (".coverage", ):
            coverage_data = PytestCoverageData(field_set.address,
                                               coverage_snapshot.digest)
        else:
            logger.warning(
                f"Failed to generate coverage data for {field_set.address}.")

    xml_results_digest = None
    if test_results_file:
        xml_results_snapshot = await Get(
            Snapshot,
            DigestSubset(result.output_digest, PathGlobs([test_results_file])))
        if xml_results_snapshot.files == (test_results_file, ):
            xml_results_digest = await Get(
                Digest,
                AddPrefix(xml_results_snapshot.digest,
                          setup.xml_dir),  # type: ignore[arg-type]
            )
        else:
            logger.warning(
                f"Failed to generate JUnit XML data for {field_set.address}.")

    return TestResult.from_fallible_process_result(
        result,
        coverage_data=coverage_data,
        xml_results=xml_results_digest,
        address=field_set.address,
    )
Esempio n. 13
0
async def setup_pytest_for_target(
    request: TestSetupRequest,
    pytest: PyTest,
    test_subsystem: TestSubsystem,
    python_setup: PythonSetup,
    coverage_config: CoverageConfig,
    coverage_subsystem: CoverageSubsystem,
    global_options: GlobalOptions,
) -> TestSetup:
    test_addresses = Addresses((request.field_set.address, ))

    transitive_targets = await Get(TransitiveTargets, Addresses,
                                   test_addresses)
    all_targets = transitive_targets.closure

    interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields(
        (tgt[PythonInterpreterCompatibility] for tgt in all_targets
         if tgt.has_field(PythonInterpreterCompatibility)),
        python_setup,
    )

    # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses
    # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly
    # from zip files. `zipp` has pathologically bad behavior with large zipfiles.
    # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term,
    # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata
    # and then by Pytest). See https://github.com/jaraco/zipp/pull/26.
    additional_args_for_pytest = ("--not-zip-safe", )

    pytest_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pytest.pex",
            requirements=PexRequirements(pytest.get_requirement_strings()),
            interpreter_constraints=interpreter_constraints,
            additional_args=additional_args_for_pytest,
            internal_only=True,
        ),
    )

    # Defaults to zip_safe=False.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(test_addresses,
                                               internal_only=True),
    )

    test_runner_pex_request = Get(
        Pex,
        PexRequest(
            interpreter_constraints=interpreter_constraints,
            output_filename="test_runner.pex",
            entry_point="pytest:main",
            additional_args=(
                "--pex-path",
                # TODO(John Sirois): Support shading python binaries:
                #   https://github.com/pantsbuild/pants/issues/9206
                # Right now any pytest transitive requirements will shadow corresponding user
                # requirements which will lead to problems when APIs that are used by either
                # `pytest:main` or the tests themselves break between the two versions.
                ":".join((
                    pytest_pex_request.subject.output_filename,
                    requirements_pex_request.subject.output_filename,
                )),
            ),
            internal_only=True,
        ),
    )

    prepared_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(all_targets, include_files=True))

    # Get the file names for the test_target so that we can specify to Pytest precisely which files
    # to test, rather than using auto-discovery.
    field_set_source_files_request = Get(
        SourceFiles, SourceFilesRequest([request.field_set.sources]))

    (
        pytest_pex,
        requirements_pex,
        test_runner_pex,
        prepared_sources,
        field_set_source_files,
    ) = await MultiGet(
        pytest_pex_request,
        requirements_pex_request,
        test_runner_pex_request,
        prepared_sources_request,
        field_set_source_files_request,
    )

    input_digest = await Get(
        Digest,
        MergeDigests((
            coverage_config.digest,
            prepared_sources.source_files.snapshot.digest,
            requirements_pex.digest,
            pytest_pex.digest,
            test_runner_pex.digest,
        )),
    )

    add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"]
    output_files = []

    results_file_name = None
    if pytest.options.junit_xml_dir and not request.is_debug:
        results_file_name = f"{request.field_set.address.path_safe_spec}.xml"
        add_opts.extend((f"--junitxml={results_file_name}", "-o",
                         f"junit_family={pytest.options.junit_family}"))
        output_files.append(results_file_name)

    coverage_args = []
    if test_subsystem.use_coverage and not request.is_debug:
        output_files.append(".coverage")
        cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (
            ".", )
        coverage_args = [
            "--cov-report=",  # Turn off output.
            *itertools.chain.from_iterable(["--cov", cov_path]
                                           for cov_path in cov_paths),
        ]

    extra_env = {
        "PYTEST_ADDOPTS": " ".join(add_opts),
        "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots),
    }

    if test_subsystem.force and not request.is_debug:
        # This is a slightly hacky way to force the process to run: since the env var
        #  value is unique, this input combination will never have been seen before,
        #  and therefore never cached. The two downsides are:
        #  1. This leaks into the test's environment, albeit with a funky var name that is
        #     unlikely to cause problems in practice.
        #  2. This run will be cached even though it can never be re-used.
        # TODO: A more principled way of forcing rules to run?
        uuid = await Get(UUID, UUIDRequest())
        extra_env["__PANTS_FORCE_TEST_RUN__"] = str(uuid)

    process = await Get(
        Process,
        PexProcess(
            test_runner_pex,
            argv=(*pytest.options.args, *coverage_args,
                  *field_set_source_files.files),
            extra_env=extra_env,
            input_digest=input_digest,
            output_files=output_files,
            timeout_seconds=request.field_set.timeout.
            calculate_from_global_options(pytest),
            execution_slot_variable=pytest.options.execution_slot_var,
            description=f"Run Pytest for {request.field_set.address}",
            level=LogLevel.DEBUG,
        ),
    )
    return TestSetup(process, results_file_name=results_file_name)