コード例 #1
0
ファイル: check.py プロジェクト: codealchemy/pants
async def check_go(request: GoCheckRequest) -> CheckResults:
    build_requests = await MultiGet(
        Get(FallibleBuildGoPackageRequest,
            BuildGoPackageTargetRequest(field_set.address))
        for field_set in request.field_sets)
    invalid_requests = []
    valid_requests = []
    for fallible_request in build_requests:
        if fallible_request.request is None:
            invalid_requests.append(fallible_request)
        else:
            valid_requests.append(fallible_request.request)

    build_results = await MultiGet(
        Get(FallibleBuiltGoPackage, BuildGoPackageRequest, request)
        for request in valid_requests)

    # NB: We don't pass stdout/stderr as it will have already been rendered as streaming.
    exit_code = next(
        (
            result.exit_code  # type: ignore[attr-defined]
            for result in (*build_results, *invalid_requests)
            if result.exit_code != 0  # type: ignore[attr-defined]
        ),
        0,
    )
    return CheckResults([CheckResult(exit_code, "", "")],
                        checker_name=request.name)
コード例 #2
0
ファイル: javac_test.py プロジェクト: codealchemy/pants
def test_compile_no_deps(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD":
        "java_sources(name='lib')",
        "3rdparty/jvm/default.lock":
        TestCoursierWrapper.new(entries=()).serialize(),
        "ExampleLib.java":
        JAVA_LIB_SOURCE,
    })
    coarsened_target = expect_single_expanded_coarsened_target(
        rule_runner, Address(spec_path="", target_name="lib"))

    classpath = rule_runner.request(
        RenderedClasspath,
        [
            CompileJavaSourceRequest(component=coarsened_target,
                                     resolve=make_resolve(rule_runner))
        ],
    )
    assert classpath.content == {
        ".ExampleLib.java.lib.javac.jar":
        {"org/pantsbuild/example/lib/ExampleLib.class"}
    }

    # Additionally validate that `check` works.
    check_results = rule_runner.request(
        CheckResults,
        [
            JavacCheckRequest([
                JavacCheckRequest.field_set_type.create(
                    coarsened_target.representative)
            ])
        ],
    ).results
    assert set(check_results) == {CheckResult(0, "", "")}
コード例 #3
0
async def kotlinc_check(
    request: KotlincCheckRequest,
    classpath_entry_request: ClasspathEntryRequestFactory,
) -> CheckResults:
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses(field_set.address for field_set in request.field_sets)
    )

    # NB: Each root can have an independent resolve, because there is no inherent relation
    # between them other than that they were on the commandline together.
    resolves = await MultiGet(
        Get(CoursierResolveKey, CoarsenedTargets([t])) for t in coarsened_targets
    )

    results = await MultiGet(
        Get(
            FallibleClasspathEntry,
            ClasspathEntryRequest,
            classpath_entry_request.for_targets(component=target, resolve=resolve),
        )
        for target, resolve in zip(coarsened_targets, resolves)
    )

    # NB: We don't pass stdout/stderr as it will have already been rendered as streaming.
    exit_code = next((result.exit_code for result in results if result.exit_code != 0), 0)
    return CheckResults([CheckResult(exit_code, "", "")], checker_name=request.name)
コード例 #4
0
def test_check(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "go.mod":
        dedent("""\
                module example.com/greeter
                go 1.17
                """),
        "bad/f.go":
        "invalid!!!",
        "good/f.go":
        dedent("""\
                package greeter

                import "fmt"

                func Hello() {
                    fmt.Println("Hello world!")
                }
                """),
        "BUILD":
        "go_mod(name='mod')",
    })
    targets = [
        rule_runner.get_target(
            Address("", target_name="mod", generated_name="./bad")),
        rule_runner.get_target(
            Address("", target_name="mod", generated_name="./good")),
    ]
    results = rule_runner.request(
        CheckResults,
        [GoCheckRequest(GoCheckFieldSet.create(tgt)
                        for tgt in targets)]).results
    assert set(results) == {CheckResult(1, "", "")}
コード例 #5
0
ファイル: check_test.py プロジェクト: hephex/pants
def test_streaming_output_partitions() -> None:
    results = CheckResults(
        [
            CheckResult(21, "", "", partition_description="ghc8.1"),
            CheckResult(0, "stdout", "stderr", partition_description="ghc9.2"),
        ],
        checker_name="typechecker",
    )
    assert results.level() == LogLevel.ERROR
    assert results.message() == dedent("""\
        typechecker failed (exit code 21).
        Partition #1 - ghc8.1:

        Partition #2 - ghc9.2:
        stdout
        stderr

        """)
コード例 #6
0
ファイル: check_test.py プロジェクト: hephex/pants
 def check_results(self) -> CheckResults:
     addresses = [config.address for config in self.field_sets]
     return CheckResults(
         [CheckResult(
             self.exit_code(addresses),
             "",
             "",
         )],
         checker_name=self.checker_name,
     )
コード例 #7
0
ファイル: check_test.py プロジェクト: hephex/pants
def test_streaming_output_failure() -> None:
    results = CheckResults([CheckResult(18, "stdout", "stderr")],
                           checker_name="typechecker")
    assert results.level() == LogLevel.ERROR
    assert results.message() == dedent("""\
        typechecker failed (exit code 18).
        stdout
        stderr

        """)
コード例 #8
0
ファイル: check_test.py プロジェクト: hephex/pants
def test_streaming_output_success() -> None:
    results = CheckResults([CheckResult(0, "stdout", "stderr")],
                           checker_name="typechecker")
    assert results.level() == LogLevel.INFO
    assert results.message() == dedent("""\
        typechecker succeeded.
        stdout
        stderr

        """)
コード例 #9
0
def test_write_reports() -> None:
    rule_runner = RuleRunner()
    report_digest = rule_runner.make_snapshot_of_empty_files(["r.txt"]).digest
    no_results = CheckResults([], checker_name="none")
    _empty_result = CheckResult(0, "", "", report=EMPTY_DIGEST)
    empty_results = CheckResults([_empty_result], checker_name="empty")
    _single_result = CheckResult(0, "", "", report=report_digest)
    single_results = CheckResults([_single_result], checker_name="single")
    duplicate_results = CheckResults(
        [_single_result, _single_result, _empty_result], checker_name="duplicate"
    )
    partition_results = CheckResults(
        [
            CheckResult(0, "", "", report=report_digest, partition_description="p1"),
            CheckResult(0, "", "", report=report_digest, partition_description="p2"),
        ],
        checker_name="partition",
    )
    partition_duplicate_results = CheckResults(
        [
            CheckResult(0, "", "", report=report_digest, partition_description="p"),
            CheckResult(0, "", "", report=report_digest, partition_description="p"),
        ],
        checker_name="partition_duplicate",
    )

    def get_tool_name(res: CheckResults) -> str:
        return res.checker_name

    write_reports(
        (
            no_results,
            empty_results,
            single_results,
            duplicate_results,
            partition_results,
            partition_duplicate_results,
        ),
        Workspace(rule_runner.scheduler, _enforce_effects=False),
        DistDir(Path("dist")),
        goal_name="check",
        get_tool_name=get_tool_name,
    )

    check_dir = Path(rule_runner.build_root, "dist", "check")
    assert (check_dir / "none").exists() is False
    assert (check_dir / "empty").exists() is False
    assert (check_dir / "single/r.txt").exists() is True

    assert (check_dir / "duplicate/all/r.txt").exists() is True
    assert (check_dir / "duplicate/all_/r.txt").exists() is True

    assert (check_dir / "partition/p1/r.txt").exists() is True
    assert (check_dir / "partition/p2/r.txt").exists() is True

    assert (check_dir / "partition_duplicate/p/r.txt").exists() is True
    assert (check_dir / "partition_duplicate/p_/r.txt").exists() is True
コード例 #10
0
def test_compile_no_deps(rule_runner: RuleRunner) -> None:
    rule_runner.write_files({
        "BUILD":
        dedent("""\
                java_sources(
                    name = 'lib',

                )
                """),
        "coursier_resolve.lockfile":
        CoursierResolvedLockfile(entries=()).to_json().decode("utf-8"),
        "ExampleLib.java":
        JAVA_LIB_SOURCE,
    })
    coarsened_target = expect_single_expanded_coarsened_target(
        rule_runner, Address(spec_path="", target_name="lib"))

    compiled_classfiles = rule_runner.request(
        ClasspathEntry,
        [
            CompileJavaSourceRequest(component=coarsened_target,
                                     resolve=make_resolve(rule_runner))
        ],
    )

    classpath = rule_runner.request(RenderedClasspath,
                                    [compiled_classfiles.digest])
    assert classpath.content == {
        ".ExampleLib.java.lib.jar":
        {"org/pantsbuild/example/lib/ExampleLib.class"}
    }

    # Additionally validate that `check` works.
    check_results = rule_runner.request(
        CheckResults,
        [
            JavacCheckRequest([
                JavacCheckRequest.field_set_type.create(
                    coarsened_target.representative)
            ])
        ],
    ).results
    assert set(check_results) == {CheckResult(0, "", "")}
コード例 #11
0
async def javac_check(request: JavacCheckRequest) -> CheckResults:
    coarsened_targets = await Get(
        CoarsenedTargets,
        Addresses(field_set.address for field_set in request.field_sets))

    # TODO: This should be fallible so that we exit cleanly.
    results = await MultiGet(
        Get(FallibleCompiledClassfiles, CompileJavaSourceRequest(component=t))
        for t in coarsened_targets)

    # NB: We return CheckResults with exit codes for the root targets, but we do not pass
    # stdout/stderr because it will already have been rendered as streaming.
    return CheckResults(
        [
            CheckResult(
                result.exit_code,
                stdout="",
                stderr="",
                partition_description=str(coarsened_target),
            ) for result, coarsened_target in zip(results, coarsened_targets)
        ],
        checker_name="javac",
    )
コード例 #12
0
ファイル: check.py プロジェクト: hephex/pants
async def javac_check(
    request: JavacCheckRequest,
    union_membership: UnionMembership,
) -> CheckResults:
    coarsened_targets = await Get(
        CoarsenedTargets, Addresses(field_set.address for field_set in request.field_sets)
    )

    resolves = await MultiGet(
        Get(CoursierResolveKey, Targets(t.members)) for t in coarsened_targets
    )

    results = await MultiGet(
        Get(
            FallibleClasspathEntry,
            ClasspathEntryRequest,
            ClasspathEntryRequest.for_targets(union_membership, component=target, resolve=resolve),
        )
        for target, resolve in zip(coarsened_targets, resolves)
    )

    # NB: We don't pass stdout/stderr as it will have already been rendered as streaming.
    exit_code = next((result.exit_code for result in results if result.exit_code != 0), 0)
    return CheckResults([CheckResult(exit_code, "", "")], checker_name="javac")
コード例 #13
0
ファイル: rules.py プロジェクト: patricklaw/pants
async def mypy_typecheck_partition(
    partition: MyPyPartition,
    config_file: MyPyConfigFile,
    first_party_plugins: MyPyFirstPartyPlugins,
    mypy: MyPy,
    python_setup: PythonSetup,
) -> CheckResult:
    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (partition.interpreter_constraints if (
        mypy.options.is_default("interpreter_constraints")
        and partition.interpreter_constraints.requires_python38_or_newer(
            python_setup.interpreter_universe)) else
                                    mypy.interpreter_constraints)

    closure_sources_get = Get(PythonSourceFiles,
                              PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(
            tgt.get(PythonSources) for tgt in partition.root_targets))

    # See `requirements_venv_pex` for how this will get wrapped in a `VenvPex`.
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )

    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=mypy.pex_requirements(
                extra_requirements=first_party_plugins.requirement_strings, ),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    closure_sources, roots_sources, mypy_pex, requirements_pex = await MultiGet(
        closure_sources_get, roots_sources_get, mypy_pex_get,
        requirements_pex_get)

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest(
            [FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    # This creates a venv with all the 3rd-party requirements used by the code. We tell MyPy to
    # use this venv by setting `--python-executable`. Note that this Python interpreter is
    # different than what we run MyPy with.
    #
    # We could have directly asked the `PexFromTargetsRequest` to return a `VenvPex`, rather than
    # `Pex`, but that would mean missing out on sharing a cache with other goals like `test` and
    # `run`.
    requirements_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="requirements_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    requirements_venv_pex, file_list_digest = await MultiGet(
        requirements_venv_pex_request, file_list_digest_request)

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            first_party_plugins.sources_digest,
            closure_sources.source_files.snapshot.digest,
            requirements_venv_pex.digest,
            config_file.digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(first_party_plugins.source_roots,
                            closure_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                venv_python=requirements_venv_pex.python.argv0,
                file_list_path=file_list_path,
                python_version=config_file.python_version_to_autoset(
                    partition.interpreter_constraints,
                    python_setup.interpreter_universe),
            ),
            input_digest=merged_input_files,
            extra_env=env,
            output_directories=(REPORT_DIR, ),
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR))
    return CheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)),
        report=report,
    )