Exemplo n.º 1
0
def test_context_object_on_streaming_workunits(
        rule_runner: RuleRunner, run_tracker: RunTracker) -> None:
    scheduler = rule_runner.scheduler

    def callback(**kwargs) -> None:
        context = kwargs["context"]
        assert isinstance(context, StreamingWorkunitContext)

        completed_workunits = kwargs["completed_workunits"]
        for workunit in completed_workunits:
            if "artifacts" in workunit and "stdout_digest" in workunit[
                    "artifacts"]:
                digest = workunit["artifacts"]["stdout_digest"]
                output = context.single_file_digests_to_bytes([digest])
                assert output == (b"stdout output\n", )

    handler = StreamingWorkunitHandler(
        scheduler,
        run_tracker=run_tracker,
        callbacks=[callback],
        report_interval_seconds=0.01,
        max_workunit_verbosity=LogLevel.INFO,
        specs=Specs.empty(),
        options_bootstrapper=create_options_bootstrapper([]),
    )

    stdout_process = Process(argv=("/bin/bash", "-c",
                                   "/bin/echo 'stdout output'"),
                             description="Stdout process")

    with handler.session():
        rule_runner.request(ProcessResult, [stdout_process])
Exemplo n.º 2
0
def test_more_complicated_engine_aware(rule_runner: RuleRunner,
                                       run_tracker: RunTracker) -> None:
    tracker = WorkunitTracker()
    handler = StreamingWorkunitHandler(
        rule_runner.scheduler,
        run_tracker=run_tracker,
        callbacks=[tracker.add],
        report_interval_seconds=0.01,
        max_workunit_verbosity=LogLevel.TRACE,
        specs=Specs.empty(),
        options_bootstrapper=create_options_bootstrapper([]),
    )
    with handler.session():
        input_1 = CreateDigest((
            FileContent(path="a.txt", content=b"alpha"),
            FileContent(path="b.txt", content=b"beta"),
        ))
        digest_1 = rule_runner.request(Digest, [input_1])
        snapshot_1 = rule_runner.request(Snapshot, [digest_1])

        input_2 = CreateDigest((FileContent(path="g.txt", content=b"gamma"), ))
        digest_2 = rule_runner.request(Digest, [input_2])
        snapshot_2 = rule_runner.request(Snapshot, [digest_2])

        input = ComplicatedInput(snapshot_1=snapshot_1, snapshot_2=snapshot_2)

        rule_runner.request(Output, [input])

    finished = list(
        itertools.chain.from_iterable(tracker.finished_workunit_chunks))
    workunit = next(
        item for item in finished
        if item["name"] == "pants.engine.internals.engine_test.a_rule")

    streaming_workunit_context = handler._context

    artifacts = workunit["artifacts"]
    output_snapshot_1 = artifacts["snapshot_1"]
    output_snapshot_2 = artifacts["snapshot_2"]

    output_contents_list = streaming_workunit_context.snapshots_to_file_contents(
        [output_snapshot_1, output_snapshot_2])
    assert len(output_contents_list) == 2

    assert isinstance(output_contents_list[0], DigestContents)
    assert isinstance(output_contents_list[1], DigestContents)

    digest_contents_1 = output_contents_list[0]
    digest_contents_2 = output_contents_list[1]

    assert len(tuple(x for x in digest_contents_1
                     if x.content == b"alpha")) == 1
    assert len(tuple(x for x in digest_contents_1
                     if x.content == b"beta")) == 1

    assert len(tuple(x for x in digest_contents_2
                     if x.content == b"gamma")) == 1
Exemplo n.º 3
0
 def _fixture_for_rules(
     self, rules, max_workunit_verbosity: LogLevel = LogLevel.INFO
 ) -> Tuple[SchedulerSession, WorkunitTracker, StreamingWorkunitHandler]:
     scheduler = self.mk_scheduler(rules, include_trace_on_error=False)
     tracker = WorkunitTracker()
     handler = StreamingWorkunitHandler(
         scheduler,
         run_tracker=new_run_tracker(),
         callbacks=[tracker],
         report_interval_seconds=0.01,
         max_workunit_verbosity=max_workunit_verbosity,
         specs=Specs.empty(),
         options_bootstrapper=create_options_bootstrapper([]),
         pantsd=False,
     )
     return scheduler, tracker, handler
Exemplo n.º 4
0
def test_counters(rule_runner: RuleRunner, run_tracker: RunTracker) -> None:
    scheduler = rule_runner.scheduler

    tracker = WorkunitTracker()
    handler = StreamingWorkunitHandler(
        scheduler,
        run_tracker=run_tracker,
        callbacks=[tracker],
        report_interval_seconds=0.01,
        max_workunit_verbosity=LogLevel.TRACE,
        specs=Specs.empty(),
        options_bootstrapper=create_options_bootstrapper([]),
        allow_async_completion=False,
    )

    with handler:
        scheduler.record_test_observation(128)
        rule_runner.request(
            ProcessResult,
            [
                Process(
                    ["/bin/sh", "-c", "true"],
                    description="always true",
                    cache_scope=ProcessCacheScope.PER_SESSION,
                )
            ],
        )
        histograms_info = scheduler.get_observation_histograms()

    finished = list(
        itertools.chain.from_iterable(tracker.finished_workunit_chunks))
    workunits_with_counters = [item for item in finished if "counters" in item]
    assert workunits_with_counters[0]["counters"]["local_cache_requests"] == 1
    assert workunits_with_counters[0]["counters"][
        "local_cache_requests_uncached"] == 1
    assert workunits_with_counters[1]["counters"][
        "local_execution_requests"] == 1

    assert histograms_info["version"] == 0
    assert "histograms" in histograms_info
    assert "test_observation" in histograms_info["histograms"]
    assert (
        histograms_info["histograms"]["test_observation"] ==
        b"\x1c\x84\x93\x14\x00\x00\x00\x1fx\x9c\x93i\x99,\xcc\xc0\xc0\xc0\xcc\x00\x010\x9a\x11J3\xd9\x7f\x800\xfe32\x01\x00E\x0c\x03\x81"
    )
Exemplo n.º 5
0
def test_process_digests_on_streaming_workunits(
        rule_runner: RuleRunner, run_tracker: RunTracker) -> None:
    scheduler = rule_runner.scheduler

    tracker = WorkunitTracker()
    handler = StreamingWorkunitHandler(
        scheduler,
        run_tracker=run_tracker,
        callbacks=[tracker],
        report_interval_seconds=0.01,
        max_workunit_verbosity=LogLevel.DEBUG,
        specs=Specs.empty(),
        options_bootstrapper=create_options_bootstrapper([]),
        allow_async_completion=False,
    )

    stdout_process = Process(argv=("/bin/bash", "-c",
                                   "/bin/echo 'stdout output'"),
                             description="Stdout process")

    with handler:
        result = rule_runner.request(ProcessResult, [stdout_process])

    assert tracker.finished
    finished = list(
        itertools.chain.from_iterable(tracker.finished_workunit_chunks))

    process_workunit = next(item for item in finished
                            if item["name"] == "process")
    assert process_workunit is not None
    stdout_digest = process_workunit["artifacts"]["stdout_digest"]
    stderr_digest = process_workunit["artifacts"]["stderr_digest"]

    assert result.stdout == b"stdout output\n"
    assert stderr_digest == EMPTY_FILE_DIGEST
    assert stdout_digest.serialized_bytes_length == len(result.stdout)

    tracker = WorkunitTracker()
    handler = StreamingWorkunitHandler(
        scheduler,
        run_tracker=run_tracker,
        callbacks=[tracker],
        report_interval_seconds=0.01,
        max_workunit_verbosity=LogLevel.DEBUG,
        specs=Specs.empty(),
        options_bootstrapper=create_options_bootstrapper([]),
        allow_async_completion=False,
    )
    stderr_process = Process(argv=("/bin/bash", "-c",
                                   "1>&2 /bin/echo 'stderr output'"),
                             description="Stderr process")
    with handler:
        result = rule_runner.request(ProcessResult, [stderr_process])

    assert tracker.finished
    finished = list(
        itertools.chain.from_iterable(tracker.finished_workunit_chunks))
    process_workunit = next(item for item in finished
                            if item["name"] == "process")

    assert process_workunit is not None
    stdout_digest = process_workunit["artifacts"]["stdout_digest"]
    stderr_digest = process_workunit["artifacts"]["stderr_digest"]

    assert result.stderr == b"stderr output\n"
    assert stdout_digest == EMPTY_FILE_DIGEST
    assert stderr_digest.serialized_bytes_length == len(result.stderr)

    assert process_workunit["metadata"]["exit_code"] == 0

    try:
        scheduler.ensure_remote_has_recursive([stdout_digest, stderr_digest])
    except Exception as e:
        # This is the exception message we should expect from invoking ensure_remote_has_recursive()
        # in rust.
        assert str(e) == "Cannot ensure remote has blobs without a remote"

    byte_outputs = scheduler.single_file_digests_to_bytes(
        [stdout_digest, stderr_digest])
    assert byte_outputs[0] == result.stdout
    assert byte_outputs[1] == result.stderr
Exemplo n.º 6
0
async def lint(
    console: Console,
    workspace: Workspace,
    specs: Specs,
    lint_subsystem: LintSubsystem,
    union_membership: UnionMembership,
    dist_dir: DistDir,
) -> Lint:
    lint_target_request_types = cast(
        "Iterable[type[LintTargetsRequest]]", union_membership.get(LintTargetsRequest)
    )
    fmt_target_request_types = cast("Iterable[type[FmtRequest]]", union_membership.get(FmtRequest))
    file_request_types = cast(
        "Iterable[type[LintFilesRequest]]", union_membership[LintFilesRequest]
    )

    _check_ambiguous_request_names(
        *lint_target_request_types, *fmt_target_request_types, *file_request_types
    )

    specified_names = determine_specified_tool_names(
        "lint",
        lint_subsystem.only,
        [*lint_target_request_types, *fmt_target_request_types],
        extra_valid_names={request.name for request in file_request_types},
    )

    def is_specified(request_type: type[StyleRequest] | type[LintFilesRequest]):
        return request_type.name in specified_names

    lint_target_request_types = filter(is_specified, lint_target_request_types)
    fmt_target_request_types = filter(is_specified, fmt_target_request_types)
    file_request_types = filter(is_specified, file_request_types)

    _get_targets = Get(
        FilteredTargets,
        Specs,
        specs if lint_target_request_types or fmt_target_request_types else Specs.empty(),
    )
    _get_specs_paths = Get(SpecsPaths, Specs, specs if file_request_types else Specs.empty())
    targets, specs_paths = await MultiGet(_get_targets, _get_specs_paths)

    def batch(field_sets: Iterable[FieldSet]) -> Iterator[list[FieldSet]]:
        partitions = partition_sequentially(
            field_sets,
            key=lambda fs: fs.address.spec,
            size_target=lint_subsystem.batch_size,
            size_max=4 * lint_subsystem.batch_size,
        )
        for partition in partitions:
            yield partition

    def batch_by_type(
        request_types: Iterable[type[_SR]],
    ) -> tuple[tuple[type[_SR], list[FieldSet]], ...]:
        return tuple(
            (request_type, field_set_batch)
            for request_type in request_types
            for field_set_batch in batch(
                request_type.field_set_type.create(target)
                for target in targets
                if request_type.field_set_type.is_applicable(target)
            )
        )

    lint_target_requests = (
        request_type(batch) for request_type, batch in batch_by_type(lint_target_request_types)
    )

    fmt_requests: Iterable[FmtRequest] = ()
    if not lint_subsystem.skip_formatters:
        batched_fmt_request_pairs = batch_by_type(fmt_target_request_types)
        all_fmt_source_batches = await MultiGet(
            Get(
                SourceFiles,
                SourceFilesRequest(
                    cast(
                        SourcesField,
                        getattr(field_set, "sources", getattr(field_set, "source", None)),
                    )
                    for field_set in batch
                ),
            )
            for _, batch in batched_fmt_request_pairs
        )
        fmt_requests = (
            request_type(
                batch,
                snapshot=source_files_snapshot.snapshot,
            )
            for (request_type, batch), source_files_snapshot in zip(
                batched_fmt_request_pairs, all_fmt_source_batches
            )
        )

    file_requests = (
        tuple(request_type(specs_paths.files) for request_type in file_request_types)
        if specs_paths.files
        else ()
    )

    all_requests = [
        *(Get(LintResults, LintTargetsRequest, request) for request in lint_target_requests),
        *(Get(FmtResult, FmtRequest, request) for request in fmt_requests),
        *(Get(LintResults, LintFilesRequest, request) for request in file_requests),
    ]
    all_batch_results = cast(
        "tuple[LintResults | FmtResult, ...]",
        await MultiGet(all_requests),  # type: ignore[arg-type]
    )

    def key_fn(results: LintResults | FmtResult):
        if isinstance(results, FmtResult):
            return results.formatter_name
        return results.linter_name

    # NB: We must pre-sort the data for itertools.groupby() to work properly.
    sorted_all_batch_results = sorted(all_batch_results, key=key_fn)

    formatter_failed = False

    def coerce_to_lintresult(batch_results: LintResults | FmtResult) -> tuple[LintResult, ...]:
        if isinstance(batch_results, FmtResult):
            nonlocal formatter_failed
            formatter_failed = formatter_failed or batch_results.did_change
            return (
                LintResult(
                    1 if batch_results.did_change else 0,
                    batch_results.stdout,
                    batch_results.stderr,
                ),
            )
        return batch_results.results

    # We consolidate all results for each linter into a single `LintResults`.
    all_results = tuple(
        sorted(
            (
                LintResults(
                    itertools.chain.from_iterable(
                        coerce_to_lintresult(batch_results) for batch_results in results
                    ),
                    linter_name=linter_name,
                )
                for linter_name, results in itertools.groupby(sorted_all_batch_results, key=key_fn)
            ),
            key=key_fn,
        )
    )

    def get_name(res: LintResults) -> str:
        return res.linter_name

    write_reports(
        all_results,
        workspace,
        dist_dir,
        goal_name=LintSubsystem.name,
        get_name=get_name,
    )

    _print_results(
        console,
        all_results,
        formatter_failed,
    )
    return Lint(_get_error_code(all_results))
Exemplo n.º 7
0
def test_no_applicable_targets_exception() -> None:
    # Check that we correctly render the error message.
    class Tgt1(Target):
        alias = "tgt1"
        core_fields = ()

    class Tgt2(Target):
        alias = "tgt2"
        core_fields = (MultipleSourcesField, )

    class Tgt3(Target):
        alias = "tgt3"
        core_fields = ()

    # No targets/files specified. Because none of the relevant targets have a sources field, we do
    # not give the filedeps command.
    exc = NoApplicableTargetsException(
        [],
        Specs.empty(),
        UnionMembership({}),
        applicable_target_types=[Tgt1],
        goal_description="the `foo` goal",
    )
    remedy = (
        "Please specify relevant file and/or target arguments. Run `./pants "
        "--filter-target-type=tgt1 list ::` to find all applicable targets in your project."
    )
    assert (dedent(f"""\
            No files or targets specified. The `foo` goal works with these target types:

              * tgt1

            {remedy}""") in str(exc))

    invalid_tgt = Tgt3({}, Address("blah"))
    exc = NoApplicableTargetsException(
        [invalid_tgt],
        Specs(
            includes=RawSpecs(file_literals=(FileLiteralSpec("foo.ext"), ),
                              description_of_origin="tests"),
            ignores=RawSpecs(description_of_origin="tests"),
        ),
        UnionMembership({}),
        applicable_target_types=[Tgt1, Tgt2],
        goal_description="the `foo` goal",
    )
    remedy = (
        "Please specify relevant file and/or target arguments. Run `./pants "
        "--filter-target-type=tgt1,tgt2 list ::` to find all applicable targets in your project, "
        "or run `./pants --filter-target-type=tgt1,tgt2 filedeps ::` to find all "
        "applicable files.")
    assert (dedent(f"""\
            No applicable files or targets matched. The `foo` goal works with these target types:

              * tgt1
              * tgt2

            However, you only specified file arguments with these target types:

              * tgt3

            {remedy}""") in str(exc))

    # Test handling of `Specs`.
    exc = NoApplicableTargetsException(
        [invalid_tgt],
        Specs(
            includes=RawSpecs(address_literals=(AddressLiteralSpec(
                "foo", "bar"), ),
                              description_of_origin="tests"),
            ignores=RawSpecs(description_of_origin="tests"),
        ),
        UnionMembership({}),
        applicable_target_types=[Tgt1],
        goal_description="the `foo` goal",
    )
    assert "However, you only specified target arguments with these target types:" in str(
        exc)
    exc = NoApplicableTargetsException(
        [invalid_tgt],
        Specs(
            includes=RawSpecs(
                address_literals=(AddressLiteralSpec("foo", "bar"), ),
                file_literals=(FileLiteralSpec("foo.ext"), ),
                description_of_origin="tests",
            ),
            ignores=RawSpecs(description_of_origin="tests"),
        ),
        UnionMembership({}),
        applicable_target_types=[Tgt1],
        goal_description="the `foo` goal",
    )
    assert "However, you only specified target and file arguments with these target types:" in str(
        exc)
Exemplo n.º 8
0
def run_lint_rule(
    rule_runner: RuleRunner,
    *,
    lint_request_types: Sequence[Type[LintTargetsRequest]],
    fmt_request_types: Sequence[Type[FmtRequest]] = (),
    targets: list[Target],
    run_files_linter: bool = False,
    batch_size: int = 128,
    only: list[str] | None = None,
    skip_formatters: bool = False,
) -> Tuple[int, str]:
    union_membership = UnionMembership({
        LintTargetsRequest:
        lint_request_types,
        LintFilesRequest: [MockFilesRequest] if run_files_linter else [],
        FmtRequest:
        fmt_request_types,
    })
    lint_subsystem = create_goal_subsystem(
        LintSubsystem,
        batch_size=batch_size,
        only=only or [],
        skip_formatters=skip_formatters,
    )
    with mock_console(rule_runner.options_bootstrapper) as (console,
                                                            stdio_reader):
        result: Lint = run_rule_with_mocks(
            lint,
            rule_args=[
                console,
                Workspace(rule_runner.scheduler, _enforce_effects=False),
                Specs.empty(),
                lint_subsystem,
                union_membership,
                DistDir(relpath=Path("dist")),
            ],
            mock_gets=[
                MockGet(
                    output_type=SourceFiles,
                    input_type=SourceFilesRequest,
                    mock=lambda _: SourceFiles(EMPTY_SNAPSHOT, ()),
                ),
                MockGet(
                    output_type=LintResults,
                    input_type=LintTargetsRequest,
                    mock=lambda mock_request: mock_request.lint_results,
                ),
                MockGet(
                    output_type=LintResults,
                    input_type=LintFilesRequest,
                    mock=lambda mock_request: mock_request.lint_results,
                ),
                MockGet(
                    output_type=FmtResult,
                    input_type=FmtRequest,
                    mock=lambda mock_request: mock_request.fmt_result,
                ),
                MockGet(
                    output_type=FilteredTargets,
                    input_type=Specs,
                    mock=lambda _: FilteredTargets(targets),
                ),
                MockGet(
                    output_type=SpecsPaths,
                    input_type=Specs,
                    mock=lambda _: SpecsPaths(("f.txt", ), ()),
                ),
            ],
            union_membership=union_membership,
        )
        assert not stdio_reader.get_stdout()
        return result.exit_code, stdio_reader.get_stderr()