def test_docker_build_fail_logs( rule_runner: RuleRunner, caplog, context_root: str | None, copy_sources: tuple[str, ...], build_context_files: tuple[str, ...], expect_logged: list[tuple[int, str]] | None, fail_log_contains: list[str], ) -> None: caplog.set_level(logging.INFO) rule_runner.write_files( {"docker/test/BUILD": f"docker_image(context_root={context_root!r})"}) build_context_files = ("docker/test/Dockerfile", *build_context_files) build_context_snapshot = rule_runner.make_snapshot_of_empty_files( build_context_files) with pytest.raises(ProcessExecutionFailure): assert_build( rule_runner, Address("docker/test"), exit_code=1, copy_sources=copy_sources, build_context_snapshot=build_context_snapshot, ) assert_logged(caplog, expect_logged) for msg in fail_log_contains: assert msg in caplog.records[0].message
def test_write_reports() -> None: rule_runner = RuleRunner() report_digest = rule_runner.make_snapshot_of_empty_files(["r.txt"]).digest no_results = CheckResults([], checker_name="none") _empty_result = CheckResult(0, "", "", report=EMPTY_DIGEST) empty_results = CheckResults([_empty_result], checker_name="empty") _single_result = CheckResult(0, "", "", report=report_digest) single_results = CheckResults([_single_result], checker_name="single") duplicate_results = CheckResults( [_single_result, _single_result, _empty_result], checker_name="duplicate" ) partition_results = CheckResults( [ CheckResult(0, "", "", report=report_digest, partition_description="p1"), CheckResult(0, "", "", report=report_digest, partition_description="p2"), ], checker_name="partition", ) partition_duplicate_results = CheckResults( [ CheckResult(0, "", "", report=report_digest, partition_description="p"), CheckResult(0, "", "", report=report_digest, partition_description="p"), ], checker_name="partition_duplicate", ) def get_tool_name(res: CheckResults) -> str: return res.checker_name write_reports( ( no_results, empty_results, single_results, duplicate_results, partition_results, partition_duplicate_results, ), Workspace(rule_runner.scheduler, _enforce_effects=False), DistDir(Path("dist")), goal_name="check", get_tool_name=get_tool_name, ) check_dir = Path(rule_runner.build_root, "dist", "check") assert (check_dir / "none").exists() is False assert (check_dir / "empty").exists() is False assert (check_dir / "single/r.txt").exists() is True assert (check_dir / "duplicate/all/r.txt").exists() is True assert (check_dir / "duplicate/all_/r.txt").exists() is True assert (check_dir / "partition/p1/r.txt").exists() is True assert (check_dir / "partition/p2/r.txt").exists() is True assert (check_dir / "partition_duplicate/p/r.txt").exists() is True assert (check_dir / "partition_duplicate/p_/r.txt").exists() is True
def run_lint_rule( rule_runner: RuleRunner, *, lint_request_types: Sequence[Type[LintTargetsRequest]], targets: list[Target], run_files_linter: bool = False, batch_size: int = 128, only: list[str] | None = None, ) -> Tuple[int, str]: union_membership = UnionMembership({ LintTargetsRequest: lint_request_types, LintFilesRequest: [MockFilesRequest] if run_files_linter else [], }) lint_subsystem = create_goal_subsystem( LintSubsystem, batch_size=batch_size, only=only or [], ) specs_snapshot = SpecsSnapshot( rule_runner.make_snapshot_of_empty_files(["f.txt"])) with mock_console(rule_runner.options_bootstrapper) as (console, stdio_reader): result: Lint = run_rule_with_mocks( lint, rule_args=[ console, Workspace(rule_runner.scheduler, _enforce_effects=False), Targets(targets), specs_snapshot, lint_subsystem, union_membership, DistDir(relpath=Path("dist")), ], mock_gets=[ MockGet( output_type=LintResults, input_type=LintTargetsRequest, mock=lambda mock_request: mock_request.lint_results, ), MockGet( output_type=LintResults, input_type=LintFilesRequest, mock=lambda mock_request: mock_request.lint_results, ), ], union_membership=union_membership, ) assert not stdio_reader.get_stdout() return result.exit_code, stdio_reader.get_stderr()