def test_check_all_files_with_combined_coverage() -> None: report = create_report( meta=ReportMetadata(branch_coverage=True), files={ "a.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=1, num_branches=5, )), "b.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=3, num_branches=5, )), }, ) assert (check_all( report, Config(file_combined_coverage_min=Decimal("60.0"))) == Pass()) assert check_all( report, Config(file_combined_coverage_min=Decimal("80.0"))) == Fail([ 'File: "a.py" failed COMBINED line plus branch coverage metric' + ", expected 80.0, was 60.0000" ])
def test_check_all_files_with_branch_coverage() -> None: report = create_report( meta=ReportMetadata(branch_coverage=True), files={ "a.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=1, num_branches=2, )), "b.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=3, num_branches=4, )), }, ) assert check_all( report, Config(file_branch_coverage_min=Decimal("50.0"))) == Pass() assert check_all( report, Config(file_branch_coverage_min=Decimal("75.0")) ) == Fail([ 'File: "a.py" failed BRANCH coverage metric, expected 75.0, was 50.0000' ])
def test_module_level_config() -> None: report = create_report( meta=ReportMetadata(branch_coverage=True), files={ "src/model/a.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=1, num_branches=2, )), "src/model/b.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=3, num_branches=4, )), "src/cli/command.py": FileCoverageModel(summary=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=4, num_branches=4, )), }, ) assert (check_all( report, Config(modules={ "src/model/": ModuleConfig(file_branch_coverage_min=Decimal("50.0")) }), ) == Pass()) assert (check_all( report, Config( modules={ "src/model/": ModuleConfig(file_branch_coverage_min=Decimal("75.0")), "src/model/a": ModuleConfig(file_branch_coverage_min=Decimal("50.0")), }), ) == Pass()) assert check_all( report, Config( modules={ "src/model/": ModuleConfig(file_branch_coverage_min=Decimal("80.0")), "src/model/a": ModuleConfig(file_branch_coverage_min=Decimal("50.0")), }), ) == Fail([ 'File: "src/model/b.py" failed BRANCH coverage metric' + ", expected 80.0, was 75.0000" ])
def read_config(config_file_name: Optional[str]) -> Config: DEFAULT_FILENAME = "./pyproject.toml" if config_file_name is not None: return Config.parse(toml.load(config_file_name)["coverage-threshold"]) else: if os.path.isfile(DEFAULT_FILENAME): return Config.parse( toml.load(DEFAULT_FILENAME).get("coverage-threshold", {})) else: return Config()
def test_check_totals() -> None: assert (check_all( create_report( totals=CoverageSummaryModel(covered_lines=3, num_statements=4)), Config(line_coverage_min=Decimal("75.0")), ) == Pass()) assert (check_all( create_report( totals=CoverageSummaryModel(covered_lines=2, num_statements=3)), Config(line_coverage_min=Decimal("67.0")), ) == Fail( ["Total line coverage metric failed, expected 67.0, was 66.6667"]))
def test_checking_branch_coverage_fails_without_branch_report() -> None: report = create_report(meta=ReportMetadata(branch_coverage=False)) expected_error_message = "missing number of branches or number of branches covered" with pytest.raises(ValueError) as e: check_all(report, Config(branch_coverage_min=Decimal("50.0"))) assert str(e.value) == expected_error_message with pytest.raises(ValueError) as e: check_all(report, Config(combined_coverage_min=Decimal("50.0"))) assert str(e.value) == expected_error_message with pytest.raises(ValueError) as e: check_all(report, Config(file_branch_coverage_min=Decimal("75.0"))) assert str(e.value) == expected_error_message
def test_check_totals__with_number_missing_lines_max() -> None: report = create_report( totals=CoverageSummaryModel(covered_lines=3, num_statements=7)) assert (check_all( report, Config(line_coverage_min=Decimal(0), number_missing_lines_max=5)) == Pass()) assert (check_all( report, Config(line_coverage_min=Decimal(0), number_missing_lines_max=4)) == Pass()) assert check_all( report, Config(line_coverage_min=Decimal(0), number_missing_lines_max=3)) == Fail([ "Total number missing lines max failed, expected 3, was 4" ])
def test_check_all_files() -> None: report = create_report( files={ "a.py": FileCoverageModel(summary=CoverageSummaryModel(covered_lines=1, num_statements=2)), "b.py": FileCoverageModel(summary=CoverageSummaryModel(covered_lines=3, num_statements=4)), }) assert check_all(report, Config(file_line_coverage_min=Decimal("50.0"))) == Pass() assert check_all( report, Config(file_line_coverage_min=Decimal("75.0")) ) == Fail([ 'File: "a.py" failed LINE coverage metric, expected 75.0, was 50.0000' ])
def test_check_totals_with_branch_coverage() -> None: report = create_report( meta=ReportMetadata(branch_coverage=True), totals=CoverageSummaryModel( covered_lines=5, num_statements=5, covered_branches=3, num_branches=4, ), ) assert (check_all( report, Config(branch_coverage_min=Decimal("75.0")), ) == Pass()) assert (check_all( report, Config(branch_coverage_min=Decimal("75.001")), ) == Fail( ["Total branch coverage metric failed, expected 75.001, was 75.0000"]))
def combine_config_with_args(args: ArgsNamespace, config: Config) -> Config: return Config( line_coverage_min=fallback(args.line_coverage_min, config.line_coverage_min), branch_coverage_min=fallback(args.branch_coverage_min, config.branch_coverage_min), combined_coverage_min=fallback(args.combined_coverage_min, config.combined_coverage_min), number_missing_lines_max=fallback(args.number_missing_lines_max, config.number_missing_lines_max), file_line_coverage_min=fallback(args.file_line_coverage_min, config.file_line_coverage_min), file_branch_coverage_min=fallback(args.file_branch_coverage_min, config.file_branch_coverage_min), file_combined_coverage_min=fallback(args.file_combined_coverage_min, config.file_combined_coverage_min), modules=config.modules, )
def combine_config_with_args(args: ArgsNamespace, config: Config) -> Config: return Config( line_coverage_min=(args.line_coverage_min if args.line_coverage_min is not None else config.line_coverage_min), branch_coverage_min=(args.branch_coverage_min if args.branch_coverage_min is not None else config.branch_coverage_min), combined_coverage_min=(args.combined_coverage_min if args.combined_coverage_min is not None else config.combined_coverage_min), file_line_coverage_min=(args.file_line_coverage_min if args.file_line_coverage_min is not None else config.file_line_coverage_min), file_branch_coverage_min=(args.file_branch_coverage_min if args.file_branch_coverage_min is not None else config.file_branch_coverage_min), file_combined_coverage_min=(args.file_combined_coverage_min if args.file_combined_coverage_min is not None else config.file_combined_coverage_min), modules=config.modules, )
def test_config_parse__empty() -> None: assert Config.parse({}) == Config()
def test_config_parse__modules_optional_decimals(field_name: str) -> None: assert Config.parse({"modules": {"src/lib/": {field_name: 123}}}) == Config( modules={"src/lib/": ModuleConfig(**{field_name: Decimal("123")})} )
def test_config_parse__modules_emtpy() -> None: assert Config.parse({"modules": {"src/lib/": {}}}) == Config( modules={"src/lib/": ModuleConfig()} )
def test_config_parse__optional_decimals(field_name: str) -> None: assert Config.parse({field_name: 123}) == Config( **{field_name: Decimal("123")} # type: ignore ) assert Config.parse({field_name: None}) == Config()
def test_config_parse__ignores_extra_fields() -> None: assert Config.parse({"lol": 123}) == Config()