Пример #1
0
    def test_map_reduce(self, num_files, num_jobs, num_checkers):
        """Compares the 3 key parameters for check_parallel() produces the same results.

        The intent here is to validate the reduce step: no stats should be lost.

        Checks regression of https://github.com/PyCQA/pylint/issues/4118
        """

        # define the stats we expect to get back from the runs, these should only vary
        # with the number of files.
        file_infos = _gen_file_datas(num_files)

        # Loop for single-proc and mult-proc, so we can ensure the same linter-config
        for do_single_proc in range(2):
            linter = PyLinter(reporter=Reporter())

            # Assign between 1 and 3 checkers to the linter, they should not change the
            # results of the lint
            linter.register_checker(ParallelTestChecker(linter))
            if num_checkers > 1:
                linter.register_checker(ExtraParallelTestChecker(linter))
            if num_checkers > 2:
                linter.register_checker(ThirdParallelTestChecker(linter))

            if do_single_proc:
                # establish the baseline
                assert (linter.namespace.jobs == 1
                        ), "jobs>1 are ignored when calling _check_files"
                linter._check_files(linter.get_ast, file_infos)
                stats_single_proc = linter.stats
            else:
                check_parallel(
                    linter,
                    jobs=num_jobs,
                    files=file_infos,
                    arguments=None,
                )
                stats_check_parallel = linter.stats
        assert str(stats_single_proc.by_msg) == str(
            stats_check_parallel.by_msg
        ), "Single-proc and check_parallel() should return the same thing"
Пример #2
0
    def test_compare_workers_to_single_proc(self, num_files, num_jobs,
                                            num_checkers):
        """Compares the 3 key parameters for check_parallel() produces the same results.

        The intent here is to ensure that the check_parallel() operates on each file,
        without ordering issues, irrespective of the number of workers used and the
        number of checkers applied.

        This test becomes more important if we want to change how we parametrise the
        checkers, for example if we aim to batch the files across jobs.
        """

        # define the stats we expect to get back from the runs, these should only vary
        # with the number of files.
        expected_stats = LinterStats(
            by_module={
                # pylint: disable-next=consider-using-f-string
                "--test-file_data-name-%d--" % idx: ModuleStats(
                    convention=0,
                    error=0,
                    fatal=0,
                    info=0,
                    refactor=0,
                    statement=18,
                    warning=0,
                )
                for idx in range(num_files)
            })
        expected_stats.by_msg = {}
        expected_stats.convention = 0
        expected_stats.error = 0
        expected_stats.fatal = 0
        expected_stats.info = 0
        expected_stats.refactor = 0
        expected_stats.statement = 18 * num_files
        expected_stats.warning = 0

        file_infos = _gen_file_datas(num_files)

        # Loop for single-proc and mult-proc, so we can ensure the same linter-config
        for do_single_proc in range(2):
            linter = PyLinter(reporter=Reporter())

            # Assign between 1 and 3 checkers to the linter, they should not change the
            # results of the lint
            linter.register_checker(SequentialTestChecker(linter))
            if num_checkers > 1:
                linter.register_checker(ExtraSequentialTestChecker(linter))
            if num_checkers > 2:
                linter.register_checker(ThirdSequentialTestChecker(linter))

            if do_single_proc:
                # establish the baseline
                assert (linter.namespace.jobs == 1
                        ), "jobs>1 are ignored when calling _check_files"
                linter._check_files(linter.get_ast, file_infos)
                assert linter.msg_status == 0, "We should not fail the lint"
                stats_single_proc = linter.stats
            else:
                check_parallel(
                    linter,
                    jobs=num_jobs,
                    files=file_infos,
                    arguments=None,
                )
                stats_check_parallel = linter.stats
                assert linter.msg_status == 0, "We should not fail the lint"

        assert str(stats_single_proc) == str(
            stats_check_parallel
        ), "Single-proc and check_parallel() should return the same thing"
        assert str(stats_check_parallel) == str(
            expected_stats
        ), "The lint is returning unexpected results, has something changed?"