Exemple #1
0
def test_analyze_explicit_script(linter: PyLinter) -> None:
    linter.set_reporter(testutils.GenericTestReporter())
    linter.check([os.path.join(DATA_DIR, "ascript")])
    assert len(linter.reporter.messages) == 1
    assert linter.reporter.messages[0] == Message(
        msg_id="C0301",
        symbol="line-too-long",
        msg="Line too long (175/100)",
        confidence=interfaces.Confidence(
            name="UNDEFINED",
            description="Warning without any associated confidence level.",
        ),
        location=MessageLocationTuple(
            abspath=os.path.join(abspath(dirname(__file__)),
                                 "ascript").replace(
                                     f"lint{os.path.sep}ascript",
                                     f"data{os.path.sep}ascript"),
            path=f"tests{os.path.sep}data{os.path.sep}ascript",
            module="data.ascript",
            obj="",
            line=2,
            column=0,
            end_line=None,
            end_column=None,
        ),
    )
Exemple #2
0
def test_new_message(
        message_definitions: ValuesView[MessageDefinition]) -> None:
    def build_message(message_definition_: MessageDefinition,
                      location_value: MessageLocationTuple) -> Message:
        return Message(
            symbol=message_definition_.symbol,
            msg_id=message_definition_.msgid,
            location=location_value,
            msg=message_definition_.msg,
            confidence=HIGH,
        )

    template = "{path}:{line}:{column}: {msg_id}: {msg} ({symbol})"
    for message_definition in message_definitions:
        if message_definition.msgid == "E1234":
            e1234_message_definition = message_definition
        if message_definition.msgid == "W1234":
            w1234_message_definition = message_definition
    e1234_location_values = MessageLocationTuple(
        abspath="1",
        path="2",
        module="3",
        obj="4",
        line=5,
        column=6,
        end_line=5,
        end_column=9,
    )
    w1234_location_values = MessageLocationTuple(
        abspath="7",
        path="8",
        module="9",
        obj="10",
        line=11,
        column=12,
        end_line=11,
        end_column=14,
    )
    expected = (
        "2:5:6: E1234: Duplicate keyword argument %r in %s call (duplicate-keyword-arg)"
    )
    e1234 = build_message(e1234_message_definition, e1234_location_values)
    w1234 = build_message(w1234_message_definition, w1234_location_values)
    assert e1234.format(template) == expected
    assert w1234.format(template) == "8:11:12: W1234: message (msg-symbol)"
Exemple #3
0
def test_addmessage(linter: PyLinter) -> None:
    linter.set_reporter(testutils.GenericTestReporter())
    linter.open()
    linter.set_current_module("0123")
    linter.add_message("C0301", line=1, args=(1, 2))
    linter.add_message("line-too-long", line=2, args=(3, 4))
    assert len(linter.reporter.messages) == 2
    assert linter.reporter.messages[0] == Message(
        msg_id="C0301",
        symbol="line-too-long",
        msg="Line too long (1/2)",
        confidence=interfaces.Confidence(
            name="UNDEFINED",
            description="Warning without any associated confidence level.",
        ),
        location=MessageLocationTuple(
            abspath="0123",
            path="0123",
            module="0123",
            obj="",
            line=1,
            column=0,
            end_line=None,
            end_column=None,
        ),
    )
    assert linter.reporter.messages[1] == Message(
        msg_id="C0301",
        symbol="line-too-long",
        msg="Line too long (3/4)",
        confidence=interfaces.Confidence(
            name="UNDEFINED",
            description="Warning without any associated confidence level.",
        ),
        location=MessageLocationTuple(
            abspath="0123",
            path="0123",
            module="0123",
            obj="",
            line=2,
            column=0,
            end_line=None,
            end_column=None,
        ),
    )
Exemple #4
0
 def inner(confidence: Confidence = HIGH) -> Message:
     return Message(
         symbol="missing-docstring",
         msg_id="C0123",
         location=MessageLocationTuple("abspath", "path", "module", "obj",
                                       1, 2, 1, 3),
         msg="msg",
         confidence=confidence,
     )
Exemple #5
0
def check_parallel(
    linter: "PyLinter",
    jobs: int,
    files: Iterable[FileItem],
    arguments: Union[None, str, Sequence[str]] = None,
) -> None:
    """Use the given linter to lint the files with given amount of workers (jobs).

    This splits the work filestream-by-filestream. If you need to do work across
    multiple files, as in the similarity-checker, then inherit from MapReduceMixin and
    implement the map/reduce mixin functionality.
    """
    # The linter is inherited by all the pool's workers, i.e. the linter
    # is identical to the linter object here. This is required so that
    # a custom PyLinter object can be used.
    initializer = functools.partial(_worker_initialize, arguments=arguments)
    with multiprocessing.Pool(jobs,
                              initializer=initializer,
                              initargs=[dill.dumps(linter)]) as pool:
        linter.open()
        all_stats = []
        all_mapreduce_data = collections.defaultdict(list)

        # Maps each file to be worked on by a single _worker_check_single_file() call,
        # collecting any map/reduce data by checker module so that we can 'reduce' it
        # later.
        for (
                worker_idx,  # used to merge map/reduce data across workers
                module,
                file_path,
                base_name,
                messages,
                stats,
                msg_status,
                mapreduce_data,
        ) in pool.imap_unordered(_worker_check_single_file, files):
            linter.file_state.base_name = base_name
            linter.set_current_module(module, file_path)
            for msg in messages:
                msg = Message(msg[0], msg[1], MessageLocationTuple(*msg[2]),
                              msg[3], msg[4])
                linter.reporter.handle_message(msg)
            all_stats.append(stats)
            all_mapreduce_data[worker_idx].append(mapreduce_data)
            linter.msg_status |= msg_status

        pool.close()
        pool.join()

    _merge_mapreduce_data(linter, all_mapreduce_data)
    linter.stats = merge_stats([linter.stats] + all_stats)

    # Insert stats data to local checkers.
    for checker in linter.get_checkers():
        if checker is not linter:
            checker.stats = linter.stats
Exemple #6
0
    def __init__(
        self,
        msg_id: str,
        symbol: str,
        location: tuple[str, str, str, str, int, int] | MessageLocationTuple,
        msg: str,
        confidence: Confidence | None,
    ) -> None:
        if not isinstance(location, MessageLocationTuple):
            warn(
                "In pylint 3.0, Messages will only accept a MessageLocationTuple as location parameter",
                DeprecationWarning,
            )
            location = MessageLocationTuple(
                location[0],
                location[1],
                location[2],
                location[3],
                location[4],
                location[5],
                None,
                None,
            )

        self.msg_id = msg_id
        self.symbol = symbol
        self.msg = msg
        self.C = msg_id[0]
        self.category = MSG_TYPES[msg_id[0]]
        self.confidence = confidence or UNDEFINED
        self.abspath = location.abspath
        self.path = location.path
        self.module = location.module
        self.obj = location.obj
        self.line = location.line
        self.column = location.column
        self.end_line = location.end_line
        self.end_column = location.end_column
Exemple #7
0
def check_parallel(linter, jobs, files: Iterable[FileItem], arguments=None):
    """Use the given linter to lint the files with given amount of workers (jobs)
    This splits the work filestream-by-filestream. If you need to do work across
    multiple files, as in the similarity-checker, then inherit from MapReduceMixin and
    implement the map/reduce mixin functionality"""
    # The reporter does not need to be passed to worker processes, i.e. the reporter does
    original_reporter = linter.reporter
    linter.reporter = None

    # The linter is inherited by all the pool's workers, i.e. the linter
    # is identical to the linter object here. This is required so that
    # a custom PyLinter object can be used.
    initializer = functools.partial(_worker_initialize, arguments=arguments)
    pool = multiprocessing.Pool(  # pylint: disable=consider-using-with
        jobs,
        initializer=initializer,
        initargs=[linter])
    # ..and now when the workers have inherited the linter, the actual reporter
    # can be set back here on the parent process so that results get stored into
    # correct reporter
    linter.set_reporter(original_reporter)
    linter.open()
    try:
        all_stats = []
        all_mapreduce_data = collections.defaultdict(list)

        # Maps each file to be worked on by a single _worker_check_single_file() call,
        # collecting any map/reduce data by checker module so that we can 'reduce' it
        # later.
        for (
                worker_idx,  # used to merge map/reduce data across workers
                module,
                file_path,
                base_name,
                messages,
                stats,
                msg_status,
                mapreduce_data,
        ) in pool.imap_unordered(_worker_check_single_file, files):
            linter.file_state.base_name = base_name
            linter.set_current_module(module, file_path)
            for msg in messages:
                msg = Message(msg[0], msg[1], MessageLocationTuple(*msg[2]),
                              msg[3], msg[4])
                linter.reporter.handle_message(
                    msg
                )  # type: ignore[attr-defined]  # linter.set_reporter() call above makes linter have a reporter attr
            all_stats.append(stats)
            all_mapreduce_data[worker_idx].append(mapreduce_data)
            linter.msg_status |= msg_status
    finally:
        pool.close()
        pool.join()

    _merge_mapreduce_data(linter, all_mapreduce_data)
    linter.stats = merge_stats([linter.stats] + all_stats)

    # Insert stats data to local checkers.
    for checker in linter.get_checkers():
        if checker is not linter:
            checker.stats = linter.stats