def write_message(self, msg: Message) -> None: """Convenience method to write a formatted message with class default template""" self_dict = msg._asdict() for key in ("end_line", "end_column"): self_dict[key] = self_dict[key] or "" self.writeln(self._fixed_template.format(**self_dict))
def test_analyze_explicit_script(linter: PyLinter) -> None: linter.set_reporter(testutils.GenericTestReporter()) linter.check([os.path.join(DATA_DIR, "ascript")]) assert len(linter.reporter.messages) == 1 assert linter.reporter.messages[0] == Message( msg_id="C0301", symbol="line-too-long", msg="Line too long (175/100)", confidence=interfaces.Confidence( name="UNDEFINED", description="Warning without any associated confidence level.", ), location=MessageLocationTuple( abspath=os.path.join(abspath(dirname(__file__)), "ascript").replace( f"lint{os.path.sep}ascript", f"data{os.path.sep}ascript"), path=f"tests{os.path.sep}data{os.path.sep}ascript", module="data.ascript", obj="", line=2, column=0, end_line=None, end_column=None, ), )
def check_parallel(linter, jobs, files, arguments=None): """Use the given linter to lint the files with given amount of workers (jobs) This splits the work filestream-by-filestream. If you need to do work across multiple files, as in the similarity-checker, then inherit from MapReduceMixin and implement the map/reduce mixin functionality""" # The reporter does not need to be passed to worker processes, i.e. the reporter does original_reporter = linter.reporter linter.reporter = None # The linter is inherited by all the pool's workers, i.e. the linter # is identical to the linter object here. This is required so that # a custom PyLinter object can be used. initializer = functools.partial(_worker_initialize, arguments=arguments) pool = multiprocessing.Pool( # pylint: disable=consider-using-with jobs, initializer=initializer, initargs=[linter]) # ..and now when the workers have inherited the linter, the actual reporter # can be set back here on the parent process so that results get stored into # correct reporter linter.set_reporter(original_reporter) linter.open() try: all_stats = [] all_mapreduce_data = collections.defaultdict(list) # Maps each file to be worked on by a single _worker_check_single_file() call, # collecting any map/reduce data by checker module so that we can 'reduce' it # later. for ( worker_idx, # used to merge map/reduce data across workers module, file_path, base_name, messages, stats, msg_status, mapreduce_data, ) in pool.imap_unordered(_worker_check_single_file, files): linter.file_state.base_name = base_name linter.set_current_module(module, file_path) for msg in messages: msg = Message(*msg) linter.reporter.handle_message(msg) all_stats.append(stats) all_mapreduce_data[worker_idx].append(mapreduce_data) linter.msg_status |= msg_status finally: pool.close() pool.join() _merge_mapreduce_data(linter, all_mapreduce_data) linter.stats = _merge_stats(all_stats) # Insert stats data to local checkers. for checker in linter.get_checkers(): if checker is not linter: checker.stats = linter.stats
def test_addmessage(linter: PyLinter) -> None: linter.set_reporter(testutils.GenericTestReporter()) linter.open() linter.set_current_module("0123") linter.add_message("C0301", line=1, args=(1, 2)) linter.add_message("line-too-long", line=2, args=(3, 4)) assert len(linter.reporter.messages) == 2 assert linter.reporter.messages[0] == Message( msg_id="C0301", symbol="line-too-long", msg="Line too long (1/2)", confidence=interfaces.Confidence( name="UNDEFINED", description="Warning without any associated confidence level.", ), location=MessageLocationTuple( abspath="0123", path="0123", module="0123", obj="", line=1, column=0, end_line=None, end_column=None, ), ) assert linter.reporter.messages[1] == Message( msg_id="C0301", symbol="line-too-long", msg="Line too long (3/4)", confidence=interfaces.Confidence( name="UNDEFINED", description="Warning without any associated confidence level.", ), location=MessageLocationTuple( abspath="0123", path="0123", module="0123", obj="", line=2, column=0, end_line=None, end_column=None, ), )
def build_message(message_definition_: MessageDefinition, location_value: MessageLocationTuple) -> Message: return Message( symbol=message_definition_.symbol, msg_id=message_definition_.msgid, location=location_value, msg=message_definition_.msg, confidence=HIGH, )
def inner(confidence: Confidence = HIGH) -> Message: return Message( symbol="missing-docstring", msg_id="C0123", location=MessageLocationTuple("abspath", "path", "module", "obj", 1, 2, 1, 3), msg="msg", confidence=confidence, )
def check_parallel( linter: "PyLinter", jobs: int, files: Iterable[FileItem], arguments: Union[None, str, Sequence[str]] = None, ) -> None: """Use the given linter to lint the files with given amount of workers (jobs). This splits the work filestream-by-filestream. If you need to do work across multiple files, as in the similarity-checker, then inherit from MapReduceMixin and implement the map/reduce mixin functionality. """ # The linter is inherited by all the pool's workers, i.e. the linter # is identical to the linter object here. This is required so that # a custom PyLinter object can be used. initializer = functools.partial(_worker_initialize, arguments=arguments) with multiprocessing.Pool(jobs, initializer=initializer, initargs=[dill.dumps(linter)]) as pool: linter.open() all_stats = [] all_mapreduce_data = collections.defaultdict(list) # Maps each file to be worked on by a single _worker_check_single_file() call, # collecting any map/reduce data by checker module so that we can 'reduce' it # later. for ( worker_idx, # used to merge map/reduce data across workers module, file_path, base_name, messages, stats, msg_status, mapreduce_data, ) in pool.imap_unordered(_worker_check_single_file, files): linter.file_state.base_name = base_name linter.set_current_module(module, file_path) for msg in messages: msg = Message(msg[0], msg[1], MessageLocationTuple(*msg[2]), msg[3], msg[4]) linter.reporter.handle_message(msg) all_stats.append(stats) all_mapreduce_data[worker_idx].append(mapreduce_data) linter.msg_status |= msg_status pool.close() pool.join() _merge_mapreduce_data(linter, all_mapreduce_data) linter.stats = merge_stats([linter.stats] + all_stats) # Insert stats data to local checkers. for checker in linter.get_checkers(): if checker is not linter: checker.stats = linter.stats
def handle_message(self, msg: Message) -> None: """Manage message of different types, and colorize output using ANSI escape codes. """ if msg.module not in self._modules: msg_style = self._get_decoration("S") if msg.module: modsep = colorize_ansi(f"************* Module {msg.module}", msg_style) else: modsep = colorize_ansi(f"************* {msg.module}", msg_style) self.writeln(modsep) self._modules.add(msg.module) msg_style = self._get_decoration(msg.C) msg.msg = colorize_ansi(msg.msg, msg_style) msg.symbol = colorize_ansi(msg.symbol, msg_style) msg.category = colorize_ansi(msg.category, msg_style) msg.C = colorize_ansi(msg.C, msg_style) self.write_message(msg)
def build_message(message_definition, location_value): return Message( symbol=message_definition.symbol, msg_id=message_definition.msgid, location=[ location_value["abspath"], location_value["path"], location_value["module"], location_value["obj"], location_value["line"], location_value["column"], ], msg=message_definition.msg, confidence="high", )
def check_parallel(linter, jobs, files, arguments=None): """Use the given linter to lint the files with given amount of workers (jobs)""" # The reporter does not need to be passed to worker processess, i.e. the reporter does # not need to be pickleable original_reporter = linter.reporter linter.reporter = None # The linter is inherited by all the pool's workers, i.e. the linter # is identical to the linter object here. This is requred so that # a custom PyLinter object can be used. initializer = functools.partial(_worker_initialize, arguments=arguments) with multiprocessing.Pool(jobs, initializer=initializer, initargs=[linter]) as pool: # ..and now when the workers have inherited the linter, the actual reporter # can be set back here on the parent process so that results get stored into # correct reporter linter.set_reporter(original_reporter) linter.open() all_stats = [] for ( module, file_path, base_name, messages, stats, msg_status, ) in pool.imap_unordered(_worker_check_single_file, files): linter.file_state.base_name = base_name linter.set_current_module(module, file_path) for msg in messages: msg = Message(*msg) linter.reporter.handle_message(msg) all_stats.append(stats) linter.msg_status |= msg_status linter.stats = _merge_stats(all_stats) # Insert stats data to local checkers. for checker in linter.get_checkers(): if checker is not linter: checker.stats = linter.stats
def handle_message(self, msg: Message) -> None: """manage message of different types, and colorize output using ansi escape codes """ if msg.module not in self._modules: msg_style = self._get_decoration("S") if msg.module: modsep = colorize_ansi(f"************* Module {msg.module}", msg_style) else: modsep = colorize_ansi(f"************* {msg.module}", msg_style) self.writeln(modsep) self._modules.add(msg.module) msg_style = self._get_decoration(msg.C) msg = msg._replace( **{ attr: colorize_ansi(getattr(msg, attr), msg_style) for attr in ("msg", "symbol", "category", "C") } ) self.write_message(msg)
def write_message(self, msg: Message) -> None: """Convenience method to write a formatted message with class default template""" self.writeln(msg.format(self._template))