def _parse_line( self, it: Iterator[str], line: str ) -> Tuple[List[Report], str]: """ Parse the given line. """ match = self.message_line_re.match(line) if match is None: return [], next(it) file_path = os.path.normpath( os.path.join(os.path.dirname(self.analyzer_result), match.group('path'))) report = Report( get_or_create_file(file_path, self._file_cache), int(match.group('line')), 0, match.group('message').strip(), '') try: return [report], next(it) except StopIteration: return [report], ''
def __parse_report(self, bug) -> Optional[Report]: """ Parse the given report and create a message from them. """ report_hash = bug['hash'] checker_name = bug['bug_type'] message = bug['qualifier'] line = int(bug['line']) col = int(bug['column']) if col < 0: col = 0 source_path = self.__get_abs_path(bug['file']) if not source_path: return None report = Report( get_or_create_file( os.path.abspath(source_path), self.__file_cache), line, col, message, checker_name, report_hash=report_hash, bug_path_events=[]) for bug_trace in bug['bug_trace']: event = self.__parse_bug_trace(bug_trace) if event: report.bug_path_events.append(event) report.bug_path_events.append(BugPathEvent( report.message, report.file, report.line, report.column)) return report
def __create_report( self, analyzer_result_file_path: str, diag: Dict, files: Dict[int, File], metadata: Dict[str, Any] ) -> Report: location = diag.get('location', {}) checker_name = diag.get('check_name', "unknown") analyzer_name = self.__get_analyzer_name(checker_name, metadata) severity = self.get_severity(checker_name) return Report( analyzer_result_file_path=analyzer_result_file_path, file=files[location['file']], line=location.get('line', -1), column=location.get('col', -1), message=diag.get('description', ''), checker_name=checker_name, severity=severity, report_hash=diag.get('issue_hash_content_of_line_in_context'), analyzer_name=analyzer_name, category=diag.get('category'), source_line=None, bug_path_events=self.__get_bug_path_events(diag, files), bug_path_positions=self.__get_bug_path_positions(diag, files), notes=self.__get_notes(diag, files), macro_expansions=self.__get_macro_expansions(diag, files))
def test_report_to_gerrit_conversion(self): """ Conversion without directory path just the source filename. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') os.environ["CC_REPO_DIR"] = self._test_files_dir res = gerrit.convert([report]) expected = { "tag": "jenkins", "message": "CodeChecker found 1 issue(s) in the code.", "labels": { "Code-Review": -1, "Verified": -1 }, "comments": { "main.cpp": [{ "range": { "start_line": 3, "start_character": 3, "end_line": 3, "end_character": 3, }, "message": "[LOW] main.cpp:3:3: " "some description [my_checker]\n sizeof(42);\n", }] }, } self.assertEquals(res, expected)
def get_reports(self, file_path: str) -> List[Report]: """ Get reports from the given analyzer result. """ reports: List[Report] = [] if not os.path.exists(file_path): LOG.error("Report file does not exist: %s", file_path) return reports try: with open(file_path, 'r', encoding="utf-8", errors="ignore") as f: diagnostics = json.load(f) except (IOError, json.decoder.JSONDecodeError): LOG.error( "Failed to parse the given analyzer result '%s'. Please " "give a valid json file generated by ESLint.", file_path) return reports file_cache: Dict[str, File] = {} for diag in diagnostics: file_path = os.path.join(os.path.dirname(file_path), diag.get('filePath')) if not os.path.exists(file_path): LOG.warning("Source file does not exists: %s", file_path) continue for bug in diag.get('messages', []): reports.append( Report( get_or_create_file(os.path.abspath(file_path), file_cache), int(bug['line']), int(bug['column']), bug['message'], bug['ruleId'])) return reports
def test_report_to_gerrit_conversion_abs_filepath(self): """ Conversion report with absolute filepath. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') res = gerrit.convert([report]) expected = { "tag": "jenkins", "message": "CodeChecker found 1 issue(s) in the code.", "labels": { "Code-Review": -1, "Verified": -1 }, "comments": { self._src_files[0].path: [{ "range": { "start_line": 3, "start_character": 3, "end_line": 3, "end_character": 3, }, "message": "[LOW] {0}:3:3: some description " "[my_checker]\n sizeof(42);\n".format( self._src_files[0].path), }] }, } self.assertEquals(res, expected)
def to_report(report: ReportData) -> Report: """ Create a Report object from the given thrift report data. """ severity = Severity._VALUES_TO_NAMES[report.severity] \ if report.severity else 'UNSPECIFIED' return Report(File(report.checkedFile), report.line, report.column, report.checkerMsg, report.checkerId, severity, report.bugHash, report.analyzerName)
def to_report(report: ReportData, get_file: Callable[[int, str], File]) -> Report: """ Create a Report object from the given thrift report data. """ severity = Severity._VALUES_TO_NAMES[report.severity] \ if report.severity else 'UNSPECIFIED' bug_path_events: List[BugPathEvent] = [] bug_path_positions: List[BugPathPosition] = [] notes: List[BugPathEvent] = [] macro_expansions: List[MacroExpansion] = [] details = report.details if details: for e in details.pathEvents: bug_path_events.append( BugPathEvent( e.msg, get_file(e.fileId, e.filePath), e.startLine, e.startCol, Range(e.startLine, e.startCol, e.endLine, e.endCol))) for p in details.executionPath: bug_path_positions.append( BugPathPosition( get_file(p.fileId, p.filePath), Range(p.startLine, p.startCol, p.endLine, p.endCol))) for e in details.extendedData: if e.type == ExtendedReportDataType.NOTE: notes.append( BugPathEvent( e.message, get_file(e.fileId, e.filePath), e.startLine, e.startCol, Range(e.startLine, e.startCol, e.endLine, e.endCol))) if e.type == ExtendedReportDataType.MACRO: name = '' macro_expansions.append( MacroExpansion( e.message, name, get_file(e.fileId, e.filePath), e.startLine, e.startCol, Range(e.startLine, e.startCol, e.endLine, e.endCol))) return Report(get_file(report.fileId, report.checkedFile), report.line, report.column, report.checkerMsg, report.checkerId, severity, report.bugHash, report.analyzerName, bug_path_events=bug_path_events or None, bug_path_positions=bug_path_positions, notes=notes, macro_expansions=macro_expansions)
def _parse_line(self, it: Iterator[str], line: str) -> Tuple[List[Report], str]: """ Parse the given line. """ match = self.message_line_re.match(line) if (match is None): return [], next(it) file_path = os.path.normpath( os.path.join(os.path.dirname(self.analyzer_result), match.group('path'))) report = Report(get_or_create_file(file_path, self._file_cache), int(match.group('line')), int(match.group('column')), match.group('message').strip(), '', bug_path_events=[]) line = '' try: line = next(it) note_match = self.note_line_re.match(line) while note_match: file_path = os.path.normpath( os.path.join(os.path.dirname(self.analyzer_result), note_match.group('path'))) report.bug_path_events.append( BugPathEvent( note_match.group('message').strip(), get_or_create_file(file_path, self._file_cache), int(note_match.group('line')), int(note_match.group('column')))) line = next(it) note_match = self.note_line_re.match(line) except StopIteration: line = '' finally: report.bug_path_events.append( BugPathEvent(report.message, report.file, report.line, report.column)) return [report], line
def __parse_bug(self, bug): """ Parse the given bug and create a message from them. """ report_hash = bug.attrib.get('instanceHash') checker_name = bug.attrib.get('type') long_message = bug.find('LongMessage').text source_line = bug.find('SourceLine') source_path = source_line.attrib.get('sourcepath') source_path = self.__get_abs_path(source_path) if not source_path: return line = source_line.attrib.get('start') col = 0 events = [] for element in list(bug): event = None if element.tag == 'Class': event = self.__event_from_class(element) elif element.tag == 'Method': event = self.__event_from_method(element) if event: events.append(event) # If <SourceLine> did not contain a 'start' attribute, take the last # of the events. if line is None: line = next((e.line for e in reversed(events) if e.line > 0), 0) report = Report(get_or_create_file(source_path, self.__file_cache), int(line), col, long_message, checker_name, report_hash=report_hash, bug_path_events=events) report.bug_path_events.append( BugPathEvent(report.message, report.file, report.line, report.column)) return report
def create_report(self, events: List[BugPathEvent], file: File, line: int, column: int, message: str, stack_traces: List[str]) -> Report: """ Create a report for the sanitizer output. """ # The original message should be the last part of the path. This is # displayed by quick check, and this is the main event displayed by # the web interface. events.append(BugPathEvent(message, file, line, column)) notes = None if stack_traces: notes = [BugPathEvent(''.join(stack_traces), file, line, column)] return Report(file, line, column, message, self.checker_name, bug_path_events=events, notes=notes)
def _parse_line(self, it: Iterator[str], line: str) -> Tuple[List[Report], str]: """ Parse the given line. """ match = self.message_line_re.match(line) if match is None: return [], next(it) checker_names = match.group('checker').strip().split(",") report = Report(get_or_create_file( os.path.abspath(match.group('path')), self._file_cache), int(match.group('line')), int(match.group('column')), match.group('message').strip(), checker_names[0], bug_path_events=[]) try: line = next(it) line = self._parse_code(it, line) line = self._parse_fixits(report, it, line) line = self._parse_notes(report, it, line) except StopIteration: line = '' finally: report.bug_path_events.append( BugPathEvent(report.message, report.file, report.line, report.column)) # When a checker name and the alias of this checker is turned on, # Clang Tidy (>v11) will generate only one report where the checker # names are concatenated with ',' mark. With this we will generate # multiple reports for each checker name / alias. reports = [] for checker_name in checker_names: r = deepcopy(report) r.checker_name = checker_name r.category = self._get_category(checker_name) reports.append(r) return reports, line
def test_Report_to_ReportData(self): """ Report to reportData conversion. """ checker_name = "checker.name" report = Report(file=File("main.cpp"), line=10, column=8, message="some checker message", checker_name=checker_name, report_hash="2343we23", analyzer_name="dummy.analyzer", severity="LOW") rep_data = report_type_converter.to_report_data(report) self.assertEqual(rep_data.checkerId, report.checker_name) self.assertEqual(rep_data.bugHash, report.report_hash) self.assertEqual(rep_data.checkedFile, report.file.path) self.assertEqual(rep_data.line, report.line) self.assertEqual(rep_data.column, report.column) self.assertEqual(rep_data.analyzerName, report.analyzer_name) self.assertEqual(rep_data.severity, ttypes.Severity.LOW)
def get_reports(self, result_file_path: str) -> List[Report]: """ Parse the given analyzer result. """ reports: List[Report] = [] if not os.path.exists(result_file_path): LOG.error("Report file does not exist: %s", result_file_path) return reports try: with open(result_file_path, 'r', encoding="utf-8", errors="ignore") as report_f: bugs = json.load(report_f) except (IOError, json.decoder.JSONDecodeError): LOG.error( "Failed to parse the given analyzer result '%s'. Please " "give a valid json file generated by TSLint.", result_file_path) return reports file_cache: Dict[str, File] = {} for bug in bugs: file_path = os.path.join(os.path.dirname(result_file_path), bug.get('name')) if not os.path.exists(file_path): LOG.warning("Source file does not exists: %s", file_path) continue end_pos = bug['startPosition'] line = int(end_pos['line'] + 1) col = int(end_pos['character'] + 1) reports.append( Report( get_or_create_file(os.path.abspath(file_path), file_cache), line, col, bug['failure'], bug['ruleName'])) return reports
def test_report_to_gerrit_conversion_report_url(self): """ Conversion report with absolute filepath and CC_REPORT_URL env. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') os.environ["CC_REPO_DIR"] = self._test_files_dir os.environ["CC_REPORT_URL"] = "localhost:8080/index.html" res = gerrit.convert([report]) expected = { "tag": "jenkins", "message": "CodeChecker found 1 issue(s) in the code. " "See: 'localhost:8080/index.html'", "labels": { "Code-Review": -1, "Verified": -1 }, "comments": { "main.cpp": [{ "range": { "start_line": 3, "start_character": 3, "end_line": 3, "end_character": 3, }, "message": "[LOW] main.cpp:3:3: " "some description [my_checker]\n sizeof(42);\n", }] }, } self.assertEquals(res, expected)
def _add_report_hash(self, report: Report): """ Generate report hash for the given plist data. """ report.report_hash = get_report_hash(report, HashType.CONTEXT_FREE)
div_zero_skel = Report( SRC_FILES[1], 7, 14, 'Division by zero', 'core.DivideZero', report_hash='79e31a6ba028f0b7d9779faf4a6cb9cf', category='Logic error', type='Division by zero', bug_path_events=[ BugPathEvent("'base' initialized to 0", SRC_FILES[0], 20, 5, Range(20, 5, 20, 12)), BugPathEvent("Passing the value 0 via 1st parameter 'base'", SRC_FILES[0], 21, 15, Range(21, 15, 21, 18)), BugPathEvent("Calling 'test_func'", SRC_FILES[0], 21, 5, Range(21, 5, 21, 19)), BugPathEvent("Entered call from 'main'", SRC_FILES[0], 6, 1, Range(6, 1, 6, 1)), BugPathEvent("Passing the value 0 via 1st parameter 'num'", SRC_FILES[0], 8, 22, Range(8, 22, 8, 25)), BugPathEvent("Calling 'generate_id'", SRC_FILES[0], 8, 10, Range(8, 10, 8, 26)), BugPathEvent("Entered call from 'test_func'", SRC_FILES[1], 6, 1, Range(6, 1, 6, 1)), BugPathEvent("Division by zero", SRC_FILES[1], 7, 14, Range(7, 12, 7, 17)) ], bug_path_positions=[ BugPathPosition(SRC_FILES[0], Range(19, 5, 19, 7)), BugPathPosition(SRC_FILES[0], Range(20, 5, 20, 7)), BugPathPosition(SRC_FILES[0], Range(21, 5, 21, 13)), BugPathPosition(SRC_FILES[0], Range(6, 1, 6, 4)), BugPathPosition(SRC_FILES[0], Range(7, 5, 7, 7)), BugPathPosition(SRC_FILES[0], Range(8, 5, 8, 6)), BugPathPosition(SRC_FILES[0], Range(8, 22, 8, 25)), BugPathPosition(SRC_FILES[0], Range(8, 10, 8, 20)), BugPathPosition(SRC_FILES[1], Range(6, 1, 6, 3)), BugPathPosition(SRC_FILES[1], Range(7, 14, 7, 14)) ], notes=[], macro_expansions=[])
def _add_metadata(self, report: Report): """ Add metada information to the given plist data. """ report.analyzer_name = self.TOOL_NAME
def test_report_to_gerrit_conversion_filter_changed_files(self): """Conversion report with changed files filter. Reports from the lib.cpp file should be not in the converted list. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') lib_report = Report(self._src_files[1], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') dummy_changed_files_content = { "/COMMIT_MSG": { "status": "A", "lines_inserted": 1, "size_delta": 1, "size": 100, }, "main.cpp": { "lines_inserted": 1, "lines_deleted": 1, "size_delta": 1, "size": 100, } } fd, changed_files_file = tempfile.mkstemp() os.write(fd, json.dumps(dummy_changed_files_content).encode("utf-8")) os.close(fd) os.environ["CC_CHANGED_FILES"] = changed_files_file res = gerrit.convert([report, report, lib_report]) os.remove(os.environ["CC_CHANGED_FILES"]) # Remove environment variable not to influence the other tests. os.environ.pop("CC_CHANGED_FILES") review_comments = res["comments"] # Reports were found in two source files. self.assertEquals(len(review_comments), 1) # Two reports in the main.cpp file. self.assertEquals(len(review_comments[report.file.path]), 2) self.assertIn("CodeChecker found 3 issue(s) in the code.", res["message"]) self.assertIn( "following reports are introduced in files which are not changed", res["message"]) self.assertIn(lib_report.file.path, res["message"])