def test_report_to_gerrit_conversion_abs_filepath(self): """ Conversion report with absolute filepath. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') res = gerrit.convert([report]) expected = { "tag": "jenkins", "message": "CodeChecker found 1 issue(s) in the code.", "labels": { "Code-Review": -1, "Verified": -1 }, "comments": { self._src_files[0].path: [{ "range": { "start_line": 3, "start_character": 3, "end_line": 3, "end_character": 3, }, "message": "[LOW] {0}:3:3: some description " "[my_checker]\n sizeof(42);\n".format( self._src_files[0].path), }] }, } self.assertEquals(res, expected)
def test_report_to_gerrit_conversion(self): """ Conversion without directory path just the source filename. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') os.environ["CC_REPO_DIR"] = self._test_files_dir res = gerrit.convert([report]) expected = { "tag": "jenkins", "message": "CodeChecker found 1 issue(s) in the code.", "labels": { "Code-Review": -1, "Verified": -1 }, "comments": { "main.cpp": [{ "range": { "start_line": 3, "start_character": 3, "end_line": 3, "end_character": 3, }, "message": "[LOW] main.cpp:3:3: " "some description [my_checker]\n sizeof(42);\n", }] }, } self.assertEquals(res, expected)
def test_report_to_gerrit_conversion_report_url(self): """ Conversion report with absolute filepath and CC_REPORT_URL env. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') os.environ["CC_REPO_DIR"] = self._test_files_dir os.environ["CC_REPORT_URL"] = "localhost:8080/index.html" res = gerrit.convert([report]) expected = { "tag": "jenkins", "message": "CodeChecker found 1 issue(s) in the code. " "See: 'localhost:8080/index.html'", "labels": { "Code-Review": -1, "Verified": -1 }, "comments": { "main.cpp": [{ "range": { "start_line": 3, "start_character": 3, "end_line": 3, "end_character": 3, }, "message": "[LOW] main.cpp:3:3: " "some description [my_checker]\n sizeof(42);\n", }] }, } self.assertEquals(res, expected)
def test_report_to_gerrit_conversion_filter_changed_files(self): """Conversion report with changed files filter. Reports from the lib.cpp file should be not in the converted list. """ report = Report(self._src_files[0], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') lib_report = Report(self._src_files[1], 3, 3, 'some description', 'my_checker', report_hash='dummy_hash', severity='LOW') dummy_changed_files_content = { "/COMMIT_MSG": { "status": "A", "lines_inserted": 1, "size_delta": 1, "size": 100, }, "main.cpp": { "lines_inserted": 1, "lines_deleted": 1, "size_delta": 1, "size": 100, } } fd, changed_files_file = tempfile.mkstemp() os.write(fd, json.dumps(dummy_changed_files_content).encode("utf-8")) os.close(fd) os.environ["CC_CHANGED_FILES"] = changed_files_file res = gerrit.convert([report, report, lib_report]) os.remove(os.environ["CC_CHANGED_FILES"]) # Remove environment variable not to influence the other tests. os.environ.pop("CC_CHANGED_FILES") review_comments = res["comments"] # Reports were found in two source files. self.assertEquals(len(review_comments), 1) # Two reports in the main.cpp file. self.assertEquals(len(review_comments[report.file.path]), 2) self.assertIn("CodeChecker found 3 issue(s) in the code.", res["message"]) self.assertIn( "following reports are introduced in files which are not changed", res["message"]) self.assertIn(lib_report.file.path, res["message"])
def main(args): """ Entry point for parsing some analysis results and printing them to the stdout in a human-readable format. """ # If the given output format is not 'table', redirect logger's output to # the stderr. stream = None if 'export' in args and args.export not in [None, 'table', 'html']: stream = 'stderr' init_logger(args.verbose if 'verbose' in args else None, stream) try: cmd_config.check_config_file(args) except FileNotFoundError as fnerr: LOG.error(fnerr) sys.exit(1) export = args.export if 'export' in args else None if export == 'html' and 'output_path' not in args: LOG.error("Argument --export not allowed without argument --output " "when exporting to HTML.") sys.exit(1) if export == 'gerrit' and not gerrit.mandatory_env_var_is_set(): sys.exit(1) if export and export not in EXPORT_TYPES: LOG.error("Unknown export format: %s", export) sys.exit(1) context = analyzer_context.get_context() # To ensure the help message prints the default folder properly, # the 'default' for 'args.input' is a string, not a list. # But we need lists for the foreach here to work. if isinstance(args.input, str): args.input = [args.input] src_comment_status_filter = args.review_status suppr_handler = None if 'suppress' in args: __make_handler = False if not os.path.isfile(args.suppress): if 'create_suppress' in args: with open(args.suppress, 'w', encoding='utf-8', errors='ignore') as _: # Just create the file. __make_handler = True LOG.info( "Will write source-code suppressions to " "suppress file: %s", args.suppress) else: LOG.warning( "Suppress file '%s' given, but it does not exist" " -- will not suppress anything.", args.suppress) else: __make_handler = True if __make_handler: suppr_handler = suppress_handler.\ GenericSuppressHandler(args.suppress, 'create_suppress' in args, src_comment_status_filter) elif 'create_suppress' in args: LOG.error("Can't use '--export-source-suppress' unless '--suppress " "SUPPRESS_FILE' is also given.") sys.exit(1) output_dir_path = None output_file_path = None if 'output_path' in args: output_path = os.path.abspath(args.output_path) if export == 'html': output_dir_path = output_path else: if os.path.exists(output_path) and os.path.isdir(output_path): # For backward compatibility reason we handle the use case # when directory is provided to this command. LOG.error( "Please provide a file path instead of a directory " "for '%s' export type!", export) sys.exit(1) if export == 'baseline' and not baseline.check(output_path): LOG.error("Baseline files must have '.baseline' extensions.") sys.exit(1) output_file_path = output_path output_dir_path = os.path.dirname(output_file_path) if not os.path.exists(output_dir_path): os.makedirs(output_dir_path) def get_output_file_path(default_file_name: str) -> Optional[str]: """ Return an output file path. """ if output_file_path: return output_file_path if output_dir_path: return os.path.join(output_dir_path, default_file_name) skip_handlers = SkipListHandlers() if 'files' in args: items = [f"+{file_path}" for file_path in args.files] items.append("-*") skip_handlers.append(SkipListHandler("\n".join(items))) if 'skipfile' in args: with open(args.skipfile, 'r', encoding='utf-8', errors='ignore') as f: skip_handlers.append(SkipListHandler(f.read())) trim_path_prefixes = args.trim_path_prefix if \ 'trim_path_prefix' in args else None all_reports = [] statistics = Statistics() file_cache = {} # For memory effiency. changed_files: Set[str] = set() processed_path_hashes = set() processed_file_paths = set() print_steps = 'print_steps' in args html_builder: Optional[report_to_html.HtmlBuilder] = None if export == 'html': html_builder = report_to_html.HtmlBuilder( context.path_plist_to_html_dist, context.checker_labels) for dir_path, file_paths in report_file.analyzer_result_files(args.input): metadata = get_metadata(dir_path) for file_path in file_paths: reports = report_file.get_reports(file_path, context.checker_labels, file_cache) reports = reports_helper.skip(reports, processed_path_hashes, skip_handlers, suppr_handler, src_comment_status_filter) statistics.num_of_analyzer_result_files += 1 for report in reports: if report.changed_files: changed_files.update(report.changed_files) statistics.add_report(report) if trim_path_prefixes: report.trim_path_prefixes(trim_path_prefixes) all_reports.extend(reports) # Print reports continously. if not export: file_report_map = plaintext.get_file_report_map( reports, file_path, metadata) plaintext.convert(file_report_map, processed_file_paths, print_steps) elif export == 'html': print(f"Parsing input file '{file_path}'.") report_to_html.convert(file_path, reports, output_dir_path, html_builder) if export is None: # Plain text output statistics.write() elif export == 'html': html_builder.finish(output_dir_path, statistics) elif export == 'json': data = report_to_json.convert(all_reports) dump_json_output(data, get_output_file_path("reports.json")) elif export == 'codeclimate': data = codeclimate.convert(all_reports) dump_json_output(data, get_output_file_path("reports.json")) elif export == 'gerrit': data = gerrit.convert(all_reports) dump_json_output(data, get_output_file_path("reports.json")) elif export == 'baseline': data = baseline.convert(all_reports) output_path = get_output_file_path("reports.baseline") if output_path: baseline.write(output_path, data) reports_helper.dump_changed_files(changed_files) if statistics.num_of_reports: sys.exit(2)