Exemplo n.º 1
0
    def test_report_path_hash_generation(self):
        """
        Test report path hash generation.
        """
        clang50_trunk_plist = os.path.join(self.__plist_test_files,
                                           'clang-5.0-trunk.plist')
        files, reports = plist_parser.parse_plist(clang50_trunk_plist, None,
                                                  False)
        self.assertEqual(len(reports), 3)

        # Generate dummy file_ids which should come from the database.
        file_ids = {}
        for i, file_name in enumerate(files, 1):
            file_ids[file_name] = i

        msg = "This test is prepared to handle 3 reports."
        self.assertEqual(len(reports), 3, msg)

        report_hash_to_path_hash = {
            '79e31a6ba028f0b7d9779faf4a6cb9cf':
            'c473c1a55df72ea4c6e055e18370ac65',
            '8714f42d8328bc78d5d7bff6ced918cc':
            '94f2a6eee8af6462a810218dff35056a',
            'a6d3464f8aab9eb31a8ea7e167e84322':
            '11f410136724cf43c63526841007897e'
        }

        for report in reports:
            path_hash = get_report_path_hash(report, files)
            bug_hash = report.main['issue_hash_content_of_line_in_context']
            self.assertEqual(path_hash, report_hash_to_path_hash[bug_hash])
Exemplo n.º 2
0
    def get_report_dir_results(reportdir):
        all_reports = []
        processed_path_hashes = set()
        for filename in os.listdir(reportdir):
            if filename.endswith(".plist"):
                file_path = os.path.join(reportdir, filename)
                LOG.debug("Parsing:" + file_path)
                try:
                    files, reports = plist_parser.parse_plist(file_path)
                    for report in reports:
                        path_hash = get_report_path_hash(report, files)
                        if path_hash in processed_path_hashes:
                            LOG.debug("Not showing report because it is a "
                                      "deduplication of an already processed "
                                      "report!")
                            LOG.debug("Path hash: %s", path_hash)
                            LOG.debug(report)
                            continue

                        if skip_report_dir_result(report):
                            continue

                        processed_path_hashes.add(path_hash)
                        report.main['location']['file_name'] = \
                            files[int(report.main['location']['file'])]
                        all_reports.append(report)

                except Exception as ex:
                    LOG.error('The generated plist is not valid!')
                    LOG.error(ex)
        return all_reports
Exemplo n.º 3
0
    def skip_html_report_data_handler(report_hash, source_file, report_line,
                                      checker_name, diag, files):
        """
        Report handler which skips bugs which were suppressed by source code
        comments.
        """
        report = Report(None, diag['path'], files)
        path_hash = get_report_path_hash(report, files)
        if path_hash in processed_path_hashes:
            LOG.debug("Skip report because it is a deduplication of an "
                      "already processed report!")
            LOG.debug("Path hash: %s", path_hash)
            LOG.debug(diag)
            return True

        skip = plist_parser.skip_report(report_hash, source_file, report_line,
                                        checker_name, suppr_handler)
        if not skip:
            processed_path_hashes.add(path_hash)

        return skip
Exemplo n.º 4
0
    def write(self, file_report_map, output=sys.stdout):
        """
        Format an already parsed plist report file to a more
        human readable format.
        The formatted text is written to the output.
        During writing the output statistics are collected.

        Write out the bugs to the output and collect report statistics.
        """

        severity_stats = defaultdict(int)
        file_stats = defaultdict(int)
        report_count = defaultdict(int)

        for file_path in sorted(file_report_map,
                                key=lambda key: len(file_report_map[key])):

            non_suppressed = 0
            sorted_reports = sorted(file_report_map[file_path],
                                    key=lambda r: r.main['location']['line'])

            for report in sorted_reports:
                path_hash = get_report_path_hash(report, report.files)
                if path_hash in self._processed_path_hashes:
                    LOG.debug("Not showing report because it is a "
                              "deduplication of an already processed report!")
                    LOG.debug("Path hash: %s", path_hash)
                    LOG.debug(report)
                    continue

                self._processed_path_hashes.add(path_hash)

                events = [i for i in report.bug_path
                          if i.get('kind') == 'event']
                f_path = report.files[events[-1]['location']['file']]
                if self.skiplist_handler and \
                        self.skiplist_handler.should_skip(f_path):
                    LOG.debug("Skipped report in '%s'", f_path)
                    LOG.debug(report)
                    continue

                last_report_event = report.bug_path[-1]
                source_file = \
                    report.files[last_report_event['location']['file']]
                report_line = last_report_event['location']['line']
                report_hash = \
                    report.main['issue_hash_content_of_line_in_context']
                checker_name = report.main['check_name']

                if skip_report(report_hash, source_file, report_line,
                               checker_name, self.src_comment_handler):
                    continue

                file_stats[f_path] += 1
                severity = self.__severity_map.get(checker_name)
                severity_stats[severity] += 1
                report_count["report_count"] += 1

                output.write(self.__format_bug_event(checker_name,
                                                     severity,
                                                     last_report_event,
                                                     source_file))
                output.write('\n')
                output.write(self.__format_location(last_report_event,
                                                    source_file))
                output.write('\n')

                if self.print_steps:
                    output.write('  Report hash: ' + report_hash + '\n')
                    output.write('  Steps:\n')

                    index_format = '    %%%dd, ' % \
                                   int(math.floor(math.log10(len(events))) + 1)

                    for index, event in enumerate(events):
                        output.write(index_format % (index + 1))
                        source_file = report.files[event['location']['file']]
                        output.write(self.__format_bug_event(None,
                                                             None,
                                                             event,
                                                             source_file))
                        output.write('\n')
                output.write('\n')

                non_suppressed += 1

            base_file = os.path.basename(file_path)
            if non_suppressed == 0:
                output.write('Found no defects in %s\n' % base_file)
            else:
                output.write('Found %d defect(s) in %s\n\n' %
                             (non_suppressed, base_file))

        return {"severity": severity_stats,
                "files":  file_stats,
                "reports": report_count}