Example #1
0
    def __print_bugs(self, bugs):

        report_num = len(bugs)
        if report_num > 0:
            index_format = '    %%%dd, ' % int(
                math.floor(math.log10(report_num)) + 1)

        non_suppressed = 0
        for bug in bugs:
            last_event = bug.get_last_event()

            if self.skiplist_handler and \
                self.skiplist_handler.should_skip(
                    last_event.start_pos.file_path):
                LOG.debug(bug.hash_value + ' is skipped (in ' +
                          last_event.start_pos.file_path + ")")
                continue

            sp_handler = suppress_handler.SourceSuppressHandler(bug)

            # Check for suppress comment.
            if sp_handler.get_suppressed():
                continue

            self.__output.write(
                self.__format_bug_event(bug.checker_name, last_event))
            self.__output.write('\n')
            self.__output.write(self.__format_location(last_event))
            self.__output.write('\n')
            if self.__print_steps:
                self.__output.write('  Steps:\n')
                for index, event in enumerate(bug.events()):
                    self.__output.write(index_format % (index + 1))
                    self.__output.write(self.__format_bug_event(None, event))
                    self.__output.write('\n')
            self.__output.write('\n')

            non_suppressed += 1

        if non_suppressed == 0:
            self.__output.write('%s found no defects while analyzing %s\n' %
                                (self.buildaction.analyzer_type,
                                 ntpath.basename(self.analyzed_source_file)))
        else:
            self.__output.write(
                '%s found %d defect(s) while analyzing %s\n\n' %
                (self.buildaction.analyzer_type, non_suppressed,
                 ntpath.basename(self.analyzed_source_file)))
Example #2
0
    def get_diff_report_dir(client, baseids, report_dir, cmp_data):
        filtered_reports = []
        report_dir_results = get_report_dir_results(report_dir)
        new_hashes = {}
        suppressed_in_code = []

        for rep in report_dir_results:
            bughash = rep.main['issue_hash_content_of_line_in_context']
            source_file = rep.main['location']['file_name']
            bug_line = rep.main['location']['line']
            new_hashes[bughash] = rep
            sp_handler = suppress_handler.SourceSuppressHandler(
                    source_file,
                    bug_line,
                    bughash,
                    rep.main['check_name'])
            if sp_handler.get_suppressed():
                suppressed_in_code.append(bughash)
                LOG.debug("Bug " + bughash +
                          "is suppressed in code. file:" + source_file +
                          "Line "+str(bug_line))

        base_hashes = client.getDiffResultsHash(baseids,
                                                new_hashes.keys(),
                                                cmp_data.diffType)

        if cmp_data.diffType == ttypes.DiffType.NEW or \
           cmp_data.diffType == ttypes.DiffType.UNRESOLVED:
            # Shows reports from the report dir which are not present in the
            # baseline (NEW reports) or appear in both side (UNRESOLVED
            # reports) and not suppressed in the code.
            for result in report_dir_results:
                h = result.main['issue_hash_content_of_line_in_context']
                if h in base_hashes and h not in suppressed_in_code:
                    filtered_reports.append(result)
        elif cmp_data.diffType == ttypes.DiffType.RESOLVED:
            # Show bugs in the baseline (server) which are not present in the
            # report dir or suppressed.
            results = get_diff_base_results(client, baseids, base_hashes,
                                            suppressed_in_code)
            for result in results:
                filtered_reports.append(result)

        return filtered_reports
Example #3
0
    def get_diff_report_dir(client, baseids, report_dir, diff_type):

        report_filter = ttypes.ReportFilter()
        add_filter_conditions(report_filter, args.filter)

        sort_mode = [(ttypes.SortMode(ttypes.SortType.FILENAME,
                                      ttypes.Order.ASC))]
        limit = constants.MAX_QUERY_SIZE
        offset = 0

        base_results = []
        results = client.getRunResults(baseids, limit, offset, sort_mode,
                                       report_filter, None)
        while results:
            base_results.extend(results)
            offset += limit
            results = client.getRunResults(baseids, limit, offset, sort_mode,
                                           report_filter, None)
        base_hashes = {}
        for res in base_results:
            base_hashes[res.bugHash] = res

        filtered_reports = []
        new_results = get_report_dir_results(report_dir)
        new_hashes = {}
        suppressed_in_code = []

        for rep in new_results:
            bughash = rep.main['issue_hash_content_of_line_in_context']
            source_file = rep.main['location']['file_name']
            bug_line = rep.main['location']['line']
            new_hashes[bughash] = rep
            sp_handler = suppress_handler.SourceSuppressHandler(
                source_file, bug_line, bughash, rep.main['check_name'])
            if sp_handler.get_suppressed():
                suppressed_in_code.append(bughash)
                LOG.debug("Bug " + bughash + "is suppressed in code. file:" +
                          source_file + "Line " + str(bug_line))

        if diff_type == 'new':
            # Shows new reports from the report dir
            # which are not present in the baseline (server)
            # and not suppressed in the code.
            for result in new_results:
                if not (result.main['issue_hash_content_of_line_in_context']
                        in base_hashes) and \
                   not (result.main['issue_hash_content_of_line_in_context']
                        in suppressed_in_code):
                    filtered_reports.append(result)
        elif diff_type == 'resolved':
            # Show bugs in the baseline (server)
            # which are not present in the report dir
            # or suppressed.
            for result in base_results:
                if not (result.bugHash in new_hashes) or \
                        (result.bugHash in suppressed_in_code):
                    filtered_reports.append(result)
        elif diff_type == 'unresolved':
            # Shows bugs in the report dir
            # that are not suppressed and
            # which are also present in the baseline (server)

            for result in new_results:
                new_hash = result.main['issue_hash_content_of_line_in_context']
                if new_hash in base_hashes and \
                        not (new_hash in suppressed_in_code):
                    filtered_reports.append(result)
        return filtered_reports
Example #4
0
    def __store_bugs(self, files, reports, connection, analisys_id):
        file_ids = {}
        # Send content of file to the server if needed.
        for file_name in files:
            file_descriptor = connection.need_file_content(
                self.__run_id, file_name)
            file_ids[file_name] = file_descriptor.fileId

            # Sometimes the file doesn't exist, e.g. when the input of the
            # analysis is pure plist files.
            if not os.path.isfile(file_name):
                LOG.debug(file_name + ' not found, and will not be stored.')
                continue

            if file_descriptor.needed:
                with open(file_name, 'r') as source_file:
                    file_content = source_file.read()
                compressed_file = zlib.compress(file_content,
                                                zlib.Z_BEST_COMPRESSION)
                # TODO: we may not use the file content in the end
                # depending on skippaths.
                LOG.debug('storing file content to the database')
                connection.add_file_content(file_descriptor.fileId,
                                            compressed_file)

        # Skipping reports in header files handled here.
        report_ids = []
        for report in reports:
            events = [i for i in report.bug_path if i.get('kind') == 'event']

            # Skip list handler can be None if no config file is set.
            if self.skiplist_handler:
                # Skip is checked based on the file path of the last reported
                # event.
                # TODO: this should be changed in later versions
                # to use the main diag section to check if the report
                # should be skipped or not.
                f_path = files[events[-1]['location']['file']]
                if events and self.skiplist_handler.should_skip(f_path):
                    LOG.debug(report + ' is skipped (in ' + f_path + ")")
                    continue

            # Create remaining data for bugs and send them to the server.
            # In plist file the source and target of the arrows are provided as
            # starting and ending ranges of the arrow. The path A->B->C is
            # given as A->B and B->C, thus range B is provided twice. So in the
            # loop only target points of the arrows are stored, and an extra
            # insertion is done for the source of the first arrow before the
            # loop.
            bug_paths = []
            report_path = [
                i for i in report.bug_path if i.get('kind') == 'control'
            ]

            if report_path:
                try:
                    start_range = report_path[0]['edges'][0]['start']
                    start1_line = start_range[0]['line']
                    start1_col = start_range[0]['col']
                    start2_line = start_range[1]['line']
                    start2_col = start_range[1]['col']
                    source_file_path = files[start_range[1]['file']]
                    bug_paths.append(
                        shared.ttypes.BugPathPos(start1_line, start1_col,
                                                 start2_line, start2_col,
                                                 file_ids[source_file_path]))
                except IndexError:
                    pass

            for path in report_path:
                try:
                    end_range = path['edges'][0]['end']
                    end1_line = end_range[0]['line']
                    end1_col = end_range[0]['col']
                    end2_line = end_range[1]['line']
                    end2_col = end_range[1]['col']
                    source_file_path = files[end_range[1]['file']]
                    bug_paths.append(
                        shared.ttypes.BugPathPos(end1_line, end1_col,
                                                 end2_line, end2_col,
                                                 file_ids[source_file_path]))
                except IndexError:
                    # Edges might be empty nothing can be stored.
                    continue

            bug_events = []
            for event in events:
                file_path = files[event['location']['file']]
                bug_events.append(
                    shared.ttypes.BugPathEvent(event['location']['line'],
                                               event['location']['col'],
                                               event['location']['line'],
                                               event['location']['col'],
                                               event['message'],
                                               file_ids[file_path]))

            bug_hash = report.main['issue_hash_content_of_line_in_context']
            checker_name = report.main['check_name']
            severity_name = self.severity_map.get(checker_name, 'UNSPECIFIED')
            severity = shared.ttypes.Severity._NAMES_TO_VALUES[severity_name]

            last_report_event = report.bug_path[-1]
            source_file = files[last_report_event['location']['file']]
            report_line = last_report_event['location']['line']
            report_hash = report.main['issue_hash_content_of_line_in_context']
            checker_name = report.main['check_name']
            sp_handler = suppress_handler.SourceSuppressHandler(
                source_file, report_line, report_hash, checker_name)

            # Check for suppress comment.
            supp = sp_handler.get_suppressed()
            if supp:
                connection.add_suppress_bug(self.__run_id, [supp])

            LOG.debug('Storing check results to the database.')

            fpath = files[report.main['location']['file']]
            msg = report.main['description']
            checker_name = report.main['check_name']
            category = report.main['category']
            type = report.main['type']
            report_id = connection.add_report(analisys_id, file_ids[fpath],
                                              bug_hash, msg, bug_paths,
                                              bug_events, checker_name,
                                              category, type, severity, supp
                                              is not None)

            report_ids.append(report_id)
Example #5
0
    def __store_bugs(self, files, bugs, connection, analisys_id):
        file_ids = {}
        # Send content of file to the server if needed.
        for file_name in files:
            file_descriptor = connection.need_file_content(
                self.__run_id, file_name)
            file_ids[file_name] = file_descriptor.fileId

            # Sometimes the file doesn't exist, e.g. when the input of the
            # analysis is pure plist files.
            if not os.path.isfile(file_name):
                LOG.debug(file_name + ' not found, and will not be stored.')
                continue

            if file_descriptor.needed:
                with open(file_name, 'r') as source_file:
                    file_content = source_file.read()
                compressed_file = zlib.compress(file_content,
                                                zlib.Z_BEST_COMPRESSION)
                # TODO: we may not use the file content in the end
                # depending on skippaths.
                LOG.debug('storing file content to the database')
                connection.add_file_content(file_descriptor.fileId,
                                            compressed_file)

        # Skipping bugs in header files handled here.
        report_ids = []
        for bug in bugs:
            events = bug.events()

            # Skip list handler can be None if no config file is set.
            if self.skiplist_handler:
                if events and self.skiplist_handler.should_skip(
                        events[-1].start_pos.file_path):
                    # Issue #20: this bug is in a file which should be skipped
                    LOG.debug(bug.hash_value + ' is skipped (in ' +
                              events[-1].start_pos.file_path + ")")
                    continue

            # Create remaining data for bugs and send them to the server.
            bug_paths = []
            for path in bug.paths():
                bug_paths.append(
                    shared.ttypes.BugPathPos(
                        path.start_pos.line, path.start_pos.col,
                        path.end_pos.line, path.end_pos.col,
                        file_ids[path.start_pos.file_path]))

            bug_events = []
            for event in bug.events():
                bug_events.append(
                    shared.ttypes.BugPathEvent(
                        event.start_pos.line, event.start_pos.col,
                        event.end_pos.line, event.end_pos.col, event.msg,
                        file_ids[event.start_pos.file_path]))

            bug_hash = bug.hash_value

            severity_name = self.severity_map.get(bug.checker_name,
                                                  'UNSPECIFIED')
            severity = shared.ttypes.Severity._NAMES_TO_VALUES[severity_name]

            sp_handler = suppress_handler.SourceSuppressHandler(bug)

            # Check for suppress comment.
            supp = sp_handler.get_suppressed()
            if supp:
                connection.add_suppress_bug(self.__run_id, [supp])

            LOG.debug('Storing check results to the database.')

            report_id = connection.add_report(analisys_id,
                                              file_ids[bug.file_path],
                                              bug_hash, bug.msg, bug_paths,
                                              bug_events, bug.checker_name,
                                              bug.category, bug.type, severity,
                                              supp is not None)

            report_ids.append(report_id)
Example #6
0
    def __print_bugs(self, reports, files):

        report_num = len(reports)
        if report_num > 0:
            index_format = '    %%%dd, ' % \
                int(math.floor(math.log10(report_num)) + 1)

        non_suppressed = 0
        for report in reports:
            events = [i for i in report.bug_path if i.get('kind') == 'event']
            f_path = files[events[-1]['location']['file']]
            if self.skiplist_handler and \
                    self.skiplist_handler.should_skip(f_path):
                LOG.debug(report + ' is skipped (in ' + f_path + ")")
                continue

            bug = {
                'hash_value':
                report.main['issue_hash_content_of_line_in_context'],
                'file_path': f_path
            }
            if self.suppress_handler and \
                    self.suppress_handler.get_suppressed(bug):
                LOG.debug("Suppressed by suppress file: {0}".format(report))
                continue

            last_report_event = report.bug_path[-1]
            source_file = files[last_report_event['location']['file']]
            report_line = last_report_event['location']['line']
            report_hash = report.main['issue_hash_content_of_line_in_context']
            checker_name = report.main['check_name']
            sp_handler = suppress_handler.SourceSuppressHandler(
                source_file, report_line, report_hash, checker_name)

            # Check for suppress comment.
            suppress_data = sp_handler.get_suppressed()
            if suppress_data:
                if self.suppress_handler:
                    LOG.info("Writing source-code suppress at '{0}:{1}' to "
                             "suppress file".format(source_file, report_line))
                    hash_value, file_name, comment = suppress_data
                    self.suppress_handler.store_suppress_bug_id(
                        hash_value, file_name, comment)

                continue

            self.__output.write(
                self.__format_bug_event(checker_name, last_report_event,
                                        source_file))
            self.__output.write('\n')
            self.__output.write(
                self.__format_location(last_report_event, source_file))
            self.__output.write('\n')
            if self.__print_steps:
                self.__output.write('  Steps:\n')
                for index, event in enumerate(events):
                    self.__output.write(index_format % (index + 1))
                    source_file = files[event['location']['file']]
                    self.__output.write(
                        self.__format_bug_event(None, event, source_file))
                    self.__output.write('\n')
            self.__output.write('\n')

            non_suppressed += 1

        basefile_print = (' ' +
                          os.path.basename(self.analyzed_source_file)) \
            if self.analyzed_source_file and \
            len(self.analyzed_source_file) > 0 else ''

        if non_suppressed == 0:
            self.__output.write(
                '%s found no defects while analyzing%s\n' %
                (self.buildaction.analyzer_type, basefile_print))
        else:
            self.__output.write('%s found %d defect(s) while analyzing%s\n\n' %
                                (self.buildaction.analyzer_type,
                                 non_suppressed, basefile_print))
Example #7
0
    def __write_bugs(self, output, reports, files, analyzed_source_file,
                     report_stats):
        """
        Write out the bugs to the output and collect report statistics.
        """

        severity_stats = defaultdict(int)
        file_stats = defaultdict(int)
        report_count = defaultdict(int)

        report_num = len(reports)
        if report_num > 0:
            index_format = '    %%%dd, ' % \
                           int(math.floor(math.log10(report_num)) + 1)

        non_suppressed = 0
        for report in reports:
            events = [i for i in report.bug_path if i.get('kind') == 'event']
            f_path = files[events[-1]['location']['file']]
            if self.skiplist_handler and \
                    self.skiplist_handler.should_skip(f_path):
                LOG.debug(report + ' is skipped (in ' + f_path + ")")
                continue
            hash_value = report.main['issue_hash_content_of_line_in_context']
            bug = {'hash_value': hash_value, 'file_path': f_path}
            if self.suppress_handler and \
                    self.suppress_handler.get_suppressed(bug):
                LOG.debug("Suppressed by suppress file: {0}".format(report))
                continue

            last_report_event = report.bug_path[-1]
            source_file = files[last_report_event['location']['file']]
            report_line = last_report_event['location']['line']
            report_hash = report.main['issue_hash_content_of_line_in_context']
            checker_name = report.main['check_name']
            sp_handler = suppress_handler.SourceSuppressHandler(
                source_file, report_line, report_hash, checker_name)

            # Check for suppress comment.
            suppress_data = sp_handler.get_suppressed()
            if suppress_data:
                if self.suppress_handler:
                    hash_value, file_name, comment = suppress_data
                    self.suppress_handler.store_suppress_bug_id(
                        hash_value, file_name, comment)

                continue

            file_stats[f_path] += 1
            severity = self.__severity_map.get(checker_name, 'UNSPECIFIED')
            severity_stats[severity] += 1
            report_count["report_count"] += 1

            output.write(
                self.__format_bug_event(checker_name, severity,
                                        last_report_event, source_file))
            output.write('\n')
            output.write(self.__format_location(last_report_event,
                                                source_file))
            output.write('\n')

            if self.print_steps:
                output.write('  Report hash: ' + report_hash + '\n')
                output.write('  Steps:\n')
                for index, event in enumerate(events):
                    output.write(index_format % (index + 1))
                    source_file = files[event['location']['file']]
                    output.write(
                        self.__format_bug_event(None, None, event,
                                                source_file))
                    output.write('\n')
            output.write('\n')

            non_suppressed += 1

        basefile_print = (' ' +
                          os.path.basename(analyzed_source_file)) \
            if analyzed_source_file and \
            len(analyzed_source_file) > 0 else ''

        if non_suppressed == 0:
            output.write('Found no defects while analyzing%s\n' %
                         (basefile_print))
        else:
            output.write('Found %d defect(s) while analyzing%s\n\n' %
                         (non_suppressed, basefile_print))

        report_stats["severity"] = severity_stats
        report_stats["files"] = file_stats
        report_stats["reports"] = report_count