示例#1
0
文件: report.py 项目: luoyaa/clang
def read_bugs(output_dir, html):
    """ Generate a unique sequence of bugs from given output directory.

    Duplicates can be in a project if the same module was compiled multiple
    times with different compiler options. These would be better to show in
    the final report (cover) only once. """

    parser = parse_bug_html if html else parse_bug_plist
    pattern = '*.html' if html else '*.plist'

    duplicate = duplicate_check(
        lambda bug: '{bug_line}.{bug_path_length}:{bug_file}'.format(**bug))

    bugs = itertools.chain.from_iterable(
        # parser creates a bug generator not the bug itself
        parser(filename)
        for filename in glob.iglob(os.path.join(output_dir, pattern)))

    return (bug for bug in bugs if not duplicate(bug))
示例#2
0
文件: report.py 项目: JaredCJR/clang
def read_bugs(output_dir, html):
    """ Generate a unique sequence of bugs from given output directory.

    Duplicates can be in a project if the same module was compiled multiple
    times with different compiler options. These would be better to show in
    the final report (cover) only once. """

    parser = parse_bug_html if html else parse_bug_plist
    pattern = '*.html' if html else '*.plist'

    duplicate = duplicate_check(
        lambda bug: '{bug_line}.{bug_path_length}:{bug_file}'.format(**bug))

    bugs = itertools.chain.from_iterable(
        # parser creates a bug generator not the bug itself
        parser(filename)
        for filename in glob.iglob(os.path.join(output_dir, pattern)))

    return (bug for bug in bugs if not duplicate(bug))
示例#3
0
文件: intercept.py 项目: blep/Beye
 def post_processing(commands):
     # run post processing only if that was requested
     if 'raw_entries' not in args or not args.raw_entries:
         # create entries from the current run
         current = itertools.chain.from_iterable(
             # creates a sequence of entry generators from an exec,
             # but filter out non compiler calls before.
             (format_entry(x) for x in commands if is_compiler_call(x)))
         # read entries from previous run
         if 'append' in args and args.append and os.path.exists(args.cdb):
             with open(args.cdb) as handle:
                 previous = iter(json.load(handle))
         else:
             previous = iter([])
         # filter out duplicate entries from both
         duplicate = duplicate_check(entry_hash)
         return (entry for entry in itertools.chain(previous, current)
                 if os.path.exists(entry['file']) and not duplicate(entry))
     return commands
示例#4
0
def read_bugs(output_dir, html):
    # type: (str, bool) -> Generator[Dict[str, Any], None, None]
    """ Generate a unique sequence of bugs from given output directory.

    Duplicates can be in a project if the same module was compiled multiple
    times with different compiler options. These would be better to show in
    the final report (cover) only once. """
    def empty(file_name):
        return os.stat(file_name).st_size == 0

    duplicate = duplicate_check(
        lambda bug: '{bug_line}.{bug_path_length}:{bug_file}'.format(**bug))

    # get the right parser for the job.
    parser = parse_bug_html if html else parse_bug_plist
    # get the input files, which are not empty.
    pattern = os.path.join(output_dir, '*.html' if html else '*.plist')
    bug_files = (file for file in glob.iglob(pattern) if not empty(file))

    for bug_file in bug_files:
        for bug in parser(bug_file):
            if not duplicate(bug):
                yield bug
示例#5
0
    def post_processing(commands):
        """ To make a compilation database, it needs to filter out commands
        which are not compiler calls. Needs to find the source file name
        from the arguments. And do shell escaping on the command.

        To support incremental builds, it is desired to read elements from
        an existing compilation database from a previous run. These elements
        shall be merged with the new elements. """

        # create entries from the current run
        current = itertools.chain.from_iterable(
            # creates a sequence of entry generators from an exec,
            format_entry(command) for command in commands)
        # read entries from previous run
        if 'append' in args and args.append and os.path.isfile(args.cdb):
            with open(args.cdb) as handle:
                previous = iter(json.load(handle))
        else:
            previous = iter([])
        # filter out duplicate entries from both
        duplicate = duplicate_check(entry_hash)
        return (entry for entry in itertools.chain(previous, current)
                if os.path.exists(entry['file']) and not duplicate(entry))
示例#6
0
def read_bugs(output_dir, html):
    # type: (str, bool) -> Generator[Dict[str, Any], None, None]
    """ Generate a unique sequence of bugs from given output directory.

    Duplicates can be in a project if the same module was compiled multiple
    times with different compiler options. These would be better to show in
    the final report (cover) only once. """

    def empty(file_name):
        return os.stat(file_name).st_size == 0

    duplicate = duplicate_check(
        lambda bug: '{bug_line}.{bug_path_length}:{bug_file}'.format(**bug))

    # get the right parser for the job.
    parser = parse_bug_html if html else parse_bug_plist
    # get the input files, which are not empty.
    pattern = os.path.join(output_dir, '*.html' if html else '*.plist')
    bug_files = (file for file in glob.iglob(pattern) if not empty(file))

    for bug_file in bug_files:
        for bug in parser(bug_file):
            if not duplicate(bug):
                yield bug
示例#7
0
    def post_processing(commands):
        """ To make a compilation database, it needs to filter out commands
        which are not compiler calls. Needs to find the source file name
        from the arguments. And do shell escaping on the command.

        To support incremental builds, it is desired to read elements from
        an existing compilation database from a previous run. These elements
        shall be merged with the new elements. """

        # create entries from the current run
        current = itertools.chain.from_iterable(
            # creates a sequence of entry generators from an exec,
            format_entry(command) for command in commands)
        # read entries from previous run
        if 'append' in args and args.append and os.path.isfile(args.cdb):
            with open(args.cdb) as handle:
                previous = iter(json.load(handle))
        else:
            previous = iter([])
        # filter out duplicate entries from both
        duplicate = duplicate_check(entry_hash)
        return (entry
                for entry in itertools.chain(previous, current)
                if os.path.exists(entry['file']) and not duplicate(entry))