Exemple #1
0
    def test_no_skip_from_parse(self):
        """Keep everything for analysis, no skipping there."""
        cmp_cmd_json = [{
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/a.cpp",
            "file": "a.cpp"
        }, {
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/b.cpp",
            "file": "b.cpp"
        }, {
            "directory": "/tmp/lib2",
            "command": "g++ /tmp/lib2/a.cpp",
            "file": "a.cpp"
        }]

        skip_list = """
        -*/lib1/*
        """
        analysis_skip = SkipListHandlers([SkipListHandler("")])
        pre_analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])

        build_actions, _ = log_parser.\
            parse_unique_log(cmp_cmd_json, self.__this_dir,
                             analysis_skip_handlers=analysis_skip,
                             pre_analysis_skip_handlers=pre_analysis_skip)

        self.assertEqual(len(build_actions), 3)
Exemple #2
0
    def test_skip_everything_from_parse(self):
        """Same skip file for pre analysis and analysis. Skip everything."""
        cmp_cmd_json = [{
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/a.cpp",
            "file": "a.cpp"
        }, {
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/b.cpp",
            "file": "b.cpp"
        }, {
            "directory": "/tmp/lib2",
            "command": "g++ /tmp/lib2/a.cpp",
            "file": "a.cpp"
        }]

        skip_list = """
        -*/lib1/*
        -*/lib2/*
        """
        analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])
        pre_analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])

        build_actions, _ = log_parser.\
            parse_unique_log(cmp_cmd_json, self.__this_dir,
                             analysis_skip_handlers=analysis_skip,
                             pre_analysis_skip_handlers=pre_analysis_skip)

        self.assertEqual(len(build_actions), 0)
Exemple #3
0
    def test_skip_no_pre_from_parse(self):
        """Keep everything pre analysis needs it in ctu or statistics mode."""
        cmp_cmd_json = [{
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/a.cpp",
            "file": "a.cpp"
        }, {
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/b.cpp",
            "file": "b.cpp"
        }, {
            "directory": "/tmp/lib2",
            "command": "g++ /tmp/lib2/a.cpp",
            "file": "a.cpp"
        }]

        skip_list = """
        -*/lib1/*
        """
        analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])
        pre_analysis_skip = SkipListHandlers([SkipListHandler("")])

        build_actions, _ = log_parser.\
            parse_unique_log(cmp_cmd_json, self.__this_dir,
                             analysis_skip_handlers=analysis_skip,
                             ctu_or_stats_enabled=True,
                             pre_analysis_skip_handlers=pre_analysis_skip)

        self.assertEqual(len(build_actions), 3)
    def test_skip_x_header(self):
        """ Test skipping a header file. """
        with open('skip_x_header.txt', encoding="utf-8", errors="ignore") as f:
            skip_handlers = SkipListHandlers([SkipListHandler(f.read())])

        self.__test_skip_reports('x.plist', 'skip_x_header.expected.plist',
                                 skip_handlers)
    def test_keep_only_empty(self):
        """ Test skipping all files except empty. """
        with open('keep_only_empty.txt', encoding="utf-8",
                  errors="ignore") as f:
            skip_handlers = SkipListHandlers([SkipListHandler(f.read())])

        self.__test_skip_reports('x.plist', 'keep_only_empty.expected.plist',
                                 skip_handlers)
Exemple #6
0
    def test_skip_everything_from_parse_relative_path(self):
        """
        Same skip file for pre analysis and analysis. Skip everything.
        Source file contains relative path.
        """
        cmp_cmd_json = [{
            "directory": "/tmp/lib1/Debug",
            "command": "g++ ../a.cpp",
            "file": "../a.cpp"
        }, {
            "directory": "/tmp/lib1/Debug/rel",
            "command": "g++ ../../b.cpp",
            "file": "../../b.cpp"
        }, {
            "directory": "/tmp/lib1/Debug",
            "command": "g++ ../d.cpp",
            "file": "../d.cpp"
        }, {
            "directory": "/tmp/lib2/Debug",
            "command": "g++ ../a.cpp",
            "file": "../a.cpp"
        }]

        skip_list = """
        +/tmp/lib1/d.cpp
        -*/lib1/Debug/rel/../../*
        -*/lib1/a.cpp
        -/tmp/lib2/a.cpp
        """
        analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])
        pre_analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])

        build_actions, _ = log_parser.\
            parse_unique_log(cmp_cmd_json, self.__this_dir,
                             analysis_skip_handlers=analysis_skip,
                             pre_analysis_skip_handlers=pre_analysis_skip)

        self.assertEqual(len(build_actions), 1)
        self.assertEqual(build_actions[0].source, '/tmp/lib1/d.cpp')
Exemple #7
0
    def test_skip_all_in_pre_from_parse(self):
        """Pre analysis skips everything but keep build action for analysis."""
        cmp_cmd_json = [{
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/a.cpp",
            "file": "a.cpp"
        }, {
            "directory": "/tmp/lib1",
            "command": "g++ /tmp/lib1/b.cpp",
            "file": "b.cpp"
        }, {
            "directory": "/tmp/lib2",
            "command": "g++ /tmp/lib2/a.cpp",
            "file": "a.cpp"
        }]

        keep = cmp_cmd_json[2]

        skip_list = """
        -*/lib1/*
        """
        pre_skip_list = """
        -*
        """
        analysis_skip = SkipListHandlers([SkipListHandler(skip_list)])
        pre_analysis_skip = SkipListHandlers([SkipListHandler(pre_skip_list)])

        build_actions, _ = log_parser.\
            parse_unique_log(cmp_cmd_json, self.__this_dir,
                             analysis_skip_handlers=analysis_skip,
                             pre_analysis_skip_handlers=pre_analysis_skip)

        self.assertEqual(len(build_actions), 1)

        source_file = os.path.join(keep['directory'], keep['file'])
        self.assertEqual(build_actions[0].source, source_file)
        self.assertEqual(build_actions[0].original_command, keep['command'])
Exemple #8
0
def main(args):
    """
    Entry point for parsing some analysis results and printing them to the
    stdout in a human-readable format.
    """
    # If the given output format is not 'table', redirect logger's output to
    # the stderr.
    stream = None
    if 'export' in args and args.export not in [None, 'table', 'html']:
        stream = 'stderr'

    init_logger(args.verbose if 'verbose' in args else None, stream)

    try:
        cmd_config.check_config_file(args)
    except FileNotFoundError as fnerr:
        LOG.error(fnerr)
        sys.exit(1)

    export = args.export if 'export' in args else None
    if export == 'html' and 'output_path' not in args:
        LOG.error("Argument --export not allowed without argument --output "
                  "when exporting to HTML.")
        sys.exit(1)

    if export == 'gerrit' and not gerrit.mandatory_env_var_is_set():
        sys.exit(1)

    if export and export not in EXPORT_TYPES:
        LOG.error("Unknown export format: %s", export)
        sys.exit(1)

    context = analyzer_context.get_context()

    # To ensure the help message prints the default folder properly,
    # the 'default' for 'args.input' is a string, not a list.
    # But we need lists for the foreach here to work.
    if isinstance(args.input, str):
        args.input = [args.input]

    src_comment_status_filter = args.review_status

    suppr_handler = None
    if 'suppress' in args:
        __make_handler = False
        if not os.path.isfile(args.suppress):
            if 'create_suppress' in args:
                with open(args.suppress,
                          'w',
                          encoding='utf-8',
                          errors='ignore') as _:
                    # Just create the file.
                    __make_handler = True
                    LOG.info(
                        "Will write source-code suppressions to "
                        "suppress file: %s", args.suppress)
            else:
                LOG.warning(
                    "Suppress file '%s' given, but it does not exist"
                    " -- will not suppress anything.", args.suppress)
        else:
            __make_handler = True

        if __make_handler:
            suppr_handler = suppress_handler.\
                GenericSuppressHandler(args.suppress,
                                       'create_suppress' in args,
                                       src_comment_status_filter)
    elif 'create_suppress' in args:
        LOG.error("Can't use '--export-source-suppress' unless '--suppress "
                  "SUPPRESS_FILE' is also given.")
        sys.exit(1)

    output_dir_path = None
    output_file_path = None
    if 'output_path' in args:
        output_path = os.path.abspath(args.output_path)

        if export == 'html':
            output_dir_path = output_path
        else:
            if os.path.exists(output_path) and os.path.isdir(output_path):
                # For backward compatibility reason we handle the use case
                # when directory is provided to this command.
                LOG.error(
                    "Please provide a file path instead of a directory "
                    "for '%s' export type!", export)
                sys.exit(1)

            if export == 'baseline' and not baseline.check(output_path):
                LOG.error("Baseline files must have '.baseline' extensions.")
                sys.exit(1)

            output_file_path = output_path
            output_dir_path = os.path.dirname(output_file_path)

        if not os.path.exists(output_dir_path):
            os.makedirs(output_dir_path)

    def get_output_file_path(default_file_name: str) -> Optional[str]:
        """ Return an output file path. """
        if output_file_path:
            return output_file_path

        if output_dir_path:
            return os.path.join(output_dir_path, default_file_name)

    skip_handlers = SkipListHandlers()
    if 'files' in args:
        items = [f"+{file_path}" for file_path in args.files]
        items.append("-*")
        skip_handlers.append(SkipListHandler("\n".join(items)))
    if 'skipfile' in args:
        with open(args.skipfile, 'r', encoding='utf-8', errors='ignore') as f:
            skip_handlers.append(SkipListHandler(f.read()))

    trim_path_prefixes = args.trim_path_prefix if \
        'trim_path_prefix' in args else None

    all_reports = []
    statistics = Statistics()
    file_cache = {}  # For memory effiency.
    changed_files: Set[str] = set()
    processed_path_hashes = set()
    processed_file_paths = set()
    print_steps = 'print_steps' in args

    html_builder: Optional[report_to_html.HtmlBuilder] = None
    if export == 'html':
        html_builder = report_to_html.HtmlBuilder(
            context.path_plist_to_html_dist, context.checker_labels)

    for dir_path, file_paths in report_file.analyzer_result_files(args.input):
        metadata = get_metadata(dir_path)
        for file_path in file_paths:
            reports = report_file.get_reports(file_path,
                                              context.checker_labels,
                                              file_cache)

            reports = reports_helper.skip(reports, processed_path_hashes,
                                          skip_handlers, suppr_handler,
                                          src_comment_status_filter)

            statistics.num_of_analyzer_result_files += 1
            for report in reports:
                if report.changed_files:
                    changed_files.update(report.changed_files)

                statistics.add_report(report)

                if trim_path_prefixes:
                    report.trim_path_prefixes(trim_path_prefixes)

            all_reports.extend(reports)

            # Print reports continously.
            if not export:
                file_report_map = plaintext.get_file_report_map(
                    reports, file_path, metadata)
                plaintext.convert(file_report_map, processed_file_paths,
                                  print_steps)
            elif export == 'html':
                print(f"Parsing input file '{file_path}'.")
                report_to_html.convert(file_path, reports, output_dir_path,
                                       html_builder)

    if export is None:  # Plain text output
        statistics.write()
    elif export == 'html':
        html_builder.finish(output_dir_path, statistics)
    elif export == 'json':
        data = report_to_json.convert(all_reports)
        dump_json_output(data, get_output_file_path("reports.json"))
    elif export == 'codeclimate':
        data = codeclimate.convert(all_reports)
        dump_json_output(data, get_output_file_path("reports.json"))
    elif export == 'gerrit':
        data = gerrit.convert(all_reports)
        dump_json_output(data, get_output_file_path("reports.json"))
    elif export == 'baseline':
        data = baseline.convert(all_reports)
        output_path = get_output_file_path("reports.baseline")
        if output_path:
            baseline.write(output_path, data)

    reports_helper.dump_changed_files(changed_files)

    if statistics.num_of_reports:
        sys.exit(2)