def __test_skip_reports(self, plist_file_path: str,
                            expected_plist_file_path: str,
                            skip_handlers: SkipListHandlers):
        """ Test skipping reports from a plist file. """
        reports = report_file.get_reports(plist_file_path)
        reports = reports_handler.skip(reports, skip_handlers=skip_handlers)

        expected_reports = report_file.get_reports(expected_plist_file_path)

        self.assertEqual(reports, expected_reports)
Ejemplo n.º 2
0
    def __test_html_builder(self, proj: str) -> str:
        """
        Test building html file from the given proj's plist file.
        """
        html_builder = report_to_html.HtmlBuilder(self.layout_dir)

        proj_dir = os.path.join(self.test_workspace, 'test_files', proj)
        output_dir = os.path.join(proj_dir, 'html')
        if not os.path.exists(output_dir):
            os.mkdir(output_dir)

        processed_path_hashes = set()
        for file_path in glob.glob(os.path.join(proj_dir, f"*.plist")):
            file_name = os.path.basename(file_path)
            output_path = os.path.join(output_dir, f"{file_name}.html")

            reports = report_file.get_reports(file_path)
            reports = reports_helper.skip(
                reports, processed_path_hashes)

            report_to_html.convert(
                file_path, reports, output_dir, html_builder)

            html_file_exists = os.path.exists(output_path)
            if reports:
                self.assertTrue(html_file_exists)
            else:
                self.assertFalse(html_file_exists)

        html_builder.create_index_html(output_dir)
        html_builder.create_statistics_html(output_dir)

        index_html = os.path.join(output_dir, 'index.html')
        self.assertTrue(os.path.exists(index_html))

        return output_dir
Ejemplo n.º 3
0
def main(args):
    """
    Entry point for parsing some analysis results and printing them to the
    stdout in a human-readable format.
    """
    # If the given output format is not 'table', redirect logger's output to
    # the stderr.
    stream = None
    if 'export' in args and args.export not in [None, 'table', 'html']:
        stream = 'stderr'

    init_logger(args.verbose if 'verbose' in args else None, stream)

    try:
        cmd_config.check_config_file(args)
    except FileNotFoundError as fnerr:
        LOG.error(fnerr)
        sys.exit(1)

    export = args.export if 'export' in args else None
    if export == 'html' and 'output_path' not in args:
        LOG.error("Argument --export not allowed without argument --output "
                  "when exporting to HTML.")
        sys.exit(1)

    if export == 'gerrit' and not gerrit.mandatory_env_var_is_set():
        sys.exit(1)

    if export and export not in EXPORT_TYPES:
        LOG.error("Unknown export format: %s", export)
        sys.exit(1)

    context = analyzer_context.get_context()

    # To ensure the help message prints the default folder properly,
    # the 'default' for 'args.input' is a string, not a list.
    # But we need lists for the foreach here to work.
    if isinstance(args.input, str):
        args.input = [args.input]

    src_comment_status_filter = args.review_status

    suppr_handler = None
    if 'suppress' in args:
        __make_handler = False
        if not os.path.isfile(args.suppress):
            if 'create_suppress' in args:
                with open(args.suppress,
                          'w',
                          encoding='utf-8',
                          errors='ignore') as _:
                    # Just create the file.
                    __make_handler = True
                    LOG.info(
                        "Will write source-code suppressions to "
                        "suppress file: %s", args.suppress)
            else:
                LOG.warning(
                    "Suppress file '%s' given, but it does not exist"
                    " -- will not suppress anything.", args.suppress)
        else:
            __make_handler = True

        if __make_handler:
            suppr_handler = suppress_handler.\
                GenericSuppressHandler(args.suppress,
                                       'create_suppress' in args,
                                       src_comment_status_filter)
    elif 'create_suppress' in args:
        LOG.error("Can't use '--export-source-suppress' unless '--suppress "
                  "SUPPRESS_FILE' is also given.")
        sys.exit(1)

    output_dir_path = None
    output_file_path = None
    if 'output_path' in args:
        output_path = os.path.abspath(args.output_path)

        if export == 'html':
            output_dir_path = output_path
        else:
            if os.path.exists(output_path) and os.path.isdir(output_path):
                # For backward compatibility reason we handle the use case
                # when directory is provided to this command.
                LOG.error(
                    "Please provide a file path instead of a directory "
                    "for '%s' export type!", export)
                sys.exit(1)

            if export == 'baseline' and not baseline.check(output_path):
                LOG.error("Baseline files must have '.baseline' extensions.")
                sys.exit(1)

            output_file_path = output_path
            output_dir_path = os.path.dirname(output_file_path)

        if not os.path.exists(output_dir_path):
            os.makedirs(output_dir_path)

    def get_output_file_path(default_file_name: str) -> Optional[str]:
        """ Return an output file path. """
        if output_file_path:
            return output_file_path

        if output_dir_path:
            return os.path.join(output_dir_path, default_file_name)

    skip_handlers = SkipListHandlers()
    if 'files' in args:
        items = [f"+{file_path}" for file_path in args.files]
        items.append("-*")
        skip_handlers.append(SkipListHandler("\n".join(items)))
    if 'skipfile' in args:
        with open(args.skipfile, 'r', encoding='utf-8', errors='ignore') as f:
            skip_handlers.append(SkipListHandler(f.read()))

    trim_path_prefixes = args.trim_path_prefix if \
        'trim_path_prefix' in args else None

    all_reports = []
    statistics = Statistics()
    file_cache = {}  # For memory effiency.
    changed_files: Set[str] = set()
    processed_path_hashes = set()
    processed_file_paths = set()
    print_steps = 'print_steps' in args

    html_builder: Optional[report_to_html.HtmlBuilder] = None
    if export == 'html':
        html_builder = report_to_html.HtmlBuilder(
            context.path_plist_to_html_dist, context.checker_labels)

    for dir_path, file_paths in report_file.analyzer_result_files(args.input):
        metadata = get_metadata(dir_path)
        for file_path in file_paths:
            reports = report_file.get_reports(file_path,
                                              context.checker_labels,
                                              file_cache)

            reports = reports_helper.skip(reports, processed_path_hashes,
                                          skip_handlers, suppr_handler,
                                          src_comment_status_filter)

            statistics.num_of_analyzer_result_files += 1
            for report in reports:
                if report.changed_files:
                    changed_files.update(report.changed_files)

                statistics.add_report(report)

                if trim_path_prefixes:
                    report.trim_path_prefixes(trim_path_prefixes)

            all_reports.extend(reports)

            # Print reports continously.
            if not export:
                file_report_map = plaintext.get_file_report_map(
                    reports, file_path, metadata)
                plaintext.convert(file_report_map, processed_file_paths,
                                  print_steps)
            elif export == 'html':
                print(f"Parsing input file '{file_path}'.")
                report_to_html.convert(file_path, reports, output_dir_path,
                                       html_builder)

    if export is None:  # Plain text output
        statistics.write()
    elif export == 'html':
        html_builder.finish(output_dir_path, statistics)
    elif export == 'json':
        data = report_to_json.convert(all_reports)
        dump_json_output(data, get_output_file_path("reports.json"))
    elif export == 'codeclimate':
        data = codeclimate.convert(all_reports)
        dump_json_output(data, get_output_file_path("reports.json"))
    elif export == 'gerrit':
        data = gerrit.convert(all_reports)
        dump_json_output(data, get_output_file_path("reports.json"))
    elif export == 'baseline':
        data = baseline.convert(all_reports)
        output_path = get_output_file_path("reports.baseline")
        if output_path:
            baseline.write(output_path, data)

    reports_helper.dump_changed_files(changed_files)

    if statistics.num_of_reports:
        sys.exit(2)