Esempio n. 1
0
def collect_results(args, start_time):
    """Walks through buck-gen, collects results for the different buck targets
    and stores them in in args.infer_out/results.csv.
    """
    buck_stats = get_buck_stats()
    logging.info(buck_stats)
    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'w') as f:
        f.write(buck_stats)

    all_csv_rows = set()
    all_json_rows = set()
    headers = []
    stats = init_stats(args, start_time)

    accumulation_whitelist = list(
        map(re.compile, [
            '^cores$',
            '^time$',
            '^start_time$',
            '.*_pc',
        ]))

    expected_analyzer = stats['normal']['analyzer']
    expected_version = stats['normal']['infer_version']

    for root, _, files in os.walk(DEFAULT_BUCK_OUT_GEN):
        for f in [f for f in files if f.endswith('.jar')]:
            path = os.path.join(root, f)
            try:
                with zipfile.ZipFile(path) as jar:
                    # Accumulate integers and float values
                    target_stats = load_stats(jar)

                    found_analyzer = target_stats['normal']['analyzer']
                    found_version = target_stats['normal']['infer_version']

                    if (found_analyzer != expected_analyzer
                            or found_version != expected_version):
                        continue
                    else:
                        for type_k in ['int', 'float']:
                            items = target_stats.get(type_k, {}).items()
                            for key, value in items:
                                if not any(
                                        map(lambda r: r.match(key),
                                            accumulation_whitelist)):
                                    old_value = stats[type_k].get(key, 0)
                                    stats[type_k][key] = old_value + value

                    csv_rows = load_csv_report(jar)
                    if len(csv_rows) > 0:
                        headers.append(csv_rows[0])
                        for row in csv_rows[1:]:
                            all_csv_rows.add(tuple(row))

                    json_rows = load_json_report(jar)
                    for row in json_rows:
                        all_json_rows.add(json.dumps(row))

                    # Override normals
                    stats['normal'].update(target_stats.get('normal', {}))
            except NotFoundInJar:
                pass
            except zipfile.BadZipfile:
                logging.warn('Bad zip file %s', path)

    csv_report = os.path.join(args.infer_out, config.CSV_REPORT_FILENAME)
    json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
    bugs_out = os.path.join(args.infer_out, config.BUGS_FILENAME)

    if len(headers) == 0:
        with open(csv_report, 'w'):
            pass
        logging.info('No reports found')
        return
    elif len(headers) > 1:
        if any(map(lambda x: x != headers[0], headers)):
            raise Exception('Inconsistent reports found')

    # Convert all float values to integer values
    for key, value in stats.get('float', {}).items():
        stats['int'][key] = int(round(value))

    # Delete the float entries before exporting the results
    del (stats['float'])

    with open(csv_report, 'w') as report:
        writer = csv.writer(report)
        all_csv_rows = [list(row) for row in all_csv_rows]
        writer.writerows([headers[0]] + all_csv_rows)
        report.flush()

    with open(json_report, 'w') as report:
        json_string = '['
        json_string += ','.join(all_json_rows)
        json_string += ']'
        report.write(json_string)
        report.flush()

    print('\n')
    issues.print_and_save_errors(json_report, bugs_out)

    stats['int']['total_time'] = int(round(utils.elapsed_time(start_time)))

    store_performances_csv(args.infer_out, stats)

    stats_filename = os.path.join(args.infer_out, config.STATS_FILENAME)
    utils.dump_json_to_path(stats, stats_filename)

    basic_stats = get_basic_stats(stats)

    if args.print_harness:
        harness_code = get_harness_code()
        basic_stats += harness_code

    logging.info(basic_stats)

    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'a') as f:
        f.write(basic_stats)
Esempio n. 2
0
def collect_results(args, start_time):
    """Walks through buck-gen, collects results for the different buck targets
    and stores them in in args.infer_out/results.csv.
    """
    buck_stats = get_buck_stats()
    logging.info(buck_stats)
    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), "w") as f:
        f.write(buck_stats)

    all_csv_rows = set()
    all_json_rows = set()
    headers = []
    stats = init_stats(args, start_time)

    accumulation_whitelist = list(map(re.compile, ["^cores$", "^time$", "^start_time$", ".*_pc"]))

    expected_analyzer = stats["normal"]["analyzer"]
    expected_version = stats["normal"]["infer_version"]

    for root, _, files in os.walk(DEFAULT_BUCK_OUT_GEN):
        for f in [f for f in files if f.endswith(".jar")]:
            path = os.path.join(root, f)
            try:
                with zipfile.ZipFile(path) as jar:
                    # Accumulate integers and float values
                    target_stats = load_stats(jar)

                    found_analyzer = target_stats["normal"]["analyzer"]
                    found_version = target_stats["normal"]["infer_version"]

                    if found_analyzer != expected_analyzer or found_version != expected_version:
                        continue
                    else:
                        for type_k in ["int", "float"]:
                            items = target_stats.get(type_k, {}).items()
                            for key, value in items:
                                if not any(map(lambda r: r.match(key), accumulation_whitelist)):
                                    old_value = stats[type_k].get(key, 0)
                                    stats[type_k][key] = old_value + value

                    csv_rows = load_csv_report(jar)
                    if len(csv_rows) > 0:
                        headers.append(csv_rows[0])
                        for row in csv_rows[1:]:
                            all_csv_rows.add(tuple(row))

                    json_rows = load_json_report(jar)
                    for row in json_rows:
                        all_json_rows.add(json.dumps(row))

                    # Override normals
                    stats["normal"].update(target_stats.get("normal", {}))
            except NotFoundInJar:
                pass
            except zipfile.BadZipfile:
                logging.warn("Bad zip file %s", path)

    csv_report = os.path.join(args.infer_out, config.CSV_REPORT_FILENAME)
    json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
    bugs_out = os.path.join(args.infer_out, config.BUGS_FILENAME)

    if len(headers) > 1:
        if any(map(lambda x: x != headers[0], headers)):
            raise Exception("Inconsistent reports found")

    # Convert all float values to integer values
    for key, value in stats.get("float", {}).items():
        stats["int"][key] = int(round(value))

    # Delete the float entries before exporting the results
    del (stats["float"])

    with open(csv_report, "w") as report:
        if len(headers) > 0:
            writer = csv.writer(report)
            all_csv_rows = [list(row) for row in all_csv_rows]
            writer.writerows([headers[0]] + all_csv_rows)
            report.flush()

    with open(json_report, "w") as report:
        json_string = "["
        json_string += ",".join(all_json_rows)
        json_string += "]"
        report.write(json_string)
        report.flush()

    print("\n")
    xml_out = None
    if args.pmd_xml:
        xml_out = os.path.join(args.infer_out, config.PMD_XML_FILENAME)
    issues.print_and_save_errors(json_report, bugs_out, xml_out)

    stats["int"]["total_time"] = int(round(utils.elapsed_time(start_time)))

    store_performances_csv(args.infer_out, stats)

    stats_filename = os.path.join(args.infer_out, config.STATS_FILENAME)
    utils.dump_json_to_path(stats, stats_filename)

    basic_stats = get_basic_stats(stats)

    if args.print_harness:
        harness_code = get_harness_code()
        basic_stats += harness_code

    logging.info(basic_stats)

    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), "a") as f:
        f.write(basic_stats)
Esempio n. 3
0
def collect_results(args, start_time, targets):
    """Walks through buck-gen, collects results for the different buck targets
    and stores them in in args.infer_out/results.csv.
    """
    buck_stats = get_buck_stats()
    logging.info(buck_stats)
    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'w') as f:
        f.write(buck_stats)

    all_csv_rows = set()
    all_json_rows = set()
    headers = []
    stats = init_stats(args, start_time)

    accumulation_whitelist = list(map(re.compile, [
        '^cores$',
        '^time$',
        '^start_time$',
        '.*_pc',
    ]))

    expected_analyzer = stats['normal']['analyzer']
    expected_version = stats['normal']['infer_version']

    for path in get_output_jars(targets):
        try:
            with zipfile.ZipFile(path) as jar:
                # Accumulate integers and float values
                target_stats = load_stats(jar)

                found_analyzer = target_stats['normal']['analyzer']
                found_version = target_stats['normal']['infer_version']

                if found_analyzer != expected_analyzer \
                        or found_version != expected_version:
                    continue
                else:
                    for type_k in ['int', 'float']:
                        items = target_stats.get(type_k, {}).items()
                        for key, value in items:
                            if not any(map(lambda r: r.match(key),
                                           accumulation_whitelist)):
                                old_value = stats[type_k].get(key, 0)
                                stats[type_k][key] = old_value + value

                csv_rows = load_csv_report(jar)
                if len(csv_rows) > 0:
                    headers.append(csv_rows[0])
                    for row in csv_rows[1:]:
                        all_csv_rows.add(tuple(row))

                json_rows = load_json_report(jar)
                for row in json_rows:
                    all_json_rows.add(json.dumps(row))

                # Override normals
                stats['normal'].update(target_stats.get('normal', {}))
        except NotFoundInJar:
            pass
        except zipfile.BadZipfile:
            logging.warn('Bad zip file %s', path)

    csv_report = os.path.join(args.infer_out, config.CSV_REPORT_FILENAME)
    json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
    bugs_out = os.path.join(args.infer_out, config.BUGS_FILENAME)

    if len(headers) > 1:
        if any(map(lambda x: x != headers[0], headers)):
            raise Exception('Inconsistent reports found')

    # Convert all float values to integer values
    for key, value in stats.get('float', {}).items():
        stats['int'][key] = int(round(value))

    # Delete the float entries before exporting the results
    del(stats['float'])

    with open(csv_report, 'w') as report:
        if len(headers) > 0:
            writer = csv.writer(report)
            all_csv_rows = [list(row) for row in all_csv_rows]
            writer.writerows([headers[0]] + all_csv_rows)
            report.flush()

    with open(json_report, 'w') as report:
        json_string = '['
        json_string += ','.join(all_json_rows)
        json_string += ']'
        report.write(json_string)
        report.flush()

    print('\n')
    xml_out = None
    if args.pmd_xml:
        xml_out = os.path.join(args.infer_out,
                               config.PMD_XML_FILENAME)
    issues.print_and_save_errors(json_report, bugs_out, xml_out)

    stats['int']['total_time'] = int(round(utils.elapsed_time(start_time)))

    store_performances_csv(args.infer_out, stats)

    stats_filename = os.path.join(args.infer_out, config.STATS_FILENAME)
    utils.dump_json_to_path(stats, stats_filename)

    basic_stats = get_basic_stats(stats)

    if args.print_harness:
        harness_code = get_harness_code()
        basic_stats += harness_code

    logging.info(basic_stats)

    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'a') as f:
        f.write(basic_stats)
Esempio n. 4
0
def collect_results(args, start_time, targets):
    """Walks through buck-gen, collects results for the different buck targets
    and stores them in in args.infer_out/results.csv.
    """
    buck_stats = get_buck_stats()
    logging.info(buck_stats)
    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'w') as f:
        f.write(buck_stats)

    all_json_rows = set()
    stats = init_stats(args, start_time)

    accumulation_whitelist = list(map(re.compile, [
        '^cores$',
        '^time$',
        '^start_time$',
        '.*_pc',
    ]))

    expected_analyzer = stats['normal']['analyzer']
    expected_version = stats['normal']['infer_version']

    for path in get_output_jars(targets):
        try:
            with zipfile.ZipFile(path) as jar:
                # Accumulate integers and float values
                target_stats = load_stats(jar)

                found_analyzer = target_stats['normal']['analyzer']
                found_version = target_stats['normal']['infer_version']

                if found_analyzer != expected_analyzer \
                        or found_version != expected_version:
                    continue
                else:
                    for type_k in ['int', 'float']:
                        items = target_stats.get(type_k, {}).items()
                        for key, value in items:
                            if not any(map(lambda r: r.match(key),
                                           accumulation_whitelist)):
                                old_value = stats[type_k].get(key, 0)
                                stats[type_k][key] = old_value + value

                json_rows = load_json_report(jar)
                for row in json_rows:
                    all_json_rows.add(json.dumps(row))

                # Override normals
                stats['normal'].update(target_stats.get('normal', {}))
        except NotFoundInJar:
            pass
        except zipfile.BadZipfile:
            logging.warn('Bad zip file %s', path)

    json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)

    # Convert all float values to integer values
    for key, value in stats.get('float', {}).items():
        stats['int'][key] = int(round(value))

    # Delete the float entries before exporting the results
    del(stats['float'])

    with open(json_report, 'w') as report:
        json_string = '['
        json_string += ','.join(all_json_rows)
        json_string += ']'
        report.write(json_string)
        report.flush()

    print('\n')
    json_report = os.path.join(args.infer_out, config.JSON_REPORT_FILENAME)
    bugs_out = os.path.join(args.infer_out, config.BUGS_FILENAME)
    issues.print_and_save_errors(args.infer_out, args.project_root,
                                 json_report, bugs_out, args.pmd_xml)

    stats['int']['total_time'] = int(round(utils.elapsed_time(start_time)))

    store_performances_csv(args.infer_out, stats)

    stats_filename = os.path.join(args.infer_out, config.STATS_FILENAME)
    utils.dump_json_to_path(stats, stats_filename)

    basic_stats = get_basic_stats(stats)

    if args.print_harness:
        harness_code = get_harness_code()
        basic_stats += harness_code

    logging.info(basic_stats)

    with open(os.path.join(args.infer_out, ANALYSIS_SUMMARY_OUTPUT), 'a') as f:
        f.write(basic_stats)