Exemplo n.º 1
0
def remove_duplicate_testcases(virgin_bits: list,
                               queue: Queue,
                               csv_path: Path = None) -> None:
    """
    Retrieve coverage information from the queue, and delete the testcase if it
    does *not* lead to new coverage.
    """
    while True:
        testcase, cov = queue.get()
        new_bits, virgin_bits = has_new_bits(cov, virgin_bits)

        if new_bits == 0:
            os.unlink(testcase)
        elif csv_path:
            # If a CSV file has been provided, write coverage information to the
            # CSV file
            t_bytes = count_non_255_bytes(virgin_bits)
            t_byte_ratio = (t_bytes * 100.0) / MAP_SIZE
            execs = int(testcase.name.split('id:')[1])
            csv_dict = dict(unix_time='%d' % testcase.stat().st_ctime,
                            map_size='%.02f' % t_byte_ratio,
                            execs=execs)

            with open(csv_path, 'a') as outf:
                CsvDictWriter(outf,
                              fieldnames=CSV_FIELDNAMES).writerow(csv_dict)

        queue.task_done()
def main() -> None:
    """The main function."""
    args = parse_args()

    # Check the target
    target = args.target
    if not os.path.isfile(target):
        raise Exception('Target `%s` does not exist' % target)

    # Maxmimum number of tasks. Two 2 tasks are required as a minimum: one for
    # running afl-showmap, and another for updating the coverage bitmap
    max_task = args.jobs
    if max_task == 0:
        max_task = multiprocessing.cpu_count()

    # Check afl-showmap
    if not which('afl-showmap'):
        raise Exception('Could not find afl-showmap. Check PATH')

    # Wait for fuzzer_stats to exist
    out_dir = args.out_dir
    fuzzer_stats_path = out_dir / 'fuzzer_stats'
    if not fuzzer_stats_path.exists():
        raise Exception('No fuzzer_stats in `%s`' % out_dir)

    # Open CSV plot_data
    csv_path = args.csv
    if csv_path:
        csv_path = Path(csv_path)
        with open(csv_path, 'w') as outf:
            CsvDictWriter(outf, fieldnames=CSV_FIELDNAMES).writeheader()

    with Executor(max_workers=max_task) as executor, \
            open(out_dir / 'blackbox.tar.gz', 'wb') as tar_file:
        # The coverage bitmap
        cov_bitmap = [255] * MAP_SIZE
        cov_queue = Queue(max_task)

        # Thread responsible for deduplicating entries in the output directory
        # and logging coverage to a CSV
        cov_thread = Thread(target=remove_duplicate_testcases,
                            args=(cov_bitmap, cov_queue, tar_file, csv_path))
        cov_thread.daemon = True
        cov_thread.start()

        # Start the monitor
        handler = TestCaseHandler(executor, cov_queue, target, afl_stats,
                                  args.timeout)
        observer = Observer()
        observer.schedule(handler, out_dir / 'queue' / '.blackbox')
        observer.start()

        # Continue until interrupted
        try:
            while observer.is_alive():
                observer.join(1)
        except KeyboardInterrupt:
            print('\nCtrl-C detected, goodbye')
            observer.stop()
            observer.join()
def export_csv(output_name, rows):
    # export array of dicts to csv

    with open(output_name, 'w', newline='') as file:
        first_row = rows[0]
        keys = first_row.keys()
        writer = CsvDictWriter(file, keys)

        writer.writeheader()
        writer.writerows(rows)
Exemplo n.º 4
0
def main(input_file, output_file) -> None:
    csv_reader = CsvDictReader(input_file)
    if not csv_reader.fieldnames:
        raise ValueError("Could not parse input CSV data")
    csv_writer = CsvDictWriter(output_file,
                               fieldnames=csv_reader.fieldnames,
                               lineterminator="\n")
    csv_writer.writeheader()
    for row in normalize(csv_reader):
        csv_writer.writerow(row)
Exemplo n.º 5
0
def main() -> None:
    """The main function."""
    args = parse_args()

    # Check the target
    target = args.target
    if not os.path.isfile(target):
        raise Exception('Target `%s` does not exist' % target)

    # Maxmimum number of tasks. Two 2 tasks are required as a minimum: one for
    # running afl-showmap, and another for updating the coverage bitmap
    max_task = args.j
    if max_task == 0:
        max_task = multiprocessing.cpu_count()
    elif max_task <= 2:
        max_task = 2

    # Check afl-showmap
    if not which('afl-showmap'):
        raise Exception('Could not find afl-showmap. Check PATH')

    # Wait for fuzzer_stats to exist
    out_dir = Path(args.out_dir)
    fuzzer_stats_path = out_dir / 'fuzzer_stats'
    while not fuzzer_stats_path.exists():
        sleep(1)
    with open(fuzzer_stats_path, 'r') as inf:
        afl_stats = FuzzerStats(inf)

    # Open CSV plot_data
    csv_path = args.csv
    if csv_path:
        csv_path = Path(csv_path)
        with open(csv_path, 'w') as outf:
            CsvDictWriter(outf, fieldnames=CSV_FIELDNAMES).writeheader()

    # Start the watchdog
    handler = TestCaseHandler(max_task - 1, target, afl_stats, csv_path)
    observer = Observer()
    observer.schedule(handler, out_dir / 'queue' / '.blackbox')
    observer.start()

    # Continue until interrupted
    try:
        while observer.is_alive():
            observer.join(1)
    except KeyboardInterrupt:
        print('\nCtrl-C detected, goodbye')
        observer.stop()
        observer.join()
        os.kill(0, 9)
Exemplo n.º 6
0
def main():
    """The main function."""
    args = parse_args()

    identifiers = set()

    for path in args.json:
        with open(path, 'r') as inf:
            for ast in parse_ast(inf):
                identifiers |= walk_ast_rec(ast)

    identifiers = [dict(identifier=iden, type=iden_type)
                   for iden, iden_type in identifiers]

    csv_writer = CsvDictWriter(sys.stdout,
                               fieldnames=('identifier', 'type'))
    csv_writer.writeheader()
    csv_writer.writerows(identifiers)
def remove_duplicate_testcases(virgin_bits: list,
                               queue: Queue,
                               tar_file: BufferedWriter,
                               csv_path: Path = None) -> None:
    """
    Retrieve coverage information from the queue, and delete the testcase if it
    does *not* lead to new coverage.
    """
    while True:
        testcase, cov = queue.get()
        new_bits, virgin_bits = has_new_bits(cov, virgin_bits)

        if new_bits:
            # Write testcase to GZIP
            with open(testcase, 'rb') as inf, \
                    TarFile.open(fileobj=tar_file, mode='w:gz') as tar:
                tar.addfile(TarInfo(testcase.name), inf)
            tar_file.flush()

            if csv_path:
                # If a CSV file has been provided, write coverage information to
                # the CSV file
                t_bytes = count_non_255_bytes(virgin_bits)
                t_byte_ratio = (t_bytes * 100.0) / MAP_SIZE
                execs = int(testcase.name.split('id:')[1])
                csv_dict = dict(unix_time='%d' % testcase.stat().st_ctime,
                                map_size='%.02f' % t_byte_ratio,
                                execs=execs)

                with open(csv_path, 'a') as outf:
                    writer = CsvDictWriter(outf, fieldnames=CSV_FIELDNAMES)
                    writer.writerow(csv_dict)

        # Delete testcase
        Thread(target=lambda: os.unlink(testcase)).start()
        queue.task_done()
Exemplo n.º 8
0
 def begin(self, fields):
     self.writer = CsvDictWriter(self.fp, fieldnames=fields)
     self.writer.writeheader()