def main(argv): show_info() parser = argparse.ArgumentParser(description="afl-vcrash verifies that afl-fuzz crash samples lead to crashes in \ the target binary.", usage="afl-vcrash [-f LIST_FILENAME] [-h] [-j THREADS] [-q] [-r] [-t TIMEOUT] collection_dir -- target_command") parser.add_argument("collection_dir", help="Directory holding all crash samples that will be verified.") parser.add_argument("target_cmd", nargs="+", help="Target binary including command line \ options. Use '@@' to specify crash sample input file position (see afl-fuzz usage).") parser.add_argument("-f", "--filelist", dest="list_filename", default=None, help="Writes all crash sample file names that do not lead to crashes into a file.") parser.add_argument("-j", "--threads", dest="num_threads", default=1, help="Enable parallel verification by specifying the number of threads afl-vcrash \ will utilize.") parser.add_argument("-q", "--quiet", dest="quiet", action="store_const", const=True, default=False, help="Suppress output of crash sample file names that do not lead to crashes. This is \ particularly useful when combined with '-r' or '-f'.") parser.add_argument("-r", "--remove", dest="remove", action="store_const", const=True, default=False, help="Remove crash samples that do not lead to crashes.") parser.add_argument("-t", "--timeout", dest="timeout_secs", default=60, help="Define the timeout in seconds before killing the verification of a crash sample") args = parser.parse_args(argv[1:]) input_dir = os.path.abspath(os.path.expanduser(args.collection_dir)) if not os.path.exists(input_dir): print_err("No valid directory provided for <collection_dir>!") return num_crashes, crash_samples = afl_collect.get_samples_from_dir(input_dir, True) print_ok("Verifying %d crash samples..." % num_crashes) args.target_cmd = " ".join(args.target_cmd).split() args.target_cmd[0] = os.path.abspath(os.path.expanduser(args.target_cmd[0])) if not os.path.exists(args.target_cmd[0]): print_err("Target binary not found!") return args.target_cmd = " ".join(args.target_cmd) invalid_samples, timeout_samples = verify_samples(int(args.num_threads), crash_samples, args.target_cmd, int(args.timeout_secs)) print_warn("Found %d invalid crash samples." % len(invalid_samples)) print_warn("%d samples caused a timeout." % len(timeout_samples)) if args.remove: print_ok("Removing invalid crash samples.") remove_samples(invalid_samples, args.quiet) print_ok("Removing timeouts.") remove_samples(timeout_samples, args.quiet) elif not args.quiet: for ci in invalid_samples: print(ci) # generate filelist of collected crash samples if args.list_filename: afl_collect.generate_sample_list(args.list_filename, invalid_samples + timeout_samples) print_ok("Generated invalid crash sample list '%s'." % args.list_filename)
def main(argv): show_info() parser = argparse.ArgumentParser(description="afl-vcrash verifies that afl-fuzz crash samples lead to crashes in \ the target binary.", usage="afl-vcrash [-f LIST_FILENAME] [-h] [-j THREADS] [-q] [-r] [-t TIMEOUT] collection_dir -- target_command") parser.add_argument("collection_dir", help="Directory holding all crash samples that will be verified.") parser.add_argument("target_cmd", nargs="+", help="Target binary including command line \ options. Use '@@' to specify crash sample input file position (see afl-fuzz usage).") parser.add_argument("-f", "--filelist", dest="list_filename", default=None, help="Writes all crash sample file names that do not lead to crashes into a file.") parser.add_argument("-j", "--threads", dest="num_threads", default=1, help="Enable parallel verification by specifying the number of threads afl-vcrash \ will utilize.") parser.add_argument("-q", "--quiet", dest="quiet", action="store_const", const=True, default=False, help="Suppress output of crash sample file names that do not lead to crashes. This is \ particularly useful when combined with '-r' or '-f'.") parser.add_argument("-r", "--remove", dest="remove", action="store_const", const=True, default=False, help="Remove crash samples that do not lead to crashes.") parser.add_argument("-t", "--timeout", dest="timeout_secs", default=60, help="Define the timeout in seconds before killing the verification of a crash sample") args = parser.parse_args(argv[1:]) input_dir = os.path.abspath(os.path.expanduser(args.collection_dir)) if not os.path.exists(input_dir): print_err("No valid directory provided for <collection_dir>!") sys.exit(1) num_crashes, crash_samples = afl_collect.get_samples_from_dir(input_dir, True) print_ok("Verifying %d crash samples..." % num_crashes) args.target_cmd = build_target_cmd(args.target_cmd) os.environ["ASAN_OPTIONS"] = "abort_on_error=1:detect_leaks=0:symbolize=1:allocator_may_return_null=1" os.environ["ASAN_SYMBOLIZER_PATH"] = "/usr/lib/llvm-3.8/bin/llvm-symbolizer" invalid_samples, timeout_samples = verify_samples(int(args.num_threads), crash_samples, args.target_cmd, int(args.timeout_secs)) print_warn("Found %d invalid crash samples." % len(invalid_samples)) print_warn("%d samples caused a timeout." % len(timeout_samples)) if args.remove: print_ok("Removing invalid crash samples.") remove_samples(invalid_samples, args.quiet) print_ok("Removing timeouts.") remove_samples(timeout_samples, args.quiet) elif not args.quiet: for ci in invalid_samples: print(ci) # generate filelist of collected crash samples if args.list_filename: afl_collect.generate_sample_list(args.list_filename, invalid_samples + timeout_samples) print_ok("Generated invalid crash sample list '%s'." % args.list_filename)
def test_get_samples_from_dir(self): sample_dir = 'testdata/queue' expected_result = (3, [ 'sample0', 'sample1', 'sample2' ]) result = afl_collect.get_samples_from_dir(sample_dir) self.assertEqual(expected_result[0], result[0]) self.assertListEqual(expected_result[1], sorted(result[1])) expected_result = (3, [ os.path.join(sample_dir, 'sample0'), os.path.join(sample_dir, 'sample1'), os.path.join(sample_dir, 'sample2'), ]) result = afl_collect.get_samples_from_dir(sample_dir, abs_path=True) self.assertEqual(expected_result[0], result[0]) self.assertListEqual(expected_result[1], sorted(result[1]))
def test_get_samples_from_dir(self): sample_dir = 'testdata/queue' expected_result = (5, [ 'sample0', 'sample1', 'sample2', 'sample3', 'sample4' ]) result = afl_collect.get_samples_from_dir(sample_dir) self.assertEqual(expected_result[0], result[0]) self.assertListEqual(expected_result[1], sorted(result[1])) expected_result = (5, [ os.path.join(sample_dir, 'sample0'), os.path.join(sample_dir, 'sample1'), os.path.join(sample_dir, 'sample2'), os.path.join(sample_dir, 'sample3'), os.path.join(sample_dir, 'sample4'), ]) result = afl_collect.get_samples_from_dir(sample_dir, abs_path=True) self.assertEqual(expected_result[0], result[0]) self.assertListEqual(expected_result[1], sorted(result[1]))
def main(argv): show_info() parser = argparse.ArgumentParser(description="afl-vcrash verifies that afl-fuzz crash samples lead to crashes in \ the target binary.", usage="afl-vcrash [-f LIST_FILENAME] [-h] [-j THREADS] [-q] [-r] collection_dir -- target_command") parser.add_argument("collection_dir", help="Directory holding all crash samples that will be verified.") parser.add_argument("target_cmd", nargs="+", help="Target binary including command line \ options. Use '@@' to specify crash sample input file position (see afl-fuzz usage).") parser.add_argument("-f", "--filelist", dest="list_filename", default=None, help="Writes all crash sample file names that do not lead to crashes into a file.") parser.add_argument("-j", "--threads", dest="num_threads", default=1, help="Enable parallel verification by specifying the number of threads afl-vcrash \ will utilize.") parser.add_argument("-q", "--quiet", dest="quiet", action="store_const", const=True, default=False, help="Suppress output of crash sample file names that do not lead to crashes. This is \ particularly useful when combined with '-r' or '-f'.") parser.add_argument("-r", "--remove", dest="remove", action="store_const", const=True, default=False, help="Remove crash samples that do not lead to crashes.") args = parser.parse_args(argv[1:]) if args.collection_dir: input_dir = args.collection_dir else: print("No valid directory provided for <collection_dir>!") return num_crashes, crash_samples = afl_collect.get_samples_from_dir(input_dir, True) print("Verifying %d crash samples..." % num_crashes) args.target_cmd = " ".join(args.target_cmd).split() args.target_cmd[0] = os.path.abspath(os.path.expanduser(args.target_cmd[0])) if not os.path.exists(args.target_cmd[0]): print("Target binary not found!") return args.target_cmd = " ".join(args.target_cmd) invalid_samples = verify_samples(int(args.num_threads), crash_samples, args.target_cmd) print("Found %d invalid crash samples." % len(invalid_samples)) if args.remove: print("Removing invalid crash samples.") remove_samples(invalid_samples, args.quiet) elif not args.quiet: for ci in invalid_samples: print(ci) # generate filelist of collected crash samples if args.list_filename: afl_collect.generate_sample_list(args.list_filename, invalid_samples) print("Generated invalid crash sample list '%s'." % args.list_filename)
def main(argv): show_info() parser = argparse.ArgumentParser(description="afl-minimize performs several optimization steps to reduce the size\n \ of an afl-fuzz corpus.", usage="afl-minimize [-c COLLECTION_DIR [--cmin [opts]] [--tmin [opts]]] [-d] [-h]\n \ [-j] sync_dir -- target_cmd\n") parser.add_argument("-c", "--collect", dest="collection_dir", help="Collect all samples from the synchronisation dir and store them in the collection dir. \ Existing files in the collection directory will be overwritten!", default=None) parser.add_argument("--cmin", dest="invoke_cmin", action="store_const", const=True, default=False, help="Run afl-cmin on collection dir. Has no effect without '-c'.") parser.add_argument("--cmin-mem-limit", dest="cmin_mem_limit", default=None, help="Set memory limit for afl-cmin.") parser.add_argument("--cmin-timeout", dest="cmin_timeout", default=None, help="Set timeout for afl-cmin.") parser.add_argument("--tmin", dest="invoke_tmin", action="store_const", const=True, default=False, help="Run afl-tmin on minimized collection dir if used together with '--cmin'\ or on unoptimized collection dir otherwise. Has no effect without '-c'.") parser.add_argument("--tmin-mem-limit", dest="tmin_mem_limit", default=None, help="Set memory limit for afl-tmin.") parser.add_argument("--tmin-timeout", dest="tmin_timeout", default=None, help="Set timeout for afl-tmin.") parser.add_argument("-d", "--dry-run", dest="dry_run", action="store_const", const=True, default=False, help="Perform dry-run on collection dir, if '-c' is provided or on \ synchronisation dir otherwise. Dry-run will move intermittent crashes out of the corpus.") parser.add_argument("-j", "--threads", dest="num_threads", default=1, help="Enable parallel dry-run and t-minimization step by specifying the number of threads \ afl-minimize will utilize.") parser.add_argument("sync_dir", help="afl synchronisation directory containing multiple fuzzers and their queues.") parser.add_argument("target_cmd", nargs="+", help="Path to the target binary and its command line arguments. \ Use '@@' to specify crash sample input file position (see afl-fuzz usage).") args = parser.parse_args(argv[1:]) if not args.collection_dir and not args.dry_run: print_err("No operation requested. You should at least provide '-c'") print_err("for sample collection or '-d' for a dry-run. Use '--help' for") print_err("usage instructions or checkout README.md for details.") return sync_dir = os.path.abspath(os.path.expanduser(args.sync_dir)) if not os.path.exists(sync_dir): print_err("No valid directory provided for <SYNC_DIR>!") return args.target_cmd = " ".join(args.target_cmd).split() args.target_cmd[0] = os.path.abspath(os.path.expanduser(args.target_cmd[0])) if not os.path.exists(args.target_cmd[0]): print_err("Target binary not found!") return args.target_cmd = " ".join(args.target_cmd) if not args.num_threads: threads = 1 else: threads = int(args.num_threads) if args.collection_dir: out_dir = os.path.abspath(os.path.expanduser(args.collection_dir)) if not os.path.exists(out_dir) or len(os.listdir(out_dir)) == 0: os.makedirs(out_dir, exist_ok=True) print_ok("Looking for fuzzing queues in '%s'." % sync_dir) fuzzers = afl_collect.get_fuzzer_instances(sync_dir, crash_dirs=False) # collect samples from fuzzer queues print_ok("Found %d fuzzers, collecting samples." % len(fuzzers)) sample_index = afl_collect.build_sample_index(sync_dir, out_dir, fuzzers) print_ok("Successfully indexed %d samples." % len(sample_index.index)) print_ok("Copying %d samples into collection directory..." % len(sample_index.index)) afl_collect.copy_samples(sample_index) else: print_warn("Collection directory exists and is not empty!") print_warn("Skipping collection step...") if args.invoke_cmin: # invoke cmin on collection print_ok("Executing: afl-cmin -i %s -o %s.cmin -- %s" % (out_dir, out_dir, args.target_cmd)) invoke_cmin(out_dir, "%s.cmin" % out_dir, args.target_cmd, mem_limit=args.cmin_mem_limit, timeout=args.cmin_timeout) if args.invoke_tmin: # invoke tmin on minimized collection print_ok("Executing: afl-tmin -i %s.cmin/* -o %s.cmin.tmin/* -- %s" % (out_dir, out_dir, args.target_cmd)) tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir("%s.cmin" % out_dir, abs_path=True) invoke_tmin(tmin_samples, "%s.cmin.tmin" % out_dir, args.target_cmd, num_threads=threads, mem_limit=args.tmin_mem_limit, timeout=args.tmin_timeout) elif args.invoke_tmin: # invoke tmin on collection print_ok("Executing: afl-tmin -i %s/* -o %s.tmin/* -- %s" % (out_dir, out_dir, args.target_cmd)) tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir(out_dir, abs_path=True) invoke_tmin(tmin_samples, "%s.tmin" % out_dir, args.target_cmd, num_threads=threads, mem_limit=args.tmin_mem_limit, timeout=args.tmin_timeout) if args.dry_run: # invoke dry-run on collected/minimized corpus if args.invoke_cmin and args.invoke_tmin: print_ok("Performing dry-run in %s.cmin.tmin..." % out_dir) print_warn("Be patient! Depending on the corpus size this step can take hours...") dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir("%s.cmin.tmin" % out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.cmin.tmin.crashes" % out_dir, "%s.cmin.tmin.hangs" % out_dir, args.target_cmd, num_threads=threads) elif args.invoke_cmin: print_ok("Performing dry-run in %s.cmin..." % out_dir) print_warn("Be patient! Depending on the corpus size this step can take hours...") dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir("%s.cmin" % out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.cmin.crashes" % out_dir, "%s.cmin.hangs" % out_dir, args.target_cmd, num_threads=threads) elif args.invoke_tmin: print_ok("Performing dry-run in %s.tmin..." % out_dir) print_warn("Be patient! Depending on the corpus size this step can take hours...") dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir("%s.tmin" % out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.tmin.crashes" % out_dir, "%s.tmin.hangs" % out_dir, args.target_cmd, num_threads=threads) else: print_ok("Performing dry-run in %s..." % out_dir) print_warn("Be patient! Depending on the corpus size this step can take hours...") dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.crashes" % out_dir, "%s.hangs" % out_dir, args.target_cmd, num_threads=threads) else: if args.dry_run: print_ok("Looking for fuzzing queues in '%s'." % sync_dir) fuzzers = afl_collect.get_fuzzer_instances(sync_dir, crash_dirs=False) print_ok("Found %d fuzzers, performing dry run." % len(fuzzers)) print_warn("Be patient! Depending on the corpus size this step can take hours...") # invoke dry-run on original corpus for f in fuzzers: for q_dir in f[1]: q_dir_complete = os.path.join(sync_dir, f[0], q_dir) print_ok("Processing %s..." % q_dir_complete) dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(q_dir_complete, abs_path=True) invoke_dryrun(dryrun_samples, os.path.join(sync_dir, f[0], "crashes"), os.path.join(sync_dir, f[0], "hangs"), args.target_cmd, num_threads=threads)
def main(argv): show_info() parser = argparse.ArgumentParser( description= "afl-minimize performs several optimization steps to reduce the size\n \ of an afl-fuzz corpus.", usage= "afl-minimize [-c COLLECTION_DIR [--cmin] [--tmin]] [-d] [-h] [-j] sync_dir \ -- target_cmd\n") parser.add_argument( "-c", "--collect", dest="collection_dir", help= "Collect all samples from the synchronisation dir and store them in the collection dir. \ Existing files in the collection directory will be overwritten!", default=None) parser.add_argument( "--cmin", dest="invoke_cmin", action="store_const", const=True, default=False, help="Run afl-cmin on collection dir. Has no effect without '-c'.") parser.add_argument( "--tmin", dest="invoke_tmin", action="store_const", const=True, default=False, help= "Run afl-tmin on minimized collection dir if used together with '--cmin'\ or on unoptimized collection dir otherwise. Has no effect without '-c'.") parser.add_argument( "-d", "--dry-run", dest="dry_run", action="store_const", const=True, default=False, help="Perform dry-run on collection dir, if '-c' is provided or on \ synchronisation dir otherwise. Dry-run will move intermittent crashes out of the corpus." ) parser.add_argument( "-j", "--threads", dest="num_threads", default=1, help= "Enable parallel dry-run and t-minimization step by specifying the number of threads \ afl-minimize will utilize.") parser.add_argument( "sync_dir", help= "afl synchronisation directory containing multiple fuzzers and their queues." ) parser.add_argument( "target_cmd", nargs="+", help="Path to the target binary and its command line arguments. \ Use '@@' to specify crash sample input file position (see afl-fuzz usage).") args = parser.parse_args(argv[1:]) if not args.collection_dir and not args.dry_run: print_err("No operation requested. You should at least provide '-c'") print_err( "for sample collection or '-d' for a dry-run. Use '--help' for") print_err("usage instructions or checkout README.md for details.") return sync_dir = os.path.abspath(os.path.expanduser(args.sync_dir)) if not os.path.exists(sync_dir): print_err("No valid directory provided for <SYNC_DIR>!") return args.target_cmd = " ".join(args.target_cmd).split() args.target_cmd[0] = os.path.abspath(os.path.expanduser( args.target_cmd[0])) if not os.path.exists(args.target_cmd[0]): print_err("Target binary not found!") return args.target_cmd = " ".join(args.target_cmd) if not args.num_threads: threads = 1 else: threads = int(args.num_threads) if args.collection_dir: out_dir = os.path.abspath(os.path.expanduser(args.collection_dir)) if not os.path.exists(out_dir) or len(os.listdir(out_dir)) == 0: os.makedirs(out_dir, exist_ok=True) print_ok("Looking for fuzzing queues in '%s'." % sync_dir) fuzzers = afl_collect.get_fuzzer_instances(sync_dir, crash_dirs=False) # collect samples from fuzzer queues print_ok("Found %d fuzzers, collecting samples." % len(fuzzers)) sample_index = afl_collect.build_sample_index( sync_dir, out_dir, fuzzers) print_ok("Successfully indexed %d samples." % len(sample_index.index)) print_ok("Copying %d samples into collection directory..." % len(sample_index.index)) afl_collect.copy_samples(sample_index) else: print_warn("Collection directory exists and is not empty!") print_warn("Skipping collection step...") if args.invoke_cmin: # invoke cmin on collection print_ok("Executing: afl-cmin -i %s -o %s.cmin -- %s" % (out_dir, out_dir, args.target_cmd)) invoke_cmin(out_dir, "%s.cmin" % out_dir, args.target_cmd) if args.invoke_tmin: # invoke tmin on minimized collection print_ok( "Executing: afl-tmin -i %s.cmin/* -o %s.cmin.tmin/* -- %s" % (out_dir, out_dir, args.target_cmd)) tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir( "%s.cmin" % out_dir, abs_path=True) tmin_num_samples_processed = invoke_tmin(tmin_samples, "%s.cmin.tmin" % out_dir, args.target_cmd, num_threads=threads) elif args.invoke_tmin: # invoke tmin on collection print_ok("Executing: afl-tmin -i %s/* -o %s.tmin/* -- %s" % (out_dir, out_dir, args.target_cmd)) tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir( out_dir, abs_path=True) tmin_num_samples_processed = invoke_tmin(tmin_samples, "%s.tmin" % out_dir, args.target_cmd, num_threads=threads) if args.dry_run: # invoke dry-run on collected/minimized corpus if args.invoke_cmin and args.invoke_tmin: print_ok("Performing dry-run in %s.cmin.tmin..." % out_dir) print_warn( "Be patient! Depending on the corpus size this step can take hours..." ) dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir( "%s.cmin.tmin" % out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.cmin.tmin.crashes" % out_dir, args.target_cmd, num_threads=threads) elif args.invoke_cmin: print_ok("Performing dry-run in %s.cmin..." % out_dir) print_warn( "Be patient! Depending on the corpus size this step can take hours..." ) dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir( "%s.cmin" % out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.cmin.crashes" % out_dir, args.target_cmd, num_threads=threads) elif args.invoke_tmin: print_ok("Performing dry-run in %s.tmin..." % out_dir) print_warn( "Be patient! Depending on the corpus size this step can take hours..." ) dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir( "%s.tmin" % out_dir, abs_path=True) invoke_dryrun(dryrun_samples, "%s.tmin.crashes" % out_dir, args.target_cmd, num_threads=threads) else: print_ok("Performing dry-run in %s..." % out_dir) print_warn( "Be patient! Depending on the corpus size this step can take hours..." ) dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir( out_dir, abs_path=True) invoke_dryrun(dryrun_samples, out_dir, args.target_cmd, num_threads=threads) else: if args.dry_run: print_ok("Looking for fuzzing queues in '%s'." % sync_dir) fuzzers = afl_collect.get_fuzzer_instances(sync_dir, crash_dirs=False) print_ok("Found %d fuzzers, performing dry run." % len(fuzzers)) print_warn( "Be patient! Depending on the corpus size this step can take hours..." ) # invoke dry-run on original corpus for f in fuzzers: for q_dir in f[1]: q_dir_complete = os.path.join(sync_dir, f[0], q_dir) print_ok("Processing %s..." % q_dir_complete) dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir( q_dir_complete, abs_path=True) invoke_dryrun(dryrun_samples, os.path.join(sync_dir, f[0], "crashes"), args.target_cmd, num_threads=threads)