Example #1
0
def twitter_init():
    try:
        config_settings["twitter_creds_file"] = os.path.abspath(
            os.path.expanduser(config_settings["twitter_creds_file"])
        )
        if not os.path.exists(config_settings["twitter_creds_file"]):
            twitter.oauth_dance(
                "fuzzer_stats",
                config_settings["twitter_consumer_key"],
                config_settings["twitter_consumer_secret"],
                config_settings["twitter_creds_file"],
            )
        oauth_token, oauth_secret = twitter.read_token_file(config_settings["twitter_creds_file"])
        twitter_instance = twitter.Twitter(
            auth=twitter.OAuth(
                oauth_token,
                oauth_secret,
                config_settings["twitter_consumer_key"],
                config_settings["twitter_consumer_secret"],
            )
        )
        return twitter_instance
    except (twitter.TwitterHTTPError, URLError):
        print_err("Network error, twitter login failed! Check your connection!")
        sys.exit(1)
Example #2
0
 def __invoke_rsync(self, rsync_cmdline):
     ret = True
     try:
         subprocess.check_call(' '.join(rsync_cmdline), shell=True)
     except subprocess.CalledProcessError as e:
         print_err('rsync failed with exit code {}'.format(e.returncode))
         ret = False
     return ret
def build_target_cmd(conf_settings):
    target_cmd = [conf_settings["target"], conf_settings["cmdline"]]
    target_cmd = " ".join(target_cmd).split()
    target_cmd[0] = os.path.abspath(os.path.expanduser(target_cmd[0]))
    if not os.path.exists(target_cmd[0]):
        print_err("Target binary not found!")
        sys.exit(1)
    target_cmd = " ".join(target_cmd)
    return target_cmd
Example #4
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description='afl-sync synchronizes fuzzer state directories between different \
locations. Supported are remote transfers through rsync that may use transport compression.', 
                                     usage='afl-sync [-S SESSION] <cmd> <src_sync_dir> <dst_storage_dir>')

    parser.add_argument('cmd',
                        help='Command to perform: push, pull or sync. Push transmits the local state from '
                             '<src_sync_dir> to the destination <dst_storage_dir>. Pull fetches remote state(s) into '
                             'the local synchronization dir appending the \'.sync\' extension. Sync performs a '
                             'pull operation followed by a push.')
    parser.add_argument('src_sync_dir',
                        help='Source afl synchronisation directory containing state directories of afl instances.')
    parser.add_argument('dst_storage_dir',
                        help='Destination directory used as fuzzer state storage. This shouldn\'t be an afl sync dir!')
    parser.add_argument('-S', '--session', dest='session', default=None,
                        help='Name of an afl-multicore session. If provided, only fuzzers belonging to '
                             'the specified session will be synced with the destination. Otherwise state '
                             'directories of all fuzzers inside the synchronisation dir will be exchanged. '
                             'Directories ending on \'.sync\' will never be pushed back to the destination!')

    args = parser.parse_args(argv[1:])

    args.cmd = args.cmd.lower()
    if not args.cmd in ['push', 'pull', 'sync']:
        print_err('Sorry, unknown command requested!')
        sys.exit(1)

    if not os.path.exists(args.src_sync_dir):
        if args.cmd in ['pull', 'sync']:
            print_warn('Local afl sync dir does not exist! Will create it for you!')
            os.makedirs(args.src_sync_dir)
        else:
            print_err('Local afl sync dir does not exist!')
            sys.exit(1)

    server_config = {
        'remote_path':      args.dst_storage_dir,
    }

    fuzzer_config = {
        'sync_dir':         args.src_sync_dir,
        'session':          args.session,
        'exclude_crashes':  False,
        'exclude_hangs':    False,
    }

    rsyncEngine = AflRsync(server_config, fuzzer_config)

    if args.cmd == 'push':
        rsyncEngine.push()
    elif args.cmd == 'pull':
        rsyncEngine.pull()
    elif args.cmd == 'sync':
        rsyncEngine.sync()
Example #5
0
def read_config(config_file):
    config_file = os.path.abspath(os.path.expanduser(config_file))

    if not os.path.isfile(config_file):
        print_err("Config file not found!")
        sys.exit(1)

    with open(config_file, 'r') as raw_config:
        config = json.load(raw_config)
        return config
Example #6
0
def generate_sample_list(list_filename, files_collected):
    list_filename = os.path.abspath(os.path.expanduser(list_filename))

    try:
        fd = open(list_filename, "w")
        for f in files_collected:
            fd.writelines("%s\n" % f)

        fd.close()
    except (FileExistsError, PermissionError):
        print_err("Could not create file list '%s'!" % list_filename)
Example #7
0
def generate_gdb_exploitable_script(script_filename, sample_index, target_cmd, script_id=0, intermediate=False):
    target_cmd = target_cmd.split()
    gdb_target_binary = target_cmd[0]
    gdb_run_cmd = " ".join(target_cmd[1:])

    if not intermediate:
        script_filename = os.path.abspath(os.path.expanduser(script_filename))
        print_ok(
            "Generating final gdb+exploitable script '%s' for %d samples..."
            % (script_filename, len(sample_index.outputs()))
        )
    else:
        script_filename = os.path.abspath(os.path.expanduser("%s.%d" % (script_filename, script_id)))
        print_ok(
            "Generating intermediate gdb+exploitable script '%s' for %d samples..."
            % (script_filename, len(sample_index.outputs()))
        )

    try:
        fd = open(script_filename, "w")

        # <script header>
        # source exploitable.py if necessary
        if gdb_exploitable_path:
            fd.writelines("source %s\n" % gdb_exploitable_path)

        # load executable
        fd.writelines("file %s\n" % gdb_target_binary)
        # </script_header>

        # fill script with content
        for f in sample_index.index:
            fd.writelines("echo Crash\ sample:\ '%s'\\n\n" % f["output"])

            if not stdin_mode(target_cmd):
                run_cmd = "run " + gdb_run_cmd + "\n"
            else:
                run_cmd = "run " + gdb_run_cmd + "< @@" + "\n"

            if intermediate:
                run_cmd = run_cmd.replace("@@", f["input"])
            else:
                run_cmd = run_cmd.replace("@@", os.path.join(sample_index.output_dir, f["output"]))

            fd.writelines(run_cmd)
            fd.writelines("exploitable\n")

        # <script_footer>
        fd.writelines("quit")
        # </script_footer>

        fd.close()
    except (FileExistsError, PermissionError):
        print_err("Could not open script file '%s' for writing!" % script_filename)
Example #8
0
def fetch_stats(config_settings, twitter_inst):
    doExit = False
    # { 'fuzzer_dir': (stat, old_stat) }
    stat_dict = dict()
    while not doExit:
        try:
            for fuzzer in config_settings["fuzz_dirs"]:
                stats = load_stats(fuzzer)

                if not stats:
                    continue

                sum_stats = summarize_stats(stats)

                try:
                    # stat_dict has already been initialized for fuzzer
                    #  old_stat <- last_stat
                    old_stats = stat_dict[fuzzer][0].copy()
                except KeyError:
                    # stat_dict has not yet been initialized for fuzzer
                    #  old_stat <- cur_stat
                    old_stats = sum_stats.copy()

                # initialize/update stat_dict
                stat_dict[fuzzer] = (sum_stats, old_stats)

                stat_change = diff_stats(sum_stats, old_stats)

                if not diff_stats:
                    continue

                print(prettify_stat(sum_stats, stat_change, True))

                tweet = prettify_stat(sum_stats, stat_change, False)

                l = len(tweet)
                c = clr.LRD if l > 140 else clr.LGN
                print_ok("Tweeting status (%s%d" % (c, l) + clr.RST + " chars)...")

                try:
                    twitter_inst.statuses.update(status=shorten_tweet(tweet))
                except (twitter.TwitterHTTPError, URLError):
                    print_warn("Problem connecting to Twitter! Tweet not sent!")
                except Exception as e:
                    print_err("Sending tweet failed (Reason: " + clr.GRA + "%s" % e.__cause__ + clr.RST + ")")

            if float(config_settings["interval"]) < 0:
                doExit = True
            else:
                time.sleep(float(config_settings["interval"]) * 60)
        except KeyboardInterrupt:
            print("\b\b")
            print_ok("Aborted by user. Good bye!")
            doExit = True
Example #9
0
def generate_sample_list(list_filename, files_collected):
    list_filename = os.path.abspath(os.path.expanduser(list_filename))
    fd = open(list_filename, "w")

    if not fd:
        print_err("Could not create file list '%s'!" % list_filename)
        return

    for f in files_collected:
        fd.writelines("%s\n" % f)

    fd.close()
Example #10
0
def kill_session(session):
    if os.path.isfile("/tmp/afl_multicore.PGID.%s" % session):
        f = open("/tmp/afl_multicore.PGID.%s" % session)
        pgids = f.readlines()

        for pgid in pgids:
            try:
                print_ok("Killing jobs with PGID %s" % pgid.strip("\r\n"))
                os.killpg(int(pgid), signal.SIGTERM)
            except ProcessLookupError:
                print_warn("No processes with PGID %s found!" % (pgid.strip("\r\n")))

        f.close()
        os.remove("/tmp/afl_multicore.PGID.%s" % session)
    else:
        print_err("PGID file '/tmp/afl_multicore.PGID.%s' not found! Aborting!" % session)
Example #11
0
def fetch_stats(config_settings, twitter_inst):
    stat_dict = dict()
    for fuzzer in config_settings['fuzz_dirs']:
        stats = load_stats(fuzzer)

        if not stats:
            continue

        sum_stats = summarize_stats(stats)

        try:
            with open('.afl_stats.{}'.format(os.path.basename(fuzzer)), 'r') as f:
                old_stats = json.load(f)
        except FileNotFoundError:
            old_stats = sum_stats.copy()

        # initialize/update stat_dict
        stat_dict[fuzzer] = (sum_stats, old_stats)

        stat_change = diff_stats(sum_stats, old_stats)

        with open('.afl_stats.{}'.format(os.path.basename(fuzzer)), 'w') as f:
            json.dump(sum_stats, f)

        print(prettify_stat(sum_stats, stat_change, True))

        tweet = prettify_stat(sum_stats, stat_change, False)

        l = len(tweet)
        c = clr.LRD if l > 140 else clr.LGN
        print_ok("Tweeting status (%s%d" % (c, l) + clr.RST + " chars)...")

        try:
            twitter_inst.statuses.update(status=shorten_tweet(tweet))
        except (twitter.TwitterHTTPError, URLError):
            print_warn("Problem connecting to Twitter! Tweet not sent!")
        except Exception as e:
            print_err("Sending tweet failed (Reason: " + clr.GRA + "%s" % e.__cause__ + clr.RST + ")")
Example #12
0
def read_config(config_file):
    global config_settings

    try:
        config_file = os.path.abspath(os.path.expanduser(config_file))

        if not os.path.isfile(config_file):
            print_err("Config file not found!")
            sys.exit(1)

        config = ConfigParser()
        config.read(config_file)
    except (MissingSectionHeaderError, UnicodeDecodeError):
        print_err("No valid configuration file specified!")
        sys.exit(1)

    try:
        config_settings["interval"] = config.get("core", "interval", raw=True)
        config_settings["twitter_consumer_key"] = config.get("twitter", "consumer_key", raw=True)
        config_settings["twitter_consumer_secret"] = config.get("twitter", "consumer_secret", raw=True)
        config_settings["twitter_creds_file"] = config.get("twitter", "credentials_file", raw=True)
    except NoSectionError as e:
        print_err(
            "No valid configuration file specified! Section '" + clr.GRA + "%s" % e.section + clr.RST + "' not found!"
        )
        sys.exit(1)
    except NoOptionError as e:
        print_err(
            "No valid configuration file specified! Option '"
            + clr.GRA
            + "%s.%s" % (e.section, e.option)
            + clr.RST
            + "' not found!"
        )
        sys.exit(1)

    exists = True
    i = 0
    while exists:
        try:
            config_settings["fuzz_dirs"].append(config.get("fuzzers", str(i), raw=True))
            i += 1
        except NoOptionError:
            exists = False

    return config_settings
Example #13
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(
        description=
        'afl-sync synchronizes fuzzer state directories between different \
locations. Supported are remote transfers through rsync that may use transport compression.',
        usage='afl-sync [-S SESSION] <cmd> <src_sync_dir> <dst_storage_dir>')

    parser.add_argument(
        'cmd',
        help=
        'Command to perform: push, pull or sync. Push transmits the local state from '
        '<src_sync_dir> to the destination <dst_storage_dir>. Pull fetches remote state(s) into '
        'the local synchronization dir appending the \'.sync\' extension. Sync performs a '
        'pull operation followed by a push.')
    parser.add_argument(
        'src_sync_dir',
        help=
        'Source afl synchronisation directory containing state directories of afl instances.'
    )
    parser.add_argument(
        'dst_storage_dir',
        help=
        'Destination directory used as fuzzer state storage. This shouldn\'t be an afl sync dir!'
    )
    parser.add_argument(
        '-S',
        '--session',
        dest='session',
        default=None,
        help=
        'Name of an afl-multicore session. If provided, only fuzzers belonging to '
        'the specified session will be synced with the destination. Otherwise state '
        'directories of all fuzzers inside the synchronisation dir will be exchanged. '
        'Directories ending on \'.sync\' will never be pushed back to the destination!'
    )

    args = parser.parse_args(argv[1:])

    args.cmd = args.cmd.lower()
    if not args.cmd in ['push', 'pull', 'sync']:
        print_err('Sorry, unknown command requested!')
        sys.exit(1)

    if not os.path.exists(args.src_sync_dir):
        if args.cmd in ['pull', 'sync']:
            print_warn(
                'Local afl sync dir does not exist! Will create it for you!')
            os.makedirs(args.src_sync_dir)
        else:
            print_err('Local afl sync dir does not exist!')
            sys.exit(1)

    server_config = {
        'remote_path': args.dst_storage_dir,
    }

    fuzzer_config = {
        'sync_dir': args.src_sync_dir,
        'session': args.session,
        'exclude_crashes': False,
        'exclude_hangs': False,
    }

    rsyncEngine = AflRsync(server_config, fuzzer_config)

    if args.cmd == 'push':
        rsyncEngine.push()
    elif args.cmd == 'pull':
        rsyncEngine.pull()
    elif args.cmd == 'sync':
        rsyncEngine.sync()
Example #14
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description="afl-multicore starts several parallel fuzzing jobs, that are run \
in the background. For fuzzer stats see 'out_dir/SESSION###/fuzzer_stats'!",
                                     usage="afl-multicore [-c config] [-h] [-s secs] [-t] [-v] <cmd> <jobs>")

    parser.add_argument("-c", "--config", dest="config_file",
                        help="afl-multicore config file (Default: afl-multicore.conf)!", default="afl-multicore.conf")
    parser.add_argument("-s", "--startup-delay", dest="startup_delay", default=None, help="Wait a configurable  amount \
of time after starting/resuming each afl instance to avoid interference during fuzzer startup. Provide wait time in \
seconds.")
    parser.add_argument("-t", "--test", dest="test_run", action="store_const", const=True, default=False, help="Perform \
a test run by starting a single afl instance in interactive mode using a test output directory.")
    parser.add_argument("-v", "--verbose", dest="verbose", action="store_const", const=True,
                        default=False, help="For debugging purposes do not redirect stderr/stdout of the created \
subprocesses to /dev/null (Default: off). Check 'nohup.out' for further outputs.")
    parser.add_argument("cmd", help="afl-multicore command to execute: start, resume, add.")
    parser.add_argument("jobs", help="Number of instances to start/resume/add.")

    args = parser.parse_args(argv[1:])

    conf_settings = read_config(os.path.abspath(os.path.expanduser(args.config_file)))

    if args.test_run:
        signal.signal(signal.SIGINT, sigint_handler)
        conf_settings["output"] += "_test"
        conf_settings["interactive"] = False
        args.jobs = 1
        args.cmd = "start"

    if args.cmd != "resume":
        conf_settings["input"] = os.path.abspath(os.path.expanduser(conf_settings["input"]))
        if not os.path.exists(conf_settings["input"]):
            print_err("No valid directory provided for <INPUT_DIR>!")
            sys.exit(1)
    else:
        conf_settings["input"] = "-"

    conf_settings["output"] = os.path.abspath(os.path.expanduser(conf_settings["output"]))

    slave_off, slave_start = get_slave_count(args.cmd, conf_settings)

    if "interactive" in conf_settings and conf_settings["interactive"]:
        if not check_screen():
            print_err("When using screen mode, please run afl-multicore from inside a screen session!")
            sys.exit(1)

        if "environment" in conf_settings:
            setup_screen(int(args.jobs), conf_settings["environment"])
        else:
            setup_screen(int(args.jobs), [])

    target_cmd = build_target_cmd(conf_settings)
    master_cmd = build_master_cmd(conf_settings, target_cmd)

    if args.test_run:
        with subprocess.Popen(master_cmd.split()) as test_proc:
            print_ok("Test instance started (PID: %d)" % test_proc.pid)

    if "slave_only" not in conf_settings or ("slave_only" in conf_settings and not conf_settings["slave_only"]):
        print_ok("Starting master instance...")

        if "interactive" in conf_settings and conf_settings["interactive"]:
            subprocess.Popen("screen -X select 1".split())
            screen_cmd = ["screen", "-X", "eval", "exec %s" % master_cmd, "next"]
            subprocess.Popen(screen_cmd)
            print(" Master 000 started inside new screen window")
        else:
            if not args.verbose:
                master = subprocess.Popen(" ".join(['nohup', master_cmd]).split(), stdout=subprocess.DEVNULL,
                                          stderr=subprocess.DEVNULL)
            else:
                master = subprocess.Popen(" ".join(['nohup', master_cmd]).split())
            print(" Master 000 started (PID: %d)" % master.pid)

        if args.startup_delay is not None:
            time.sleep(int(args.startup_delay))

    print_ok("Starting slave instances...")
    num_slaves = int(args.jobs)+slave_start-slave_off
    for i in range(slave_start, num_slaves, 1):
        slave_cmd = build_slave_cmd(conf_settings, i, target_cmd)

        if "interactive" in conf_settings and conf_settings["interactive"]:
            subprocess.Popen(["screen", "-X", "select", "%d" % (i + 1)])
            screen_cmd = ["screen", "-X", "eval", "exec %s" % slave_cmd, "next"]
            subprocess.Popen(screen_cmd)
            print(" Slave %03d started inside new screen window" % i)
        else:
            if not args.verbose:
                slave = subprocess.Popen(" ".join(['nohup', slave_cmd]).split(), stdout=subprocess.DEVNULL,
                                         stderr=subprocess.DEVNULL)
            else:
                slave = subprocess.Popen(" ".join(['nohup', slave_cmd]).split())
            print(" Slave %03d started (PID: %d)" % (i, slave.pid))

        if args.startup_delay is not None and i < (num_slaves-1):
            time.sleep(int(args.startup_delay))

    write_pgid_file(conf_settings)
Example #15
0
def generate_gdb_exploitable_script(script_filename,
                                    sample_index,
                                    target_cmd,
                                    script_id=0,
                                    intermediate=False,
                                    asan_mode=False):
    target_cmd = target_cmd.split()
    gdb_target_binary = target_cmd[0]
    gdb_run_cmd = " ".join(target_cmd[1:])

    if not intermediate:
        script_filename = os.path.abspath(os.path.expanduser(script_filename))
        print_ok(
            "Generating final gdb+exploitable script '%s' for %d samples..." %
            (script_filename, len(sample_index.outputs())))
    else:
        script_filename = os.path.abspath(
            os.path.expanduser("%s.%d" % (script_filename, script_id)))
        print_ok(
            "Generating intermediate gdb+exploitable script '%s' for %d samples..."
            % (script_filename, len(sample_index.outputs())))

    gdb_exploitable_path = None
    gdbinit = os.path.expanduser("~/.gdbinit")
    if not os.path.exists(gdbinit) or b"exploitable.py" not in open(
            gdbinit, "rb").read():
        gdb_exploitable_path = os.path.join(exploitable.__path__[0],
                                            "exploitable.py")

    try:
        fd = open(script_filename, "w")

        # <script header>
        # source exploitable.py if necessary
        if gdb_exploitable_path:
            fd.writelines("source %s\n" % gdb_exploitable_path)

        if asan_mode:
            fd.writelines("set pagination off\n")
            fd.writelines("handle SIGSEGV nostop\n")
            asan_logname = '/tmp/{}.{}'.format(
                asan_log_tmpstring, ''.join(
                    random.choice(string.ascii_lowercase + string.digits)
                    for _ in range(10)))

        # load executable
        fd.writelines("file %s\n" % gdb_target_binary)
        # </script_header>

        # fill script with content
        for f in sample_index.index:
            fd.writelines("echo Crash\ sample:\ '%s'\\n\n" % f['output'])

            if not asan_mode:
                if not stdin_mode(target_cmd):
                    run_cmd = "run " + gdb_run_cmd + "\n"
                else:
                    run_cmd = "run " + gdb_run_cmd + "< @@" + "\n"
            else:
                if not stdin_mode(target_cmd):
                    run_cmd = "run " + gdb_run_cmd + " 2> {}".format(
                        asan_logname) + "\n"
                else:
                    run_cmd = "run " + gdb_run_cmd + "< @@" + " 2> {}".format(
                        asan_logname) + "\n"

            if intermediate:
                run_cmd = run_cmd.replace("@@", "'{}'".format(f['input']))
            else:
                run_cmd = run_cmd.replace(
                    "@@",
                    os.path.join(sample_index.output_dir,
                                 "'{}'".format(f['output'])))

            fd.writelines(run_cmd)
            if not asan_mode:
                fd.writelines("exploitable\n")
            else:
                fd.writelines("exploitable -a {}\n".format(asan_logname))

        # <script_footer>
        fd.writelines("quit")
        # </script_footer>

        fd.close()
    except (FileExistsError, PermissionError):
        print_err("Could not open script file '%s' for writing!" %
                  script_filename)
Example #16
0
def run(serial_number, cmds):
    cmd = ['adb', '-s', serial_number] + cmds.split(" ")
    ret = subprocess.call(' '.join(cmd), shell=True)
    if ret != 0:
        print_err("adb command failed to run: %s!" % " ".join(cmd))
        sys.exit(1)
Example #17
0
def read_config(config_file):
    try:
        config_file = os.path.abspath(os.path.expanduser(config_file))

        if not os.path.isfile(config_file):
            print_err("Config file not found!")
            sys.exit(1)

        config = ConfigParser()
        # overwrite optionxform to not convert config file items to lower case
        config.optionxform = str
        config.read(config_file)
    except (MissingSectionHeaderError, UnicodeDecodeError):
        print_err("No valid configuration file specified!")
        sys.exit(1)

    try:
        conf_settings = dict()

        # Get required settings
        conf_settings["input"] = config.get("afl.dirs", "input", raw=True)
        conf_settings["output"] = config.get("afl.dirs", "output", raw=True)

        conf_settings["target"] = config.get("target", "target", raw=True)
        conf_settings["cmdline"] = config.get("target", "cmdline", raw=True)

        # Get optional settings
        if config.has_option("afl.ctrl", "file"):
            conf_settings["file"] = config.get("afl.ctrl", "file", raw=True)
        else:
            conf_settings["file"] = None

        if config.has_option("afl.ctrl", "timeout"):
            conf_settings["timeout"] = config.get("afl.ctrl", "timeout", raw=True)
        else:
            conf_settings["timeout"] = None

        if config.has_option("afl.ctrl", "afl_margs"):
            conf_settings["afl_margs"] = config.get("afl.ctrl", "afl_margs", raw=True)
        else:
            conf_settings["afl_margs"] = None

        if config.has_option("afl.ctrl", "mem_limit"):
            conf_settings["mem_limit"] = config.get("afl.ctrl", "mem_limit", raw=True)
        else:
            conf_settings["mem_limit"] = None

        if config.has_option("afl.ctrl", "qemu"):
            conf_settings["qemu"] = config.get("afl.ctrl", "qemu", raw=True)
        else:
            conf_settings["qemu"] = None

        if config.has_option("afl.behavior", "dirty"):
            conf_settings["dirty"] = config.get("afl.behavior", "dirty", raw=True)
        else:
            conf_settings["dirty"] = None

        if config.has_option("afl.behavior", "dumb"):
            conf_settings["dumb"] = config.get("afl.behavior", "dumb", raw=True)
        else:
            conf_settings["dumb"] = None

        if config.has_option("afl.behavior", "dict"):
            conf_settings["dict"] = config.get("afl.behavior", "dict", raw=True)
        else:
            conf_settings["dict"] = None

        if config.has_option("job", "session"):
            conf_settings["session"] = config.get("job", "session", raw=True)
        else:
            conf_settings["session"] = "SESSION"

        if config.has_option("job", "slave_only"):
            if config.get("job", "slave_only", raw=True) == "on":
                conf_settings["slave_only"] = True
            else:
                conf_settings["slave_only"] = False
        else:
            conf_settings["slave_only"] = False

        if config.has_option("job", "interactive"):
            if config.get("job", "interactive", raw=True) == "on":
                conf_settings["interactive"] = True
            else:
                conf_settings["interactive"] = False
        else:
            conf_settings["interactive"] = False

        if config.has_section("environment"):
            environment = []
            env_list = config.options("environment")
            for env in env_list:
                environment.append((env, config.get("environment", env, raw=True)))
        else:
            environment = None
    except NoOptionError as e:
        print_err("No valid configuration file specified! Option '" + clr.GRA + "%s.%s" % (e.section, e.option) +
                  clr.RST + "' not found!")
        sys.exit(1)
    except NoSectionError as e:
        print_err("No valid configuration file specified! Section '" + clr.GRA + "%s" % e.section + clr.RST +
                  "' not found!")
        sys.exit(1)

    return conf_settings, environment
Example #18
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(
        description=
        "afl-minimize performs several optimization steps to reduce the size\n \
of an afl-fuzz corpus.",
        usage=
        "afl-minimize [-c COLLECTION_DIR [--cmin [opts]] [--tmin [opts]]] [--reseed]\n \
                   [-d] [-h] [-j] sync_dir -- target_cmd\n")

    parser.add_argument(
        "-c",
        "--collect",
        dest="collection_dir",
        help=
        "Collect all samples from the synchronisation dir and store them in the collection dir.",
        default=None)
    parser.add_argument(
        "--cmin",
        dest="invoke_cmin",
        action="store_const",
        const=True,
        default=False,
        help="Run afl-cmin on collection dir. Has no effect without '-c'.")
    parser.add_argument("--cmin-mem-limit",
                        dest="cmin_mem_limit",
                        default=None,
                        help="Set memory limit for afl-cmin.")
    parser.add_argument("--cmin-timeout",
                        dest="cmin_timeout",
                        default=None,
                        help="Set timeout for afl-cmin.")
    parser.add_argument("--reseed",
                        dest="reseed",
                        default=False,
                        action="store_const",
                        const=True,
                        help="Reseed afl-fuzz with the \
collected (and optimized) corpus. This replaces all sync_dir queues with the newly generated corpus."
                        )
    parser.add_argument(
        "--tmin",
        dest="invoke_tmin",
        action="store_const",
        const=True,
        default=False,
        help=
        "Run afl-tmin on minimized collection dir if used together with '--cmin'\
or on unoptimized collection dir otherwise. Has no effect without '-c'.")
    parser.add_argument("--tmin-mem-limit",
                        dest="tmin_mem_limit",
                        default=None,
                        help="Set memory limit for afl-tmin.")
    parser.add_argument("--tmin-timeout",
                        dest="tmin_timeout",
                        default=None,
                        help="Set timeout for afl-tmin.")
    parser.add_argument(
        "-d",
        "--dry-run",
        dest="dry_run",
        action="store_const",
        const=True,
        default=False,
        help="Perform dry-run on collection dir, if '-c' is provided or on \
synchronisation dir otherwise. Dry-run will move intermittent crashes out of the corpus."
    )
    parser.add_argument(
        "-j",
        "--threads",
        dest="num_threads",
        default=1,
        help=
        "Enable parallel dry-run and t-minimization step by specifying the number of threads \
afl-minimize will utilize.")
    parser.add_argument(
        "sync_dir",
        help=
        "afl synchronisation directory containing multiple fuzzers and their queues."
    )
    parser.add_argument(
        "target_cmd",
        nargs="+",
        help="Path to the target binary and its command line arguments. \
Use '@@' to specify crash sample input file position (see afl-fuzz usage).")

    args = parser.parse_args(argv[1:])

    if not args.collection_dir and not args.dry_run:
        print_err("No operation requested. You should at least provide '-c'")
        print_err(
            "for sample collection or '-d' for a dry-run. Use '--help' for")
        print_err("usage instructions or checkout README.md for details.")
        return

    sync_dir = os.path.abspath(os.path.expanduser(args.sync_dir))
    if not os.path.exists(sync_dir):
        print_err("No valid directory provided for <SYNC_DIR>!")
        return

    args.target_cmd = " ".join(args.target_cmd).split()
    args.target_cmd[0] = os.path.abspath(os.path.expanduser(
        args.target_cmd[0]))
    if not os.path.exists(args.target_cmd[0]):
        print_err("Target binary not found!")
        return
    args.target_cmd = " ".join(args.target_cmd)

    threads = int(args.num_threads)

    if args.collection_dir:
        out_dir = os.path.abspath(os.path.expanduser(args.collection_dir))
        if not os.path.exists(out_dir) or len(os.listdir(out_dir)) == 0:
            os.makedirs(out_dir, exist_ok=True)

            print_ok("Looking for fuzzing queues in '%s'." % sync_dir)
            fuzzers = afl_collect.get_fuzzer_instances(sync_dir,
                                                       crash_dirs=False)

            # collect samples from fuzzer queues
            print_ok("Found %d fuzzers, collecting samples." % len(fuzzers))
            sample_index = afl_collect.build_sample_index(
                sync_dir, out_dir, fuzzers, omit_fuzzer_name=True)

            print_ok("Successfully indexed %d samples." %
                     len(sample_index.index))
            print_ok("Copying %d samples into collection directory..." %
                     len(sample_index.index))
            afl_collect.copy_samples(sample_index)
        else:
            print_warn("Collection directory exists and is not empty!")
            print_warn("Skipping collection step...")

        if args.invoke_cmin:
            # invoke cmin on collection
            print_ok("Executing: afl-cmin -i %s -o %s.cmin -- %s" %
                     (out_dir, out_dir, args.target_cmd))
            invoke_cmin(out_dir,
                        "%s.cmin" % out_dir,
                        args.target_cmd,
                        mem_limit=args.cmin_mem_limit,
                        timeout=args.cmin_timeout)
            if args.invoke_tmin:
                # invoke tmin on minimized collection
                print_ok(
                    "Executing: afl-tmin -i %s.cmin/* -o %s.cmin.tmin/* -- %s"
                    % (out_dir, out_dir, args.target_cmd))
                tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir(
                    "%s.cmin" % out_dir, abs_path=True)
                invoke_tmin(tmin_samples,
                            "%s.cmin.tmin" % out_dir,
                            args.target_cmd,
                            num_threads=threads,
                            mem_limit=args.tmin_mem_limit,
                            timeout=args.tmin_timeout)
        elif args.invoke_tmin:
            # invoke tmin on collection
            print_ok("Executing: afl-tmin -i %s/* -o %s.tmin/* -- %s" %
                     (out_dir, out_dir, args.target_cmd))
            tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir(
                out_dir, abs_path=True)
            invoke_tmin(tmin_samples,
                        "%s.tmin" % out_dir,
                        args.target_cmd,
                        num_threads=threads,
                        mem_limit=args.tmin_mem_limit,
                        timeout=args.tmin_timeout)
        if args.dry_run:
            # invoke dry-run on collected/minimized corpus
            if args.invoke_cmin and args.invoke_tmin:
                print_ok("Performing dry-run in %s.cmin.tmin..." % out_dir)
                print_warn(
                    "Be patient! Depending on the corpus size this step can take hours..."
                )
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(
                    "%s.cmin.tmin" % out_dir, abs_path=True)
                invoke_dryrun(dryrun_samples,
                              "%s.cmin.tmin.crashes" % out_dir,
                              "%s.cmin.tmin.hangs" % out_dir,
                              args.target_cmd,
                              num_threads=threads)
            elif args.invoke_cmin:
                print_ok("Performing dry-run in %s.cmin..." % out_dir)
                print_warn(
                    "Be patient! Depending on the corpus size this step can take hours..."
                )
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(
                    "%s.cmin" % out_dir, abs_path=True)
                invoke_dryrun(dryrun_samples,
                              "%s.cmin.crashes" % out_dir,
                              "%s.cmin.hangs" % out_dir,
                              args.target_cmd,
                              num_threads=threads)
            elif args.invoke_tmin:
                print_ok("Performing dry-run in %s.tmin..." % out_dir)
                print_warn(
                    "Be patient! Depending on the corpus size this step can take hours..."
                )
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(
                    "%s.tmin" % out_dir, abs_path=True)
                invoke_dryrun(dryrun_samples,
                              "%s.tmin.crashes" % out_dir,
                              "%s.tmin.hangs" % out_dir,
                              args.target_cmd,
                              num_threads=threads)
            else:
                print_ok("Performing dry-run in %s..." % out_dir)
                print_warn(
                    "Be patient! Depending on the corpus size this step can take hours..."
                )
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(
                    out_dir, abs_path=True)
                invoke_dryrun(dryrun_samples,
                              "%s.crashes" % out_dir,
                              "%s.hangs" % out_dir,
                              args.target_cmd,
                              num_threads=threads)
        elif args.reseed:
            optimized_corpus = out_dir

            if args.invoke_cmin:
                optimized_corpus = optimized_corpus + ".cmin"

            if args.invoke_tmin:
                optimized_corpus = optimized_corpus + ".tmin"

            afl_reseed(sync_dir, optimized_corpus)
    else:
        if args.dry_run:
            print_ok("Looking for fuzzing queues in '%s'." % sync_dir)
            fuzzers = afl_collect.get_fuzzer_instances(sync_dir,
                                                       crash_dirs=False)
            print_ok("Found %d fuzzers, performing dry run." % len(fuzzers))
            print_warn(
                "Be patient! Depending on the corpus size this step can take hours..."
            )
            # invoke dry-run on original corpus
            for f in fuzzers:
                for q_dir in f[1]:
                    q_dir_complete = os.path.join(sync_dir, f[0], q_dir)
                    print_ok("Processing %s..." % q_dir_complete)

                    dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(
                        q_dir_complete, abs_path=True)
                    invoke_dryrun(dryrun_samples,
                                  os.path.join(sync_dir, f[0], "crashes"),
                                  os.path.join(sync_dir, f[0], "hangs"),
                                  args.target_cmd,
                                  num_threads=threads)
Example #19
0
def read_config(config_file):
    try:
        config_file = os.path.abspath(os.path.expanduser(config_file))

        if not os.path.isfile(config_file):
            print_err("Config file not found!")
            sys.exit(1)

        config = ConfigParser()
        # overwrite optionxform to not convert config file items to lower case
        config.optionxform = str
        config.read(config_file)
    except (MissingSectionHeaderError, UnicodeDecodeError):
        print_err("No valid configuration file specified!")
        sys.exit(1)

    try:
        conf_settings = dict()

        # Get required settings
        conf_settings["input"] = config.get("afl.dirs", "input", raw=True)
        conf_settings["output"] = config.get("afl.dirs", "output", raw=True)

        conf_settings["target"] = config.get("target", "target", raw=True)
        conf_settings["cmdline"] = config.get("target", "cmdline", raw=True)

        # Get optional settings
        if config.has_option("afl.ctrl", "file"):
            conf_settings["file"] = config.get("afl.ctrl", "file", raw=True)
        else:
            conf_settings["file"] = None

        if config.has_option("afl.ctrl", "timeout"):
            conf_settings["timeout"] = config.get("afl.ctrl",
                                                  "timeout",
                                                  raw=True)
        else:
            conf_settings["timeout"] = None

        if config.has_option("afl.ctrl", "afl_margs"):
            conf_settings["afl_margs"] = config.get("afl.ctrl",
                                                    "afl_margs",
                                                    raw=True)
        else:
            conf_settings["afl_margs"] = None

        if config.has_option("afl.ctrl", "mem_limit"):
            conf_settings["mem_limit"] = config.get("afl.ctrl",
                                                    "mem_limit",
                                                    raw=True)
        else:
            conf_settings["mem_limit"] = None

        if config.has_option("afl.ctrl", "cpu_affinity"):
            conf_settings["cpu_affinity"] = config.get("afl.ctrl",
                                                       "cpu_affinity",
                                                       raw=True).split()
        else:
            conf_settings["cpu_affinity"] = None

        if config.has_option("afl.ctrl", "qemu"):
            conf_settings["qemu"] = config.get("afl.ctrl", "qemu", raw=True)
        else:
            conf_settings["qemu"] = None

        if config.has_option("afl.behavior", "dirty"):
            conf_settings["dirty"] = config.get("afl.behavior",
                                                "dirty",
                                                raw=True)
        else:
            conf_settings["dirty"] = None

        if config.has_option("afl.behavior", "dumb"):
            conf_settings["dumb"] = config.get("afl.behavior",
                                               "dumb",
                                               raw=True)
        else:
            conf_settings["dumb"] = None

        if config.has_option("afl.behavior", "dict"):
            conf_settings["dict"] = config.get("afl.behavior",
                                               "dict",
                                               raw=True)
        else:
            conf_settings["dict"] = None

        if config.has_option("job", "session"):
            conf_settings["session"] = config.get("job", "session", raw=True)
        else:
            conf_settings["session"] = "SESSION"

        if config.has_option("job", "slave_only"):
            if config.get("job", "slave_only", raw=True) == "on":
                conf_settings["slave_only"] = True
            else:
                conf_settings["slave_only"] = False
        else:
            conf_settings["slave_only"] = False

        if config.has_option("job", "interactive"):
            if config.get("job", "interactive", raw=True) == "on":
                conf_settings["interactive"] = True
            else:
                conf_settings["interactive"] = False
        else:
            conf_settings["interactive"] = False

        if config.has_section("environment"):
            environment = []
            env_list = config.options("environment")
            for env in env_list:
                environment.append(
                    (env, config.get("environment", env, raw=True)))
        else:
            environment = None
    except NoSectionError as e:
        print_err("No valid configuration file specified! Section '" +
                  clr.GRA + "%s" % e.section + clr.RST + "' not found!")
        sys.exit(1)
    except NoOptionError as e:
        print_err("No valid configuration file specified! Option '" + clr.GRA +
                  "%s.%s" % (e.section, e.option) + clr.RST + "' not found!")
        sys.exit(1)

    return conf_settings, environment
Example #20
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description="afl-multicore starts several parallel fuzzing jobs, that are run \
in the background. For fuzzer stats see 'out_dir/SESSION###/fuzzer_stats'!",
                                     usage="afl-multicore [-c config] [-h] [-t] [-v] <cmd> <jobs>")

    parser.add_argument("-c", "--config", dest="config_file",
                        help="afl-multicore config file (Default: afl-multicore.conf)!", default="afl-multicore.conf")
    parser.add_argument("-t", "--test", dest="test_run", action="store_const", const=True, default=False, help="Perform \
a test run by starting a single afl instance in interactive mode using a test output directory.")
    parser.add_argument("-v", "--verbose", dest="verbose", action="store_const", const=True,
                        default=False, help="For debugging purposes do not redirect stderr/stdout of the created \
subprocesses to /dev/null (Default: off). Check 'nohup.out' for further outputs.")
    parser.add_argument("cmd", help="afl-multicore command to execute: start, resume, add.")
    parser.add_argument("jobs", help="Number of instances to start/resume/add.")

    args = parser.parse_args(argv[1:])

    conf_settings, environment = read_config(os.path.abspath(os.path.expanduser(args.config_file)))

    if args.test_run:
        signal.signal(signal.SIGINT, sigint_handler)
        conf_settings["output"] += "_test"
        conf_settings["interactive"] = False
        args.jobs = 1
        args.cmd = "start"

    if args.cmd != "resume":
        conf_settings["input"] = os.path.abspath(os.path.expanduser(conf_settings["input"]))
        if not os.path.exists(conf_settings["input"]):
            print_err("No valid directory provided for <INPUT_DIR>!")
            return
    else:
        conf_settings["input"] = "-"

    conf_settings["output"] = os.path.abspath(os.path.expanduser(conf_settings["output"]))

    if args.cmd == "add":
        slave_start = 0
        slave_off = 0
        dirs = os.listdir(conf_settings["output"])
        for d in dirs:
            if os.path.isdir(os.path.abspath(os.path.join(conf_settings["output"], d))) \
                    and conf_settings["session"] in d:
                slave_start += 1
        conf_settings["slave_only"] = True
    else:
        slave_start = 1
        slave_off = 1

    target_cmd = [conf_settings["target"], conf_settings["cmdline"]]
    target_cmd = " ".join(target_cmd).split()
    target_cmd[0] = os.path.abspath(os.path.expanduser(target_cmd[0]))
    if not os.path.exists(target_cmd[0]):
        print_err("Target binary not found!")
        return
    target_cmd = " ".join(target_cmd)

    if conf_settings["interactive"]:
        if not check_screen():
            print_err("When using screen mode, please run afl-multicore from inside a screen session!")
            return

        setup_screen(int(args.jobs), environment)

    # compile command-line for master
    # $ afl-fuzz -i <input_dir> -o <output_dir> -M <session_name>.000 <afl_args> \
    #   </path/to/target.bin> <target_args>
    master_cmd = [afl_path] + afl_cmdline_from_config(conf_settings)
    master_cmd += ["-M", "%s000" % conf_settings["session"], "--", target_cmd]
    master_cmd = " ".join(master_cmd)

    if args.test_run:
        with subprocess.Popen(master_cmd.split()) as test_proc:
            print_ok("Test instance started (PID: %d)" % test_proc.pid)

    if not conf_settings["slave_only"]:
        print_ok("Starting master instance...")

        if not conf_settings["interactive"]:
            if not args.verbose:
                master = subprocess.Popen(" ".join(['nohup', master_cmd]).split(), stdout=subprocess.DEVNULL,
                                          stderr=subprocess.DEVNULL)
            else:
                master = subprocess.Popen(" ".join(['nohup', master_cmd]).split())
            print(" Master 000 started (PID: %d)" % master.pid)
        else:
            subprocess.Popen("screen -X select 1".split())
            screen_cmd = ["screen", "-X", "eval", "exec %s" % master_cmd, "next"]
            subprocess.Popen(screen_cmd)
            print(" Master 000 started inside new screen window")

    # compile command-line for slaves
    print_ok("Starting slave instances...")
    for i in range(slave_start, int(args.jobs)+slave_start-slave_off, 1):
        # $ afl-fuzz -i <input_dir> -o <output_dir> -S <session_name>.NNN <afl_args> \
        #   </path/to/target.bin> <target_args>
        slave_cmd = [afl_path] + afl_cmdline_from_config(conf_settings)
        slave_cmd += ["-S", "%s%03d" % (conf_settings["session"], i), "--", target_cmd]
        slave_cmd = " ".join(slave_cmd)

        if not conf_settings["interactive"]:
            if not args.verbose:
                slave = subprocess.Popen(" ".join(['nohup', slave_cmd]).split(), stdout=subprocess.DEVNULL,
                                         stderr=subprocess.DEVNULL)
            else:
                slave = subprocess.Popen(" ".join(['nohup', slave_cmd]).split())
            print(" Slave %03d started (PID: %d)" % (i, slave.pid))
        else:
            subprocess.Popen(["screen", "-X", "select", "%d" % (i+1)])
            screen_cmd = ["screen", "-X", "eval", "exec %s" % slave_cmd, "next"]
            subprocess.Popen(screen_cmd)
            print(" Slave %03d started inside new screen window" % i)

    print("")
    if not conf_settings["interactive"]:
        # write/append PGID to file /tmp/afl-multicore.PGID.<SESSION>
        f = open("/tmp/afl_multicore.PGID.%s" % conf_settings["session"], "a")
        if f.writable():
            f.write("%d\n" % os.getpgid(0))
        f.close()
        print_ok("For progress info check: %s/%sxxx/fuzzer_stats!" % (conf_settings["output"],
                                                                      conf_settings["session"]))
    else:
        print_ok("Check the newly created screen windows!")
Example #21
0
# afl-collect global settings
global_crash_subdirs = "crashes"
global_queue_subdirs = "queue"
global_exclude_files = [
    "README.txt",
]

fuzzer_stats_filename = "fuzzer_stats"

# gdb settings

# Path to gdb binary
gdb_binary = shutil.which("gdb")
if gdb_binary is None:
    print_err("gdb binary not found!")
    sys.exit(1)

# Path to 'exploitable.py' (https://github.com/rc0r/exploitable)
# Set to None if you already source exploitable.py in your .gdbinit file!
gdb_exploitable_path = None


def show_info():
    print(clr.CYA + "afl-collect " + clr.BRI + "%s" % afl_utils.__version__ + clr.RST + " by %s" % afl_utils.__author__)
    print("Crash sample collection and processing utility for afl-fuzz.")
    print("")


def get_fuzzer_instances(sync_dir, crash_dirs=True):
    if not os.path.isabs(sync_dir):
Example #22
0
def find_fuzzer_binary(fuzzer_bin):
    afl_path = shutil.which(fuzzer_bin)
    if afl_path is None:
        print_err("Fuzzer binary not found!")
        sys.exit(1)
    return afl_path
Example #23
0
# afl-collect global settings
global_crash_subdirs = "crashes"
global_queue_subdirs = "queue"
global_exclude_files = [
    "README.txt",
]

fuzzer_stats_filename = "fuzzer_stats"

# gdb settings

# Path to gdb binary
gdb_binary = shutil.which("gdb")
if gdb_binary is None:
    print_err("gdb binary not found!")
    sys.exit(1)

# Path to 'exploitable.py' (https://github.com/rc0r/exploitable)
# Set to None if you already source exploitable.py in your .gdbinit file!
gdb_exploitable_path = None


# afl-collect database table spec
db_table_spec = """`Sample` TEXT PRIMARY KEY NOT NULL, `Classification` TEXT NOT NULL,
`Classification_Description` TEXT NOT NULL, `Hash` TEXT, `User_Comment` TEXT"""


def show_info():
    print(clr.CYA + "afl-collect " + clr.BRI + "%s" % afl_utils.__version__ + clr.RST + " by %s" % afl_utils.__author__)
    print("Crash sample collection and processing utility for afl-fuzz.")
Example #24
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description="afl-collect copies all crash sample files from an afl sync dir used \
by multiple fuzzers when fuzzing in parallel into a single location providing easy access for further crash analysis.",
                                     usage="afl-collect [-d DATABASE] [-e|-g GDB_EXPL_SCRIPT_FILE] [-f LIST_FILENAME]\n \
[-h] [-j THREADS] [-m] [-r] [-rr] sync_dir collection_dir -- target_cmd")

    parser.add_argument("sync_dir", help="afl synchronisation directory crash samples will be collected from.")
    parser.add_argument("collection_dir",
                        help="Output directory that will hold a copy of all crash samples and other generated files. \
Existing files in the collection directory will be overwritten!")
    parser.add_argument("-d", "--database", dest="database_file", help="Submit sample data into an sqlite3 database (\
only when used together with '-e'). afl-collect skips processing of samples already found in existing database.",
                        default=None)
    parser.add_argument("-e", "--execute-gdb-script", dest="gdb_expl_script_file",
                        help="Generate and execute a gdb+exploitable script after crash sample collection for crash \
classification. (Like option '-g', plus script execution.)",
                        default=None)
    parser.add_argument("-f", "--filelist", dest="list_filename", default=None,
                        help="Writes all collected crash sample filenames into a file in the collection directory.")
    parser.add_argument("-g", "--generate-gdb-script", dest="gdb_script_file",
                        help="Generate gdb script to run 'exploitable.py' on all collected crash samples. Generated \
script will be placed into collection directory.", default=None)
    parser.add_argument("-j", "--threads", dest="num_threads", default=1,
                        help="Enable parallel analysis by specifying the number of threads afl-collect will utilize.")
    parser.add_argument("-m", "--minimize-filenames", dest="min_filename", action="store_const", const=True,
                        default=False, help="Minimize crash sample file names by only keeping fuzzer name and ID.")
    parser.add_argument("-r", "--remove-invalid", dest="remove_invalid", action="store_const", const=True,
                        default=False, help="Verify collected crash samples and remove samples that do not lead to \
crashes (runs 'afl-vcrash.py -r' on collection directory). This step is done prior to any script file \
or file list generation/execution.")
    parser.add_argument("-rr", "--remove-unexploitable", dest="remove_unexploitable", action="store_const", const=True,
                        default=False, help="Remove crash samples that have an exploitable classification of \
'NOT_EXPLOITABLE' or 'PROBABLY_NOT_EXPLOITABLE'. Sample file removal will take place after gdb+exploitable \
script execution. Has no effect without '-e'.")
    parser.add_argument("target_cmd", nargs="+", help="Path to the target binary and its command line arguments. \
Use '@@' to specify crash sample input file position (see afl-fuzz usage).")

    args = parser.parse_args(argv[1:])

    sync_dir = os.path.abspath(os.path.expanduser(args.sync_dir))
    if not os.path.exists(sync_dir):
        print_err("No valid directory provided for <SYNC_DIR>!")
        return

    if args.collection_dir:
        out_dir = os.path.abspath(os.path.expanduser(args.collection_dir))
        if not os.path.exists(out_dir):
            os.makedirs(out_dir, exist_ok=True)
    else:
        print_err("No valid directory provided for <OUT_DIR>!")
        return

    args.target_cmd = " ".join(args.target_cmd).split()
    args.target_cmd[0] = os.path.abspath(os.path.expanduser(args.target_cmd[0]))
    if not os.path.exists(args.target_cmd[0]):
        print_err("Target binary not found!")
        return
    args.target_cmd = " ".join(args.target_cmd)

    if args.database_file:
        db_file = os.path.abspath(os.path.expanduser(args.database_file))
    else:
        db_file = None

    print_ok("Going to collect crash samples from '%s'." % sync_dir)

    # initialize database
    if db_file:
        lite_db = con_sqlite.sqliteConnector(db_file)
        lite_db.init_database()
    else:
        lite_db = None

    fuzzers = get_fuzzer_instances(sync_dir)
    print_ok("Found %d fuzzers, collecting crash samples." % len(fuzzers))

    sample_index = build_sample_index(sync_dir, out_dir, fuzzers, lite_db, args.min_filename)

    if len(sample_index.index) > 0:
        print_ok("Successfully indexed %d crash samples." % len(sample_index.index))
    elif db_file:
        print_warn("No unseen samples found. Check your database for results!")
        return
    else:
        print_warn("No samples found. Check directory settings!")
        return

    if args.remove_invalid:
        from afl_utils import afl_vcrash
        invalid_samples = afl_vcrash.verify_samples(int(args.num_threads), sample_index.inputs(), args.target_cmd)

        # store invalid samples in db
        print_ok("Saving invalid sample info to database.")
        if args.gdb_expl_script_file and db_file:
            for sample in invalid_samples:
                sample_name = sample_index.outputs(input_file=sample)
                dataset = {'sample': sample_name[0]['output'], 'classification': 'INVALID',
                           'description': 'Sample does not cause a crash in the target.', 'hash': ''}
                if not lite_db.dataset_exists(dataset):
                    lite_db.insert_dataset(dataset)

        # remove invalid samples from sample index
        sample_index.remove_inputs(invalid_samples)
        print_warn("Removed %d invalid crash samples from index." % len(invalid_samples))

    # generate gdb+exploitable script
    if args.gdb_expl_script_file:
        divided_index = sample_index.divide(int(args.num_threads))

        for i in range(0, int(args.num_threads), 1):
            generate_gdb_exploitable_script(os.path.join(out_dir, args.gdb_expl_script_file), divided_index[i],
                                            args.target_cmd, i, intermediate=True)

        # execute gdb+exploitable script
        classification_data = execute_gdb_script(out_dir, args.gdb_expl_script_file, len(sample_index.inputs()),
                                                 int(args.num_threads))

        # Submit crash classification data into database
        print_ok("Saving sample classification info to database.")
        if db_file:
            for dataset in classification_data:
                if not lite_db.dataset_exists(dataset):
                    lite_db.insert_dataset(dataset)

        # de-dupe by exploitable hash
        seen = set()
        seen_add = seen.add
        classification_data_dedupe = [x for x in classification_data
                                      if x['hash'] not in seen and not seen_add(x['hash'])]

        # remove dupe samples identified by exploitable hash
        uninteresting_samples = [x['sample'] for x in classification_data
                                 if x not in classification_data_dedupe]

        sample_index.remove_outputs(uninteresting_samples)

        print_warn("Removed %d duplicate samples from index. Will continue with %d remaining samples." %
                   (len(uninteresting_samples), len(sample_index.index)))

        # remove crash samples that are classified uninteresting
        if args.remove_unexploitable:
            classification_unexploitable = [
                'NOT_EXPLOITABLE',
                'PROBABLY_NOT_EXPLOITABLE',
            ]

            uninteresting_samples = []

            for c in classification_data_dedupe:
                if c['classification'] in classification_unexploitable:
                    uninteresting_samples.append(c['sample'])

            sample_index.remove_outputs(uninteresting_samples)
            print_warn("Removed %d uninteresting crash samples from index." % len(uninteresting_samples))

        # generate output gdb script
        generate_gdb_exploitable_script(os.path.join(out_dir, args.gdb_expl_script_file), sample_index,
                                        args.target_cmd, 0)
    elif args.gdb_script_file:
        generate_gdb_exploitable_script(os.path.join(out_dir, args.gdb_script_file), sample_index, args.target_cmd)

    print_ok("Copying %d samples into output directory..." % len(sample_index.index))
    files_collected = copy_samples(sample_index)

    # generate filelist of collected crash samples
    if args.list_filename:
        generate_sample_list(os.path.abspath(os.path.expanduser(args.list_filename)), files_collected)
        print_ok("Generated crash sample list '%s'." % os.path.abspath(os.path.expanduser(args.list_filename)))
Example #25
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(
        description=
        "afl-multicore starts several parallel fuzzing jobs, that are run \
in the background. For fuzzer stats see 'out_dir/SESSION###/fuzzer_stats'!",
        usage=
        "afl-multicore [-c config] [-h] [-s secs] [-t] [-v] <cmd> <jobs[,offset]>"
    )

    parser.add_argument(
        "-c",
        "--config",
        dest="config_file",
        help="afl-multicore config file (Default: afl-multicore.conf)!",
        default="afl-multicore.conf")
    parser.add_argument("-s",
                        "--startup-delay",
                        dest="startup_delay",
                        default=None,
                        help="Wait a configurable  amount \
of time after starting/resuming each afl instance to avoid interference during fuzzer startup. Provide wait time in \
seconds.")
    parser.add_argument("-t",
                        "--test",
                        dest="test_run",
                        action="store_const",
                        const=True,
                        default=False,
                        help="Perform \
a test run by starting a single afl instance in interactive mode using a test output directory."
                        )
    parser.add_argument(
        "-v",
        "--verbose",
        dest="verbose",
        action="store_const",
        const=True,
        default=False,
        help=
        "For debugging purposes do not redirect stderr/stdout of the created \
subprocesses to /dev/null (Default: off). Check 'nohup.out' for further outputs."
    )
    parser.add_argument(
        "-r",
        "--redirect",
        dest="redirect",
        type=str,
        default=False,
        help="For debugging purposes, redirect to specific file.")
    parser.add_argument(
        "cmd", help="afl-multicore command to execute: start, resume, add.")
    parser.add_argument(
        "jobs",
        help=
        "Number of instances to start/resume/add. For resumes you may specify an optional \
job offset that allows to resume specific (ranges of) afl-instances.")

    args = parser.parse_args(argv[1:])

    conf_settings = read_config(
        os.path.abspath(os.path.expanduser(args.config_file)))

    if args.test_run:
        signal.signal(signal.SIGINT, sigint_handler)
        conf_settings["output"] += "_test"
        conf_settings["interactive"] = False
        args.jobs = 1
        args.cmd = "start"

    jobs_count, jobs_offset = get_job_counts(args.jobs)

    if args.cmd != "resume":
        conf_settings["input"] = os.path.abspath(
            os.path.expanduser(conf_settings["input"]))
        jobs_offset = 0
        if not os.path.exists(conf_settings["input"]):
            print_err("No valid directory provided for <INPUT_DIR>!")
            sys.exit(1)
    else:
        conf_settings["input"] = "-"

    conf_settings["output"] = os.path.abspath(
        os.path.expanduser(conf_settings["output"]))

    instances_started = get_started_instance_count(args.cmd, conf_settings)
    master_count = get_master_count(conf_settings)

    if "interactive" in conf_settings and conf_settings["interactive"]:
        if not check_screen():
            print_err(
                "When using screen mode, please run afl-multicore from inside a screen session!"
            )
            sys.exit(1)

        if "environment" in conf_settings:
            setup_screen(jobs_count, conf_settings["environment"])
        else:
            setup_screen(jobs_count, [])

    target_cmd = build_target_cmd(conf_settings)

    if args.test_run:
        cmd = build_master_cmd(conf_settings, 0, target_cmd)
        with subprocess.Popen(cmd.split()) as test_proc:
            print_ok("Test instance started (PID: %d)" % test_proc.pid)

    print_ok("Starting fuzzer instance(s)...")
    jobs_offset += instances_started
    jobs_count += jobs_offset
    instances = []
    for i in range(jobs_offset, jobs_count, 1):
        is_master = has_master(conf_settings, i)

        if is_master:
            cmd = build_master_cmd(conf_settings, i, target_cmd)
        else:
            cmd = build_slave_cmd(conf_settings, i, target_cmd)

        if "interactive" in conf_settings and conf_settings["interactive"]:
            subprocess.Popen(["screen", "-X", "select", "%d" % (i + 1)])
            screen_cmd = ["screen", "-X", "eval", "exec %s" % cmd, "next"]
            subprocess.Popen(screen_cmd)
            if is_master:
                if master_count == 1:
                    print(" Master %03d started inside new screen window" % i)
                else:
                    print(
                        " Master %03d/%03d started inside new screen window" %
                        (i, master_count - 1))
            else:
                print(" Slave %03d started inside new screen window" % i)
        else:
            if not args.verbose and not args.redirect:
                fuzzer_inst = subprocess.Popen(" ".join(['nohup',
                                                         cmd]).split(),
                                               stdout=subprocess.DEVNULL,
                                               stderr=subprocess.DEVNULL)
            elif args.redirect:
                with open(args.redirect, "ab") as fp:
                    fuzzer_inst = subprocess.Popen(" ".join(['nohup',
                                                             cmd]).split(),
                                                   stdout=fp,
                                                   stderr=subprocess.STDOUT)
                    instances.append(fuzzer_inst)
            else:
                fuzzer_inst = subprocess.Popen(" ".join(['nohup',
                                                         cmd]).split())
            if is_master:
                if master_count == 1:
                    print(" Master %03d started inside new screen window" % i)
                else:
                    print(" Master %03d/%03d started (PID: %d)" %
                          (i, master_count - 1, fuzzer_inst.pid))
            else:
                print(" Slave %03d started (PID: %d)" % (i, fuzzer_inst.pid))

        if i < (jobs_count - 1):
            startup_delay(conf_settings, i, args.cmd, args.startup_delay)

    write_pgid_file(conf_settings)
Example #26
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description="afl-collect copies all crash sample files from an afl sync dir used \
by multiple fuzzers when fuzzing in parallel into a single location providing easy access for further crash analysis.",
                                     usage="afl-collect [-d DATABASE] [-e|-g GDB_EXPL_SCRIPT_FILE] [-f LIST_FILENAME]\n \
[-h] [-j THREADS] [-m] [-r] [-rr] sync_dir collection_dir -- target_cmd")

    parser.add_argument("sync_dir", help="afl synchronisation directory crash samples will be collected from.")
    parser.add_argument("collection_dir",
                        help="Output directory that will hold a copy of all crash samples and other generated files. \
Existing files in the collection directory will be overwritten!")
    parser.add_argument("-d", "--database", dest="database_file", help="Submit sample data into an sqlite3 database (\
only when used together with '-e'). afl-collect skips processing of samples already found in existing database.",
                        default=None)
    parser.add_argument("-e", "--execute-gdb-script", dest="gdb_expl_script_file",
                        help="Generate and execute a gdb+exploitable script after crash sample collection for crash \
classification. (Like option '-g', plus script execution.)",
                        default=None)
    parser.add_argument("-f", "--filelist", dest="list_filename", default=None,
                        help="Writes all collected crash sample filenames into a file in the collection directory.")
    parser.add_argument("-g", "--generate-gdb-script", dest="gdb_script_file",
                        help="Generate gdb script to run 'exploitable.py' on all collected crash samples. Generated \
script will be placed into collection directory.", default=None)
    parser.add_argument("-j", "--threads", dest="num_threads", default=1,
                        help="Enable parallel analysis by specifying the number of threads afl-collect will utilize.")
    parser.add_argument("-m", "--minimize-filenames", dest="min_filename", action="store_const", const=True,
                        default=False, help="Minimize crash sample file names by only keeping fuzzer name and ID.")
    parser.add_argument("-r", "--remove-invalid", dest="remove_invalid", action="store_const", const=True,
                        default=False, help="Verify collected crash samples and remove samples that do not lead to \
crashes or cause timeouts (runs 'afl-vcrash.py -r' on collection directory). This step is done prior to any script \
file execution or file list generation.")
    parser.add_argument("-rr", "--remove-unexploitable", dest="remove_unexploitable", action="store_const", const=True,
                        default=False, help="Remove crash samples that have an exploitable classification of \
'NOT_EXPLOITABLE' or 'PROBABLY_NOT_EXPLOITABLE'. Sample file removal will take place after gdb+exploitable \
script execution. Has no effect without '-e'.")
    parser.add_argument("target_cmd", nargs="+", help="Path to the target binary and its command line arguments. \
Use '@@' to specify crash sample input file position (see afl-fuzz usage).")

    args = parser.parse_args(argv[1:])

    sync_dir = os.path.abspath(os.path.expanduser(args.sync_dir))
    if not os.path.exists(sync_dir):
        print_err("No valid directory provided for <SYNC_DIR>!: %s" % sync_dir)
        return

    out_dir = os.path.abspath(os.path.expanduser(args.collection_dir))
    if not os.path.exists(out_dir):
        os.makedirs(out_dir, exist_ok=True)

    args.target_cmd = " ".join(args.target_cmd).split()
    args.target_cmd[0] = os.path.abspath(os.path.expanduser(args.target_cmd[0]))
    if not os.path.exists(args.target_cmd[0]):
        print_err("Target binary not found!")
        return
    args.target_cmd = " ".join(args.target_cmd)

    if args.database_file:
        db_file = os.path.abspath(os.path.expanduser(args.database_file))
    else:
        db_file = None

    print_ok("Going to collect crash samples from '%s'." % sync_dir)

    # initialize database
    if db_file:
        lite_db = con_sqlite.sqliteConnector(db_file)
        lite_db.init_database('Data', db_table_spec)
    else:
        lite_db = None

    fuzzers = get_fuzzer_instances(sync_dir)
    print_ok("Found %d fuzzers, collecting crash samples." % len(fuzzers))

    sample_index = build_sample_index(sync_dir, out_dir, fuzzers, lite_db, args.min_filename)
    sample_index_original = copy.copy(sample_index)
 
    if len(sample_index.index) > 0:
        print_ok("Successfully indexed %d crash samples." % len(sample_index.index))
    elif db_file:
        print_warn("No unseen samples found. Check your database for results!")
        return
    else:
        print_warn("No samples found. Check directory settings!")
        return

    if args.remove_invalid:
        from afl_utils import afl_vcrash
        invalid_samples, timeout_samples = afl_vcrash.verify_samples(int(args.num_threads), sample_index.inputs(),
                                                                     args.target_cmd, timeout_secs=2)

        # store invalid samples in db
        if args.gdb_expl_script_file and db_file:
            print_ok("Saving invalid sample info to database.")
            for sample in invalid_samples:
                sample_name = sample_index.outputs(input_file=sample)
                dataset = {'Sample': sample_name[0], 'Classification': 'INVALID',
                           'Classification_Description': 'Sample does not cause a crash in the target.', 'Hash': '',
                           'User_Comment': ''}
                if not lite_db.dataset_exists('Data', dataset, ['Sample']):
                    lite_db.insert_dataset('Data', dataset)

            for sample in timeout_samples:
                sample_name = sample_index.outputs(input_file=sample)
                dataset = {'Sample': sample_name[0], 'Classification': 'TIMEOUT',
                           'Classification_Description': 'Sample caused a target execution timeout.', 'Hash': '',
                           'User_Comment': ''}
                if not lite_db.dataset_exists('Data', dataset, ['Sample']):
                    lite_db.insert_dataset('Data', dataset)

        # remove invalid samples from sample index
        sample_index.remove_inputs(invalid_samples+timeout_samples)
        print_warn("Removed %d invalid crash samples from index." % len(invalid_samples))
        print_warn("Removed %d timed out samples from index." % len(timeout_samples))

    # generate gdb+exploitable script
    if args.gdb_expl_script_file:
        divided_index = sample_index.divide(int(args.num_threads))

        for i in range(0, int(args.num_threads), 1):
            generate_gdb_exploitable_script(os.path.join(out_dir, args.gdb_expl_script_file), divided_index[i],
                                            args.target_cmd, i, intermediate=True)

        # execute gdb+exploitable script
        classification_data = execute_gdb_script(out_dir, args.gdb_expl_script_file, len(sample_index.inputs()),
                                                 int(args.num_threads))

        # Submit crash classification data into database
        if db_file:
            print_ok("Saving sample classification info to database.")
            for dataset in classification_data:
                if not lite_db.dataset_exists('Data', dataset, ['Sample']):
                    lite_db.insert_dataset('Data', dataset)

        # de-dupe by exploitable hash
        seen = set()
        seen_add = seen.add
        classification_data_dedupe = [x for x in classification_data
                                      if x['Hash'].split(".")[0] not in seen and not seen_add(x['Hash'].split(".")[0])]

        # remove dupe samples identified by exploitable hash
        uninteresting_samples = [x['Sample'] for x in classification_data
                                 if x not in classification_data_dedupe]

        sample_index.remove_outputs(uninteresting_samples)

        print_warn("Removed %d duplicate samples from index. Will continue with %d remaining samples." %
                   (len(uninteresting_samples), len(sample_index.index)))

        # remove crash samples that are classified uninteresting
        if args.remove_unexploitable:
            classification_unexploitable = [
                'NOT_EXPLOITABLE',
                'PROBABLY_NOT_EXPLOITABLE',
            ]

            uninteresting_samples = []

            for c in classification_data_dedupe:
                if c['Classification'] in classification_unexploitable:
                    uninteresting_samples.append(c['Sample'])

            sample_index.remove_outputs(uninteresting_samples)
            print_warn("Removed %d uninteresting crash samples from index." % len(uninteresting_samples))

        # generate output gdb script
        generate_gdb_exploitable_script(os.path.join(out_dir, args.gdb_expl_script_file), sample_index,
                                        args.target_cmd, 0)
    elif args.gdb_script_file:
        generate_gdb_exploitable_script(os.path.join(out_dir, args.gdb_script_file), sample_index, args.target_cmd)

    print_ok("Copying %d samples into output directory..." % len(sample_index.index))
    files_collected = copy_samples(sample_index)

    # Move crashes to a special folder, collected_all_crashes
    print_ok("Moving all crashes to %s (Samples: %d)" % (os.path.join(sample_index.output_dir, "all_crashes"), len(sample_index_original.index)))
    if not os.path.exists(os.path.join(sample_index_original.output_dir, "all_crashes")):
        os.makedirs(os.path.join(sample_index_original.output_dir, "all_crashes"))
    for sample in sample_index_original.index:
        #print("Moving %s to %s" % (sample['input'], os.path.join(sample_index.output_dir, "all_crashes", sample['output'])))
        shutil.move(sample['input'], os.path.join(sample_index_original.output_dir, "all_crashes", sample['output']))

    # generate filelist of collected crash samples
    if args.list_filename:
        generate_sample_list(os.path.abspath(os.path.expanduser(args.list_filename)), files_collected)
        print_ok("Generated crash sample list '%s'." % os.path.abspath(os.path.expanduser(args.list_filename)))

    # write db contents to file and close db connection
    if db_file:
        lite_db.commit_close()
Example #27
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description="Post selected contents of fuzzer_stats to Twitter.",
                                     usage="afl-stats [-h] [-c config]\n")

    parser.add_argument("-c", "--config", dest="config_file",
                        help="afl-stats config file (Default: afl-stats.conf)!", default="afl-stats.conf")

    args = parser.parse_args(argv[1:])

    config_settings = read_config(args.config_file)

    twitter_inst = twitter_init()

    doExit = False

    # { 'fuzzer_dir': (stat, old_stat) }
    stat_dict = dict()

    while not doExit:
        try:
            for fuzzer in config_settings['fuzz_dirs']:
                stats = load_stats(fuzzer)

                if not stats:
                    continue

                sum_stats = summarize_stats(stats)

                try:
                    # stat_dict has already been initialized for fuzzer
                    #  old_stat <- last_stat
                    old_stats = stat_dict[fuzzer][0].copy()
                except KeyError:
                    # stat_dict has not yet been initialized for fuzzer
                    #  old_stat <- cur_stat
                    old_stats = sum_stats.copy()

                # initialize/update stat_dict
                stat_dict[fuzzer] = (sum_stats, old_stats)

                stat_change = diff_stats(sum_stats, old_stats)

                if not diff_stats:
                    continue

                print(prettify_stat(sum_stats, stat_change, True))

                tweet = prettify_stat(sum_stats, stat_change, False)

                l = len(tweet)
                c = clr.LRD if l>140 else clr.LGN
                print_ok("Tweeting status (%s%d" % (c, l) + clr.RST + " chars)...")

                try:
                    twitter_inst.statuses.update(status=shorten_tweet(tweet))
                except (twitter.TwitterHTTPError, URLError):
                    print_warn("Problem connecting to Twitter! Tweet not sent!")
                except Exception as e:
                    print_err("Sending tweet failed (Reason: " + clr.GRA + "%s" % e.__cause__ + clr.RST + ")")

            if float(config_settings['interval']) < 0:
                doExit = True
            else:
                time.sleep(float(config_settings['interval'])*60)
        except KeyboardInterrupt:
                print("\b\b")
                print_ok("Aborted by user. Good bye!")
                doExit = True
Example #28
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(description="afl-minimize performs several optimization steps to reduce the size\n \
of an afl-fuzz corpus.",
                                     usage="afl-minimize [-c COLLECTION_DIR [--cmin [opts]] [--tmin [opts]]] [-d] [-h]\n \
                   [-j] sync_dir -- target_cmd\n")

    parser.add_argument("-c", "--collect", dest="collection_dir",
                        help="Collect all samples from the synchronisation dir and store them in the collection dir. \
Existing files in the collection directory will be overwritten!", default=None)
    parser.add_argument("--cmin", dest="invoke_cmin", action="store_const", const=True,
                        default=False, help="Run afl-cmin on collection dir. Has no effect without '-c'.")
    parser.add_argument("--cmin-mem-limit", dest="cmin_mem_limit", default=None, help="Set memory limit for afl-cmin.")
    parser.add_argument("--cmin-timeout", dest="cmin_timeout", default=None, help="Set timeout for afl-cmin.")
    parser.add_argument("--tmin", dest="invoke_tmin", action="store_const", const=True,
                        default=False, help="Run afl-tmin on minimized collection dir if used together with '--cmin'\
or on unoptimized collection dir otherwise. Has no effect without '-c'.")
    parser.add_argument("--tmin-mem-limit", dest="tmin_mem_limit", default=None, help="Set memory limit for afl-tmin.")
    parser.add_argument("--tmin-timeout", dest="tmin_timeout", default=None, help="Set timeout for afl-tmin.")
    parser.add_argument("-d", "--dry-run", dest="dry_run", action="store_const", const=True,
                        default=False, help="Perform dry-run on collection dir, if '-c' is provided or on \
synchronisation dir otherwise. Dry-run will move intermittent crashes out of the corpus.")
    parser.add_argument("-j", "--threads", dest="num_threads", default=1,
                        help="Enable parallel dry-run and t-minimization step by specifying the number of threads \
afl-minimize will utilize.")
    parser.add_argument("sync_dir", help="afl synchronisation directory containing multiple fuzzers and their queues.")
    parser.add_argument("target_cmd", nargs="+", help="Path to the target binary and its command line arguments. \
Use '@@' to specify crash sample input file position (see afl-fuzz usage).")

    args = parser.parse_args(argv[1:])

    if not args.collection_dir and not args.dry_run:
        print_err("No operation requested. You should at least provide '-c'")
        print_err("for sample collection or '-d' for a dry-run. Use '--help' for")
        print_err("usage instructions or checkout README.md for details.")
        return

    sync_dir = os.path.abspath(os.path.expanduser(args.sync_dir))
    if not os.path.exists(sync_dir):
        print_err("No valid directory provided for <SYNC_DIR>!")
        return

    args.target_cmd = " ".join(args.target_cmd).split()
    args.target_cmd[0] = os.path.abspath(os.path.expanduser(args.target_cmd[0]))
    if not os.path.exists(args.target_cmd[0]):
        print_err("Target binary not found!")
        return
    args.target_cmd = " ".join(args.target_cmd)

    if not args.num_threads:
        threads = 1
    else:
        threads = int(args.num_threads)

    if args.collection_dir:
        out_dir = os.path.abspath(os.path.expanduser(args.collection_dir))
        if not os.path.exists(out_dir) or len(os.listdir(out_dir)) == 0:
            os.makedirs(out_dir, exist_ok=True)

            print_ok("Looking for fuzzing queues in '%s'." % sync_dir)
            fuzzers = afl_collect.get_fuzzer_instances(sync_dir, crash_dirs=False)

            # collect samples from fuzzer queues
            print_ok("Found %d fuzzers, collecting samples." % len(fuzzers))
            sample_index = afl_collect.build_sample_index(sync_dir, out_dir, fuzzers)

            print_ok("Successfully indexed %d samples." % len(sample_index.index))
            print_ok("Copying %d samples into collection directory..." % len(sample_index.index))
            afl_collect.copy_samples(sample_index)
        else:
            print_warn("Collection directory exists and is not empty!")
            print_warn("Skipping collection step...")

        if args.invoke_cmin:
            # invoke cmin on collection
            print_ok("Executing: afl-cmin -i %s -o %s.cmin -- %s" % (out_dir, out_dir, args.target_cmd))
            invoke_cmin(out_dir, "%s.cmin" % out_dir, args.target_cmd, mem_limit=args.cmin_mem_limit,
                        timeout=args.cmin_timeout)
            if args.invoke_tmin:
                # invoke tmin on minimized collection
                print_ok("Executing: afl-tmin -i %s.cmin/* -o %s.cmin.tmin/* -- %s" % (out_dir, out_dir,
                                                                                       args.target_cmd))
                tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir("%s.cmin" % out_dir, abs_path=True)
                invoke_tmin(tmin_samples, "%s.cmin.tmin" % out_dir, args.target_cmd, num_threads=threads,
                            mem_limit=args.tmin_mem_limit, timeout=args.tmin_timeout)
        elif args.invoke_tmin:
            # invoke tmin on collection
            print_ok("Executing: afl-tmin -i %s/* -o %s.tmin/* -- %s" % (out_dir, out_dir, args.target_cmd))
            tmin_num_samples, tmin_samples = afl_collect.get_samples_from_dir(out_dir, abs_path=True)
            invoke_tmin(tmin_samples, "%s.tmin" % out_dir, args.target_cmd, num_threads=threads,
                        mem_limit=args.tmin_mem_limit, timeout=args.tmin_timeout)
        if args.dry_run:
            # invoke dry-run on collected/minimized corpus
            if args.invoke_cmin and args.invoke_tmin:
                print_ok("Performing dry-run in %s.cmin.tmin..." % out_dir)
                print_warn("Be patient! Depending on the corpus size this step can take hours...")
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir("%s.cmin.tmin" % out_dir,
                                                                                      abs_path=True)
                invoke_dryrun(dryrun_samples, "%s.cmin.tmin.crashes" % out_dir, "%s.cmin.tmin.hangs" % out_dir,
                              args.target_cmd, num_threads=threads)
            elif args.invoke_cmin:
                print_ok("Performing dry-run in %s.cmin..." % out_dir)
                print_warn("Be patient! Depending on the corpus size this step can take hours...")
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir("%s.cmin" % out_dir,
                                                                                      abs_path=True)
                invoke_dryrun(dryrun_samples, "%s.cmin.crashes" % out_dir, "%s.cmin.hangs" % out_dir, args.target_cmd,
                              num_threads=threads)
            elif args.invoke_tmin:
                print_ok("Performing dry-run in %s.tmin..." % out_dir)
                print_warn("Be patient! Depending on the corpus size this step can take hours...")
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir("%s.tmin" % out_dir,
                                                                                      abs_path=True)
                invoke_dryrun(dryrun_samples, "%s.tmin.crashes" % out_dir, "%s.tmin.hangs" % out_dir, args.target_cmd,
                              num_threads=threads)
            else:
                print_ok("Performing dry-run in %s..." % out_dir)
                print_warn("Be patient! Depending on the corpus size this step can take hours...")
                dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(out_dir, abs_path=True)
                invoke_dryrun(dryrun_samples, "%s.crashes" % out_dir, "%s.hangs" % out_dir, args.target_cmd,
                              num_threads=threads)
    else:
        if args.dry_run:
            print_ok("Looking for fuzzing queues in '%s'." % sync_dir)
            fuzzers = afl_collect.get_fuzzer_instances(sync_dir, crash_dirs=False)
            print_ok("Found %d fuzzers, performing dry run." % len(fuzzers))
            print_warn("Be patient! Depending on the corpus size this step can take hours...")
            # invoke dry-run on original corpus
            for f in fuzzers:
                for q_dir in f[1]:
                    q_dir_complete = os.path.join(sync_dir, f[0], q_dir)
                    print_ok("Processing %s..." % q_dir_complete)

                    dryrun_num_samples, dryrun_samples = afl_collect.get_samples_from_dir(q_dir_complete, abs_path=True)
                    invoke_dryrun(dryrun_samples, os.path.join(sync_dir, f[0], "crashes"),
                                  os.path.join(sync_dir, f[0], "hangs"), args.target_cmd, num_threads=threads)
Example #29
0
def main(argv):
    show_info()

    parser = argparse.ArgumentParser(
        description=
        "afl-multicore starts several parallel fuzzing jobs, that are run \
in the background. For fuzzer stats see 'out_dir/SESSION###/fuzzer_stats'!",
        usage="afl-multicore [-c config] [-h] [-t] [-v] <cmd> <jobs>")

    parser.add_argument(
        "-c",
        "--config",
        dest="config_file",
        help="afl-multicore config file (Default: afl-multicore.conf)!",
        default="afl-multicore.conf")
    parser.add_argument("-t",
                        "--test",
                        dest="test_run",
                        action="store_const",
                        const=True,
                        default=False,
                        help="Perform \
a test run by starting a single afl instance in interactive mode using a test output directory."
                        )
    parser.add_argument(
        "-v",
        "--verbose",
        dest="verbose",
        action="store_const",
        const=True,
        default=False,
        help=
        "For debugging purposes do not redirect stderr/stdout of the created \
subprocesses to /dev/null (Default: off). Check 'nohup.out' for further outputs."
    )
    parser.add_argument(
        "cmd", help="afl-multicore command to execute: start, resume, add.")
    parser.add_argument("jobs",
                        help="Number of instances to start/resume/add.")

    args = parser.parse_args(argv[1:])

    conf_settings, environment = read_config(
        os.path.abspath(os.path.expanduser(args.config_file)))

    if args.test_run:
        signal.signal(signal.SIGINT, sigint_handler)
        conf_settings["output"] += "_test"
        conf_settings["interactive"] = False
        args.jobs = 1
        args.cmd = "start"

    if args.cmd != "resume":
        conf_settings["input"] = os.path.abspath(
            os.path.expanduser(conf_settings["input"]))
        if not os.path.exists(conf_settings["input"]):
            print_err("No valid directory provided for <INPUT_DIR>!")
            sys.exit(1)
    else:
        conf_settings["input"] = "-"

    conf_settings["output"] = os.path.abspath(
        os.path.expanduser(conf_settings["output"]))

    slave_off, slave_start = get_slave_count(args.cmd, conf_settings)

    if conf_settings["interactive"]:
        if not check_screen():
            print_err(
                "When using screen mode, please run afl-multicore from inside a screen session!"
            )
            sys.exit(1)

        setup_screen(int(args.jobs), environment)

    target_cmd = build_target_cmd(conf_settings)
    master_cmd = build_master_cmd(conf_settings, target_cmd)

    if args.test_run:
        with subprocess.Popen(master_cmd.split()) as test_proc:
            print_ok("Test instance started (PID: %d)" % test_proc.pid)

    if not conf_settings["slave_only"]:
        print_ok("Starting master instance...")

        if not conf_settings["interactive"]:
            if not args.verbose:
                master = subprocess.Popen(" ".join(['nohup',
                                                    master_cmd]).split(),
                                          stdout=subprocess.DEVNULL,
                                          stderr=subprocess.DEVNULL)
            else:
                master = subprocess.Popen(" ".join(['nohup',
                                                    master_cmd]).split())
            print(" Master 000 started (PID: %d)" % master.pid)
        else:
            subprocess.Popen("screen -X select 1".split())
            screen_cmd = [
                "screen", "-X", "eval",
                "exec %s" % master_cmd, "next"
            ]
            subprocess.Popen(screen_cmd)
            print(" Master 000 started inside new screen window")

    print_ok("Starting slave instances...")
    for i in range(slave_start, int(args.jobs) + slave_start - slave_off, 1):
        slave_cmd = build_slave_cmd(conf_settings, i, target_cmd)

        if not conf_settings["interactive"]:
            if not args.verbose:
                slave = subprocess.Popen(" ".join(['nohup',
                                                   slave_cmd]).split(),
                                         stdout=subprocess.DEVNULL,
                                         stderr=subprocess.DEVNULL)
            else:
                slave = subprocess.Popen(" ".join(['nohup',
                                                   slave_cmd]).split())
            print(" Slave %03d started (PID: %d)" % (i, slave.pid))
        else:
            subprocess.Popen(["screen", "-X", "select", "%d" % (i + 1)])
            screen_cmd = [
                "screen", "-X", "eval",
                "exec %s" % slave_cmd, "next"
            ]
            subprocess.Popen(screen_cmd)
            print(" Slave %03d started inside new screen window" % i)

    write_pgid_file(conf_settings)