def test_multicb_spawn(): """ Test that the fuzzer spins up for a multicb challenge. """ import logging logging.getLogger("fuzzer").setLevel("DEBUG") binaries = [os.path.join(bin_location, "./cgc_qualifier_event/cgc/251abc02_01"), os.path.join(bin_location, "./cgc_qualifier_event/cgc/251abc02_02")] f = fuzzer.Fuzzer(binaries, "work", create_dictionary=True) f.start() for _ in range(15): if f.alive: break time.sleep(1) nose.tools.assert_true(f.alive) dictionary_path = os.path.join("work", "251abc02_01", "251abc02_01.dict") nose.tools.assert_true(os.path.isfile(dictionary_path)) if f.alive: f.kill()
def test_multicb_spawn(): """ Test that the fuzzer spins up for a multicb challenge. """ binaries = [ os.path.join(bin_location, "tests/cgc/251abc02_01"), os.path.join(bin_location, "tests/cgc/251abc02_02") ] f = fuzzer.Fuzzer(binaries, "work", create_dictionary=True) f.start() for _ in range(15): if f.alive: break time.sleep(1) nose.tools.assert_true(f.alive) dictionary_path = os.path.join("work", "251abc02_01", "251abc02_01.dict") nose.tools.assert_true(os.path.isfile(dictionary_path)) if f.alive: f.kill()
def fuzzer(self): """The fuzzer instance. Automatically created if it was set to None.""" if self.__fuzzer is None: self.__fuzzer = fuzzer.Fuzzer(self.target, self.work_dir, afl_count=self.threads, qemu=self.qemu, target_opts=self.target_args, memory="none") self.__fuzzer.dictionary = self.dictionary return self.__fuzzer
def __init__(self, driver_path="C:/Program Files (x86)/Google/Chrome/Application/" "chromedriver_win32/chromedriver.exe"): self.fuzzer = fuzzer.Fuzzer() self.vscanner = vscanner.Vscanner() options = webdriver.ChromeOptions() prefs = { "download.open_pdf_in_system_reader": False, "download.prompt_for_download": True, "plugins.always_open_pdf_externally": False } options.add_experimental_option( "prefs", prefs ) self.driver = webdriver.Chrome(executable_path=driver_path, options=options)
def main(): args = parse_args() with tempfile.TemporaryDirectory() as tmpdir: # Fuzzer will setup the base environment variables needed f = fuzzer.Fuzzer(args.file, tmpdir) # The relative dir to the AFL bins afl_path = os.path.abspath(os.environ['AFL_PATH']) afl_bin_dir = os.path.join(afl_path, "..", "..") os.environ[ 'PATH'] = afl_path + ":" + afl_bin_dir + ":" + os.environ['PATH'] # Give them a shell os.system("/bin/bash")
def test_fuzzer_spawn(): """ Test that the fuzzer spawns correctly """ binary = os.path.join(bin_location, "shellphish/PIZZA_00001") f = fuzzer.Fuzzer(binary, "work") f.start() for _ in range(15): if f.alive: break time.sleep(1) nose.tools.assert_true(f.alive) if f.alive: f.kill()
def start(self): if self.started: return self.started = True print "[*] Enable driller..." self.drill_extension = driller.LocalCallback( num_workers=config.DRILLER_WORKERS) print "[*] Initialize fuzzer..." self.fuzzer = fuzzer.Fuzzer( self.binary, config.FUZZ_DIR, afl_count=config.FUZZER_WORKERS, force_interval=config.FUZZER_FORCE_INTERVAL, create_dictionary=True, stuck_callback=self.drill_extension, time_limit=self.time_limit) print "[*] Starting fuzzer..." self.fuzzer.start()
def analyze_dir(DIR): DIR = DIR.rstrip('/') pname = DIR + '.results' if os.path.exists(pname): print "ALREADY EXISTS:", pname return BIN = os.path.basename(DIR).split('-')[-1] f = fuzzer.Fuzzer('/results/bins/%s' % BIN, '', job_dir=DIR) h = fuzzer.InputHierarchy(fuzzer=f, load_crashes=True) for i in h.inputs.values(): print i, len(i.block_set), len(i._trace) #for o,v in h.technique_contributions(): # print o.timestamp,o,v print "SAVING TO:", pname with open(pname, 'w') as of: pickle.dump(h, of, -1) return h
def main(opts): ip = opts['ip'] port = opts['port'] print ip, port fuzz = fuzzer.Fuzzer() tcp = TCP(ip="172.16.0.55", port=443, buffer=16 * 1024, timeout=0.5) p = TLSRecord(version=TLSRecord.PROTOCOL_TLS_1_0) / TLSHandshake( data=TLSClientHello(version=TLSRecord.PROTOCOL_TLS_1_1)) resp = tcp / Raw(data=p) for pf in fuzz.mutate_layer(p): print "X", pf tcp = TCP(ip="172.16.0.55", port=443, buffer=16 * 1024) resp = tcp / Raw(data=pf) try: resp = TLSRecord(__raw=resp) except: print "resp - not a tlsRecord structure" print resp exit() print "[ -> ] sending TLS Handshake" resp = tcp / (TLSRecord(version=0x0302, content_type=0x16) / TLSHandshake(version=0x0302)) print "[ <- ] response: %s" % (len(resp) if resp else 0) hexdump_squashed(resp) print repr(TLSRecord(__raw=resp)) print "[ -> ] sending TLS Handshake" resp = tcp / (TLSRecord(version=0x0302) / TLSHeartBeat(payload_length=0x4000)) print "[ <- ] response: %s" % (len(resp) if resp else 0) if not resp: print "no response!" return hexdump_squashed(resp) print repr(TLSRecord(__raw=resp))
def _spawn_singlecb_fuzzer(self, path): add_extender = False cores = self._job.request_cpu if self._job.request_cpu >= 4: LOG.debug( "4 or more cores specified, dedicating one to the extender") cores -= 1 add_extender = True fzzr = fuzzer.Fuzzer(path, self._workdir, cores, create_dictionary=True, never_resume=True) if add_extender: if not fzzr.add_extension('extender'): LOG.warning( "Unable to spin-up the extender, using a normal AFL instance instead" ) fzzr.add_fuzzer() return fzzr
#!/usr/bin/env python import os import sys import tqdm import json import fuzzer DIR = sys.argv[1].rstrip('/') BIN = os.path.basename(DIR).split('-')[-1] print(DIR,BIN) f = fuzzer.Fuzzer('/results/bins/%s'%BIN, '', job_dir=DIR) h = fuzzer.InputHierarchy(fuzzer=f, load_crashes=True) def good(_i): return _i.instance not in ('fuzzer-1', 'fuzzer-2', 'fuzzer-3', 'fuzzer-4', 'fuzzer-5') all_blocks = set() all_transitions = set() all_inputs = [ i for i in h.inputs.values() if not i.crash and good(i) ] all_crashes = [ i for i in h.inputs.values() if i.crash ] min_timestamp = min(i.timestamp for i in all_inputs) if all_crashes: first_crash = min(all_crashes, key=lambda i: i.timestamp) time_to_crash = first_crash.timestamp - min_timestamp first_crash_techniques = first_crash.contributing_techniques if 'grease' in first_crash_techniques : # TODO: figure out how long that input took time_to_crash += 120 else: first_crash = None
def deploy_string(fuzz, testing_dir, findings_dir, binary, config, profile): global fuzz_instance if not fuzz_instance: fuzz_instance = fuzzer.Fuzzer(fuzz, testing_dir, findings_dir, binary, config, profile)
print("[*] Seeding...") for dirpath in args.seed_dir: for filename in os.listdir(dirpath): filepath = os.path.join(dirpath, filename) if not os.path.isfile(filepath): continue with open(filepath, 'rb') as seedfile: seeds.append(seedfile.read()) print("[*] Creating fuzzer...") fuzzer = fuzzer.Fuzzer( args.binary, args.work_dir, afl_count=args.afl_cores, force_interval=args.force_interval, create_dictionary=not args.no_dictionary, stuck_callback=stuck_callback, time_limit=args.timeout, memory=args.memory, seeds=seeds, timeout=args.run_timeout, ) # start it! print("[*] Starting fuzzer...") fuzzer.start() if args.ipython: print("[!]") print("[!] Launching ipython shell. Relevant variables:") print("[!]") print("[!] fuzzer")
def fuzz(binary): l.info("beginning to fuzz \"%s\"", binary) binary_path = os.path.join(config.BINARY_DIR, binary) seeds = _get_seeds() # look for a pcap pcap_path = os.path.join(config.PCAP_DIR, "%s.pcap" % binary) if os.path.isfile(pcap_path): l.info("found pcap for binary %s", binary) seeds = pcap.process(pcap_path) else: l.warning( "unable to find pcap file, will seed fuzzer with the default") # TODO enable dictionary creation, this may require fixing parts of the fuzzer module fzr = fuzzer.Fuzzer(binary_path, config.FUZZER_WORK_DIR, config.FUZZER_INSTANCES, seeds=seeds, create_dictionary=True) early_crash = False try: fzr.start() # start a listening for inputs produced by driller start_listener(fzr) # clean all stale redis data clean_redis(fzr) # list of 'driller request' each is a celery async result object driller_jobs = [] # start the fuzzer and poll for a crash, timeout, or driller assistance while not fzr.found_crash() and not fzr.timed_out(): # check to see if driller should be invoked if 'fuzzer-1' in fzr.stats and 'pending_favs' in fzr.stats[ 'fuzzer-1']: if not int(fzr.stats['fuzzer-1']['pending_favs']) > 0: l.info("[%s] driller being requested!", binary) driller_jobs.extend(request_drilling(fzr)) time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fzr.kill() except fuzzer.EarlyCrash: l.info("binary crashed on dummy testcase, moving on...") early_crash = True # we found a crash! if early_crash or fzr.found_crash(): l.info("found crash for \"%s\"", binary) # publish the crash redis_inst = redis.Redis(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB) redis_inst.publish("crashes", binary) # revoke any driller jobs which are still working for job in driller_jobs: if job.status == 'PENDING': job.revoke(terminate=True) if fzr.timed_out(): l.info("timed out while fuzzing \"%s\"", binary) # TODO end drilling jobs working on the binary return fzr.found_crash() or early_crash
if not os.path.exists(os.path.join(target_dir, which)): usage() target = os.path.join(target_dir, which) target_bins = glob.glob(target + "/*") get_bin = gen_from_list(target_bins) while True: bin1 = get_bin.next() bin2 = get_bin.next() bin1_id = os.path.basename(bin1) bin2_id = os.path.basename(bin2) fuzzer1 = fuzzer.Fuzzer(bin1, which) fuzzer2 = fuzzer.Fuzzer(bin2, which) print "fuzzing %s..."%(bin1_id) fuzzer1.start() print "fuzzing %s..."%(bin2_id) fuzzer2.start() time.sleep(sec) bin1_crash_num = fuzzer1.get_crash_num() bin2_crash_num = fuzzer2.get_crash_num() print "%s Crash count : %d"%(bin1_id, bin1_crash_num) print "%s Crash count : %d"%(bin2_id, bin2_crash_num)
def _spawn_multicb_fuzzer(self, paths): return fuzzer.Fuzzer(paths, self._workdir, self._job.request_cpu, create_dictionary=True, never_resume=True)