def main(args): if len(args) < 1: logger.warning("please provide a filename") return path = args[0] testcase = None # Can we read the testfile? with open(path, "r") as f: testcase = json.load(f) # Start all docker daemons that we'll use during the execution logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) f = Fuzzer(config=Config(dummy())) f.start_daemons() runTest(testcase, os.path.basename(path), f)
def fuzz(self): # target = "http://192.168.234.161/login.php" parameter_list = http_params.HttpParams() parameter_list.load_from_string_list(self.parameter_list) payload_list = payloads.Payloads() payload_list.load_from_string_list(self.payload_list) fuzzer = Fuzzer(self.target, parameter_list.parameters, payload_list.payload_list) fuzzer.fuzz() return fuzzer.output
def start_test(self): if len(self.devices) == 0: self.error = 'Wybierz urzadzenia do testu!' return port = settings.config['devices_start_port'] print('Pushing agents') for d in self.devices: d.push(settings.config['agent_path'], '/data/local/tmp/agent', 0o755) self.fuzzers.append(Fuzzer(d, port, self.devpath)) port += 1 for f in self.fuzzers: t = threading.Thread(target=f.run, daemon=True) self.threads.append(t) t.start() print('Fuzzing started!') print('{} active threads'.format(threading.active_count() - 1)) print('Press any key to stop testing...') input() print('Stopping..') for f in self.fuzzers: f.stop() for t in self.threads: t.join() self.fuzzers = [] self.threads = [] print('{} active threads'.format(threading.active_count() - 1))
def create_dict(binary, dict_filename): create_dict_script = os.path.join(__angr_Fuzzer._get_base(), "bin", "create_dict.py") args = [sys.executable, create_dict_script, binary] with open(dict_filename, 'wb') as df: p = subprocess.Popen(args, stdout=df) retcode = p.wait() df.close() return_ok = retcode == 0 and os.path.getsize(dict_filename) if return_ok: # angr prints 'wtf' on some lines, I think due to this file https://github.com/angr/angr/blob/8b1f0325187f28ba7721ee1e9a1f33f46394c487/angr/analyses/cfg/cfg_fast.py # so I remove these lines and log it with open(dict_filename, 'rb') as df: lines = df.readlines() df.close() WTF = b'wtf\n' if WTF in lines: logger.warn("Found 'wtf' lines in dictionary. Removing them") content = b''.join([line for line in lines if line != WTF]) with open(dict_filename, 'wb') as df: df.write(content) return return_ok
def __init__(self): Fuzzer.__init__(self, self.parseTnef) self.props = {} self.pmapi = {} self.types = [] self.tmapi = [] for x, y in TnefEnum().allValues().iteritems(): if x.startswith('TYPE_'): self.types += [y] elif x.startswith('ID_'): self.props[x] = y for x, y in MapiEnum().allValues().iteritems(): if x.startswith('PT_'): self.tmapi += [y] if x.startswith('PR_'): self.pmapi[x] = y
def minimize(self, test_case_asm: str, outfile: str, num_inputs: int, add_fences: bool): # initialize fuzzer fuzzer: Fuzzer = Fuzzer(self.instruction_set_spec, "", test_case_asm) fuzzer.initialize_modules() # Parse the test case and inputs test_case: TestCase = fuzzer.generator.parse_existing_test_case(test_case_asm) inputs: List[Input] = fuzzer.input_gen.generate(CONF.input_gen_seed, num_inputs) # Load, boost inputs, and trace fuzzer.model.load_test_case(test_case) boosted_inputs: List[Input] = fuzzer.boost_inputs(inputs, CONF.model_max_nesting) print("Trying to reproduce...") violations = self._get_all_violations(fuzzer, test_case, boosted_inputs) if not violations: print("Could not reproduce the violation. Exiting...") return print(f"Found {len(violations)} violations") # print("Searching for a minimal input set...") # min_inputs = self.minimize_inputs(fuzzer, test_case, boosted_inputs, violations) min_inputs = boosted_inputs print("Minimizing the test case...") min_test_case: TestCase = self.minimize_test_case(fuzzer, test_case, min_inputs) if add_fences: print("Trying to add fences...") min_test_case = self.add_fences(fuzzer, min_test_case, min_inputs) print("Storing the results") copy(min_test_case.asm_path, outfile)
def check_fuzzer(args, factory): """Implementation of "fx fuzz check".""" device = factory.create_device() blank = True for package, executable in device.buildenv.fuzzers(args.name): fuzzer = Fuzzer(device, package, executable) if not args.name and not fuzzer.is_running(): continue if not fuzzer.is_resolved(): factory.host.echo('{}: NOT INSTALLED'.format(fuzzer)) elif fuzzer.is_running(): factory.host.echo('{}: RUNNING'.format(fuzzer)) else: factory.host.echo('{}: STOPPED'.format(fuzzer)) if fuzzer.is_resolved(): num, size = fuzzer.corpus.measure() factory.host.echo(' Corpus size: {} inputs / {} bytes'.format( num, size)) artifacts = fuzzer.list_artifacts() if artifacts: factory.host.echo(' Artifacts:') for artifact in artifacts: factory.host.echo(' {}'.format(artifact)) factory.host.echo('') blank = False if blank: factory.host.echo('No fuzzers are running.', 'Include \'name\' to check specific fuzzers.')
def __init__(self, tprogram, seed_files, workdir, target_opts=None, input_placeholder='@@', afl_opts=None, afl_count=1, library_path=None): self.tprogram = tprogram self.target_opts = target_opts self.workdir = workdir self.stat_file = os.path.join(self.workdir, tprogram.program_name, 'stat') self.stat = {} self.crashing_inputs = [] self.tmout_inputs = [] if seed_files == None: seed_files = self.__prepare_predefined_seeds() seeds = [] logger.debug("Starting to classify the seeds for %s", tprogram.program_name) for sf in seed_files: logger.debug("Trying running %s", sf) ret = self.__get_exec_result_on_input(sf) if ret == 2: # the seed results in crash self.crashing_inputs.append(sf) elif ret == 1: # the seed results in timeout self.tmout_inputs.append(sf) else: with open(sf, 'r') as f: seeds.append(f.read()) logger.debug("Classifying the seeds for %s ended", tprogram.program_name) use_qemu = True logger.debug("fuzzing %s with opts: %s", self.tprogram.program_path, str(target_opts)) self._fuzzer = SFFuzzer(self.tprogram.program_path, self.workdir, seeds=seeds, qemu=use_qemu, create_dictionary=False, target_opts=target_opts, extra_opts=afl_opts, afl_count=afl_count, library_path=library_path) # save the seed files resulting in crashes and timeout self.save_crash_and_tmout_inputs()
def create_dict(binary, dict_filename): create_dict_script = os.path.join(__angr_Fuzzer._get_base(), "bin", "create_dict.py") args = [sys.executable, create_dict_script, binary] with open(dict_filename, 'wb') as df: p = subprocess.Popen(args, stdout=df) retcode = p.wait() return retcode == 0 and os.path.getsize(dict_filename)
def __init__(self, binary_path, testcase): """ :param binary_path: path to the binary which the testcase applies to :param testcase: string representing the contents of the testcase """ self.binary_path = binary_path self.testcase = testcase Fuzzer._perform_env_checks() self.base = Fuzzer._get_base() l.debug("got base dir %s", self.base) # unfortunately here is some code reuse between Fuzzer and Minimizer p = angr.Project(self.binary_path) tracer_id = 'cgc' if p.loader.main_bin.os == 'cgc' else p.arch.qemu_name self.tmin_path = os.path.join(afl_wrapper.afl_dir(tracer_id), "afl-tmin") self.afl_path_var = afl_wrapper.afl_path_var(tracer_id) l.debug("tmin_path: %s", self.tmin_path) l.debug("afl_path_var: %s", self.afl_path_var) os.environ['AFL_PATH'] = self.afl_path_var # create temp self.work_dir = tempfile.mkdtemp(prefix='tmin-', dir='/tmp/') # flag for work directory removal self._removed = False self.input_testcase = os.path.join(self.work_dir, 'testcase') self.output_testcase = os.path.join(self.work_dir, 'minimized_result') l.debug("input_testcase: %s", self.input_testcase) l.debug("output_testcase: %s", self.output_testcase) # populate contents of input testcase with open(self.input_testcase, 'w') as f: f.write(testcase)
def main(): config_file_path = sys.argv[1] endpoints_description = sys.argv[2] junit_output = sys.argv[3] custom_payloads_path = sys.argv[4] if len(sys.argv) == 5 else None with open(config_file_path, 'r') as config_file_pointer: ConfigurationManager(config_file_pointer) target = ConfigurationManager.config["target"] # Load and generate default payloads load_default_payloads(target["hostname"]) # If user specified file with custom payloads, we add them to our mutations payloads_loader = PayloadsLoader(target["hostname"]) payloads_loader.load_payloads(custom_payloads_path, FuzzPayloads.CUSTOM_PAYLOADS_KEY) with open(junit_output, 'w', encoding='utf8') as junit_output_file_pointer: with open(FUZZING_LOG_FILE, "w", encoding='utf8') as full_log_file_pointer: text_logger = TextLogger(full_log_file_pointer) junit_logger = JUnitLogger(junit_output_file_pointer, test_suite_name_delimiter=":", hostname=target["hostname"]) protocol = 'ssl' if target["ssl"] is True else 'tcp' with open(endpoints_description, 'r') as endpoints_description_file_pointer: endpoints = json.loads( endpoints_description_file_pointer.read()) fuzzer = Fuzzer(endpoints, text_logger, junit_logger, protocol) fuzzer.fuzz() return fuzzer.was_there_any_failure()
def create_dict(binary, dict_filename): create_dict_script = os.path.join(__angr_Fuzzer._get_base(), "bin", "create_dict.py") args = [sys.executable, create_dict_script, binary] with open(dict_filename + '.org', 'wb') as df: p = subprocess.Popen(args, stdout=df) retcode = p.wait() out = open(dict_filename + '.org') file = open(dict_filename, 'w') for line in out: match = re.match(r"string_[\d]+=.+[\n]{0,1}", line) if match: file.writelines(line) return retcode == 0 and os.path.getsize(dict_filename)
def afl(binary): l.info("beginning to fuzz \"%s\"", binary) binary_path = os.path.join(config.BINARY_DIR, binary) seeds = ["111", "fuzz"] fzr = Fuzzer(binary_path, config.AFL_WORK_DIR, config.AFL_INSTANCES, time_limit=config.FUZZ_TIMEOUT, qemu=False, seeds=seeds, create_dictionary=False) try: fzr.start() # clean all stale redis data clean_redis(fzr) time.sleep(2) # start the fuzzer and poll for a crash, timeout, or concolic assistance while not fzr.found_crash() and not fzr.timed_out(): time.sleep(5) if fzr.timed_out(): sql = 'update binarys SET status=5 WHERE binary_name = %s' else: sql = 'update binarys SET status=4 WHERE binary_name = %s' db.execute(sql, binary) fzr.kill() except InstallError: return False
def create_fuzzer(self, args, device=None): """Constructs a Fuzzer from command line arguments, showing a disambiguation menu if specified name matches more than one fuzzer.""" if not device: device = self.create_device() package, executable = self._resolve_fuzzer(device.buildenv, args.name) fuzzer = Fuzzer(device, package, executable) keys = [ key for key, val in vars(Fuzzer).items() if isinstance(val, property) and val.fset ] for key, val in vars(args).items(): if key in keys and val is not None: setattr(fuzzer, key, val) return fuzzer
def minimize_inputs(self, fuzzer: Fuzzer, test_case: TestCase, inputs: List[Input], violations: List[EquivalenceClass]) -> List[Input]: min_inputs: List[Input] = [] for violation in violations: for i in range(len(violation)): measurement = violation.measurements[i] primer, _ = fuzzer.build_batch_primer(inputs, measurement.input_id, measurement.htrace, 1) min_inputs.extend(primer) # Make sure these inputs indeed reproduce violations = self._get_all_violations(fuzzer, test_case, min_inputs) if not violations or len(min_inputs) > len(inputs): print("Failed to build a minimal input sequence. Falling back to using all inputs...") min_inputs = inputs else: print(f"Reduced to {len(min_inputs)} inputs") return min_inputs
def start_run(self, chosen_devices, methods): print(methods) port = settings.config['devices_start_port'] self.view.log('Instaluję agenta na urządzeniach ({})...\n'.format( len(chosen_devices))) for idx in chosen_devices: self.devices[idx].device.push(settings.config['agent_path'], '/data/local/tmp/agent', 0o755) #TODO: add devices.shell run agent mutations = [x[1].get() for x in self.mutations] self.fuzzers.append( Fuzzer(self.devices[idx], port, self.project.devpath, self.corpus, methods, mutations)) port += 1 for f in self.fuzzers: t = threading.Thread(target=f.run, daemon=True) self.threads.append(t) t.start() self.view.log('Rozpoczęto testowanie!\n')
def fuzz(self, api_resources): """ Call APIFuzzer with the given api definition :type api_resources: dict """ prog = Fuzzer(api_resources=api_resources, report_dir=self.report_dir, test_level=1, alternate_url=self.test_app_url, test_result_dst=None, log_level='Debug', basic_output=False, auth_headers={}) prog.prepare() prog.run()
def fuzz(self, api_resources): """ Call APIFuzzer with the given api definition :type api_resources: dict """ with pytest.raises(SystemExit): prog = Fuzzer(api_resources=api_resources, report_dir=self.report_dir, test_level=1, alternate_url=self.test_app_url, test_result_dst=None, log_level='Debug', auth_headers={}) prog.prepare() prog.run()
def read_fuzzers(self, pathname): """Parses the available fuzzers from an fuzzers.json pathname.""" with self.host.open( pathname, on_error=[ 'Failed to read fuzzers from {}.'.format(pathname), 'Have you run "fx set ... --fuzz-with <sanitizer>"?' ]) as opened: metadata = json.load(opened) fuzz_specs = [] by_label = defaultdict(dict) for entry in metadata: # Try v2 metadata first. label = entry.get('label') if label: by_label[label].update(entry) continue # Fallback to v1 metadata. package = entry['fuzzers_package'] package_url = 'fuchsia-pkg://fuchsia.com/{}'.format(package) for fuzzer in entry['fuzzers']: fuzz_specs.append({ 'package': package, 'package_url': package_url, 'fuzzer': fuzzer, 'manifest': '{}.cmx'.format(fuzzer), 'label': '//generated/{}:{}'.format(package, fuzzer), }) fuzz_specs += by_label.values() self._fuzzers = [ Fuzzer(self._factory, fuzz_spec) for fuzz_spec in fuzz_specs ] self._fuzzers.sort()
def _get_all_violations(self, fuzzer: Fuzzer, test_case: TestCase, inputs: List[Input]) -> List[EquivalenceClass]: # Initial measurement fuzzer.model.load_test_case(test_case) fuzzer.executor.load_test_case(test_case) ctraces = fuzzer.model.trace_test_case(inputs, CONF.model_max_nesting) htraces: List[HTrace] = fuzzer.executor.trace_test_case(inputs) # Check for violations violations: List[EquivalenceClass] = fuzzer.analyser.filter_violations( inputs, ctraces, htraces, stats=True) if not violations: return [] if CONF.no_priming: return violations # Try priming the inputs that disagree with the other ones within the same eq. class true_violations = [] while violations: violation: EquivalenceClass = violations.pop() if fuzzer.survives_priming(violation, inputs): true_violations.append(violation) return true_violations
def fuzz(self, api_resources, headers): """ Call APIFuzzer with the given api definition :type api_resources: dict :param headers: headers to add fuzz request """ if headers is None: self.generate_random_auth_headers() else: self.auth_headers = headers prog = Fuzzer(api_resources=api_resources, report_dir=self.report_dir, test_level=1, alternate_url=self.test_app_url, test_result_dst=None, log_level='Debug', basic_output=False, auth_headers=self.auth_headers) prog.prepare() prog.run()
if __name__ == "__main__": parser = argparse.ArgumentParser(description='A DOM fuzzer') parser.add_argument("-m", dest="mode", help="Fuzzing mode") parser.add_argument("-n", dest="num", help="Number of generated testcases", required=False) parser.add_argument("-i", dest="index", help="Fuzzer ID") parser.add_argument("-o", dest="output", required=False) args = parser.parse_args() mode = fuzz_modes.get(args.mode) if mode is None: parser.print_help() sys.exit(1) init() if mode == FuzzMode.GenerateOnly: if args.output is None or args.num is None: print( "Number of testcases (-n) and output directory (-o) are required in generated-only mode." ) sys.exit(1) manager = Manager(int(args.index), True, args.output) fuzzer = Fuzzer(None, manager) fuzzer.generate_only(int(args.num)) else: pass
def main(): parser = ArgumentParser(description='', add_help=False) subparsers = parser.add_subparsers(dest='subparser_name') # Fuzzing parser_fuzz = subparsers.add_parser('fuzz') parser_fuzz.add_argument("-s", "--instruction-set", type=str, required=True) parser_fuzz.add_argument("-c", "--config", type=str, required=False) parser_fuzz.add_argument( "-n", "--num-test-cases", type=int, default=1, help="Number of test cases.", ) parser_fuzz.add_argument( "-i", "--num-inputs", type=int, default=100, help="Number of inputs per test case.", ) parser_fuzz.add_argument( '-w', '--working-directory', type=str, default='', ) parser_fuzz.add_argument('-t', '--testcase', type=str, default=None, help="Use an existing test case") parser_fuzz.add_argument( '--timeout', type=int, default=0, help= "Run fuzzing with a time limit [seconds]. No timeout when set to zero." ) parser_fuzz.add_argument( '--nonstop', action='store_true', help="Don't stop after detecting an unexpected result") parser_mini = subparsers.add_parser('minimize') parser_mini.add_argument( '--infile', '-i', type=str, required=True, ) parser_mini.add_argument( '--outfile', '-o', type=str, required=True, ) parser_mini.add_argument("-c", "--config", type=str, required=False) parser_mini.add_argument( "-n", "--num-inputs", type=int, default=100, help="Number of inputs per test case.", ) parser_mini.add_argument( "-f", "--add-fences", action='store_true', default=False, help="Add as many LFENCEs as possible, while preserving the violation.", ) parser_mini.add_argument("-s", "--instruction-set", type=str, required=True) args = parser.parse_args() # Update configuration if args.config: CONF.config_path = args.config with open(args.config, "r") as f: config_update: Dict = yaml.safe_load(f) for var, value in config_update.items(): CONF.set(var, value) CONF.sanity_check() LOGGER.set_logging_modes() # Fuzzing if args.subparser_name == 'fuzz': # Make sure we're ready for fuzzing if args.working_directory and not os.path.isdir( args.working_directory): SystemExit("The working directory does not exist") # Normal fuzzing mode fuzzer = Fuzzer(args.instruction_set, args.working_directory, args.testcase) fuzzer.start( args.num_test_cases, args.num_inputs, args.timeout, args.nonstop, ) return # Test Case minimisation if args.subparser_name == "minimize": CONF.coverage_type = 'none' postprocessor = Postprocessor(args.instruction_set) postprocessor.minimize(args.infile, args.outfile, args.num_inputs, args.add_fences) return raise Exception("Unreachable")
def __init__(self): Fuzzer.__init__(self) self.method = "POST"
def __init__(self, settings): self.ids = [] Fuzzer.__init__(self, settings, self.ids) WebServer.__init__(self, settings)
report_path = sys.argv[1] else: report_path = os.path.join(rp.get_path(source_path), "report") if not os.path.exists(report_path): os.makedirs(report_path) print "Directory " + dirName + " Created" try: with open(config_path) as f: config = json.load(f) rospy.loginfo("Building Fuzzer with configuration.") except IOError: config = None rospy.loginfo("The configuration file does not exist. Working with empty dictionary.") fuzzer = Fuzzer(config) # Anything necesary before the execution of state # As built a mock robot or initialize the necessary services. # # skills = base_skills # rospy.loginfo("Building robot with skills: ".join(skills)) # robot = robot_factory.build(skills) sm = state_machine # Only if you need or want a instronspection server to see the state machine structure: # sis = smach_ros.IntrospectionServer("server_name", sm, "/SM_ROOT") # sis.start() # It can handle multiple state machines, only add to machines and it will test all togethers machines = [sm._states]
def __init__(self): Fuzzer.__init__(self, self.parseCert)
def fuzz(binary): l.info("beginning to fuzz \"%s\"", binary) binary_path = os.path.join(config.BINARY_DIR, binary) seeds = ["fuzzz", "111"] # look for a pcap pcap_path = os.path.join(config.PCAP_DIR, "%s.pcap" % binary) if os.path.isfile(pcap_path): l.info("found pcap for binary %s", binary) seeds = pcap.process(pcap_path) else: l.warning( "unable to find pcap file, will seed fuzzer with the default") # TODO enable dictionary creation, this may require fixing parts of the fuzzer module fzr = Fuzzer(binary_path, config.FUZZER_WORK_DIR, config.FUZZER_INSTANCES, time_limit=config.FUZZ_TIMEOUT, qemu=False, seeds=seeds, create_dictionary=False) try: fzr.start() # start a listening for inputs produced by concolic start_listener(fzr) # clean all stale redis data clean_redis(fzr) # list of 'concolic request' each is a celery async result object concolic_jobs = [] time.sleep(2) # start the fuzzer and poll for a crash, timeout, or concolic assistance while not fzr.found_crash() and not fzr.timed_out(): # check to see if concolic should be invoked sql = 'update binarys SET status=1 WHERE binary_name = %s' db.execute(sql, binary) if 'fuzzer-1' in fzr.stats and 'pending_favs' in fzr.stats[ 'fuzzer-1']: if not int(fzr.stats['fuzzer-1']['pending_favs']) > 0: sql = 'update binarys SET status=2 WHERE binary_name = %s' db.execute(sql, binary) concolic_jobs.extend(request_drilling(fzr)) time.sleep(config.CRASH_CHECK_INTERVAL) # make sure to kill the fuzzers when we're done fzr.kill() except InstallError: l.info("fuzzer InstallError") return False # we found a crash! if fzr.found_crash(): l.info("found crash for \"%s\"", binary) l.info("time for found_crash %d", fzr.compute_time()) sql = 'update binarys SET status=3 WHERE binary_name = %s' db.execute(sql, binary) # publish the crash redis_inst = redis.Redis(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.REDIS_DB) redis_inst.publish("crashes", binary) # revoke any concolic jobs which are still working for job in concolic_jobs: if job.status == 'PENDING': job.revoke(terminate=True) if fzr.timed_out(): l.info("timed out while fuzzing \"%s\"", binary) sql = 'update binarys SET status=-1 WHERE binary_name = %s' db.execute(sql, binary) # TODO end drilling jobs working on the binary return len(fzr.crashes()) > 0
cert.cert.validity.setup('170420132219Z', '180420132219Z') cert.cert.pubkey.setup(hexdump.dehex(pubkey)) cert.signature.setup(hexdump.dehex(signature)) iss = p7s.IssuerAndSerial() iss[0] = st obj.signedData.certificates[0].replace(cert) si = obj.signedData.signerInfos[0] si.issuerAndSerial.replace(iss) si.attributes.signingTime.setup('170502094738Z') si.attributes.messageDigest.setup(hexdump.dehex(msgDigest)) si.attributes.msEncryptionCert.setup(iss) si.attributes.keyPref.setup(iss) si.digest.setup(hexdump.dehex(digest)) data = obj.dump() with open("../mails/cert.p7s", "rb") as f: res = f.read() print "test3:", "OK" if checkBuffers(res, data, True) else "FAILED" # obj.pprint() fuzz = Fuzzer() fuzz.parseArgs(None, ['-v']) with open("crashdata0.log", "rb") as f: data = f.read() print fuzz.parseTnef(fuzz.variant(data))
class Fuzzer(object): ''' Wrapper class if shellphish fuzzer, which is, in turn, a python wrapper for AFL. ''' def __init__(self, tprogram, seed_files, workdir, target_opts=None, input_placeholder='@@', afl_opts=None): self.tprogram = tprogram self.target_opts = target_opts self.workdir = workdir self.stat_file = os.path.join(self.workdir, tprogram.program_name, 'stat') self.stat = {} self.crashing_inputs = [] self.tmout_inputs = [] if seed_files == None: seed_files = self.__prepare_predefined_seeds() seeds = [] logger.debug("Starting to classify the seeds for %s", tprogram.program_name) for sf in seed_files: logger.debug("Trying running %s", sf) ret = self.__get_exec_result_on_input(sf) if ret == 2: # the seed results in crash self.crashing_inputs.append(sf) elif ret == 1: # the seed results in timeout self.tmout_inputs.append(sf) else: seeds.append(open(sf, "br").read()) logger.debug("Classifying the seeds for %s ended", tprogram.program_name) use_qemu = False if tprogram.is_cgc(): use_qemu = True logger.debug("fuzzing %s with opts: %s", self.tprogram.program_path, str(target_opts)) self._fuzzer = SFFuzzer(self.tprogram.program_path, self.workdir, seeds=seeds, qemu=use_qemu, create_dictionary=False, target_opts=target_opts, extra_opts=afl_opts) # save the seed files resulting in crashes and timeout self.save_crash_and_tmout_inputs() def __prepare_predefined_seeds(self): _pre_seeds_dir = os.path.join(self.workdir, '_pre_seeds') _pre_seed_file = os.path.join(_pre_seeds_dir, 'seed-0') if not os.path.exists(_pre_seeds_dir): os.makedirs(_pre_seeds_dir) with open(_pre_seed_file, 'w') as f: f.write('fuzz') return [_pre_seed_file] def __str__(self): return "<Fuzzer:(tprogram:%s, workdir:%s)>" % \ (self.tprogram, self.workdir) def __repr__(self): return self.__str__() def start(self): self.stat['status'] = 'running' self._fuzzer.start() def stop(self): self._fuzzer.kill() self.stat['status'] = 'stopped' self.write_stat() def __find_generated_files(self, afl_instance, subdir): if not os.path.exists( os.path.join(self._fuzzer.out_dir, afl_instance, subdir)): raise ValueError("subdir: '%s' in afl instance '%s' does not exist"\ % (subdir, afl_instance)) subdir = os.path.join(self._fuzzer.out_dir, afl_instance, subdir) generated_files = [ x for x in os.listdir(subdir) if x.startswith('id:') ] return [os.path.join(subdir, x) for x in generated_files] def generated_inputs(self, afl_instance='fuzzer-master'): return self.__find_generated_files(afl_instance, 'queue') def crashes_found(self, afl_instance='fuzzer-master'): return self.__find_generated_files(afl_instance, 'crashes') def crash_seeds(self): cs_dir = os.path.join(self._fuzzer.job_dir, 'crashing_seeds') cs_files = [ x for x in os.listdir(cs_dir) if x.startswith('crash_seed_') ] return [os.path.join(cs_dir, x) for x in cs_files] def timeout_seeds(self): timeout_seed_dir = os.path.join(self._fuzzer.job_dir, 'tmout_seeds') timeout_seed_files = [ x for x in os.listdir(cs_dir) if x.startswith('tmout_seed_') ] return [os.path.join(cs_dir, x) for x in cs_files] def failed_to_start(self): afl_log_file = os.path.join(self._fuzzer.job_dir, 'fuzzer-master.log') if not os.path.exists(afl_log_file): return True ansi_escape = re.compile(r'\x1b[^m]*m') with open(afl_log_file, 'r') as f: for l in f.readlines(): l = l.strip() ansi_escape.sub('', l) if "[-] PROGRAM ABORT :" in l: return True return False def __get_exec_result_on_input(self, input_file): args = replace_input_placeholder(self.target_opts, input_file) # here we hard-code the max time # to run a target program by 1 sec executor = Executor(self.tprogram.program_path, target_opts=args, timeout=1) if executor.tmout: return 1 if executor.crash: return 2 return 0 def save_crash_and_tmout_inputs(self, additional_crash_input_files=None, additional_tmout_input_files=None): ci_dir = os.path.join(self._fuzzer.job_dir, 'crashing_seeds') tmout_dir = os.path.join(self._fuzzer.job_dir, 'tmout_seeds') if not os.path.exists(ci_dir): os.makedirs(ci_dir) if not os.path.exists(tmout_dir): os.makedirs(tmout_dir) if additional_crash_input_files != None: for c in additional_crash_input_files: self.crashing_inputs.append(c) if additional_tmout_input_files != None: for c in additional_tmout_input_files: self.tmout_inputs.append(c) idx = 0 for i in self.crashing_inputs: shutil.copyfile(i, os.path.join(ci_dir, 'crash_seed_' + str(idx))) idx = idx + 1 idx = 0 for i in self.tmout_inputs: shutil.copyfile(i, os.path.join(tmout_dir, 'tmout_seed_' + str(idx))) idx = idx + 1 def resuming(self): return self._fuzzer.resuming def is_stuck(self): try: pending_favs = self._fuzzer.stats['fuzzer-master']['pending_favs'] except: return False return int(pending_favs) == 0 def write_stat(self): if not os.path.exists(os.path.dirname(self.stat_file)): os.makedirs(os.path.dirname(self.stat_file)) with open(self.stat_file, 'w') as f: for key, val in list(self.stat.items()): f.write("%s:%s\n" % (key, val)) def __del__(self): self.write_stat()
def __init__(self): Fuzzer.__init__(self)