def attach(self): if self.project.pid != None: target_process = self.project.pid elif self.project.process_name != None: target_process = self.project.process_name else: log.warn("No process specified with 'process_name' or 'pid'!") return False if self.project.remote_frida: self.frida_session = frida.get_remote_device().attach( target_process) else: self.frida_session = frida.attach(target_process) self.loadScript() pid = self.frida_script.exports.getpid() log.info("Attached to pid %d!" % pid) self.project.pid = pid # Query the loaded modules from the target self.getModuleMap() # ... and create the module filter list self.createModuleFilterList() return True
def init(args): if os.path.exists(args.project): log.warn("Project '%s' already exists!" % args.project) return log.info("Creating project '%s'!" % args.project) if not project.createProject(args.project): log.warn("Could not create project!")
def createModuleFilterList(self): """ Creates the list of modules in which coverage information should be collected. This list is created by querying frida for the loaded modules and comparing them to the modules the user selected in the project settings. Must be called after frida was attached to the target and before any coverage is collected. """ if self.modules == None: log.warn("'%s'.createModuleFilterList: self.modules is None!" % self.name) return False self.watched_modules = [] for module in self.modules: for search_term in self.modules_to_watch: if search_term in module["path"]: self.watched_modules.append(module) if len(self.watched_modules) == 0: paths = "\n".join([m["path"] for m in self.modules]) log.warn("'%s'.createModuleFilterList: No module was selected! Possible choices:\n%s" % (self.name, paths)) return False else: paths = "\n".join([m["path"] for m in self.watched_modules]) log.info("'%s'.createModuleFilterList: Filter coverage to only include the following modules:\n%s" % (self.name, paths)) return True
def main(): args = parse_args() if args.command != "init": # Load project if not project.loadProject(args.project): log.warn("Error: Could not load project '%s'!" % args.project) return if args.command in ["fuzz", "replay", "minimize"]: # Create Fuzzer and attach to target fuzzer = FridaFuzzer(project.getInstance()) if not fuzzer.attach(): return # Invoke subcommand function with instantiated fuzzer args.func(args, fuzzer) log.info("Detach Fuzzer ...") fuzzer.detach() else: # Invoke subcommand function args.func(args) log.info("Done") return
def getModuleMap(self): if self.frida_script == None: log.warn("'%s'.getModuleMap: self.frida_script is None!" % self.name) return None try: modulemap = self.frida_script.exports.makemaps() except frida.core.RPCException as e: log.info("RPCException: " + repr(e)) return None self.modules = [] for image in modulemap: idx = image['id'] path = image['path'] base = int(image['base'], 0) end = int(image['end'], 0) size = image['size'] m = { 'id' : idx, 'path' : path, 'base' : base, 'end' : end, 'range' : range(base, end), 'size' : size} self.modules.append(m) return self.modules
def buildCorpus(self): log.info("Initializing Corpus...") # Resetting Corpus to avoid at restart to have with ASLR more blocks than needed self.accumulated_coverage = set() corpus = [ self.project.corpus_dir + "/" + x for x in os.listdir(self.project.corpus_dir) ] corpus.sort() #log.debug("Corpus: " + str(corpus)) if len(corpus) == 0: log.warn( "Corpus is empty, please add files/directories with 'add'") return False for infile in corpus: fuzz_pkt = open(infile, "rb").read() coverage_last = None for i in range(5): t = time.strftime("%Y-%m-%d %H:%M:%S") log.update(t + " [iteration=%d] %s" % (i, infile)) # send packet to target coverage = self.getCoverageOfPayload(fuzz_pkt) if coverage == None or len(coverage) == 0: log.warn( "No coverage was returned! you might want to delete %s from corpus if it happens more often" % infile) #log.info("Iteration=%d covlen=%d file=%s" % (i, len(coverage), infile)) if coverage_last != None and coverage_last != coverage: log.warn(t + " [iteration=%d] Inconsistent coverage for %s!" % (i, infile)) #log.info("diff a-b:" + " ".join([str(x) for x in coverage_last.difference(coverage)])) #log.info("diff b-a:" + " ".join([str(x) for x in coverage.difference(coverage_last)])) coverage_last = coverage # Accumulate coverage: self.accumulated_coverage = self.accumulated_coverage.union( coverage_last) write_drcov_file( self.modules, coverage_last, self.project.coverage_dir + "/" + infile.split("/")[-1]) log.finish_update( "Using %d input files which cover a total of %d basic blocks!" % (len(corpus), len(self.accumulated_coverage))) self.corpus = corpus return True
def minimize(args, fuzzer): # Create Fuzzer and attach to target fuzzer = FridaFuzzer(project.getInstance()) if not fuzzer.attach(): return if fuzzer.doMinimize(): log.info( "Minimized the Corpus. Start again without the minimizing option!") else: log.warn("Failed to minimize the corpus!")
def sendFuzzPayloadInProcess(self, payload): """ Send fuzzing payload to target process by invoking the target function directly in frida """ # Call function under fuzz: encoded = payload.hex() try: coverage_blob = self.frida_script.exports.fuzz(encoded) #log.info("sendFuzzPayloadInProcess: len=%d" % len(coverage_blob)) except frida.core.RPCException as e: log.info("RPCException: " + repr(e)) log.info("CRASH?")
def add(args): infiles = [] for path in args.input: if not os.path.exists(path): log.warn("File or directory '%s' does not exist!" % path) return if os.path.isdir(path): infiles.extend([path + "/" + x for x in os.listdir(path)]) else: infiles.append(path) corpus_dir = project.getInstance().corpus_dir for inFile in infiles: if not os.path.exists(corpus_dir + "/" + inFile.split("/")[-1]): log.info("Copying '%s' to corpus directory: " % inFile) shutil.copy2(inFile, corpus_dir)
def getCoverageOfPayload(self, payload, timeout=0.1, retry=5): """ Sends of the payload and checks the returned coverage. Important: Frida appears to have a bug sometimes in collecting traces with the stalker.. no idea how to fix this yet.. hence we do a retry. This can however screw up the replay functionality and should be fixed in the future. Arguments: payload {[type]} -- [description] Keyword Arguments: timeout {float} -- [description] (default: {0.1}) retry {int} -- [description] (default: {5}) Returns: [type] -- [description] """ cnt = 0 while cnt <= retry: try: if self.project.fuzz_in_process: self.sendFuzzPayloadInProcess(payload) else: self.sendFuzzPayload(payload) start = time.time() cov = None while (cov == None or len(cov) == 0) and (time.time() - start) < timeout: cov = self.frida_script.exports.getcoverage() if cov != None and len(cov) > 0: break cnt += 1 if cov == None or len(cov) == 0: log.info("getCoverageOfPayload: got nothing!") return set() except frida.InvalidOperationError as e: log.warning("Error communicating with the frida script: %s" % str(e)) self.detach() time.sleep(30) self.attach() return parse_coverage(cov, self.watched_modules)
def fuzzerLoop(self): try: self.start_time = time.time() self.total_executions = 0 while True: if not self.doIteration(): log.info("stopping fuzzer loop") return False self.corpus = [ self.project.corpus_dir + "/" + f for f in os.listdir(self.project.corpus_dir) ] self.corpus.sort() self.project.seed += 1 self.project.saveState() except KeyboardInterrupt: log.info("Interrupted by user..")
def doReplay(self): """ This function replays the last Session. This function will later implement also probes to test when the process is crashing """ log.info("Starting the full Replay") with open(self.project.project_dir + "/frida_fuzzer.history") as fp: for line in fp: pkt_file, seed = line.split("|") try: fuzz_pkt = self.getMutatedPayload(pkt_file, int(seed.strip())) if fuzz_pkt == None: continue if self.project.debug_mode: open(self.project.debug_dir + "/history", "a").write("file: {} seed: {} \n{}\n".format( pkt_file, seed, fuzz_pkt, )) coverage = self.getCoverageOfPayload(fuzz_pkt) log.info("Current iteration: " + time.strftime("%Y-%m-%d %H:%M:%S") + " [seed=%d] [file=%s]" % (int(seed.strip()), pkt_file)) except (frida.TransportError, frida.InvalidOperationError, frida.core.RPCException) as e: log.finish_update("doReplay: Got a frida error: " + str(e)) log.debug("Full Stack Trace:\n" + traceback.format_exc()) log.finish_update("Current iteration: " + time.strftime("%Y-%m-%d %H:%M:%S") + " [seed=%d] [file=%s]" % (int(seed.strip()), pkt_file)) log.finish_update("Server Crashed! Lets narrow it down") #crash_file = self.crash_dir + time.strftime("/%Y%m%d_%H%M%S_crash") #with open(crash_file, "wb") as f: # f.write(fuzz_pkt) #log.info("Payload is written to " + crash_file) return False if coverage == None: log.warn("No coverage was generated for [%d] %s!" % (seed, pkt_file)) log.warn("Replay did not crash the Server!") return False
def attach(self): """ Attach frida to all specified targets (project.targets) """ scriptfile = os.path.join(os.path.dirname(__file__), 'frida_script.js') log.info("Loading script: %s" % scriptfile) script_code = open(scriptfile, "r").read() for target in self.targets: if not target.attach(script_code): return False if target.getModuleMap( ) == None: # Query the loaded modules from the target return False if not target.createModuleFilterList( ): # ... and create the module filter list return False return True
def loadScript(self): scriptfile = os.path.join(os.path.dirname(__file__), 'frida_script.js') log.info("Loading script: %s" % scriptfile) script_code = open(scriptfile, "r").read() script = self.frida_session.create_script(script_code) def on_message(message, data): if 'payload' in message.keys() and str( message['payload']) == "finished": pass else: log.info("on_message: " + str(message)) #log.info("on_message: " + str(message['payload'])) #log.info("on_message (data): " + str(data)) script.on('message', on_message) script.load() script.exports.settarget(self.project.target_function) self.frida_script = script return script
def attach(self, script_code): """ Attach frida to the target """ if self.process_pid != None: target_process = int(self.process_pid) elif self.process_name != None: target_process = self.process_name else: log.warn("'%s'.attach: No process specified with 'process_name' or 'pid'!" % self.name) return False try: if self.remote_frida: self.frida_session = self.frida_instance.attach(target_process) else: self.frida_session = self.frida_instance.attach(target_process) except frida.ProcessNotFoundError as e: log.warn("'%s'.attach: %s" % (self.name, str(e))) return False self.loadScript(script_code) pid = self.frida_script.exports.getpid() log.info("'%s'.attach: Attached to pid %d!" % (self.name, pid)) self.process_pid = pid function_address = self.function if isinstance(function_address, str): function_address = int(self.frida_script.exports.resolvesymbol(self.function), 0) if function_address > 0: log.info("Target function '%s' is at address %s!" % (self.function, function_address)) else: log.warn("No symbol with name '%s' was found!" % self.function) self.detach() return False self.frida_script.exports.settarget(function_address) return True
def main(): args = parse_args() if args.command != "init": # Load project if not project.loadProject(args.project, args): log.warn("Error: Could not load project '%s'!" % args.project) return if project.getInstance().logfile_name != None: log.logfile = open(project.getInstance().logfile_name, "wb", 0) if not project.getInstance().colored_output: log.use_color = False log.CLEAR_LINE = "" # no escape sequences for the no-color option! if args.command in ["fuzz", "replay", "minimize"]: # Create Fuzzer and attach to target fuzzer = FridaFuzzer(project.getInstance()) if not fuzzer.attach(): return if not fuzzer.loadPayloadFilter(): return # Invoke subcommand function with instantiated fuzzer args.func(args, fuzzer) log.info("Detach Fuzzer ...") fuzzer.detach() else: # Invoke subcommand function args.func(args) log.info("Done") if log.logfile != None: log.logfile.close() return
def doReplay(self): """ This function replays the last Session. This function will later implement also probes to test when the process is crashing """ log.info("Starting the full Replay") with open(self.project.project_dir + "/frida_fuzzer.history") as fp: for line in fp: pkt_file, seed = line.split("|") try: fuzz_pkt = check_output( ["radamsa", "-s", str(seed.strip()), pkt_file]) if self.project.debug: open(self.project.debug_dir + "/history", "a").write("file: {} seed: {} \n{}\n".format( pkt_file, seed, fuzz_pkt, )) coverage = self.getCoverageOfPayload(fuzz_pkt) log.info("Current iteration: " + time.strftime("%Y-%m-%d %H:%M:%S") + " [seed=%d] [file=%s]" % (int(seed.strip()), pkt_file)) except (frida.TransportError, frida.InvalidOperationError) as e: log.success("doReplay: Got a frida error: " + str(e)) log.success("Current iteration: " + time.strftime("%Y-%m-%d %H:%M:%S") + " [seed=%d] [file=%s]" % (int(seed.strip()), pkt_file)) log.success("Server Crashed! Lets narrow it down") #crash_file = self.crash_dir + time.strftime("/%Y%m%d_%H%M%S_crash") #with open(crash_file, "wb") as f: # f.write(fuzz_pkt) #log.info("Payload is written to " + crash_file) return False if coverage == None: log.warn("No coverage was generated for [%d] %s!" % (seed, pkt_file)) log.info("Sending Empty Package to verify the crashing server") try: coverage = self.getCoverageOfPayload(b'FOOBAR') except (frida.TransportError, frida.InvalidOperationError) as e: log.success("Server Crashed! Lets narrow it down") # TODO # Rabbit Mode here log.warning( "History did not crash the Server! Might be due to some race conditions." ) return False
def doMinimize(self): """ This Function will minimize the current Corpus """ log.info("Minimizing Corpus...") # Reset the accumulated coverage self.accumulated_coverage = set() corpus = [ self.project.corpus_dir + "/" + x for x in os.listdir(self.project.corpus_dir) ] corpus.sort() if len(corpus) == 0: log.warn( "Corpus is empty, please use the 'add' subcommand to add files to it." ) return False # Collect coverage dict_of_infile_coverages = {} loop_counter = 0 for infile in corpus: loop_counter += 1 fuzz_pkt = open(infile, "rb").read() failed_coverage_count = 0 tmp_accu_cov = set() RETRIES = 5 for i in range(RETRIES): t = time.strftime("%Y-%m-%d %H:%M:%S") log.update( t + " Collecting coverage for corpus files (%d/%d) ... [iteration=%d] %s" % (loop_counter, len(corpus), i, infile)) # send packet to target coverage = self.getCoverageOfPayload(fuzz_pkt, timeout=0.2) if coverage == None or len(coverage) == 0: failed_coverage_count += 1 continue # Accumulate coverage: tmp_accu_cov = tmp_accu_cov.union(coverage) if failed_coverage_count == RETRIES: log.warn("Coverage for %s was always 0 (%d retries)" % (infile, RETRIES)) # note: file will be removed later.. dict_of_infile_coverages[infile] = tmp_accu_cov self.accumulated_coverage = self.accumulated_coverage.union( tmp_accu_cov) write_drcov_file( self.active_target.modules, tmp_accu_cov, self.project.coverage_dir + "/" + infile.split("/")[-1]) log.finish_update( "Collected coverage for corpus (%d basic blocks from %d files in corpus)" % (len(self.accumulated_coverage), len(corpus))) # Filter all corpus files with a coverage that is a direct subset of another corpus file loop_counter = 0 for infile in corpus: loop_counter += 1 log.update( "(%d/%d) Comparing %s (%d bblocks) against rest of the corpus..." % (loop_counter, len(corpus), infile, len(dict_of_infile_coverages[infile]))) for other_infile in [f for f in corpus if f != infile]: if dict_of_infile_coverages[infile].issubset( dict_of_infile_coverages[other_infile]): log.info( "%s coverage is direct subset of %s. Moving to trash..." % (infile, other_infile)) backup_file = self.project.corpus_trash_dir + "/" + infile.split( "/")[-1] shutil.move(infile, backup_file) break corpus_new = [ self.project.corpus_dir + "/" + x for x in os.listdir(self.project.corpus_dir) ] acc_cov_new = set.union(*dict_of_infile_coverages.values()) log.finish_update( "Remaining input files: %d (total of %d basic blocks)." % (len(corpus_new), len(acc_cov_new))) self.corpus = corpus_new return True
def replay(args, fuzzer): log.info("Replay Mode!") fuzzer.doReplay()
def doMinimize(self): """ This Function will minimize the current Corpus """ log.info("Minimizing Corpus...") # Reset the accumulated coverage self.accumulated_coverage = set() corpus = [ self.project.corpus_dir + "/" + x for x in os.listdir(self.project.corpus_dir) ] corpus.sort() if len(corpus) == 0: log.warn( "Corpus is empty, please specify an input directory with --indir" ) return False for infile in corpus: fuzz_pkt = open(infile, "rb").read() coverage_last = None cov_cnt = 0 tmp_accu_cov = set() for i in range(5): t = time.strftime("%Y-%m-%d %H:%M:%S") log.update(t + " [iteration=%d] %s" % (i, infile)) # send packet to target coverage = self.getCoverageOfPayload(fuzz_pkt) if coverage == None or len(coverage) == 0: cov_cnt += 1 coverage_last = coverage # Accumulate coverage: tmp_accu_cov = tmp_accu_cov.union(coverage_last) if cov_cnt >= 4: if os.path.exists(infile): log.warn( "Moving %s from corpus since the returned coverage was always 0" % infile) #TODO backup_file = self.project.corpus_trash_dir + "/" + infile.split( "/")[-1] shutil.move(infile, backup_file) if not tmp_accu_cov.issubset(self.accumulated_coverage): # New Paths found. Add it to the overall coverage log.success("File: %s looks good for the corpus! Keeping it" % infile) self.accumulated_coverage = self.accumulated_coverage.union( coverage_last) write_drcov_file( self.modules, coverage_last, self.project.coverage_dir + "/" + infile.split("/")[-1]) else: # No new paths found with current file... better delete it ;-) if os.path.exists(infile): log.warn( "Deleting %s from corpus since there was no new coverage in it" % infile) os.remove(infile) log.finish_update( "Using %d input files which cover a total of %d basic blocks!" % (len(corpus), len(self.accumulated_coverage))) self.corpus = corpus return True
def minimize(args, fuzzer): if fuzzer.doMinimize(): log.info( "Minimized the Corpus. Start again without the minimizing option!") else: log.warn("Failed to minimize the corpus!")
def loadProject(self): # Load config file if not os.path.exists(self.config_file): log.warn("Config file %s does not exist!" % self.config_file) return False proj = toml.loads(open(self.config_file).read()) log.info("Project: " + repr(proj)) if "fuzzer" in proj: if "log_level" in proj["fuzzer"]: log.log_level = proj["fuzzer"]["log_level"] if "debug_mode" in proj["fuzzer"]: self.debug_mode = proj["fuzzer"]["debug_mode"] if not "target" in proj: log.warn("Section 'target' was not found in config file.") return False target = proj["target"] if "function" in target: self.target_function = target["function"] else: log.warn("No 'function' in section 'target'!") return False if "process_name" in target: self.process_name = target["process_name"] if "host" in target: self.host = target["host"] if "port" in target: self.port = target["port"] if "ssl" in target: self.ssl = target["ssl"] if "remote_frida" in target: self.remote_frida = target["remote_frida"] if "recv_timeout" in target: self.recv_timeout = target["recv_timeout"] if "fuzz_in_process" in target: self.fuzz_in_process = target["fuzz_in_process"] if "modules" in target: self.modules = target["modules"] # Load state file if os.path.exists(self.state_file): state = toml.loads(open(self.state_file).read()) if "seed" in state: self.seed = state["seed"] if "pid" in state: self.pid = state["pid"] if "crashes" in state: self.crashes = state["crashes"] if "last_new_path" in state: self.last_new_path = state["last_new_path"] log.info("Found old state. Continuing at seed=%d pid=%d" % (self.seed, self.pid)) return True
def on_message(message, data): if 'payload' in message.keys() and str(message['payload']) == "finished": pass else: log.info("on_message: " + str(message))
def doIteration(self, seed=None, corpus=None): if seed == None: seed = self.project.seed if corpus == None: corpus = self.corpus start_time = time.time() for pkt_file in corpus: log.update("[seed=%d] " % seed + time.strftime("%Y-%m-%d %H:%M:%S") + " %s" % pkt_file) #log.info(time.strftime("%Y-%m-%d %H:%M:%S") + " %s" % pkt_file) fuzz_pkt = check_output(["radamsa", "-s", str(seed), pkt_file]) # Writing History file for replaying open(self.project.project_dir + "/frida_fuzzer.history", "a").write(str(pkt_file) + "|" + str(seed) + "\n") try: coverage = self.getCoverageOfPayload(fuzz_pkt) except (frida.TransportError, frida.InvalidOperationError) as e: log.warn("doIteration: Got a frida error: " + str(e)) log.info("Current iteration: " + time.strftime("%Y-%m-%d %H:%M:%S") + " [seed=%d] [file=%s]" % (seed, pkt_file)) crash_file = self.project.crash_dir + time.strftime( "/%Y%m%d_%H%M%S_crash") with open(crash_file + "_" + self.project.pid, "wb") as f: f.write(fuzz_pkt) log.info("Payload is written to " + crash_file) self.project.crashes += 1 return False if coverage == None: log.warn("No coverage was generated for [%d] %s!" % (seed, pkt_file)) continue if not coverage.issubset(self.accumulated_coverage): # New basic blocks covered! log.info("Found new path: [%d] %s" % (seed, pkt_file)) newfile = open( self.project.corpus_dir + "/" + str(seed) + "_" + pkt_file.split("/")[-1], "wb") newfile.write(fuzz_pkt) newfile.close() cov_file = self.project.coverage_dir + "/" + pkt_file.split( "/")[-1] write_drcov_file(self.modules, coverage, cov_file) write_drcov_file( self.modules, coverage.difference(self.accumulated_coverage), cov_file + "_diff") self.project.last_new_path = seed self.accumulated_coverage = self.accumulated_coverage.union( coverage) self.total_executions += 1 end_time = time.time() speed = len(corpus) / (end_time - start_time) avg_speed = self.total_executions / (end_time - self.start_time) log.finish_update( "[seed=%d] speed=[%3d exec/sec (avg: %d)] coverage=[%d bblocks] corpus=[%d files] " "last new path: [%d] crashes: [%d]" % (seed, speed, avg_speed, len(self.accumulated_coverage), len(corpus), self.project.last_new_path, self.project.crashes)) return True
def loadProject(self, args): # Load config file if not os.path.exists(self.config_file): log.warn("Config file %s does not exist!" % self.config_file) return False proj = toml.loads(open(self.config_file).read()) log.info("Project: " + repr(proj)) if not "fuzzer" in proj: log.warn("Section 'fuzzer' was not found in config file.") return False fuzzer = proj["fuzzer"] if "fuzzer" in proj: if "log_level" in fuzzer: log.log_level = fuzzer["log_level"] if "write_logfile" in fuzzer: if fuzzer["write_logfile"]: self.logfile_name = self.project_dir + time.strftime("/%Y%m%d_%H%M%S_stdout.log") if "colored_output" in fuzzer: self.colored_output = fuzzer["colored_output"] if "debug_mode" in fuzzer: self.debug_mode = fuzzer["debug_mode"] if "host" in fuzzer: self.host = fuzzer["host"] if "port" in fuzzer: self.port = fuzzer["port"] if "ssl" in fuzzer: self.ssl = fuzzer["ssl"] if "udp" in fuzzer: self.udp = fuzzer["udp"] if self.udp and 'ssl' in fuzzer and self.ssl: log.warn("SSL can not be used with UDP sockets. SSL will be ignored.") if "recv_timeout" in fuzzer: self.recv_timeout = fuzzer["recv_timeout"] if "fuzz_in_process" in fuzzer: self.fuzz_in_process = fuzzer["fuzz_in_process"] if "max_payload_size" in fuzzer: self.max_payload_size = fuzzer["max_payload_size"] if "payload_filter" in fuzzer: self.payload_filter = fuzzer["payload_filter"] targets = [t for t in proj.keys() if t.startswith('target')] if len(targets) == 0: log.warn("No 'target' sections were not found in config file (section starting with 'target...').") return False for target in targets: targetobj = Target(target, proj[target]) self.targets.append(targetobj) # Load state file if os.path.exists(self.state_file): state = toml.loads(open(self.state_file).read()) if "seed" in state: self.seed = state["seed"] if "crashes" in state: self.crashes = state["crashes"] if "last_new_path" in state: self.last_new_path = state["last_new_path"] log.info("Found old state. Continuing at seed=%d" % (self.seed)) # Load command line parameters if args != None: if 'pid' in args and args.pid != None: self.pid = args.pid if 'seed' in args and args.seed != None: self.seed = args.seed if 'function' in args and args.function != None: self.target_function = args.function if 'debug' in args and args.debug == True: self.debug_mode = True return True
def getCoverageOfPayload(self, payload, timeout=0.04, retry=0): """ Sends of the payload and checks the returned coverage. If the payload_filter was specified by the user, the payload will first be passed through it. All targets will then be checked for coverage. The function only succeeds if just one target has produced a coverage. Important: Frida appears to have a bug sometimes in collecting traces with the stalker.. no idea how to fix this yet.. hence we do a retry. This can however screw up the replay functionality and should be fixed in the future. Arguments: payload {bytes} -- payload which shall be sent to the target Keyword Arguments: timeout {float} -- [description] (default: {0.1}) retry {int} -- [description] (default: {5}) Returns: {set} -- set of basic blocks covered by the payload """ payload = self.runPayloadFilterFunction(payload) if payload == None: return set() cov = None cnt = 0 while cnt <= retry: # Clear coverage info in all targets: for target in self.targets: target.frida_script.exports.clearcoverage() # Send payload if self.project.fuzz_in_process: self.sendFuzzPayloadInProcess(payload) else: self.sendFuzzPayload(payload) # Wait for timeout seconds for any of the stalkers to get attached target, stalker_attached, stalker_finished = self.waitForCoverage( timeout) if target != None: # Found a target that has attached their stalker. Wait for the stalker # to finish and then extract the coverage. # Wait for 1 second <- maybe this should be adjusted / configurable ? start = time.time() while not stalker_finished and (time.time() - start) < 1: stalker_attached, stalker_finished = target.frida_script.exports.checkstalker( ) if not stalker_finished: log.info( "getCoverageOfPayload: Stalker did not finish after 1 second!" ) break cov = target.frida_script.exports.getcoverage() if cov != None and len(cov) > 0: break else: # None of the targets' function was hit. next try.. cnt += 1 if cov == None or len(cov) == 0: log.debug("getCoverageOfPayload: got nothing!") return set() return parse_coverage(cov, self.active_target.watched_modules)