def build_corpus(self): log.info("Initializing Corpus...") # Resetting Corpus to avoid at restart to have with ASLR more blocks than needed self.accumulated_coverage = set() corpus = [ self.project.corpus_dir + "/" + x for x in os.listdir(self.project.corpus_dir) ] corpus.sort() for c in self.corpus_blacklist: if c in corpus: corpus.remove(c) if len(corpus) == 0: log.warn( "Corpus is empty, please add files/directories with 'add'") return False for infile in corpus: fuzz_pkt = open(infile, "rb").read() coverage_last = None for i in range(5): t = time.strftime("%Y-%m-%d %H:%M:%S") log.update(t + " [iteration=%d] %s" % (i, infile)) # send packet to target coverage = self.get_coverage_of_payload( fuzz_pkt, infile, corpus) if coverage is None or len(coverage) == 0: log.warn( f"No coverage was returned! you might want to delete {infile} from corpus if it happens " f"more often") # log.info("Iteration=%d covlen=%d file=%s" % (i, len(coverage), infile)) if coverage_last is not None and coverage_last != coverage: log.warn(t + " [iteration=%d] Inconsistent coverage for %s!" % (i, infile)) # log.info("diff a-b:" + " ".join([str(x) for x in coverage_last.difference(coverage)])) # log.info("diff b-a:" + " ".join([str(x) for x in coverage.difference(coverage_last)])) coverage_last = coverage # Accumulate coverage: self.accumulated_coverage = self.accumulated_coverage.union( coverage_last) write_drcov_file( self.modules, coverage_last, self.project.coverage_dir + "/" + infile.split("/")[-1]) log.finish_update( "Using %d input files which cover a total of %d basic blocks!" % (len(corpus), len(self.accumulated_coverage))) self.corpus = corpus return True
def do_iteration(self, seed=None, corpus=None): if seed is None: seed = self.project.seed if corpus is None: corpus = self.corpus start_time = time.time() for pkt_file in corpus: # log.update("[seed=%d] " % seed + time.strftime("%Y-%m-%d %H:%M:%S") + " %s" % pkt_file) #log.info(time.strftime("%Y-%m-%d %H:%M:%S") + " %s" % pkt_file) start = ms_time_now() if not use_libradamsa(): fuzz_pkt = check_output(["radamsa", "-s", str(seed), pkt_file]) if len(fuzz_pkt) > 672: fuzz_pkt = fuzz_pkt[:672] else: if pkt_file not in self.corpus_cache: log.finish_update("%s not in cache" % pkt_file) input_pkt_data = b'' with open(pkt_file, 'rb') as input_pkt_file: self.corpus_cache[pkt_file] = input_pkt_file.read() (fuzz_pkt, fuzz_pkt_len) = radamsa_mutate(self.corpus_cache[pkt_file], 672, seed) fuzz_pkt = fuzz_pkt[:fuzz_pkt_len] end = ms_time_now() self.mutation_time += end - start # do any protocol or target specific transformations on the fuzzing payload # if the user did not specify this function it will just return the untouched # payload try: start = ms_time_now() fuzz_bin = self.frida_script.process_payload(fuzz_pkt.hex()) fuzz_pkt = binascii.unhexlify(fuzz_bin) end = ms_time_now() self.process_payload_time += end - start except: pass # Writing History file for replaying open(self.project.project_dir + "/frida_fuzzer.history", "a").write(str(pkt_file) + "|" + str(seed) + "\n") try: start = ms_time_now() coverage = self.get_coverage_of_payload( fuzz_pkt, pkt_file, corpus) end = ms_time_now() self.get_coverage_of_payload_time += end - start except (frida.TransportError, frida.InvalidOperationError) as e: log.warn("doIteration: Got a frida error: " + str(e)) truncated_payload = str(binascii.hexlify(fuzz_pkt))[:25] log.warn("had payload: " + truncated_payload + " [...]") log.info("Current iteration: " + time.strftime("%Y-%m-%d %H:%M:%S") + " [seed=%d] [file=%s]" % (seed, pkt_file)) crash_file = self.project.crash_dir + time.strftime( "/%Y%m%d_%H%M%S_crash") with open(crash_file + "_" + str(self.project.pid), "wb") as f: f.write(fuzz_pkt) log.info("Payload is written to " + crash_file) self.project.crashes += 1 return False if coverage is None: log.warn("No coverage was generated for [%d] %s!" % (seed, pkt_file)) continue if not coverage.issubset(self.accumulated_coverage): # New basic blocks covered! log.info("Found new path: [%d] %s" % (seed, pkt_file)) newfile = open( self.project.corpus_dir + "/" + str(seed) + "_" + pkt_file.split("/")[-1], "wb") newfile.write(fuzz_pkt) newfile.close() cov_file = self.project.coverage_dir + "/" + pkt_file.split( "/")[-1] write_drcov_file(self.modules, coverage, cov_file) write_drcov_file( self.modules, coverage.difference(self.accumulated_coverage), cov_file + "_diff") self.project.last_new_path = seed self.accumulated_coverage = self.accumulated_coverage.union( coverage) self.total_executions += 1 end_time = time.time() speed = len(corpus) / (end_time - start_time) avg_speed = self.total_executions / (end_time - self.start_time) self.current_speed_avg = avg_speed log.finish_update( "[seed=%d] speed=[%3d exec/sec (avg: %d)] coverage=[%d bblocks] corpus=[%d files] " "last new path: [%d] crashes: [%d]" % (seed, speed, avg_speed, len(self.accumulated_coverage), len(corpus), self.project.last_new_path, self.project.crashes)) return True