def mutate_internal(self, template): while 1: buf = bytearray(template) key = None size = random.randint(0, self.max_size) offset = 0 if len(buf) - size > self.skip_bytes: offset = random.randint(self.skip_bytes, len(buf) - size) else: offset = self.skip_bytes values = [] for i in range(size): c = random.randint(0, 255) values.append(chr(c)) if [offset, size, "".join(values)] in self.discard_data or \ template[offset:offset+size] == "".join(values): debug( "Generated a mutated block with already discarded data...") continue break for i in range(size): buf[offset + i % len(buf)] = values[i % len(values)] return offset, size, buf
def mutate_internal(self, template): while 1: buf = bytearray(template) key = None size = random.randint(0, self.max_size) offset = 0 if len(buf)-size>self.skip_bytes: offset = random.randint(self.skip_bytes, len(buf)-size) else: offset = self.skip_bytes values = [] for i in range(size): c = random.randint(0, 255) values.append(chr(c)) if [offset, size, "".join(values)] in self.discard_data or \ template[offset:offset+size] == "".join(values): debug("Generated a mutated block with already discarded data...") continue break for i in range(size): buf[offset+i%len(buf)] = values[i%len(values)] return offset, size, buf
def create_sample(self, pe): subfolder = pe.subfolder tube_prefix = pe.tube_prefix command = pe.command project_id = pe.project_id mutation_engine_id = pe.mutation_engine_id filename = self.read_random_file(subfolder) debug("Random template file %s" % filename) cmd, temp_file = self.get_command(command, filename, subfolder) log("Generating mutated file %s" % temp_file) debug("*** Command: %s" % cmd) os.system(cmd) self.queue_lock.acquire() try: log("Putting it in queue and updating statistics...") buf = file(temp_file, "rb").read() q = get_queue(watch=False, name="%s-samples" % tube_prefix) json_buf = json.dumps([base64.b64encode(buf), temp_file]) q.put(json_buf) self.update_statistics(project_id, mutation_engine_id) except: log("Error putting job in queue: %s" % str(sys.exc_info()[1])) log("Removing temporary file %s" % temp_file) try: os.remove(temp_file) except: pass if os.path.exists("%s.diff" % temp_file): log("Removing temporary diff file %s" % temp_file) os.remove("%s.diff" % temp_file) finally: self.queue_lock.release()
def debug_server(self, shared_queue): self.read_configuration() uid = int(self.server_uid) if os.getuid() != uid: os.setresuid(uid, uid, uid) gid = int(self.server_gid) if os.getgid() != gid: os.setresgid(gid, gid, gid) for key in self.env: debug("Setting environment variable %s=%s" % (key, self.env[key])) os.putenv(key, self.env[key]) if self.pre_command is not None: os.system(self.pre_command) crash = None for i in range(0,3): try: crash = self.launch_debugger(self.timeout, self.command, "") break except: log("Exception: %s" % sys.exc_info()[1]) continue if self.post_command is not None: os.system(self.post_command) if crash is not None: self.crash_info = crash shared_queue.put(crash) return True return False
def mutate_from_templates(self, template): while 1: filename = random.choice(os.listdir(self.templates_path)) filename = os.path.join(self.templates_path, filename) if os.path.isfile(filename): break debug("Randomly selected template file %s" % filename) buf = open(filename, "rb").read() # TODO: Check this... size = random.randint(0, self.max_size) offset = 0 if min(len(buf)-size, len(template)-size)>self.skip_bytes: offset = random.randint(self.skip_bytes, min(len(buf)-size, len(template)-size)) else: offset = self.skip_bytes chunk = buf[offset:offset+size] buf = bytearray(template) # Let's flip a coin to choose if we are going to put in the same # offset as the template file or in a random location if random.randint(0, 1) == 1 and offset+size < len(chunk): offset = random.randint(0, len(chunk)-offset+size) buf[offset:offset+size] = chunk return offset, size, chunk
def mutate_from_templates(self, template): while 1: filename = random.choice(os.listdir(self.templates_path)) filename = os.path.join(self.templates_path, filename) if os.path.isfile(filename): break debug("Randomly selected template file %s" % filename) buf = open(filename, "rb").read() # TODO: Check this... size = random.randint(0, self.max_size) offset = 0 if min(len(buf) - size, len(template) - size) > self.skip_bytes: offset = random.randint(self.skip_bytes, min(len(buf) - size, len(template) - size)) else: offset = self.skip_bytes chunk = buf[offset:offset + size] buf = bytearray(template) # Let's flip a coin to choose if we are going to put in the same # offset as the template file or in a random location if random.randint(0, 1) == 1 and offset + size < len(chunk): offset = random.randint(0, len(chunk) - offset + size) buf[offset:offset + size] = chunk return offset, size, chunk
def debug_server(self, shared_queue): self.read_configuration() uid = int(self.server_uid) if os.getuid() != uid: os.setresuid(uid, uid, uid) gid = int(self.server_gid) if os.getgid() != gid: os.setresgid(gid, gid, gid) for key in self.env: debug("Setting environment variable %s=%s" % (key, self.env[key])) os.putenv(key, self.env[key]) if self.pre_command is not None: os.system(self.pre_command) crash = None for i in range(0, 3): try: crash = self.launch_debugger(self.timeout, self.command, "") break except: log("Exception: %s" % sys.exc_info()[1]) continue if self.post_command is not None: os.system(self.post_command) if crash is not None: self.crash_info = crash shared_queue.put(crash) return True return False
def target(): debug('Thread started') if os.name == "nt": line = self.cmd shell = False else: # Unix based line = "exec %s" % self.cmd shell = True self.process = subprocess.Popen(line, shell=shell) self.process.communicate() debug('Thread finished')
def read_fuzzer_configuration(self, parser): """ Read this specific fuzzer additional configuration options from the config file instead of adding a gazilion command line options. """ section = "BCF" if "BCF" not in parser.sections(): raise Exception("Binary instrumentation toolkit section %s does not exists in the given configuration file" % BININST_TOOL) try: self.templates_path = parser.get("BCF", 'templates-path') debug("Templates path configured to %s" % self.templates_path) except: self.templates_path = None
def read_fuzzer_configuration(self, parser): """ Read this specific fuzzer additional configuration options from the config file instead of adding a gazilion command line options. """ section = "BCF" if section not in parser.sections(): raise Exception("Binary instrumentation toolkit section %s does not exists in the given configuration file" % section) try: self.templates_path = parser.get("BCF", 'templates-path') debug("Templates path configured to %s" % self.templates_path) except: self.templates_path = None
def generate(self): log("Starting generator...") while 1: debug("Add templates...") self.add_templates() debug("Finding crashes...") self.find_crashes() debug("Checking files to remove...") self.remove_obsolete_files() debug("Reading project engines...") project_engines = self.get_project_engines() created = False for pe in project_engines: tube_prefix = pe.tube_prefix tube_name = "%s-samples" % tube_prefix maximum = pe.maximum_samples if not self.queue_is_full(tube_name, maximum): for i in range(self.get_pending_elements(tube_name, maximum)): if self.queue_is_full(tube_name, maximum): break line = "Creating sample for %s from folder %s for tube %s mutator %s" log(line % (pe.project_name, pe.subfolder, pe.tube_prefix, pe.mutation_generator)) try: self.create_sample(pe) created = True except: log("Error creating sample: %s" % str(sys.exc_info()[1])) raise #break if not created: time.sleep(0.1)
def fuzz(self): log("Launching fuzzer, listening in tube %s" % self.tube_name) while 1: value = self.q.stats_tube(self.tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) job = self.q.reserve() buf, temp_file = json.loads(job.body) buf = base64.b64decode(buf) debug("Launching sample %s..." % os.path.basename(temp_file)) if self.launch_sample(buf): log("We have a crash, moving to %s queue..." % self.crash_tube) crash = self.crash_info d = {temp_file:self.crash_info} self.crash_q.put(json.dumps(d)) self.crash_info = None log("$PC 0x%08x Signal %s Exploitable %s " % (crash["pc"], crash["signal"], crash["exploitable"])) if crash["disasm"] is not None: log("%08x: %s" % (crash["disasm"][0], crash["disasm"][1])) else: file_delete = os.path.basename(temp_file) self.delete_q.put(str(file_delete)) if self.cleanup is not None: debug("Running clean-up command %s" % self.cleanup) os.system(self.cleanup) debug("Done") job.delete() if self.iface == gdb_iface: break
def process_manager(total_procs, target, args, wait_time=0.2): """ Always maintain a total of @total_procs running @target and waiting for each thread to finish @wait_time second(s). """ procs = [] debug("Maximum number of processes in pool is %d" % total_procs) try: while 1: if len(procs) < total_procs: debug("Starting process %d" % (len(procs) + 1)) p = Process(target=target, args=args) p.start() procs.append(p) debug("Total of %d process(es) started" % len(procs)) else: i = 0 for p in list(procs): p.join(wait_time) if not p.is_alive(): debug( "Process finished, deleting and starting a new one..." ) del procs[i] continue i += 1 except KeyboardInterrupt: pass
def read_bininst_configuration(self, parser): try: self.bininst_tool = parser.get("BCF", 'bininst-tool') debug("Binary instrumentation tool configured to %s" % self.bininst_tool) except: raise Exception("Binary instrumentation toolkit parameter bininst-tool does not exists in the given configuration file") """ Read the "binary instrumentation toolkit" configuration. """ if self.bininst_tool not in parser.sections(): raise Exception("Binary instrumentation toolkit section %s does not exists in the given configuration file" % self.bininst_tool) try: self.bininst_path = parser.get(self.bininst_tool, 'path') except: raise Exception("No binary instrumentation toolkit path specified in the configuration file")
def iterative_mutator(self, template): debug("Acquiring lock") self.lock.acquire() try: buf = bytearray(template) buf[self.skip_bytes + self.stats["iteration"]] = chr(self.stats["iteration_char"]) ret = self.stats["iteration"], 1, buf self.stats["iteration_char"] += 1 if self.stats["iteration_char"] > 255: self.stats["iteration_char"] = 0 self.stats["iteration"] += 1 log("Current iteration %d" % self.stats["iteration"]) finally: debug("Releasing lock") self.lock.release() return ret
def create_sample(self, pe): template_folder = os.path.join(self.config["WORKING_PATH"], pe.subfolder, "templates") tube_prefix = pe.tube_prefix command = pe.command project_id = pe.project_id mutation_engine_id = pe.mutation_engine_id filename = self.read_random_file(template_folder) template_hash = os.path.basename(filename) debug("Random template file %s" % filename) cmd, temp_file = self.get_command(command, filename, template_folder) log("Generating mutated file %s" % temp_file) debug("*** Command: %s" % cmd) os.system(cmd) self.queue_lock.acquire() try: log("Putting it in queue and updating statistics...") buf = file(temp_file, "rb").read() q = get_queue(watch=False, name="%s-samples" % tube_prefix) data = { 'sample': base64.b64encode(zlib.compress(buf)), 'temp_file': temp_file, 'template_hash': template_hash } q.put(json.dumps(data)) self.update_statistics(project_id, mutation_engine_id) self.update_iteration(project_id) except: log("Error putting job in queue: %s" % str(sys.exc_info()[1])) log("Removing temporary file %s" % temp_file) try: os.remove(temp_file) except: pass if os.path.exists("%s.diff" % temp_file): log("Removing temporary diff file %s" % temp_file) os.remove("%s.diff" % temp_file) finally: self.queue_lock.release()
def launch_sample(self, buf): # Re-read configuration each time we're running the fuzzer so the # new changes are immediately applied. self.read_configuration() filename = tempfile.mktemp(suffix=self.extension) f = open(filename, "wb") f.write(buf) f.close() file = filename.split('/')[-1] #os.putenv("NIGHTMARE_TIMEOUT", str(self.timeout)) for key in self.env: debug("Setting environment variable %s=%s" % (key, self.env[key])) os.putenv(key, self.env[key]) if self.pre_command is not None: if pre_command.find("@@") > -1 and pre_command.find("$$") > -1: self.pre_command.replace('@@', filename) self.pre_command.replace('$$', file) log(self.pre_command) os.system(self.pre_command) crash = None for i in range(0,3): try: crash = self.launch_debugger(self.timeout, self.command, filename) break except: log("Exception: %s" % sys.exc_info()[1]) continue if self.post_command is not None: os.system(self.post_command) if crash is not None: self.crash_info = crash return True else: os.remove(filename) return False
def read_bininst_configuration(self, parser): try: self.bininst_tool = parser.get("BCF", 'bininst-tool') debug("Binary instrumentation tool configured to %s" % self.bininst_tool) except: raise Exception( "Binary instrumentation toolkit parameter bininst-tool does not exists in the given configuration file" ) """ Read the "binary instrumentation toolkit" configuration. """ if self.bininst_tool not in parser.sections(): raise Exception( "Binary instrumentation toolkit section %s does not exists in the given configuration file" % self.bininst_tool) try: self.bininst_path = parser.get(self.bininst_tool, 'path') except: raise Exception( "No binary instrumentation toolkit path specified in the configuration file" )
def coverage(self, command, timeout=36000, hide_output=True): tool_path = self.path + "/source/tools/RunTracer" if int(self.arch) == 32: tool_path = tool_path + "/obj-ia32/ccovtrace.so" elif int(self.arch) == 64: tool_path = tool_path + "/obj-intel64/ccovtrace.so" logfile = mkstemp()[1] # XXX: Do we want to use the .sh script? Using this we're limiting # ourselves to only Linux and MacOSX. cmdline = "%s/pin.sh -t %s -o %s -- %s" if hide_output: # ...although, when using "hide_output", we're already doing it... cmdline += " >/dev/null 2>/dev/null" cmdline = cmdline % (self.path, tool_path, logfile, command) debug("Running command %s" % cmdline) cmd = TimeoutCommand(cmdline) ret = cmd.run(timeout) coverage = self.read_coverage_log(logfile) debug("Removing temporary file %s " % logfile) os.remove(logfile) debug("Returning coverage data...") cover = CCoverResults(coverage[0], coverage[1], ret) return cover
def coverage(self, command, timeout=36000, hide_output = True): tool_path = self.path+"/source/tools/RunTracer" if int(self.arch) == 32: tool_path = tool_path + "/obj-ia32/ccovtrace.so" elif int(self.arch) == 64: tool_path = tool_path + "/obj-intel64/ccovtrace.so" logfile = mkstemp()[1] # XXX: Do we want to use the .sh script? Using this we're limiting # ourselves to only Linux and MacOSX. cmdline = "%s/pin.sh -t %s -o %s -- %s" if hide_output: # ...although, when using "hide_output", we're already doing it... cmdline += " >/dev/null 2>/dev/null" cmdline = cmdline % (self.path, tool_path, logfile, command) debug("Running command %s" % cmdline) cmd = TimeoutCommand(cmdline) ret = cmd.run(timeout) coverage = self.read_coverage_log(logfile) debug("Removing temporary file %s " % logfile) os.remove(logfile) debug("Returning coverage data...") cover = CCoverResults(coverage[0], coverage[1], ret) return cover
def target(): debug('Thread started') if os.name == "nt": line = self.cmd shell = False else: # Unix based line = "exec %s" % self.cmd shell = True if get_output: self.process = subprocess.Popen(line, stdout=subprocess.PIPE,\ stderr=subprocess.PIPE, shell=shell) self.pid = self.process.pid out, err = self.process.communicate() self.stdout = out[:8192] self.stderr = err[:8192] else: self.process = subprocess.Popen(line, shell=shell) self.pid = self.process.pid self.process.communicate() debug('Thread finished')
def launch_client(self, shared_queue): self.read_configuration() gid = int(self.client_gid) if gid != os.getgid(): os.setgid(gid) uid = int(self.client_uid) if uid != os.getuid(): os.setuid(uid) value = self.q.stats_tube(self.tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) job = self.q.reserve() buf, temp_file = json.loads(job.body) buf = base64.b64decode(buf) debug("Launching sample %s..." % os.path.basename(temp_file)) cmd = "%s %s" % (self.client_command, temp_file) ret = os.system(cmd) try: crash_info = shared_queue.get(timeout=1) print "AT CLIENT", crash_info except: print "AT CLIENT, except", sys.exc_info()[1] crash_info = None print "AT CLIENT, before check?", shared_queue if not shared_queue.empty(): log("We have a crash, moving to %s queue..." % self.crash_tube) crash = self.crash_info d = {temp_file: self.crash_info} self.crash_q.put(json.dumps(d)) self.crash_info = None log("$PC 0x%08x Signal %s Exploitable %s " % (crash["pc"], crash["signal"], crash["exploitable"])) if crash["disasm"] is not None: log("%08x: %s" % (crash["disasm"][0], crash["disasm"][1])) else: file_delete = os.path.basename(temp_file) self.delete_q.put(str(file_delete)) if self.cleanup is not None: debug("Running clean-up command %s" % self.cleanup) os.system(self.cleanup) debug("Done") job.delete()
def launch_client(self, shared_queue): self.read_configuration() gid = int(self.client_gid) if gid != os.getgid(): os.setgid(gid) uid = int(self.client_uid) if uid != os.getuid(): os.setuid(uid) value = self.q.stats_tube(self.tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) job = self.q.reserve() buf, temp_file = json.loads(job.body) buf = base64.b64decode(buf) debug("Launching sample %s..." % os.path.basename(temp_file)) cmd = "%s %s" % (self.client_command, temp_file) ret = os.system(cmd) try: crash_info = shared_queue.get(timeout=1) print "AT CLIENT", crash_info except: print "AT CLIENT, except", sys.exc_info()[1] crash_info = None print "AT CLIENT, before check?", shared_queue if not shared_queue.empty(): log("We have a crash, moving to %s queue..." % self.crash_tube) crash = self.crash_info d = {temp_file:self.crash_info} self.crash_q.put(json.dumps(d)) self.crash_info = None log("$PC 0x%08x Signal %s Exploitable %s " % (crash["pc"], crash["signal"], crash["exploitable"])) if crash["disasm"] is not None: log("%08x: %s" % (crash["disasm"][0], crash["disasm"][1])) else: file_delete = os.path.basename(temp_file) self.delete_q.put(str(file_delete)) if self.cleanup is not None: debug("Running clean-up command %s" % self.cleanup) os.system(self.cleanup) debug("Done") job.delete()
def launch_sample(self, buf): # Re-read configuration each time we're running the fuzzer so the # new changes are immediately applied. self.read_configuration() filename = tempfile.mktemp(suffix=self.extension) f = open(filename, "wb") f.write(buf) f.close() #os.putenv("NIGHTMARE_TIMEOUT", str(self.timeout)) for key in self.env: debug("Setting environment variable %s=%s" % (key, self.env[key])) os.putenv(key, self.env[key]) if self.pre_command is not None: os.system(self.pre_command) crash = None for i in range(0,3): try: crash = self.launch_debugger(self.timeout, self.command, filename) break except: log("Exception: %s" % sys.exc_info()[1]) raise continue if self.post_command is not None: os.system(self.post_command) if crash is not None: self.crash_info = crash return True else: os.remove(filename) return False
def process_manager(total_procs, target, args, wait_time=0.2): """ Always maintain a total of @total_procs running @target and waiting for each thread to finish @wait_time second(s). """ procs = [] debug("Maximum number of processes in pool is %d" % total_procs) while 1: if len(procs) < total_procs: debug("Starting process %d" % (len(procs)+1)) p = Process(target=target, args=args) p.start() procs.append(p) debug("Total of %d process(es) started" % len(procs)) else: i = 0 for p in list(procs): p.join(wait_time) if not p.is_alive(): debug("Process finished, deleting and starting a new one...") del procs[i] continue i += 1
def coverage(self, command, timeout=36000, hide_output=True): logdir = mkdtemp() cmdline = "%s/bin%s/drrun -t drcov -dump_text -logdir %s -- %s" if hide_output: cmdline += " >/dev/null 2>/dev/null" cmdline = cmdline % (self.path, self.arch, logdir, command) debug("Running command %s" % cmdline) cmd = TimeoutCommand(cmdline) ret = cmd.run(timeout) coverage = self.read_coverage_log(logdir) debug("Removing temporary directory %s " % logdir) shutil.rmtree(logdir) debug("Returning coverage data...") cover = CCoverResults(coverage[0], coverage[1], ret) return cover
def coverage(self, command, timeout=36000, hide_output = True): logdir = mkdtemp() cmdline = "%s/bin%s/drrun -t drcov -dump_text -logdir %s -- %s" if hide_output: cmdline += " >/dev/null 2>/dev/null" cmdline = cmdline % (self.path, self.arch, logdir, command) debug("Running command %s" % cmdline) cmd = TimeoutCommand(cmdline) ret = cmd.run(timeout) coverage = self.read_coverage_log(logdir) debug("Removing temporary directory %s " % logdir) shutil.rmtree(logdir) debug("Returning coverage data...") cover = CCoverResults(coverage[0], coverage[1], ret) return cover
def get_pending_elements(self, prefix, maximum): tube_name = "%s-samples" % prefix q = get_queue(watch=True, name=tube_name) value = q.stats_tube(tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) return maximum - value
def queue_is_full(self, prefix, maximum): tube_name = "%s-samples" % prefix q = get_queue(watch=True, name=tube_name) value = q.stats_tube(tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) return value > maximum - 1
def fuzz_one_internal(self, template): # Get mutated data using @template as the template buffer. offset, size, buf = self.mutate(template) filename = mktemp(suffix = self.extension) debug("Creating temporary file %s" % filename) with open(filename, "wb") as f: f.write(buf) debug("Performing code coverage...") metrics = [] self.record_metric(filename, metrics) for metric in metrics: bbs = int(metric.unique_bbs) if False and len(metric.all_unique_bbs-self.stats["all"])>0: if len(self.stats["all"])==0: log("=+= Found yet unseen basic block! Saving to templates.") shutil.copyfile(filename,os.path.join(self.templates_path,os.path.basename(filename))) self.stats["all"]=self.stats["all"] | metric.all_unique_bbs if bbs > self.stats["max"]: if not self.radamsa: log("GOOD! Found an interesting change at 0x%x! Covered basic blocks %d, original maximum %d" % (offset, bbs, self.stats["max"])) else: log("GOOD! Found an interesting change! Covered basic blocks %d, original maximum %d" % (bbs, self.stats["max"])) if self.iterative: self.stats["iteration_char"] = 0 self.stats["iteration"] += 1 increase = (bbs - self.stats["max"]) self.generation_value += increase self.apply_bytes(offset, size, buf) self.generation_value = 0 old_stats = self.mgr.dict(self.stats) self.lock.acquire() try: debug("Recalculating statistics...") self.recalculate_statistics(old_stats, bbs) finally: self.lock.release() elif bbs < self.stats["min"]: debug("Bad metric found: minimum basic block(s) %d, current test-case basic block(s) %d" % (self.stats["min"], bbs)) self.discard_bytes(offset, size, buf) self.generation_value -= 3 else: line = "Uninteresting data with current test-case: min %d, max %d, current %d" line = line % (self.stats["min"], self.stats["max"], bbs) debug(line) self.discard_bytes(offset, size, buf) self.generation_value -= 1 if metric.exit_code in RETURN_SIGNALS: self.generation_value += abs(self.generation_bottom_level) ret = metric.exit_code if RETURN_SIGNALS[ret] != "SIGTERM": log("*** Found a BUG, caught signal %d (%s), hurra!" % (ret, RETURN_SIGNALS[ret])) self.dump_poc(filename, offset, size, buf) self.bugs += 1 else: log("*** Target received signal SIGTERM. Bug found or the box is running out of resources...") log("Waiting for at least 1 minute...") time.sleep(60) log("Resuming...") debug("Removing test-case %s" % filename) os.remove(filename)
def test(self): debug("Running tests...") for test in self.test_cases: data = self.test_cases[test] self.run_test(test, self.test_cases[test]) debug("Done")
def fuzz_one_internal(self, template): # Get mutated data using @template as the template buffer. offset, size, buf = self.mutate(template) filename = mktemp(suffix=self.extension) debug("Creating temporary file %s" % filename) with open(filename, "wb") as f: f.write(buf) debug("Performing code coverage...") metrics = [] self.record_metric(filename, metrics) for metric in metrics: bbs = int(metric.unique_bbs) if False and len(metric.all_unique_bbs - self.stats["all"]) > 0: if len(self.stats["all"]) == 0: log("=+= Found yet unseen basic block! Saving to templates." ) shutil.copyfile( filename, os.path.join(self.templates_path, os.path.basename(filename))) self.stats["all"] = self.stats["all"] | metric.all_unique_bbs if bbs > self.stats["max"]: if not self.radamsa: log("GOOD! Found an interesting change at 0x%x! Covered basic blocks %d, original maximum %d" % (offset, bbs, self.stats["max"])) else: log("GOOD! Found an interesting change! Covered basic blocks %d, original maximum %d" % (bbs, self.stats["max"])) if self.iterative: self.stats["iteration_char"] = 0 self.stats["iteration"] += 1 increase = (bbs - self.stats["max"]) self.generation_value += increase self.apply_bytes(offset, size, buf) self.generation_value = 0 old_stats = self.mgr.dict(self.stats) self.lock.acquire() try: debug("Recalculating statistics...") self.recalculate_statistics(old_stats, bbs) finally: self.lock.release() elif bbs < self.stats["min"]: debug( "Bad metric found: minimum basic block(s) %d, current test-case basic block(s) %d" % (self.stats["min"], bbs)) self.discard_bytes(offset, size, buf) self.generation_value -= 3 else: line = "Uninteresting data with current test-case: min %d, max %d, current %d" line = line % (self.stats["min"], self.stats["max"], bbs) debug(line) self.discard_bytes(offset, size, buf) self.generation_value -= 1 if metric.exit_code in RETURN_SIGNALS: self.generation_value += abs(self.generation_bottom_level) ret = metric.exit_code log("*** Found a BUG, caught signal %d (%s), hurra!" % (ret, RETURN_SIGNALS[ret])) self.dump_poc(filename, offset, size, buf) self.bugs += 1 debug("Removing test-case %s" % filename) os.remove(filename)
def multi_coverage(self, command, times, timeout=36000): ret = [] debug("Performing coverage %d time(s)" % times) for i in range(times): ret.append(self.coverage(command, timeout)) return ret
def target(): debug('Thread started') self.process = subprocess.Popen("exec %s" % self.cmd, shell=True) self.process.communicate() debug('Thread finished')
def run_test(self, name, data): track_lines = int(data[0]) track_return = int(data[1]) mitigate_lines = int(data[2]) mitigate_return = int(data[3]) args = ["-track 1", "-track 1 -mitigate 1"] archs = ["ia32", "intel64"] for arch in archs: failed = False tmp_cmd = "%s/pin -t %s/obj-%s/%s" % ( self.pin_path, self.tool_path, arch, self.tool_name) for arg in args: suffix = "" if arch == "ia32": suffix = "32" cmd = "%s %s -- %s/%s%s" % ( tmp_cmd, arg, self.testcases_directory, name, suffix) debug("Running %s" % cmd) t = TimeoutCommand(cmd) code = t.run(get_output=True) stdout = t.stdout if arg.find("mitigate") == -1: if code != track_return: failed = True line = "*** TEST %s FAILED *** Different return code for tracker: got %d, expected %d" log(line % (repr(name), code, track_return)) print "-" * 80 print repr(stdout) print "-" * 80 lines = stdout.count("\n") if lines != track_lines: failed = True line = "*** TEST %s FAILED *** Different number of lines for tracker: got %d, expected %d" log(line % (repr(name), lines, track_lines)) print "-" * 80 print repr(stdout) print "-" * 80 else: if code != mitigate_return: failed = True line = "*** TEST %s FAILED *** Different return code for mitigator: got %d, expected %d" log(line % (repr(name), code, mitigate_return)) print "-" * 80 print repr(stdout) print "-" * 80 lines = stdout.count("\n") if lines != mitigate_lines: failed = True line = "*** TEST %s FAILED *** Different number of lines for mitigator: got %d, expected %d" log(line % (repr(name), lines, mitigate_lines)) print "-" * 80 print repr(stdout) print "-" * 80 if not failed: test_type = "tracker" if arg.find("mitigate") == -1: test_type = "mitigator" log("TEST %s FOR %s ARCH %s PASSED" % (repr(name), test_type, arch)) else: self.failed = True
def read_target_configuration(self, parser): if self.section not in parser.sections(): raise Exception("Section %s does not exists in the given configuration file" % self.section) try: self.pre_command = parser.get(self.section, 'pre-command') except: # Ignore it, it isn't mandatory self.pre_command = None try: self.post_command = parser.get(self.section, 'post-command') except: # Ignore it, it isn't mandatory self.post_command = None try: self.command = parser.get(self.section, 'command') except: raise Exception("No command specified in the configuration file for section %s" % self.section) try: self.tube_name = parser.get(self.section, 'tube') except: raise raise Exception("No tube specified in the configuration file for section %s" % self.section) try: self.crash_tube = parser.get(self.section, 'crash-tube') except: self.crash_tube = "%s-crash" % self.tube_name try: self.extension = parser.get(self.section, 'extension') except: raise Exception("No extension specified in the configuration file for section %s" % self.section) try: self.timeout = parser.get(self.section, 'timeout') except: # Default timeout is 90 seconds self.timeout = 90 self.timeout = int(self.timeout) try: environment = parser.get(self.section, 'environment') self.env = dict(parser.items(environment)) except: self.env = {} try: self.cleanup = parser.get(self.section, 'cleanup-command') except: self.cleanup = None try: self.iterative = bool(int(parser.get(self.section, 'iterative'))) if self.iterative: debug("Iterative algorithm in use") except: self.iterative = False try: self.save_generations = bool(int(parser.get(self.section, 'save-generations'))) except: self.save_generations = False try: self.radamsa = bool(parser.get(self.section, 'radamsa')) if self.radamsa: debug("Radamsa algorithm in use") except: self.radamsa = False try: self.state_file = parser.get(self.section, 'state-file') except: #raise Exception("No state file specified for target %s" % self.section) self.state_file = None try: self.current_state = parser.get(self.section, 'current-state-file') except: self.current_state = "current-state" try: is_debug = parser.getboolean(self.section, 'debug') config.DEBUG = is_debug except: # Silently ignore the exception pass try: self.generation_bottom_level = int(parser.get(self.section, 'generation-bottom-level')) except: self.generation_bottom_level = -100 try: self.hide_output = bool(int(parser.get(self.section, 'hide-output'))) except: self.hide_output = True try: self.skip_bytes = int(parser.get(self.section, 'skip-bytes')) except: self.skip_bytes = 4 try: self.non_uniques = bool(parser.get(self.section, 'non-uniques')) except: self.non_uniques = False
def read_target_configuration(self, parser): if self.section not in parser.sections(): raise Exception( "Section %s does not exists in the given configuration file" % self.section) try: self.pre_command = parser.get(self.section, 'pre-command') except: # Ignore it, it isn't mandatory self.pre_command = None try: self.post_command = parser.get(self.section, 'post-command') except: # Ignore it, it isn't mandatory self.post_command = None try: self.command = parser.get(self.section, 'command') except: raise Exception( "No command specified in the configuration file for section %s" % self.section) try: self.tube_name = parser.get(self.section, 'tube') except: raise raise Exception( "No tube specified in the configuration file for section %s" % self.section) try: self.crash_tube = parser.get(self.section, 'crash-tube') except: self.crash_tube = "%s-crash" % self.tube_name try: self.extension = parser.get(self.section, 'extension') except: raise Exception( "No extension specified in the configuration file for section %s" % self.section) try: self.timeout = parser.get(self.section, 'timeout') except: # Default timeout is 90 seconds self.timeout = 90 self.timeout = int(self.timeout) try: environment = parser.get(self.section, 'environment') self.env = dict(parser.items(environment)) except: self.env = {} try: self.cleanup = parser.get(self.section, 'cleanup-command') except: self.cleanup = None try: self.iterative = parser.getboolean(self.section, 'iterative') if self.iterative: debug("Iterative algorithm in use") except: self.iterative = False try: self.save_generations = parser.getboolean(self.section, 'save-generations') except: self.save_generations = False try: self.radamsa = bool(parser.get(self.section, 'radamsa')) if self.radamsa: debug("Radamsa algorithm in use") except: self.radamsa = False try: self.state_file = parser.get(self.section, 'state-file') except: #raise Exception("No state file specified for target %s" % self.section) self.state_file = None try: self.current_state = parser.get(self.section, 'current-state-file') except: self.current_state = "current-state" try: is_debug = parser.getboolean(self.section, 'debug') config.DEBUG = is_debug except: # Silently ignore the exception pass try: self.generation_bottom_level = parser.getint( self.section, 'generation-bottom-level') except: self.generation_bottom_level = -100 try: self.hide_output = parser.getboolean(self.section, 'hide-output') except: self.hide_output = True try: self.skip_bytes = parser.getint(self.section, 'skip-bytes') except: self.skip_bytes = 4 try: self.non_uniques = parser.getboolean(self.section, 'non-uniques') except: self.non_uniques = False
def fuzz_one_internal(self, template): # Get mutated data using @template as the template buffer. offset, size, buf = self.mutate(template) filename = mktemp(suffix = self.extension) debug("Creating temporary file %s" % filename) with open(filename, "wb") as f: f.write(buf) debug("Performing code coverage...") metrics = [] self.record_metric(filename, metrics) for metric in metrics: bbs = int(metric.unique_bbs) if bbs > self.stats["max"]: if not self.radamsa: log("GOOD! Found an interesting change at 0x%x! Covered basic blocks %d, original maximum %d" % (offset, bbs, self.stats["max"])) else: log("GOOD! Found an interesting change! Covered basic blocks %d, original maximum %d" % (bbs, self.stats["max"])) if self.iterative: self.stats["iteration_char"] = 0 self.stats["iteration"] += 1 increase = (bbs - self.stats["max"]) self.generation_value += increase self.apply_bytes(offset, size, buf) self.generation_value = 0 old_stats = self.mgr.dict(self.stats) self.lock.acquire() try: debug("Recalculating statistics...") self.recalculate_statistics(old_stats, bbs) finally: self.lock.release() elif bbs < self.stats["min"]: debug("Bad metric found: minimum basic block(s) %d, current test-case basic block(s) %d" % (self.stats["min"], bbs)) self.discard_bytes(offset, size, buf) self.generation_value -= 3 else: line = "Uninteresting data with current test-case: min %d, max %d, current %d" line = line % (self.stats["min"], self.stats["max"], bbs) debug(line) self.discard_bytes(offset, size, buf) self.generation_value -= 1 if metric.exit_code in RETURN_SIGNALS: self.generation_value += abs(self.generation_bottom_level) ret = metric.exit_code log("*** Found a BUG, caught signal %d (%s), hurra!" % (ret, RETURN_SIGNALS[ret])) self.dump_poc(filename, offset, size, buf) self.bugs += 1 debug("Removing test-case %s" % filename) os.remove(filename)
def mutate(self, template): if self.is_dir: self.radamsa = True if self.iterative: debug("Iterative2?") return self.iterative_mutator(template) elif self.radamsa: debug("Radamsa") return self.mutate_radamsa(template) else: method = random.randint(0, 3) if method == 0: debug("Mutate internal") return self.mutate_internal(template) elif method == 1: debug("Mutate from templates") return self.mutate_from_templates(template) elif method == 2: debug("Iterative") return self.iterative_mutator(template) elif method == 3: debug("Radamsa") return self.mutate_radamsa(template)
def mutate(self, template): if self.iterative: debug("Iterative2?") return self.iterative_mutator(template) elif self.radamsa: debug("Radamsa") return self.mutate_radamsa(template) else: method = random.randint(0, 3) if method == 0: debug("Mutate internal") return self.mutate_internal(template) elif method == 1: debug("Mutate from templates") return self.mutate_from_templates(template) elif method == 2: debug("Iterative") return self.iterative_mutator(template) elif method == 3: debug("Radamsa") return self.mutate_radamsa(template)
def apply_bytes(self, offset, size, buf): debug("Acquiring lock...") self.lock.acquire() try: debug("Saving old generation (%s)" % sha1(self.template).hexdigest()) if len(self.generations) >= self.max_generations: del self.generations[0] self.generations.append([ bytearray(self.template), dict(self.stats), self.generation_value ]) if self.save_generations and buf != "": file_hash = sha1(buf).hexdigest() ext = os.path.splitext(self.input_file)[1] filename = "generation_%s%s" % (file_hash, ext) filename = os.path.join(self.output, filename) log("Writing discovered generation file %s (%s)" % (file_hash, filename)) with open(filename, "wb") as f: f.write(buf) if not self.radamsa: debug("Applying patch at offset %d of size %d" % (offset, size)) else: debug("Replacing old buffer") self.template = buf """ if self.skip_bytes > 0: header = self.template[0:self.skip_bytes] if len(buf) > len(self.template): self.template = bytearray(buf) else: for i in range(size): self.template[offset+i] = buf[i] if self.skip_bytes > 0: self.template[0:self.skip_bytes] = header """ if self.current_state is not None: ext = os.path.splitext(self.input_file)[1] filename = "%s%s" % (self.current_state, ext) filename = os.path.join(self.output, filename) file_hash = sha1(self.template).hexdigest() debug("Creating or updating current state file %s (%s)" % (filename, file_hash)) with open(filename, "wb") as f: f.write(self.template) finally: debug("Releasing lock...") self.lock.release()
def apply_bytes(self, offset, size, buf): debug("Acquiring lock...") self.lock.acquire() try: debug("Saving old generation (%s)" % sha1(self.template).hexdigest()) if len(self.generations) >= self.max_generations: del self.generations[0] self.generations.append([bytearray(self.template), dict(self.stats), self.generation_value]) if self.save_generations: file_hash = sha1(buf).hexdigest() ext = os.path.splitext(self.input_file)[1] filename = "generation_%s%s" % (file_hash, ext) filename = os.path.join(self.output, filename) log("Writing discovered generation file %s (%s)" % (file_hash, filename)) with open(filename, "wb") as f: f.write(buf) if not self.radamsa: debug("Applying patch at offset %d of size %d" % (offset, size)) else: debug("Replacing old buffer") self.template = buf """ if self.skip_bytes > 0: header = self.template[0:self.skip_bytes] if len(buf) > len(self.template): self.template = bytearray(buf) else: for i in range(size): self.template[offset+i] = buf[i] if self.skip_bytes > 0: self.template[0:self.skip_bytes] = header """ if self.current_state is not None: ext = os.path.splitext(self.input_file)[1] filename = "%s%s" % (self.current_state, ext) filename = os.path.join(self.output, filename) file_hash = sha1(self.template).hexdigest() debug("Creating or updating current state file %s (%s)" % (filename, file_hash)) with open(filename, "wb") as f: f.write(self.template) finally: debug("Releasing lock...") self.lock.release()