def __init__(self, cfg, section): self.cfg = cfg self.section = section self.read_configuration() self.q = get_queue(name=self.tube_name, watch=True) self.delete_q = get_queue(name="delete", watch=False) self.crash_q = get_queue(name=self.crash_tube, watch=False) self.crash_info = None
def create_sample(self, pe): subfolder = pe.subfolder tube_prefix = pe.tube_prefix command = pe.command project_id = pe.project_id mutation_engine_id = pe.mutation_engine_id filename = self.read_random_file(subfolder) debug("Random template file %s" % filename) cmd, temp_file = self.get_command(command, filename, subfolder) log("Generating mutated file %s" % temp_file) debug("*** Command: %s" % cmd) os.system(cmd) self.queue_lock.acquire() try: log("Putting it in queue and updating statistics...") buf = file(temp_file, "rb").read() q = get_queue(watch=False, name="%s-samples" % tube_prefix) json_buf = json.dumps([base64.b64encode(buf), temp_file]) q.put(json_buf) self.update_statistics(project_id, mutation_engine_id) except: log("Error putting job in queue: %s" % str(sys.exc_info()[1])) log("Removing temporary file %s" % temp_file) try: os.remove(temp_file) except: pass if os.path.exists("%s.diff" % temp_file): log("Removing temporary diff file %s" % temp_file) os.remove("%s.diff" % temp_file) finally: self.queue_lock.release()
def __init__(self, tube_prefix): if tube_prefix.endswith("-samples"): log("Notice: Removing '-samples' suffix from the queue name") tube_prefix = tube_prefix.replace("-samples", "") self.tube_prefix = tube_prefix self.q = get_queue(watch=False, name="%s-samples" % tube_prefix)
def list_queues(): q = get_queue(watch=False, name="default") for tube in q.tubes(): if tube not in ["default"]: line = "Tube %s, total of %d job(s)" try: print line % (tube, q.stats_tube(tube)["current-jobs-ready"]) except: sys.stderr.write("Error reading tube %s: %s\n" % (tube, str(sys.exc_info()[1]))) sys.stderr.flush()
def find_crashes(self): what = "project_id, subfolder, tube_prefix" res = self.db.select("projects", what=what, where="enabled = 1") for row in res: tube_name = "%s-crash" % row.tube_prefix q = get_queue(watch=True, name=tube_name) while q.stats_tube(tube_name)["current-jobs-ready"] > 0: job = q.reserve() d = json.loads(job.body) self.insert_crash(row.project_id, row.subfolder, d) job.delete()
def empty_queue(): if len(sys.argv) == 2: print "Command '-e' needs a queue to empty." else: tube = sys.argv[2] q = get_queue(watch=True, name=tube) while 1: job = q.reserve(1) if not job: break job.delete()
def find_crashes(self): what = "project_id, tube_prefix" res = self.db.select("projects", what=what, where="enabled = 1") for row in res: tube_name = "%s-crash" % row.tube_prefix q = get_queue(watch=True, name=tube_name) while q.stats_tube(tube_name)["current-jobs-ready"] > 0: job = q.reserve() crash_info = json.loads(job.body) temp_file = crash_info.keys()[0] crash_data = crash_info.values()[0] self.insert_crash(row.project_id, temp_file, crash_data) job.delete()
def remove_obsolete_files(self): q = get_queue(watch=True, name="delete") while q.stats_tube("delete")["current-jobs-ready"] > 0: self.find_crashes() job = q.reserve() if job.body.find(".") > -1 or job.body.find("/") > -1: raise Exception("Invalid filename %s" % job.body) sample_file = os.path.join(self.config["TEMPORARY_PATH"], job.body) log("Deleting sample file %s" % sample_file) try: os.remove(sample_file) if os.path.exists(sample_file + ".diff"): os.remove(sample_file + ".diff") except: log("Error removing temporary file: %s" % str(sys.exc_info()[1])) job.delete()
def remove_obsolete_files(self): q = get_queue(watch=True, name="delete") while q.stats_tube("delete")["current-jobs-ready"] > 0: self.find_crashes() job = q.reserve() if job.body.find(".") > -1 or job.body.find("/") > -1: raise Exception("Invalid filename %s" % job.body) sample_file = os.path.join(self.config["SAMPLES_PATH"], job.body) log("Deleting sample file %s" % sample_file) try: os.remove(sample_file) if os.path.exists(sample_file + ".diff"): os.remove(sample_file + ".diff") except: log("Error removing temporary file: %s" % str(sys.exc_info()[1])) job.delete()
def create_sample(self, pe): template_folder = os.path.join(self.config["WORKING_PATH"], pe.subfolder, "templates") tube_prefix = pe.tube_prefix command = pe.command project_id = pe.project_id mutation_engine_id = pe.mutation_engine_id filename = self.read_random_file(template_folder) template_hash = os.path.basename(filename) debug("Random template file %s" % filename) cmd, temp_file = self.get_command(command, filename, template_folder) log("Generating mutated file %s" % temp_file) debug("*** Command: %s" % cmd) os.system(cmd) self.queue_lock.acquire() try: log("Putting it in queue and updating statistics...") buf = file(temp_file, "rb").read() q = get_queue(watch=False, name="%s-samples" % tube_prefix) data = { 'sample': base64.b64encode(zlib.compress(buf)), 'temp_file': temp_file, 'template_hash': template_hash } q.put(json.dumps(data)) self.update_statistics(project_id, mutation_engine_id) self.update_iteration(project_id) except: log("Error putting job in queue: %s" % str(sys.exc_info()[1])) log("Removing temporary file %s" % temp_file) try: os.remove(temp_file) except: pass if os.path.exists("%s.diff" % temp_file): log("Removing temporary diff file %s" % temp_file) os.remove("%s.diff" % temp_file) finally: self.queue_lock.release()
def GET(self): if not 'user' in session or session.user is None: f = register_form() return render.login(f) # XXX: TODO: IFNULL is not supported in PgSQL sql = """ select p.name, sum(total) total_samples, ifnull(( select count(*) from samples s, crashes c where c.sample_id = s.sample_id and project_id = p.project_id group by project_id ), 0) crashes, ( select iteration from statistics st where st.project_id = p.project_id and st.mutation_engine_id = -1 ) iteration from statistics s, projects p, mutation_engines m where p.project_id = s.project_id and m.mutation_engine_id = s.mutation_engine_id and p.enabled = 1 group by p.name """ db = init_web_db() project_stats = db.query(sql) sql = """ select distinct exploitability, count(*) count from crashes c, projects p where p.project_id = c.project_id and p.enabled = 1 group by exploitability """ exploitables = db.query(sql) sql = """ select distinct crash_signal, count(*) count from crashes c, projects p where p.project_id = c.project_id and p.enabled = 1 group by crash_signal """ signals = db.query(sql) sql = """select substr(disassembly, instr(disassembly, ' ')+1) dis, count(*) count from crashes c, projects p where p.project_id = c.project_id and p.enabled = 1 group by 1""" disassemblies = db.query(sql) # XXX: TODO: Neither concat nor conv are supported in either PgSQL # or SQLite so I need to create a function for these databases. sql = """ select concat('0x???????', substr(conv(program_counter, 10, 16), length(conv(program_counter, 10, 16))-2)) address, crash_signal, substr(disassembly, instr(disassembly, ' ')+1) dis, count(*) count from crashes c, projects p where p.project_id = c.project_id and crash_signal != 'UNKNOWN' and p.enabled = 1 group by 1 order by 4 desc""" bugs = db.query(sql) tubes = {} q = get_queue(watch=True, name="delete") for tube in q.tubes(): if tube != "default": tubes[tube] = q.stats_tube(tube)["current-jobs-ready"] return render.statistics(project_stats, exploitables, signals, disassemblies, bugs, tubes)
def queue_is_full(self, prefix, maximum): tube_name = "%s-samples" % prefix q = get_queue(watch=True, name=tube_name) value = q.stats_tube(tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) return value > maximum - 1
def read_configuration(self): if not os.path.exists(self.cfg): raise Exception("Invalid configuration file given") parser = ConfigParser.SafeConfigParser() parser.optionxform = str parser.read(self.cfg) self.parser = parser if self.section not in parser.sections(): raise Exception( "Section %s does not exists in the given configuration file" % self.section) try: self.pre_command = parser.get(self.section, 'pre-command') except: # Ignore it, it isn't mandatory self.pre_command = None try: self.pre_iterations = int( parser.get(self.section, 'pre-iterations')) except: # Ignore it, it isn't mandatory self.pre_iterations = 1 try: self.post_command = parser.get(self.section, 'post-command') except: # Ignore it, it isn't mandatory self.post_command = None try: self.post_iterations = int( parser.get(self.section, 'post-iterations')) except: # Ignore it, it isn't mandatory self.post_iterations = 1 try: self.command = parser.get(self.section, 'command') except: raise Exception( "No command specified in the configuration file for section %s" % self.section) try: self.extension = parser.get(self.section, 'extension') except: raise Exception( "No extension specified in the configuration file for section %s" % self.section) try: self.timeout = parser.get(self.section, 'minimize-timeout') except: # Default timeout is 90 seconds self.timeout = 90 if self.timeout.lower() != "auto": self.timeout = int(self.timeout) try: environment = parser.get(self.section, 'environment') self.env = dict(parser.items(environment)) except: self.env = {} try: self.cleanup = parser.get(self.section, 'cleanup-command') except: self.cleanup = None try: self.signal = int(parser.get(self.section, 'signal')) except: self.signal = None try: self.mode = parser.get(self.section, 'mode') if self.mode.isdigit(): self.mode = int(self.mode) except: self.mode = 32 try: self.windbg_path = parser.get(self.section, 'windbg-path') except: self.windbg_path = None try: self.exploitable_path = parser.get(self.section, 'exploitable-path') except: self.exploitable_path = None try: self.debugging_interface = parser.get( self.section, 'minimize-debugging-interface') if self.debugging_interface == "pykd": self.iface = pykd_iface elif self.debugging_interface == "gdb": self.iface = gdb_iface elif self.debugging_interface == "asan": self.iface = asan_iface else: self.iface = vtrace_iface except: self.debugging_interface = None self.iface = None try: self.asan_symbolizer_path = parser.get(self.section, 'asan-symbolizer-path') except: if self.debugging_interface == "asan": raise Exception( "No asan-symbolizer-path specified in the configuration file for section %s" % self.section) self.asan_symbolizer_path = None # If the crash-tube is specified, put every single crash found while # minimizing a proof-of-concept. try: self.crash_tube = parser.get(self.section, 'crash-tube') self.crash_q = get_queue(self.crash_tube, False) except: self.crash_tube = None self.crash_q = None try: self.local_files = bool( parser.get(self.section, 'minimize-local-files')) except: # By default, consider we're minimizing in a box distinct to the # one were nfp_engine.py is running self.local_files = False
def read_configuration(self): if not os.path.exists(self.cfg): raise Exception("Invalid configuration file given") parser = ConfigParser.SafeConfigParser() parser.optionxform = str parser.read(self.cfg) self.parser = parser if self.section not in parser.sections(): raise Exception("Section %s does not exists in the given configuration file" % self.section) try: self.pre_command = parser.get(self.section, 'pre-command') except: # Ignore it, it isn't mandatory self.pre_command = None try: self.pre_iterations = int(parser.get(self.section, 'pre-iterations')) except: # Ignore it, it isn't mandatory self.pre_iterations = 1 try: self.post_command = parser.get(self.section, 'post-command') except: # Ignore it, it isn't mandatory self.post_command = None try: self.post_iterations = int(parser.get(self.section, 'post-iterations')) except: # Ignore it, it isn't mandatory self.post_iterations = 1 try: self.command = parser.get(self.section, 'command') except: raise Exception("No command specified in the configuration file for section %s" % self.section) try: self.extension = parser.get(self.section, 'extension') except: raise Exception("No extension specified in the configuration file for section %s" % self.section) try: self.timeout = parser.get(self.section, 'minimize-timeout') except: # Default timeout is 90 seconds self.timeout = 90 if self.timeout.lower() != "auto": self.timeout = int(self.timeout) try: environment = parser.get(self.section, 'environment') self.env = dict(parser.items(environment)) except: self.env = {} try: self.cleanup = parser.get(self.section, 'cleanup-command') except: self.cleanup = None try: self.signal = int(parser.get(self.section, 'signal')) except: self.signal = None try: self.mode = parser.get(self.section, 'mode') if self.mode.isdigit(): self.mode = int(self.mode) except: self.mode = 32 try: self.windbg_path = parser.get(self.section, 'windbg-path') except: self.windbg_path = None try: self.exploitable_path = parser.get(self.section, 'exploitable-path') except: self.exploitable_path = None try: self.debugging_interface = parser.get(self.section, 'minimize-debugging-interface') if self.debugging_interface == "pykd": self.iface = pykd_iface elif self.debugging_interface == "gdb": self.iface = gdb_iface elif self.debugging_interface == "asan": self.iface = asan_iface else: self.iface = vtrace_iface except: self.debugging_interface = None self.iface = None try: self.asan_symbolizer_path = parser.get(self.section, 'asan-symbolizer-path') except: if self.debugging_interface == "asan": raise Exception("No asan-symbolizer-path specified in the configuration file for section %s" % self.section) self.asan_symbolizer_path = None # If the crash-tube is specified, put every single crash found while # minimizing a proof-of-concept. try: self.crash_tube = parser.get(self.section, 'crash-tube') self.crash_q = get_queue(self.crash_tube, False) except: self.crash_tube = None self.crash_q = None try: self.local_files = bool(parser.get(self.section, 'minimize-local-files')) except: # By default, consider we're minimizing in a box distinct to the # one were nfp_engine.py is running self.local_files = False
def get_pending_elements(self, prefix, maximum): tube_name = "%s-samples" % prefix q = get_queue(watch=True, name=tube_name) value = q.stats_tube(tube_name)["current-jobs-ready"] debug("Total of %d job(s) in queue" % value) return maximum - value