def __pre_sync_handler(self): log_mapserver("__pre_sync_handler: " + str(self.round_counter_master_pre ) + " / " + str(self.round_counter)) if (self.round_counter_master_pre == self.round_counter):# or self.abortion_alredy_sent: send_msg(KAFL_TAG_UNTOUCHED_NODES, self.treemap.get_num_of_untouched_nodes(), self.comm.to_master_from_mapserver_queue) return True return False
def __sync_handler(self): if self.redqueen_sync: if self.__redqueen_sync_handler(): self.redqueen_sync = False self.round_counter = 0 if self.effector_sync: if self.__effector_sync_handler(): self.effector_sync = False self.effector_initial_bitmap = None self.effector_map = [] if self.verification_sync: if self.__verification_sync_handler(): self.verification_sync = False if self.pre_sync: if self.__pre_sync_handler(): self.pre_sync = False self.round_counter_master_pre = 0 log_mapserver("ShadowMap Size: " + str(len(self.shadow_map))) if self.post_sync: if self.__post_sync_handler(): self.post_sync = False self.round_counter_master_post = 0 self.round_counter = 0
def __verification_sync_handler(self): log_mapserver("__verificatiom_sync_handler: " + str(self.round_counter_verification_sync) + " / " + str(self.round_counter)) if (self.round_counter_verification_sync == self.round_counter): send_msg(KAFL_TAG_REQ_VERIFY_SYNC, 0, self.comm.to_master_from_mapserver_queue) return True return False
def __result_tag_handler(self, request): self.comm.slave_locks_B[request.source].acquire() results = request.data payloads = [] bitmaps = [] payload_shm = self.comm.get_mapserver_payload_shm(request.source) bitmap_shm = self.comm.get_bitmap_shm(request.source) for result in results: if result.new_bits: bitmap_shm.seek(result.pos * self.comm.get_bitmap_shm_size()) payload_shm.seek(result.pos * self.comm.get_mapserver_payload_shm_size()) length = payload_shm.read(4) data_len = (ord(length[3]) << 24) + (ord(length[2]) << 16) + (ord(length[1]) << 8) + \ (ord(length[0])) payloads.append(payload_shm.read(data_len)) bitmaps.append(bitmap_shm.read( self.comm.get_bitmap_shm_size())) else: payloads.append(None) bitmaps.append(None) #log_mapserver("[MAPS]\t\ SKIP") self.comm.slave_locks_A[request.source].release() for i in range(len(results)): if results[i].reloaded: self.abortion_counter += 1 if results[i].new_bits: if results[i].timeout: self.mapserver_state_obj.timeout += 1 new_hash = mmh3.hash64(bitmaps[i]) self.__check_hash(new_hash, bitmaps[i], payloads[i], results[i].crash, results[i].timeout, results[i].kasan, results[i].slave_id, results[i].reloaded, results[i].performance, results[i].qid, results[i].pos) self.last_hash = new_hash self.round_counter += 1 if self.effector_initial_bitmap: if self.effector_initial_bitmap != new_hash: for j in results[i].affected_bytes: if not self.effector_map[j]: self.effector_map[j] = True else: self.round_counter += 1 # TODO: Replace const value by performance*(1/50)s if self.abortion_counter >= self.abortion_threshold: if not self.abortion_alredy_sent: log_mapserver("Stage abortion limit (" + str(self.abortion_threshold) + ") reached!") send_msg(KAFL_TAG_ABORT_REQ, self.mapserver_state_obj, self.comm.to_master_queue) self.abortion_alredy_sent = True self.comm.stage_abortion_notifier.value = True
def __effector_sync_handler(self): log_mapserver("__effector_sync_handler: " + str(self.round_counter_effector_sync) + " / " + str(self.round_counter)) if (self.round_counter_effector_sync == self.round_counter) or self.abortion_alredy_sent: send_msg(KAFL_TAG_GET_EFFECTOR, self.effector_map, self.comm.to_master_from_mapserver_queue) return True return False
def mapserver_loader(comm): log_mapserver("PID: " + str(os.getpid())) mapserver_process = MapserverProcess(comm) try: mapserver_process.loop() except KeyboardInterrupt: mapserver_process.comm.slave_termination.value = True mapserver_process.treemap.save_data() mapserver_process.save_data() log_mapserver("Date saved!")
def __fin_preliminary_tag_handler(self, request): # Todo flush shadow map if self.preliminary_mode != request.data: self.preliminary_mode = request.data if self.preliminary_mode: self.state["preliminary"] = 0 self.last_hash = "" log_mapserver("Preliminary Mode: " + str(self.preliminary_mode)) send_msg(KAFL_TAG_REQ_PRELIMINARY, self.treemap.toggle_preliminary_mode(request.data), self.comm.to_master_from_mapserver_queue)
def __result_tag_handler(self, request): self.comm.slave_locks_B[request.source].acquire() results = request.data payloads = [] bitmaps = [] payload_shm = self.comm.get_mapserver_payload_shm(request.source) bitmap_shm = self.comm.get_bitmap_shm(request.source) bitmap_hashes = [] for result in results: if result.new_bits and result.bitmap_hash and result.bitmap_hash: bitmap_shm.seek(result.pos * self.comm.get_bitmap_shm_size()) payload_shm.seek(result.pos * self.comm.get_mapserver_payload_shm_size()) length = payload_shm.read(4) data_len = (ord(length[3]) << 24) + (ord(length[2]) << 16) + ( ord(length[1]) << 8) + (ord(length[0])) payloads.append(payload_shm.read(data_len)) bitmaps.append(bitmap_shm.read( self.comm.get_bitmap_shm_size())) bitmap_hashes.append(result.bitmap_hash) else: payloads.append(None) bitmaps.append(None) bitmap_hashes.append(None) self.comm.slave_locks_A[request.source].release() for i in range(len(results)): if bitmap_hashes[i] is not None and results[i].new_bits: self.__check_hash(bitmap_hashes[i], bitmaps[i], payloads[i], results[i].crash, results[i].timeout, results[i].kasan, results[i].slave_id, results[i].reloaded, results[i].performance, results[i].qid, results[i].pos, results[i].methode) self.last_hash = bitmap_hashes[i] self.round_counter += 1 if self.effector_initial_bitmap: if self.effector_initial_bitmap != bitmap_hashes[i]: for j in results[i].affected_bytes: log_mapserver("affected_bytes: " + str(j)) if not self.effector_map[j]: self.effector_map[j] = True else: self.round_counter += 1
def mapserver_loader(comm, reload): signal.signal(signal.SIGUSR1, handle_pdb) log_mapserver("PID: " + str(os.getpid())) mapserver_process = None try: mapserver_process = MapserverProcess(comm, reload=reload) mapserver_process.loop() except KeyboardInterrupt: print('mapserver keyboard interrupt') if mapserver_process: print('mapserver saving') mapserver_process.treemap.save_data() mapserver_process.save_data() print('mapserver saved')
def __sync_handler(self): if self.effector_sync: if self.__effector_sync_handler(): self.effector_sync = False self.effector_initial_bitmap = None self.effector_map = [] #log_mapserver("Deactivate Effector Mapping...") #log_mapserver("ShadowMap Size: " + str(len(self.shadow_map))) if self.pre_sync: if self.__pre_sync_handler(): self.pre_sync = False self.round_counter_master_pre = 0 log_mapserver("ShadowMap Size: " + str(len(self.shadow_map))) if self.post_sync: if self.__post_sync_handler(): self.post_sync = False self.round_counter_master_post = 0 self.round_counter = 0
def __next_tag_handler(self, request): self.post_sync_master_tag = request.tag self.post_sync = True self.round_counter_master_post = request.data[0] self.performance = request.data[1] log_mapserver("Performance: " + str(self.performance))
def __check_hash(self, new_hash, bitmap, payload, crash, timeout, kasan, slave_id, reloaded, performance, qid, pos): self.ring_buffers[slave_id].append(str(payload)) hash_was_new = False if new_hash != self.last_hash: if len(self.hash_list) == 0: hash_was_new = True if new_hash not in self.hash_list and new_hash not in self.shadow_map: hash_was_new = True if crash or kasan or timeout: #log_mapserver("CRASH: " + str(crash) + " KASAN: " + str(kasan) + " TIMOUT: " + str(timeout)) #log_mapserver(str(payload)) # fugly workaround #if fastCount(bitmap) >= (32 << 10): # return if crash: if new_hash in self.crash_list: self.mapserver_state_obj.crashes += 1 else: if self.treemap.is_unique_crash(bitmap): self.abortion_counter = 0 log_mapserver("Unique crash submited by slave #" + str(slave_id) + " ...") self.crash_list.append(new_hash) if self.treemap.append(payload, bitmap, node_type=KaflNodeType.crash): self.__save_ring_buffer(slave_id, self.config.argument_values['work_dir'] + "/rbuf/crash_" + str(self.mapserver_state_obj.unique) + ".rbuf") self.mapserver_state_obj.crashes += 1 self.mapserver_state_obj.unique += 1 else: self.crash_list.append(new_hash) self.mapserver_state_obj.crashes += 1 elif kasan: if new_hash in self.crash_list: self.mapserver_state_obj.kasan += 1 else: if self.treemap.is_unique_kasan(bitmap): self.abortion_counter = 0 log_mapserver("Unique kasan report submited by slave #" + str(slave_id) + " ...") self.crash_list.append(new_hash) if self.treemap.append(payload, bitmap, node_type=KaflNodeType.kasan): self.__save_ring_buffer(slave_id, self.config.argument_values['work_dir'] + "/rbuf/kasan_" + str(self.mapserver_state_obj.unique_kasan) + ".rbuf") self.mapserver_state_obj.kasan += 1 self.mapserver_state_obj.unique_kasan += 1 else: self.crash_list.append(new_hash) self.mapserver_state_obj.kasan += 1 elif timeout: if new_hash in self.crash_list: self.mapserver_state_obj.timeout += 1 else: if self.treemap.is_unique_timeout(bitmap): self.abortion_counter = 0 log_mapserver("Unique timeout detected by slave #" + str(slave_id) + " ...") self.crash_list.append(new_hash) if self.treemap.append(payload, bitmap, node_type=KaflNodeType.timeout): self.__save_ring_buffer(slave_id, self.config.argument_values['work_dir'] + "/rbuf/timeout_" + str(self.mapserver_state_obj.unique_timeout) + ".rbuf") self.mapserver_state_obj.timeout += 1 self.mapserver_state_obj.unique_timeout += 1 else: self.crash_list.append(new_hash) self.mapserver_state_obj.timeout += 1 elif hash_was_new: #log_mapserver("NEW FINDING :" + str(payload)) if self.__add_new_hash(new_hash, bitmap, payload, performance): #log_mapserver("NEW FINDING :" + str(payload)) """ os.system("cp /dev/shm/kafl_raw_" + str(qid) + "_" + str(pos) + " " + self.config.argument_values['work_dir'] + "/corpus/pt_buf_" + str(self.new_findings)) """ self.__update_state() self.new_findings += 1 #self.mapserver_state_obj.path_pending += 1 self.mapserver_state_obj.last_hash_time = time.time() if reloaded: self.ring_buffers[slave_id].clear()
def __fin_verification_tag_handler(self, request): log_mapserver("__fin_verification_tag_handler: " + str(request.data)) self.round_counter_verification_sync = request.data self.verification_sync = True
def __req_effector_tag_handler(self, request): log_mapserver("New Effector Map (" + str(len(request.data)) + ")") self.effector_initial_bitmap = mmh3.hash64(request.data) for i in range(self.config.config_values['PAYLOAD_SHM_SIZE']): self.effector_map.append(False)
def __check_hash(self, new_hash, bitmap, payload, crash, timeout, kasan, slave_id, reloaded, performance, qid, pos, methode): self.ring_buffers[slave_id].append(str(payload)) if self.preliminary_mode: hash_was_new = True else: hash_was_new = False if new_hash != self.last_hash: if len(self.hash_list) == 0: hash_was_new = True if new_hash not in self.hash_list and new_hash not in self.shadow_map: hash_was_new = True if crash or kasan or timeout: if crash: state_str = "crash" node_type = KaflNodeType.crash elif kasan: state_str = "kasan" node_type = KaflNodeType.kasan elif timeout: state_str = "timeout" node_type = KaflNodeType.timeout if self.treemap.append(payload, bitmap, methode, node_type=node_type): if not self.preliminary_mode: log_mapserver("Unique " + state_str + " submited by slave #" + str(slave_id) + " ...") self.__save_ring_buffer( slave_id, self.config.argument_values['work_dir'] + "/rbuf/" + state_str + "_" + str(self.state[state_str + "_unique"]) + ".rbuf") self.state[state_str] += 1 self.state[state_str + "_unique"] += 1 else: self.state["preliminary"] += 1 log_mapserver("Unique " + state_str + " submited by slave #" + str(slave_id) + " [preliminary]...") else: if not self.preliminary_mode: self.state[state_str] += 1 path = FuzzerConfiguration().argument_values[ 'work_dir'] + "/findings/non_uniq/" + state_str + "_non_uniq_" + str( self.state[state_str]) with open(path, "w") as f: f.write(payload) with open( FuzzerConfiguration().argument_values['work_dir'] + "/evaluation/findings.csv", 'ab') as f: f.write("%s\n" % json.dumps([ time.time() - GlobalState().values["inittime"], path ])) elif hash_was_new: if self.treemap.append(payload, bitmap, methode, performance=performance): if not self.preliminary_mode: if methode.get_type() == METHODE_IMPORT: self.state["imports"] += 1 self.hash_list.add(new_hash) self.new_findings += 1 self.state["last_hash_time"] = time.time() self.__update_state() else: self.state["preliminary"] += 1 else: if not self.preliminary_mode: self.shadow_map.add(new_hash) if reloaded: self.ring_buffers[slave_id].clear()