Esempio n. 1
0
    def __append_to_level(self, new_node):
        if self.current not in self.references.keys():
            self.references[self.current] = []
        self.all_nodes.append(new_node)
        self.references[self.current].append(self.all_nodes.index(new_node))
        if new_node.level > self.max_level:
            self.max_level = new_node.level

        if new_node.node_type == KaflNodeType.favorite:
            self.favorite_buf.append(self.__get_ref(new_node))
        elif new_node.node_type == KaflNodeType.regular and new_node.node_state < KaflNodeState.finished:
            self.regular_buf.append(self.__get_ref(new_node))
        elif new_node.node_state >= KaflNodeState.finished:
            self.finished_buf.append(self.__get_ref(new_node))

        self.payload_hashes[new_node.payload_hash] = self.__get_ref(new_node)

        if self.max_min_bucketing_enabled:
            for i in list(set(self.next_max_bucket)):
                log_tree("===> New max bucket " + str(i) + " = " + str(self.max_bucket_values[i]) + "\tID: "+ str(new_node.node_id))
                old_node = None
                if self.max_bucket_ref[i]:
                    log_tree("Replacing ...")
                    self.old_pending_node = self.max_bucket_ref[i]
                self.max_bucket_ref[i] = new_node 

            self.next_max_bucket = []
Esempio n. 2
0
    def __enable_preliminary_mode(self):
        log_tree("creating bitmap backups...")
        self.bitmap.seek(0)
        self.crash_bitmap.seek(0)
        self.kasan_bitmap.seek(0)
        self.timeout_bitmap.seek(0)
        self.backup_bitmap.seek(0)
        self.backup_crash_bitmap.seek(0)
        self.backup_kasan_bitmap.seek(0)
        self.backup_timeout_bitmap.seek(0)

        self.backup_bitmap.write(self.bitmap.read(self.bitmap_size))
        self.backup_crash_bitmap.write(self.crash_bitmap.read(self.bitmap_size))
        self.backup_kasan_bitmap.write(self.kasan_bitmap.read(self.bitmap_size))
        self.backup_timeout_bitmap.write(self.timeout_bitmap.read(self.bitmap_size))

        self.backup_bitmap.flush()
        self.backup_crash_bitmap.flush()
        self.backup_kasan_bitmap.flush()
        self.backup_timeout_bitmap.flush()

        if self.max_min_bucketing_enabled:
            self.next_max_bucket = []
            for i in range(len(self.max_bucket_values)):
                self.backup_max_bucket_values[i] = self.max_bucket_values[i]

        for e in self.preliminary_mode_queue:
            e.remove()
        self.preliminary_mode_queue = []

        self.qemu_lookup.enable_preliminary_mode()
        self.preliminary_mode = True
        self.graph.update(self.__get_from_ref(self.current))

        return 0
Esempio n. 3
0
    def __is_unique_timeout(self, bitmap):
        empty_bitmap = True
        for i in range(len(bitmap)):
            if bitmap[i] != '\xff':
                empty_bitmap = False 
                break
        if empty_bitmap:
            log_tree("Very suspicious...kAFL bitmap is empty.\n\t\tWrong address range configured or buggy userspace agent in use?\n\n")

        return self.__is_finding_unique(bitmap, self.c_timeout_bitmap, timeout=True)
Esempio n. 4
0
 def load_json(cls, json_data):
     log_tree("Restoring: " + str(json_data['node_id']))
     obj =  cls(json_data['level'], None, None,
                node_state=int(json_data['node_state']), node_type=int(json_data['node_type']),
                current=json_data['current'], write_data=False)
     obj.node_id = int(json_data['node_id'])
     obj.bits = json_data['bits']
     obj.identifier = json_data['identifier']
     obj.payload_len = json_data['payload_len']
     obj.payload_hash = json_data['payload_hash']
     return obj
Esempio n. 5
0
 def __restore_graph(self):
     log_tree("__restore_graph()")
     if self.current != self.MASTER_NODE_ID:
         self.__get_from_ref(self.current).current = True
     for key, value in self.references.items():
         key = int(key)
         for next_key in value:
             next_node = self.all_nodes[next_key]
             if key == self.MASTER_NODE_ID:
                 self.graph.append(None, next_node)
             else:
                 self.graph.append(self.all_nodes[key], next_node)
Esempio n. 6
0
 def __are_new_bits_present(self, new_bitmap):
     log_tree('are_new_bits_present')
     cnt = 0
     for b in new_bitmap:
         if b != 255:
             cnt += 1
     log_tree('new bitmap has %d bytes' % cnt)
     cnt = 0
     for i in range(len(self.bitmap)):
         if self.bitmap[i] != 0:
             cnt += 1
     log_tree('global bitmap has %d bytes' % cnt)
     found = False
     counter = 0
     for i in range(len(new_bitmap)):
         # Check if bit within the shm bitmap is set and the bucketing bitmap field is not already fully populated...
         if new_bitmap[i] != 255 and self.bitmap[i] != 255:
             for j in reversed(range(len(self.buckets))):
                 # Find the most significant bit ...
                 if (new_bitmap[i] + 1 & self.buckets[j]) != 0:
                     # Check if the bucket slot is free ...
                     if (self.bitmap[i]
                             & self.buckets[j]) == 0:  # and j > 1:
                         counter += 1
                         found = True
                         self.bitmap[i] = self.bitmap[i] + self.buckets[j]
                     # If not, skip this bit ...
                     break
     if found:
         log_tree("New path found!\t(" + str(counter) + " Bits)")
     return found
Esempio n. 7
0
 def load_data(cls, enable_graphviz=False):
     log_tree("Restore from json file...")
     obj = cls([], enable_graphviz=enable_graphviz, flush=False)
     KaflNode.reset_node_id()
     with open(FuzzerConfiguration().argument_values['work_dir'] + "/tree.json", 'r') as infile:
         dump = json.load(infile)
         for key, value in dump.iteritems():
             if key == 'all_nodes':
                 obj.all_nodes = []
                 for var in value:
                     obj.all_nodes.append(KaflNode.load_json(var))
             else:
                 setattr(obj, key, value)
     obj.__restore_graph()
     return obj
Esempio n. 8
0
    def save_data(self):
        ignore = [
            "bitmap_fd", "crash_bitmap_fd", "kasan_bitmap_fd",
            "timeout_bitmap_fd", "bitmap", "crash_bitmap", "kasan_bitmap",
            "timeout_bitmap"
        ]
        dump = {}
        for key, value in self.__dict__.items():
            if key != 'graph' and key not in ignore:
                dump[key] = value

        log_tree(str(dump))
        with open(
                FuzzerConfiguration().argument_values['work_dir'] +
                "/tree.json", 'w') as outfile:
            json.dump(dump, outfile, default=json_dumper)
Esempio n. 9
0
 def __is_finding_unique(self, bitmap, finding_bitmap, timeout=False):
     found = False
     counter = 0
     for i in range(len(bitmap)):
         # Check if bit within the shm bitmap is set and the bucketing bitmap field is not already fully populated...
         if bitmap[i] != 255 and finding_bitmap[i] != 255:
             for j in reversed(range(len(self.buckets))):
                 # Find the most significant bit ...
                 if (bitmap[i] + 1 & self.buckets[j]) != 0:
                     # Check if the bucket slot is free ...
                     if (finding_bitmap[i] & self.buckets[j]) == 0:
                         counter += 1
                         found = True
                         #finding_bitmap[i] += self.buckets[j]
                         finding_bitmap[
                             i] = finding_bitmap[i] + self.buckets[j]
                     # If not, skip this bit ...
                     break
     if found:
         log_tree("New finding!\t(" + str(counter) + " Bits)")
     return found
Esempio n. 10
0
 def __are_new_bits_present(self, new_bitmap):
     found = False
     counter = 0
     for i in range(len(new_bitmap)):
         # Check if bit within the shm bitmap is set and the bucketing bitmap field is not already fully populated...
         if new_bitmap[i] != '\xff' and self.bitmap[i] != '\xff':
             for j in reversed(range(len(self.buckets))):
                 # Find the most significant bit ...
                 if (ord(new_bitmap[i]) + 1 & self.buckets[j]) != 0:
                     # Check if the bucket slot is free ...
                     if (ord(self.bitmap[i])
                             & self.buckets[j]) == 0:  # and j > 1:
                         counter += 1
                         found = True
                         self.bitmap[i] = chr(
                             ord(self.bitmap[i]) + self.buckets[j])
                     # If not, skip this bit ...
                     break
     if found:
         log_tree("New path found!\t(" + str(counter) + " Bits)")
     return found
Esempio n. 11
0
    def get_next(self, performance, finished=False):
        tmp = self.__get_from_ref(self.current)

        if self.current != self.MASTER_NODE_ID:
            if tmp.node_state != KaflNodeState.finished:
                if finished:
                    self.__set_finished(self.__get_from_ref(self.current))
                else:
                    self.__set_unfinished(self.__get_from_ref(self.current))
        

        next_node = self.__get_favorites()
        if not next_node:
            if RAND(20) == 0:
                next_node = self.__get_regular()
            else:
                next_node = self.__get_finished()
        if not next_node:
            self.__restore_state()
            return self.get_next(performance)
        self.draw()

        log_tree("log_tree: " + str(self.__get_score(self.__get_ref(next_node))) + " " + str(next_node.fav_bits) + " " + str(next_node.new_bit_count) + " " + str(next_node.level) + " " + str(next_node.performance))
        return next_node
Esempio n. 12
0
    def __check_if_max_bucket(self, value, field):
        if (self.max_bucket_values[field] == None) or value > self.max_bucket_values[field]:
            log_tree("=====>>> " + str(value) + " vs " + str(self.max_bucket_values[field]) + "\t" + str(field))
            self.max_bucket_values[field] = value

            self.next_max_bucket.append(field)
            if self.preliminary_mode:
                log_tree("Max bucket found (v: " + str(value) + " @ " + str(field) + ")... [preliminary]")
            else:
                log_tree("Max bucket found (v: " + str(value) + " @ " + str(field) + ")...")

            return True
        return False
Esempio n. 13
0
    def __disable_preliminary_mode(self):
        log_tree("restoring bitmaps...")

        self.bitmap.seek(0)
        self.crash_bitmap.seek(0)
        self.kasan_bitmap.seek(0)
        self.timeout_bitmap.seek(0)
        self.backup_bitmap.seek(0)
        self.backup_crash_bitmap.seek(0)
        self.backup_kasan_bitmap.seek(0)
        self.backup_timeout_bitmap.seek(0)

        self.bitmap.write(self.backup_bitmap.read(self.bitmap_size))
        self.crash_bitmap.write(self.backup_crash_bitmap.read(self.bitmap_size))
        self.kasan_bitmap.write(self.backup_kasan_bitmap.read(self.bitmap_size))
        self.timeout_bitmap.write(self.backup_timeout_bitmap.read(self.bitmap_size))

        self.bitmap.flush()
        self.crash_bitmap.flush()
        self.kasan_bitmap.flush()
        self.timeout_bitmap.flush()

        if self.max_min_bucketing_enabled:
            self.next_max_bucket = []
            for i in range(len(self.max_bucket_values)):
                self.max_bucket_values[i] = self.backup_max_bucket_values[i]


        log_tree("flushing preliminary queue...")
        for node in self.preliminary_mode_queue:
            try:
                self.graph.remove_node(node)
            except:
                pass

        log_tree("flushing preliminary qemu map...")
        self.qemu_lookup.disable_preliminary_mode()
        self.preliminary_mode = False
        self.graph.update(self.__get_from_ref(self.current))

        return len(self.preliminary_mode_queue)
Esempio n. 14
0
    def append(self, payload, bitmap, methode, node_state=None, node_type=None, performance=0.0):
        accepted = False

        new_byte_count = 0
        new_bit_count = 0

        if self.preliminary_mode:
            found = False
            if node_type >= KaflNodeType.crash:
                if node_type == KaflNodeType.crash:
                    accepted = self.__is_unique_crash(bitmap)
                elif node_type == KaflNodeType.kasan:
                    accepted = self.__is_unique_kasan(bitmap)
                elif node_type == KaflNodeType.timeout:
                    accepted = self.__is_unique_timeout(bitmap)
            else:
                new_byte_count, new_bit_count = self.__are_new_bits_present(bitmap)
                if new_bit_count != 0 and not self.__check_if_duplicate(payload):
                    accepted = True

            if accepted:
                log_tree("new preliminary input found: " + repr(payload[:32]))
                new_node = KaflNode((self.level + 1), payload, bitmap, methode, node_state=KaflNodeState.untouched, node_type=KaflNodeType.preliminary, performance=performance)
                self.preliminary_mode_queue.append(new_node)
                self.graph.append(self.__get_from_ref(self.current), new_node)
                self.draw()
                return True
            return False

        if node_type:
            if node_type >= KaflNodeType.crash:
                if node_type == KaflNodeType.crash:
                    accepted = self.__is_unique_crash(bitmap)
                elif node_type == KaflNodeType.kasan:
                    accepted = self.__is_unique_kasan(bitmap)
                elif node_type == KaflNodeType.timeout:
                    accepted = self.__is_unique_timeout(bitmap)
                if not accepted:
                    return False

        if not accepted:
            new_byte_count, new_bit_count = self.__are_new_bits_present(bitmap)
            found = (new_bit_count != 0)
            if self.__check_if_duplicate(payload):
                return False
            if found:
                if not node_type >=KaflNodeType.crash:
                    self.paths += 1
                accepted = True

        if accepted:
            if self.__check_if_duplicate(payload):
                return False

            log_tree("new input found: " + repr(payload[:32]))
            new_node = KaflNode((self.level + 1), payload, bitmap, methode, node_state=node_state, node_type=node_type, performance=performance, new_byte_count=new_byte_count, new_bit_count=new_bit_count)
            if methode:
                self.fuzz_yield.append_result(methode)
                self.fuzz_yield.write_result(FuzzerConfiguration().argument_values['work_dir'] + "/yield.txt")
            self.__append_to_level(new_node)
            self.graph.append(self.__get_from_ref(self.current), new_node)
            if not node_type >=KaflNodeType.crash:
                self.__check_if_favorite(new_node)
                self.__is_favorite(new_node)
                if self.max_min_bucketing_enabled and self.old_pending_node:
                    self.__is_favorite(self.old_pending_node)
                    self.old_pending_node = None
            self.draw()
            return True
        else:
            return False