def run(self, input_id_map_list, cov_file_list):
        """
        For each input,
            -convert ktest
            -create sync dir
            -build cmd
            -create new process job
        """
        se_info("{0} activated. input list : {1}".format(self, input_id_map_list))
        for input_id_map in input_id_map_list:
            pid = self.get_new_pid()
            input_idx = 0

            #--generate klee seed ktest
            # print input_id_map
            afl_input = input_id_map['input']
            klee_seed = self.seed_dir+"/klee_instance_sym_"+str(pid).zfill(6)+".ktest"
            # print "before calling converter"
            # print afl_input
            self.call_converter("a2k", afl_input, klee_seed, self.bitmodel, self.input_type)
            if not os.path.exists(klee_seed):
                print "no seed" + klee_seed
                continue

            #--create sync_dir for new klee instance
            new_sync_dir = self.sync_dir_base+"/klee_instance_sym_"+str(pid).zfill(6)+"/queue"
            utils.mkdir_force(new_sync_dir)

            #--build klee instance cmd
            edge_ids = [x for x in input_id_map['interesting_edges']]
            stdin_len = os.path.getsize(afl_input)
            klee_cmd = self.build_cmd(klee_seed, edge_ids, new_sync_dir, stdin_len, afl_input, cov_file_list[input_idx])
            print ' '.join(klee_cmd)

            #--construct process meta data, add to jobs list
            kw = {'mock_eof':True,'mem_cap': self.max_mem}
            p = multiprocessing.Process(target=utils.exec_async, args=[klee_cmd], kwargs=kw)
            p.daemon = True
            task_st = {}
            task_st['instance'] = p
            task_st['sync_dir'] = new_sync_dir
            task_st['seed'] = klee_seed
            task_st['cmd'] = klee_cmd
            if "AFLUnCovSearcher" in self.get_search_heuristics():
                task_st['afl_cov'] = self.fuzzer_cov_file
            self.jobs[pid] = task_st
            input_idx = input_idx + 1

        for pid, task in self.jobs.iteritems():
            try:
                if pid not in self.started_jobs:
                    task['instance'].start()
                    task['real_pid'] = task['instance'].pid
                    # print "starting klee process: ", task['real_pid']
                    self.started_jobs.add(pid)
                else:
                    se_info("WTF the process {0} is already started".format(pid))
            except Exception:
                pass
Exemple #2
0
 def __init__(self, config, target):
     self.jobs = {}
     self.started_jobs = set()
     self.config = config
     self.target = target
     self.get_config()
     utils.mkdir_force(self.seed_dir)
     self.pid_ctr = 0
     se_info("Concolic Explorer using searcher[{0}]".format(''.join(self.get_search_heuristics())))
Exemple #3
0
    def get_moriarty_config(self):
        config = ConfigParser.ConfigParser()
        config.read(self.config)
        if "moriarty" not in config.sections() or "afl" not in config.sections(
        ):
            moriarty_info("Config file read error")
            sys.exit()
        try:
            self.compile_script = config.get("moriarty", "compile_script")
        except Exception:
            self.compile_script = None
        self.target_bin = config.get("moriarty", "target_bin").replace(
            "@target", self.proj_dir)
        self.target_bc = config.get("moriarty", "target_bc").replace(
            "@target", self.proj_dir)
        self.cov_file_base = config.get("moriarty", "sync_dir").replace(
            "@target", self.proj_dir)
        self.switch_heuristic = config.get("switch oracle", "strategy")
        self.max_allow_se_num = int(
            config.get("moriarty", "max_explorer_instance"))
        self.num_of_fuzzers = int(config.get("afl", "slave_num")) + 1
        self.num_of_explorers = self.se_factory.get_se_size(
        ) * self.max_allow_se_num
        self.is_sym_explorer_activated = True if config.has_option(
            "sym_explorer", "bin") else False
        self.is_conc_explorer_activated = True if config.has_option(
            "conc_explorer", "bin") else False

        try:
            self.batch_run_seed_num = int(
                config.get("moriarty", "batch_run_input_num"))
        except Exception:
            self.batch_run_seed_num = 1
        moriarty_info(
            "Number of explorers: {0}, each batch run {1} inputs".format(
                self.num_of_explorers, self.batch_run_seed_num))

        try:
            self.only_count_se_cov = True if "true" in config.get(
                "moriarty", "only_count_se_cov") else False
        except Exception:
            self.only_count_se_cov = True

        try:
            self.log_explored_seed_dir = config.get(
                "moriarty",
                "explored_seed_dir").replace("@target", self.proj_dir)
            utils.mkdir_force(self.log_explored_seed_dir)
            moriarty_info("Saving explored inputs to {0}".format(
                self.log_explored_seed_dir))
        except Exception:
            moriarty_info("Will not save explored inputs")
            self.log_explored_seed_dir = None

        for explorer_id in xrange(self.num_of_explorers):
            cov = self.cov_file_base
            cov_suffix = "/coverage.csv"
            cov = cov + "/.tmp_se_" + str(explorer_id) + ".cov"
            self.explorer_cov_file_list.append(cov)

        for slave_id in xrange(self.num_of_fuzzers):
            #constructing coverage file list
            #constructing sanitizer_edge file list
            cov_suffix = "/coverage.csv"
            san_suffix = "/edge_sanitizer.csv"
            cov = self.cov_file_base
            san = self.cov_file_base
            if slave_id == 0:
                cov = cov + "/master" + cov_suffix
                san = san + "/master" + san_suffix
            else:
                cov = cov + "/slave_" + str(slave_id).zfill(6) + cov_suffix
                san = san + "/slave_" + str(slave_id).zfill(6) + san_suffix
            self.cov_file_list.append(cov)
            self.san_file_list.append(san)

        self.clean_up_last_session()

        #collecting the recommended edges for reasoning
        try:
            self.loc_map = self.proj_dir + '/locmap.csv'
            self.find_loc_script = config.get("afl",
                                              "root") + "/find_source_loc.sh"
            self.recommend_edge_log = config.get("moriarty",
                                                 "recommend_edge_log").replace(
                                                     "@target", self.proj_dir)
        except Exception:
            self.recommend_edge_log = None
            self.loc_map = None
            self.find_loc_script = None
Exemple #4
0
 def make_dirs(self):
     mkdir_force(self.seed_dir)
Exemple #5
0
    def run(self, input_id_map_list, cov_file):
        """
            -create seed-out-dir
            For each input,
                -convert ktest move to seed-out-dir
            -create sync dir
            -build cmd
            -create new process job
        """
        pid = self.get_new_pid()
        qsym_seed_dir = self.my_in_dir(str(pid))
        mkdir_force(qsym_seed_dir)

        se_info("{0} activated. input list : {1}".format(
            self, [x['input'] for x in input_id_map_list]))
        se_info("{0} activated. input score : {1}".format(
            self, [x['score'] for x in input_id_map_list]))
        se_info("{0} activated. input size: {1}".format(
            self, [x['size'] for x in input_id_map_list]))

        # sync previousley generated seeds
        self.sync_gen_seeds(
        )  #Redundant if the QSYM explorer cycle is always running shorter than the main cycle

        # launch qsym for each inputs in my_in_dir
        for input_id_map in input_id_map_list:
            #QSYM does not support batch mode
            assert len(input_id_map_list) <= 1

            # print input_id_map
            afl_input = input_id_map['input']
            qsym_seed = os.path.join(qsym_seed_dir, afl_input.split("/")[-1])
            shutil.copy2(afl_input, qsym_seed)
            if not os.path.exists(qsym_seed):
                se_info("no seed created: " + qsym_seed)
                continue

            #--create sync_dir for new qsym instance
            key = "qsym_instance_conc_" + str(pid).zfill(6)
            new_sync_dir = self.my_sync_dir(key)
            mkdir_force(new_sync_dir)

            # temp dir to store tmp genearated seeds
            # filtered seeds will be transfer to new_sync_dir
            tmp_dir = tempfile.mkdtemp()
            mkdir_force(tmp_dir)

            #--build qsym instance cmd
            q, qsym_cmd = self.build_cmd(qsym_seed, tmp_dir, self.bitmap)
            print ' '.join(qsym_cmd)
            # q.run(self.max_time_per_seed)

            #--construct process meta data, add to jobs list
            kw = {
                'stdin': q.stdin,
                'mem_cap': self.max_mem,
                'use_shell': True,
                'testcase_dir': q.testcase_dir,
                'target_base_path': self.base_dir_from_afl_input(afl_input)
            }
            p = multiprocessing.Process(
                target=utils.qsym_exec_async, args=[qsym_cmd],
                kwargs=kw)  # Needs docker implementation
            p.daemon = True
            task_st = {}
            task_st['instance'] = p
            task_st['sync_dir'] = new_sync_dir
            task_st['cmd'] = qsym_cmd
            task_st['tmp_dir'] = tmp_dir
            task_st['qsym'] = q
            task_st['seed_index'] = 0
            task_st['synced'] = False
            task_st['key'] = key
            task_st['processed'] = False
            self.jobs[pid] = task_st

        for pid, task in self.jobs.iteritems():
            try:
                if pid not in self.started_jobs:
                    task['instance'].start()
                    task['real_pid'] = task['instance'].pid
                    self.started_jobs.add(pid)

            except Exception:
                pass
        return (key, [x['input'] for x in input_id_map_list])
    def run(self, input_id_map_list, cov_file):
        """
            -create seed-out-dir
            For each input,
                -convert ktest move to seed-out-dir
            -create sync dir
            -build cmd
            -create new process job
        """
        pid = self.get_new_pid()
        klee_seed_dir = self.seed_dir + "/klee_instance_conc_" + str(pid)
        utils.mkdir_force(klee_seed_dir)
        input_counter = 0
        max_input_size = 0

        se_info("{0} activated. input list : {1}".format(
            self, [x['input'] for x in input_id_map_list]))
        se_info("{0} activated. input score : {1}".format(
            self, [x['score'] for x in input_id_map_list]))
        try:
            se_info("{0} activated. input size: {1}".format(
                self, [x['size'] for x in input_id_map_list]))
        except Exception:
            pass
        for input_id_map in input_id_map_list:
            #--generate klee seed ktest
            # print input_id_map
            afl_input = utils.from_simple_to_afl_name(input_id_map['input'])
            if not afl_input:
                continue
            if max_input_size < os.path.getsize(afl_input):
                max_input_size = os.path.getsize(afl_input)
            klee_seed = klee_seed_dir + "/" + str(input_counter).zfill(
                6) + ".ktest"
            # print "before calling converter"
            self.call_converter("a2k", afl_input, klee_seed, self.bitmodel,
                                self.input_type)
            input_counter += 1
            if not os.path.exists(klee_seed):
                print "no seed" + klee_seed
                continue

#--create sync_dir for new klee instance
            new_sync_dir = self.sync_dir_base + "/klee_instance_conc_" + str(
                pid).zfill(6) + "/queue"
            utils.mkdir_force(new_sync_dir)

            #--build klee instance cmd
            edge_ids = [x for x in input_id_map['interesting_edges']]
            klee_cmd = self.build_cmd(klee_seed_dir, edge_ids, new_sync_dir,
                                      max_input_size, afl_input, cov_file)
            print ' '.join(klee_cmd)

            #--construct process meta data, add to jobs list
            kw = {'mock_eof': True, 'mem_cap': self.max_mem, 'use_shell': True}
            p = multiprocessing.Process(target=utils.exec_async,
                                        args=[klee_cmd],
                                        kwargs=kw)
            p.daemon = True
            task_st = {}
            task_st['instance'] = p
            task_st['sync_dir'] = new_sync_dir
            task_st['seed'] = klee_seed
            task_st['cmd'] = klee_cmd
            if "AFLUnCovSearcher" in self.get_search_heuristics():
                task_st['afl_cov'] = self.fuzzer_cov_file
            self.jobs[pid] = task_st

        for pid, task in self.jobs.iteritems():
            try:
                if pid not in self.started_jobs:
                    task['instance'].start()
                    task['real_pid'] = task['instance'].pid
                    # print "starting klee process: ", task['real_pid']
                    self.started_jobs.add(pid)
                else:
                    se_info(
                        "WTF the process {0} is already started".format(pid))
            except Exception:
                pass