Пример #1
0
def system_test_runner(prog, args_list, test_filename, repo_root_dir, \
                            exe_path_map=None, env_vars=None, timeout=None, \
                            collected_output=None, using_wrapper=False, \
                            dbg_log_execution_out=False):
    try:
        tmp_env = os.environ.copy()
        if env_vars is not None:
            #for e, v in env_vars.items():
            #    tmp_env[e] = v
            tmp_env.update(env_vars)
        if test_filename is not None:
            tmp_env[TEST_FILE_NAME_ENV_VAR] = test_filename
        if using_wrapper and timeout is not None:
            tmp_env[TEST_EXECUTION_TIMEOUT_ENV_VAR] = str(timeout)
            timeout = None

        if collected_output is None:
            retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                prog=prog, args_list=args_list, env=tmp_env,\
                                timeout=timeout, out_on=dbg_log_execution_out,\
                                err_on=dbg_log_execution_out, \
                                merge_err_to_out=True)
        else:
            retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                prog=prog, args_list=args_list, env=tmp_env,\
                                timeout=timeout, merge_err_to_out=True)
            collected_output.append(retcode)
            collected_output.append(out)
            collected_output.append(
                retcode in DriversUtils.EXEC_TIMED_OUT_RET_CODE)

        if dbg_log_execution_out:
            logging.debug("(DBG - Test Output):\n" + out)
    #except (ValueError, OSError) as os_e:
    except OSError as os_e:
        # ERROR
        logging.warning(
            "\ntest execution error in system_test_runner (bellow)")
        logging.warning(str(os_e))
        logging.warning("COMMAND: " + " ".join([prog] + args_list))
        if collected_output is not None:
            collected_output.append(None)
            collected_output.append(str(os_e))
            collected_output.append(False)
        return GlobalConstants.TEST_EXECUTION_ERROR
    except ValueError as v_e:
        logging.warning(
            "\ntest execution valueerror in system_test_runner (bellow)")
        logging.warning(str(v_e))
        logging.warning("COMMAND: " + " ".join([prog] + args_list))
        return GlobalConstants.TEST_EXECUTION_ERROR

    # Parse the result
    hasfail = False
    hasfail |= (retcode != 0)

    return GlobalConstants.FAIL_TEST_VERDICT if hasfail else \
                                            GlobalConstants.PASS_TEST_VERDICT
Пример #2
0
    def _execute_a_test (self, testcase, exe_path_map, env_vars, \
                    callback_object=None, timeout=None, collect_output=False):
        """ Execute a test given that the executables have been set 
            properly
        """
        prog = 'klee-replay'

        if timeout is None:
            timeout = self.config.ONE_TEST_EXECUTION_TIMEOUT

        ERROR_HANDLER.assert_true(len(exe_path_map) == 1, \
                                    "support a single exe for now", __file__)
        ERROR_HANDLER.assert_true(callback_object is None, \
                                        'TODO: handle callback_obj', __file__)

        repo_exe = list(exe_path_map.keys())[0]
        local_exe = os.path.join(self.klee_used_tmp_build_dir, repo_exe)
        if repo_exe not in self.repo_exe_to_local_to_remote:
            if not os.path.isdir(os.path.dirname(local_exe)):
                os.makedirs(os.path.dirname(local_exe))
            self.repo_exe_to_local_to_remote[repo_exe] = {local_exe: None}

        remote_exe = exe_path_map[repo_exe]
        if remote_exe is None:
            remote_exe = repo_exe

        if remote_exe != self.repo_exe_to_local_to_remote[repo_exe][local_exe]:
            if remote_exe == repo_exe:
                self.code_builds_factory.repository_manager.\
                                    set_repo_to_build_default(\
                                        also_copy_to_map={repo_exe: local_exe})
            else:
                shutil.copy2(remote_exe, local_exe)

        args = [local_exe, os.path.join(self.tests_storage_dir, testcase)]
        tmp_env = os.environ.copy()
        #tmp_env.update(env_vars)
        tmp_env['KLEE_REPLAY_TIMEOUT'] = str(timeout)
        if collect_output:
            retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                    prog=prog, args_list=args, env=tmp_env, \
                                                        merge_err_to_out=True)
            output_err = (retcode, out)
        else:
            retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                    prog=prog, args_list=args, env=tmp_env, \
                                                    out_on=False, err_on=False)
            output_err = None

        if retcode in (DriversUtils.EXEC_TIMED_OUT_RET_CODE, \
                                    DriversUtils.EXEC_SEGFAULT_OUT_RET_CODE):
            verdict = common_mix.GlobalConstants.FAIL_TEST_VERDICT
        else:
            verdict = common_mix.GlobalConstants.PASS_TEST_VERDICT

        return verdict, output_err
Пример #3
0
def make_build_func(repo_root_dir, exe_rel_paths, compiler, flags_list, clean,\
                                                                reconfigure):
    """ Helper for GNU make
    """
    def print_err(out, msg):
        out = out.splitlines()
        print(out, msg)

    #~ def print_err()

    cwd = os.getcwd()
    os.chdir(repo_root_dir)

    #try:
    tmp_env = os.environ.copy()
    if compiler is not None:
        tmp_env["CC"] = compiler
    if flags_list is not None:
        tmp_env["CFLAGS"] = " ".join(flags_list)

    if reconfigure:
        args_list = ['clean']
        retcode, out, _ = DriversUtils.execute_and_get_retcode_out_err(\
                                prog='make', args_list=args_list, \
                                env=tmp_env, merge_err_to_out=True)
        if retcode != 0:
            print_err(out, "reconfigure failed")
            os.chdir(cwd)
            return GlobalConstants.COMMAND_FAILURE
    if clean:
        args_list = ['clean']
        retcode, out, _ = DriversUtils.execute_and_get_retcode_out_err(\
                                prog='make', args_list=args_list, \
                                env=tmp_env, merge_err_to_out=True)
        if retcode != 0:
            print_err(out, "clean failed")
            os.chdir(cwd)
            return GlobalConstants.COMMAND_FAILURE

    retcode, out, _ = DriversUtils.execute_and_get_retcode_out_err(\
                        prog='make', env=tmp_env, merge_err_to_out=True)
    if retcode != 0:
        print_err(out, "make")
        os.chdir(cwd)
        return GlobalConstants.COMMAND_FAILURE
    #except:
    #    os.chdir(cwd)
    #    assert False, "Build Unexpected Error in "+__file__
    #return GlobalConstants.COMMAND_FAILURE

    os.chdir(cwd)
    return GlobalConstants.COMMAND_SUCCESS


#~ def make_build_func()
Пример #4
0
    def _do_instrument_code (self, outputdir, exe_path_map, \
                                        code_builds_factory, \
                                        enabled_criteria, parallel_count=1):
        # Setup
        if os.path.isdir(self.instrumented_code_storage_dir):
            shutil.rmtree(self.instrumented_code_storage_dir)
        os.mkdir(self.instrumented_code_storage_dir)
        if os.path.isdir(self.gc_files_dir):
            shutil.rmtree(self.gc_files_dir)
        os.mkdir(self.gc_files_dir)

        prog = 'gcc'

        flags = ['--coverage', '-fprofile-dir='+self.gc_files_dir, '-O0']
        additionals = ["-fkeep-inline-functions"]
        
        # get gcc version
        ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                                        prog, ['-dumpversion'])
        ERROR_HANDLER.assert_true(ret == 0, "'gcc -dumpversion' failed'")
        
        # if version > 6.5
        if int(out.split('.')[0]) >= 6:
            if int(out.split('.')[0]) > 6 or int(out.split('.')[1]) > 5:
                additionals += ["-fkeep-static-functions"]
        
        flags += additionals
        
        rel_path_map = {}
        exes, _ = code_builds_factory.repository_manager.\
                                                    get_relative_exe_path_map()
        for exe in exes:
            filename = os.path.basename(exe)
            rel_path_map[exe] = os.path.join(\
                                self.instrumented_code_storage_dir, filename)

        self.instrument_callback_obj = self.InstrumentCallbackObject()
        self.instrument_callback_obj.set_post_callback_args(\
                                            (self.gc_files_dir, rel_path_map))
        pre_ret, ret, post_ret = code_builds_factory.transform_src_into_dest(\
                        src_fmt=CodeFormats.C_SOURCE,\
                        dest_fmt=CodeFormats.NATIVE_CODE,\
                        src_dest_files_paths_map=None,\
                        compiler=prog, flags_list=flags, clean_tmp=True, \
                        reconfigure=True, \
                        callback_object=self.instrument_callback_obj)
        
        # Check
        if ret == common_mix.GlobalConstants.COMMAND_FAILURE:
            ERROR_HANDLER.error_exit("Program {} {}.".format(prog,\
                                        'built problematic'), __file__)

        # write down the rel_path_map
        ERROR_HANDLER.assert_true(not os.path.isfile(\
                self.instrumentation_details), "must not exist here", __file__)
        common_fs.dumpJSON(rel_path_map, self.instrumentation_details)
Пример #5
0
    def _do_instrument_code (self, exe_path_map, \
                                        code_builds_factory, \
                                        enabled_criteria, parallel_count=1):
        # Setup
        if os.path.isdir(self.instrumented_code_storage_dir):
            shutil.rmtree(self.instrumented_code_storage_dir)
        os.mkdir(self.instrumented_code_storage_dir)
        if os.path.isdir(self.mutant_data):
            shutil.rmtree(self.mutant_data)
        os.mkdir(self.mutant_data)

        prog = 'gpmutation'
        if self.custom_binary_dir is not None:
            prog = os.path.join(self.custom_binary_dir, prog)
            ERROR_HANDLER.assert_true(os.path.isfile(prog), \
                            "The tool {} is missing from the specified dir {}"\
                                        .format(os.path.basename(prog), \
                                            self.custom_binary_dir), __file__)

        exes, _ = code_builds_factory.repository_manager.\
                                                    get_relative_exe_path_map()

        ERROR_HANDLER.assert_true(len(exes) == 1, \
                                        "Support only a singe exe", __file__)

        gpmutation_subj = os.path.basename(exes[0])
        args=[gpmutation_subj, self.separate_muts_dir]

        # Execute GPMutation
        ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                                        prog, args_list=args)
        if (ret != 0):
            logging.error(out)
            logging.error(err)
            logging.error("\n>> CMD: " + " ".join([prog]+args) + '\n')
            ERROR_HANDLER.error_exit("gpmutation failed!", __file__)
        
        # write down the rel_path_map
        ERROR_HANDLER.assert_true(not os.path.isfile(\
                self.instrumentation_details), "must not exist here", __file__)
        
        with open (self.instrumentation_details, 'w') as f:
            for exe in exes:
                f.write(exe+'\n')

        # Archive separated if on
        if self.archive_separated:
            err_msg = common_fs.TarGz.compressDir(self.separate_muts_dir, \
                                                    remove_in_directory=True)
            ERROR_HANDLER.assert_true(err_msg is None,\
                                "Compression failed: "+str(err_msg), __file__)
Пример #6
0
 def _dir_chmod777(self, dirpath):
     try:
         for root_, dirs_, files_ in os.walk(dirpath):
             for sub_d in dirs_:
                 if os.path.isdir(os.path.join(root_, sub_d)):
                     os.chmod(os.path.join(root_, sub_d), 0o777)
             for f_ in files_:
                 if os.path.isfile(os.path.join(root_, f_)):
                     os.chmod(os.path.join(root_, f_), 0o777)
     except PermissionError:
         ret,_,_ = DriversUtils.execute_and_get_retcode_out_err('sudo', \
                             args_list=['chmod 777 -R {}'.format(dirpath)])
         ERROR_HANDLER.assert_true(ret == 0, \
                     "'sudo chmod 777 -R "+dirpath+"' failed (returned "+\
                                                     str(ret)+")", __file__)
Пример #7
0
    def _collect_temporary_coverage_data(self, criteria_name_list, \
                                            test_execution_verdict, \
                                            used_environment_vars, \
                                                    result_dir_tmp):
        ''' get gcov files from gcda files into result_dir_tmp
        '''
        prog = 'gcov'

        cov2flags = {
                    TestCriteria.STATEMENT_COVERAGE: [],
                    TestCriteria.BRANCH_COVERAGE: ['-b', '-c'],
                    TestCriteria.FUNCTION_COVERAGE: ['-f'],
                }

        args_list = []
        for criterion in criteria_name_list:
            args_list += cov2flags[criterion]

        gcda_files = self._get_gcda_list()

        raw_filename_list = [os.path.splitext(f)[0] for f in gcda_files]
        args_list += raw_filename_list
        
        if len(gcda_files) > 0:
            # TODO: When gcov generate coverage for different files with
            # same name filename bu located at diferent dir. Avoid override.
            # Go where the gcov will be looked for
            cwd = os.getcwd()
            os.chdir(self.gc_files_dir)

            # collect gcda (gcno)
            r, _, _ = DriversUtils.execute_and_get_retcode_out_err(prog=prog, \
                                        args_list=args_list, out_on=False, \
                                                                err_on=False)

            os.chdir(cwd)
            
            if r != 0:
                ERROR_HANDLER.error_exit("Program {} {}.".format(prog,\
                        'error collecting coverage is problematic'), __file__)
            
            # delete gcda
            for gcda_f in gcda_files:
                os.remove(gcda_f)
            
            common_fs.dumpJSON(self._get_gcov_list(), \
                                os.path.join(result_dir_tmp,\
                                                self.gcov_files_list_filename))
Пример #8
0
def python_unittest_runner(test_name, repo_root_dir, exe_path_map, env_vars, \
                                            timeout, collected_output=None):
    # TODO: use exe_path_map 

    def parse_test(s):
        return s.split('...')[0].replace(':','/').replace(' ','')

    cwd = os.getcwd()
    os.chdir(repo_root_dir)

    try:
        args_list = ['-m', 'unittest', test_name, '-v']

        if collected_output is None:
            retcode, stdout, _ = DriversUtils.execute_and_get_retcode_out_err(\
                                prog=sys.executable, args_list=args_list, \
                                        timeout=timeout, merge_err_to_out=True)
            stdout = stdout.splitlines()
        else:
            # collected_output is a list ([retcode, out_err_log])
            # TODO: use wrapper? or parse out for particular codes... 
            assert False, "TO BE Implemented"
    except:
        # ERROR
        os.chdir(cwd)
        return GlobalConstants.TEST_EXECUTION_ERROR
    
    # Parse the result
    subtests_verdicts = {}
    hasfail = False
    hasfail |= (retcode != 0)
    for s in stdout:
        if s.endswith('... FAIL'):
            hasfail = True
            subtests_verdicts[parse_test(s)] = True
        elif s.endswith('... ok'):
            subtests_verdicts[parse_test(s)] = False
    #print(subtests_verdicts)
    os.chdir(cwd)
    return GlobalConstants.FAIL_TEST_VERDICT if hasfail else \
                                            GlobalConstants.PASS_TEST_VERDICT
#~ def python_unittest_runner()
def make_build(repo_root_dir, exe_rel_paths, compiler, flags_list, clean,\
                                                                reconfigure):
    cwd = os.getcwd()
    os.chdir(repo_root_dir)
    if flags_list is None:
        flags_list = ["-DUSING_MUTERIA"]

    else:
        flags_list.append("-DUSING_MUTERIA")
    # try:
    tmp_env = os.environ.copy()
    if compiler is not None:
        tmp_env["CC"] = compiler
    if flags_list is not None:
        tmp_env["CFLAGS"] = " ".join(flags_list)
    args_list = ['build_project.sh']
    if reconfigure:
        args_list.append('1')
    else:
        args_list.append('0')

    if clean:
        args_list.append('1')
    else:
        args_list.append('0')

    retcode, out, _ = DriversUtils.execute_and_get_retcode_out_err(
        prog='bash', args_list=args_list, env=tmp_env, merge_err_to_out=True)

    def print_err(out, msg):
        #out = out.splitlines()
        #print(out, msg)
        logging.error(str(out) + msg)

    if retcode != 0:
        print_err(out, "make")
        os.chdir(cwd)
        return GlobalConstants.COMMAND_FAILURE
    os.chdir(cwd)
    return GlobalConstants.COMMAND_SUCCESS
Пример #10
0
    def _do_instrument_code (self, outputdir, exe_path_map, \
                                        code_builds_factory, \
                                        enabled_criteria, parallel_count=1):
        # Setup
        if os.path.isdir(self.instrumented_code_storage_dir):
            shutil.rmtree(self.instrumented_code_storage_dir)
        os.mkdir(self.instrumented_code_storage_dir)
        if os.path.isdir(self.mutant_data):
            shutil.rmtree(self.mutant_data)
        os.mkdir(self.mutant_data)

        prog = 'mart'

        # get llvm compiler path
        ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                                        prog, ['--version'])
        llvm_compiler_path = None
        for line in out.splitlines():
            line = line.strip()
            if line.startswith('LLVM tools dir:'):
                llvm_compiler_path = line.split()[3]  #[1:-1]
                break

        ERROR_HANDLER.assert_true(llvm_compiler_path is not None, \
                                'Problem getting llvm path for mart', __file__)

        # Build into LLVM
        back_llvm_compiler = 'clang'
        rel_path_map = {}
        exes, _ = code_builds_factory.repository_manager.\
                                                    get_relative_exe_path_map()
        for exe in exes:
            filename = os.path.basename(exe)
            rel_path_map[exe] = os.path.join(self.mutant_data, filename)
        pre_ret, ret, post_ret = code_builds_factory.transform_src_into_dest(\
                        src_fmt=CodeFormats.C_SOURCE,\
                        dest_fmt=CodeFormats.LLVM_BITCODE,\
                        src_dest_files_paths_map=rel_path_map,\
                        compiler=back_llvm_compiler, flags_list=['-g'], \
                        clean_tmp=True, reconfigure=True, \
                        llvm_compiler_path=llvm_compiler_path)
        if ret == common_mix.GlobalConstants.TEST_EXECUTION_ERROR:
            ERROR_HANDLER.error_exit("Program {}.".format(\
                                'LLVM (clang) built problematic'), __file__)

        # Update exe_map to reflect bitcode extension
        rel2bitcode = {}
        for r_file, b_file in list(rel_path_map.items()):
            bc = b_file + '.bc'
            ERROR_HANDLER.assert_true(os.path.isfile(bc), \
                                    "Bitcode file not existing: "+bc, __file__)
            rel2bitcode[r_file] = bc

        ERROR_HANDLER.assert_true(len(rel_path_map) == 1, \
                            "Support single bitcode module for now", __file__)

        bitcode_file = rel2bitcode[list(rel2bitcode.keys())[0]]

        # mart params
        bool_param, k_v_params = self._get_default_params()
        if TestCriteria.STRONG_MUTATION in enabled_criteria:
            bool_param['-write-mutants'] = True

        args = [bp for bp, en in list(bool_param.items()) if en]
        for k, v in list(k_v_params.items()):
            if v is not None:
                args += [k, v]
        args.append(bitcode_file)

        # Execute Mart
        cwd = os.getcwd()
        os.chdir(self.mutant_data)
        ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                                                    prog, args)
        os.chdir(cwd)

        if (ret != 0):
            logging.error(out)
            logging.error(err)
            logging.error("\n>> CMD: " + " ".join([prog] + args) + '\n')
            ERROR_HANDLER.error_exit("mart failed'", __file__)

        # write down the rel_path_map
        ERROR_HANDLER.assert_true(not os.path.isfile(\
                self.instrumentation_details), "must not exist here", __file__)
        store_obj = {c.get_str(): {} for c in enabled_criteria}
        for k, v in list(rel_path_map.items()):
            exe_file = os.path.basename(v)
            if TestCriteria.WEAK_MUTATION in enabled_criteria:
                crit_str = TestCriteria.WEAK_MUTATION.get_str()
                store_obj[crit_str][k] = exe_file + '.WM'
            if TestCriteria.MUTANT_COVERAGE in enabled_criteria:
                crit_str = TestCriteria.MUTANT_COVERAGE.get_str()
                store_obj[crit_str][k] = exe_file + '.COV'
            if TestCriteria.STRONG_MUTATION in enabled_criteria:
                crit_str = TestCriteria.STRONG_MUTATION.get_str()
                store_obj[crit_str][k] = exe_file + '.MetaMu'
        common_fs.dumpJSON(store_obj, self.instrumentation_details)
Пример #11
0
    def _do_generate_tests (self, exe_path_map, outputdir, \
                                        code_builds_factory, max_time=None):
        # Setup
        if os.path.isdir(self.tests_working_dir):
            shutil.rmtree(self.tests_working_dir)
        os.mkdir(self.tests_working_dir)
        if os.path.isdir(self.tests_storage_dir):
            shutil.rmtree(self.tests_storage_dir)

        prog = 'klee'
        default_sym_args = ['-sym-arg', '5']
        back_llvm_compiler = None  #'clang'

        rel_path_map = {}
        exes, _ = code_builds_factory.repository_manager.\
                                                    get_relative_exe_path_map()
        for exe in exes:
            filename = os.path.basename(exe)
            rel_path_map[exe] = os.path.join(self.tests_working_dir, filename)
        pre_ret, ret, post_ret = code_builds_factory.transform_src_into_dest(\
                        src_fmt=CodeFormats.C_SOURCE,\
                        dest_fmt=CodeFormats.LLVM_BITCODE,\
                        src_dest_files_paths_map=rel_path_map,\
                        compiler=back_llvm_compiler, \
                        clean_tmp=True, reconfigure=True)
        if ret == common_mix.GlobalConstants.TEST_EXECUTION_ERROR:
            ERROR_HANDLER.error_exit("Program {}.".format(\
                                'LLVM built problematic'), __file__)

        # Update exe_map to reflect bitcode extension
        rel2bitcode = {}
        for r_file, b_file in list(rel_path_map.items()):
            bc = b_file + '.bc'
            ERROR_HANDLER.assert_true(os.path.isfile(bc), \
                                    "Bitcode file not existing: "+bc, __file__)
            rel2bitcode[r_file] = bc

        ERROR_HANDLER.assert_true(len(rel_path_map) == 1, \
                            "Support single bitcode module for now", __file__)

        bitcode_file = rel2bitcode[list(rel2bitcode.keys())[0]]

        # klee params
        bool_param, k_v_params = self._get_default_params()
        if max_time is not None:
            k_v_params['-max-time'] = str(max_time)

        args = [bp for bp, en in list(bool_param.items()) if en]
        for k, v in list(k_v_params.items()):
            if v is not None:
                args += [k, str(v)]
        args.append(bitcode_file)

        # sym args
        klee_sym_args = default_sym_args
        uc = self.config.get_tool_user_custom()
        if uc is not None:
            post_bc_cmd = uc.POST_TARGET_CMD_ORDERED_FLAGS_LIST
            if post_bc_cmd is not None:
                klee_sym_args = []
                for tup in post_bc_cmd:
                    klee_sym_args += list(tup)
        args += klee_sym_args

        # Execute Klee
        ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                                                    prog, args)

        if (ret != 0):
            logging.error(out)
            logging.error(err)
            logging.error("\n>> CMD: " + " ".join([prog] + args) + '\n')
            ERROR_HANDLER.error_exit("klee failed'", __file__)

        store_obj = {r: os.path.basename(b) for r, b in rel2bitcode.items()}
        common_fs.dumpJSON(store_obj, self.test_details_file)

    #~ def _do_generate_tests()


#~ class CustomTestcases
Пример #12
0
    def get_instrumented_executable_paths_map(self, enabled_criteria):
        using_gdb_wrapper = self.driver_config.get_use_gdb_wrapper()
        if using_gdb_wrapper:
            # use wrapper if gdb is installed
            using_gdb_wrapper = DriversUtils.check_tool(prog='gdb', \
                                                    args_list=['--version'], \
                                                    expected_exit_codes=[0])

            if using_gdb_wrapper:
                # XXX: Docker has issues running programs in Docker,
                # unless the container is ran with the following arguments:
                #
                # docker run --cap-add=SYS_PTRACE \
                #               --security-opt seccomp=unconfined ...
                #
                # We check that it is fine by testing on echo
                ret, o_e, _ = DriversUtils.execute_and_get_retcode_out_err(
                                        prog='gdb', \
                                        args_list=['--batch-silent', \
                                                    '--quiet',
                                                    '--return-child-result',
                                                    '-ex', 'run',
                                                    '--args', 'echo'], \
                                        )#out_on=False, err_on=False)
                using_gdb_wrapper = (ret == 0)
                if not using_gdb_wrapper:
                    logging.warning("use gdb is enabled but call to gdb fails"
                                    " (retcode {}) with msg: {}".format(
                                        ret, o_e))
            else:
                logging.warning("use gdb is enabled but gdb is not installed")

        crit_to_exes_map = {}
        obj = common_fs.loadJSON(self.instrumentation_details)
        #exes = [p for _, p in list(obj.items())]
        for name in obj:
            obj[name] = os.path.join(self.instrumented_code_storage_dir, \
                                                                    obj[name])

        if using_gdb_wrapper:
            # Using GDB WRAPPER
            with open(self.gcov_gdb_wrapper_template) as f:
                template_str = f.read()
            single_exe = obj[list(obj)[0]]
            template_str = template_str.replace(\
                                'MUTERIA_GCOV_PROGRAMEXE_PATHNAME', single_exe)
            with open(self.gcov_gdb_wrapper_sh, 'w') as f:
                f.write(template_str)
            shutil.copymode(single_exe, self.gcov_gdb_wrapper_sh)
            obj[list(obj)[0]] = self.gcov_gdb_wrapper_sh

        exes_map = obj
        for criterion in enabled_criteria:
            crit_to_exes_map[criterion] = exes_map

        #logging.debug("DBG: {} {} {}".format(\
        #                  "Using gdb wrapper is {}.".format(using_gdb_wrapper), \
        #                  "crit_to_exe_map is {}.".format(crit_to_exes_map), \
        #                  "wraper path is {}!".format(self.gcov_gdb_wrapper_sh)))

        return crit_to_exes_map
Пример #13
0
    def execute_test(cls, executable_file, test_file, env_vars, stdin=None, \
                                        must_exist_dir_list=None, \
                                        timeout=None, collected_output=None, \
                                        custom_replay_tool_binary_dir=None):

        prog, args = cls._get_replay_prog_args(executable_file, test_file, \
                                                custom_replay_tool_binary_dir)

        # klee-replay may create files or dir. in KLEE version with LLVM-3.4,
        # those are created in a temporary dir set as <cwd>.temps
        # XXX XXX. make sure each test has its own
        test_work_dir = test_file + ".execdir"
        klee_replay_temps = test_work_dir + '.temps'
        for d in (test_work_dir, klee_replay_temps):
            if os.path.isdir(d):
                try:
                    shutil.rmtree(d)
                except PermissionError:
                    cls._dir_chmod777(d)
                    shutil.rmtree(d)

        if not os.path.isdir(test_work_dir):
            os.mkdir(test_work_dir)

        if must_exist_dir_list is not None:
            for d in must_exist_dir_list:
                td = os.path.join(test_work_dir, d)
                if not os.path.isdir(td):
                    os.makedirs(td)

        # XXX Execution setup
        tmp_env = os.environ.copy()
        if env_vars is not None:
            #for e, v in env_vars.items():
            #    tmp_env[e] = v
            tmp_env.update(env_vars)

        timeout_return_codes = cls.timedout_retcodes + \
                                        DriversUtils.EXEC_TIMED_OUT_RET_CODE

        if timeout is not None:
            tmp_env['KLEE_REPLAY_TIMEOUT'] = str(timeout)
            kt_over = 10  # 1second
            timeout += kt_over
        else:
            # DBG
            logging.warning("@KTEST: calling ktest execution without timeout.")

        # XXX Get the parsing regexes to use
        retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                prog=prog, args_list=['--help'], \
                                merge_err_to_out=True)
        clean_regex, status_regex = cls._get_regexes(out, \
                                                        clean_everything=True)

        # XXX Execute the ktest
        #logging.debug("DBG: test_work_dir is {}. its content is {}".format(
        #                    test_work_dir, list(os.listdir(test_work_dir))))
        #if collected_output is not None:

        # XXX: Use stdbuf to line buffer the stderr to avoid mixing or
        # err between klee-replay and the executd prog
        use_stdbuf = True
        if use_stdbuf:
            args = ["--output=L", "--error=L", prog] + args
            prog = "stdbuf"
            # TODO: check that stdbuf is installed

        retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                prog=prog, args_list=args, env=tmp_env, \
                                stdin=stdin, \
                                timeout=timeout, timeout_grace_period=5, \
                                merge_err_to_out=True, cwd=test_work_dir)
        retcode, out, exit_status = cls._remove_output_noise(retcode, out, \
                                                  clean_regex, status_regex)
        # In klee-replay, when exit_status here is not None, retcode is 0
        # When there is an issue, like timeout, exit_status is None and
        # retcode has the ode of the issue
        if exit_status is None:
            exit_status = retcode
        if collected_output is not None:
            collected_output.extend((exit_status, out, \
                                     (retcode in timeout_return_codes or \
                            retcode in DriversUtils.EXEC_TIMED_OUT_RET_CODE)))
        #else:
        #    retcode, out, err = DriversUtils.execute_and_get_retcode_out_err(\
        #                        prog=prog, args_list=args, env=tmp_env, \
        #                        timeout=timeout, timeout_grace_period=5, \
        #                                        out_on=False, err_on=False, \
        #                                        cwd=test_work_dir)

        # XXX: Go back to previous CWD
        for d in (test_work_dir, klee_replay_temps):
            if os.path.isdir(d):
                try:
                    shutil.rmtree(d)
                except PermissionError:
                    cls._dir_chmod777(d)
                    shutil.rmtree(d)

        #if must_exist_dir_list is not None:
        #    try:
        #        shutil.rmtree(test_work_dir)
        #    except PermissionError:
        #        cls._dir_chmod777(test_work_dir)
        #        shutil.rmtree(test_work_dir)

        if retcode in timeout_return_codes + \
                                    DriversUtils.EXEC_SEGFAULT_OUT_RET_CODE:
            verdict = common_mix.GlobalConstants.FAIL_TEST_VERDICT
        else:
            verdict = common_mix.GlobalConstants.PASS_TEST_VERDICT

        return verdict
Пример #14
0
    def _do_instrument_code (self, exe_path_map, \
                                        code_builds_factory, \
                                        enabled_criteria, parallel_count=1):
        # Setup
        if os.path.isdir(self.instrumented_code_storage_dir):
            shutil.rmtree(self.instrumented_code_storage_dir)
        os.mkdir(self.instrumented_code_storage_dir)
        if os.path.isdir(self.gc_files_dir):
            try:
                shutil.rmtree(self.gc_files_dir)
            except PermissionError:
                self._dir_chmod777(self.gc_files_dir)
                shutil.rmtree(self.gc_files_dir)
        os.mkdir(self.gc_files_dir, mode=0o777)

        prog = 'gcc'
        if self.custom_binary_dir is not None:
            prog = os.path.join(self.custom_binary_dir, prog)
            ERROR_HANDLER.assert_true(os.path.isfile(prog), \
                            "The tool {} is missing from the specified dir {}"\
                                        .format(os.path.basename(prog), \
                                            self.custom_binary_dir), __file__)

        flags = [
            '-g', '--coverage', '-fprofile-dir=' + self.gc_files_dir, '-O0'
        ]
        additionals = ["-fkeep-inline-functions"]
        #additionals.append('-fprofile-abs-path')
        #additionals.append('-fprofile-prefix-path='+code_builds_factory\
        #                        .repository_manager.get_repository_dir_path())

        # get gcc version
        ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                            prog, args_list=['-dumpversion'])
        ERROR_HANDLER.assert_true(ret == 0, "'gcc -dumpversion' failed'")

        # if version > 6.5
        if int(out.split('.')[0]) >= 6:
            if int(out.split('.')[0]) > 6 or int(out.split('.')[1]) > 5:
                additionals += ["-fkeep-static-functions"]

        flags += additionals

        rel_rel_path_map = {}
        rel_abs_path_map = {}
        exes, _ = code_builds_factory.repository_manager.\
                                                    get_relative_exe_path_map()
        ERROR_HANDLER.assert_true(len(exes) == 1, \
                                "Only single exe supported in GCOV", __file__)
        for exe in exes:
            filename = os.path.basename(exe)
            rel_rel_path_map[exe] = filename
            rel_abs_path_map[exe] = os.path.join(\
                                self.instrumented_code_storage_dir, filename)

        self.instrument_callback_obj = self.InstrumentCallbackObject()
        self.instrument_callback_obj.set_post_callback_args(\
                                        (self.gc_files_dir, rel_abs_path_map))
        pre_ret, ret, post_ret = code_builds_factory.transform_src_into_dest(\
                        src_fmt=CodeFormats.C_SOURCE,\
                        dest_fmt=CodeFormats.NATIVE_CODE,\
                        src_dest_files_paths_map=None,\
                        compiler=prog, flags_list=flags, clean_tmp=True, \
                        reconfigure=True, \
                        callback_object=self.instrument_callback_obj)

        # Check
        if ret == common_mix.GlobalConstants.COMMAND_FAILURE:
            ERROR_HANDLER.error_exit("Program {} {}.".format(prog,\
                                        'built problematic'), __file__)

        # write down the rel_path_map
        ERROR_HANDLER.assert_true(not os.path.isfile(\
                self.instrumentation_details), "must not exist here", __file__)
        common_fs.dumpJSON(rel_rel_path_map, self.instrumentation_details)
Пример #15
0
    def _collect_temporary_coverage_data(self, criteria_name_list, \
                                            test_execution_verdict, \
                                            used_environment_vars, \
                                            result_dir_tmp, \
                                            testcase):
        ''' get gcov files from gcda files into result_dir_tmp
        '''
        prog = 'gcov'
        if self.custom_binary_dir is not None:
            prog = os.path.join(self.custom_binary_dir, prog)
            ERROR_HANDLER.assert_true(os.path.isfile(prog), \
                            "The tool {} is missing from the specified dir {}"\
                                        .format(os.path.basename(prog), \
                                            self.custom_binary_dir), __file__)

        cov2flags = {
            TestCriteria.STATEMENT_COVERAGE: [],
            TestCriteria.BRANCH_COVERAGE: ['-b', '-c'],
            TestCriteria.FUNCTION_COVERAGE: ['-f'],
        }

        args_list = []
        for criterion in criteria_name_list:
            args_list += cov2flags[criterion]

        gcda_files = self._get_gcda_list()

        raw_filename_list = [os.path.splitext(f)[0] for f in gcda_files]
        args_list += raw_filename_list

        if len(gcda_files) > 0:
            # TODO: When gcov generate coverage for different files with
            # same name filename but located at diferent dir. Avoid override.
            # Go where the gcov will be looked for
            cwd = os.getcwd()
            os.chdir(self.gc_files_dir)

            # collect gcda (gcno)
            r, _, err_str = DriversUtils.execute_and_get_retcode_out_err(\
                                        prog=prog, \
                                        args_list=args_list, out_on=False, \
                                        err_on=True, merge_err_to_out=False)

            os.chdir(cwd)

            if r != 0:  # or err_str:
                ERROR_HANDLER.error_exit("Program {} {}.".format(prog,\
                        'collecting coverage is problematic. ')+
                        "The error msg is {}. \nThe command:\n{}".format(\
                                err_str, " ".join([prog]+args_list)), __file__)

            dot_gcov_file_list = self._get_gcov_list()
            # Sources of interest
            _, src_map = self.code_builds_factory.repository_manager.\
                                                    get_relative_exe_path_map()
            base_dot_gcov = [os.path.basename(f) for f in dot_gcov_file_list]
            base_raw = [os.path.basename(f) for f in raw_filename_list]
            interest = [os.path.basename(s) for s, o in src_map.items()]
            interest = [s+'.gcov' for s in interest if os.path.splitext(s)[0] \
                                                                in base_raw]
            missed = set(interest) - set(base_dot_gcov)

            # FIXME: Check for partial failure (compare number of gcno and gcov)
            ERROR_HANDLER.assert_true(len(missed) == 0,
                    "{} did not generate the '.gcov' files {}.".format(\
                        prog, missed)+ "\nIts stderr is {}.\n CMD: {}".format(\
                            err_str, " ".join([prog]+args_list)), __file__)

            # delete gcda
            for gcda_f in gcda_files:
                os.remove(gcda_f)
        else:
            if not self.driver_config.get_allow_missing_coverage():
                ERROR_HANDLER.error_exit(\
                    "Testcase '{}' did not generate gcda, {}".format(\
                        testcase, "when allow missing coverage is disabled"))
            dot_gcov_file_list = []

        common_fs.dumpJSON(dot_gcov_file_list, \
                                os.path.join(result_dir_tmp,\
                                                self.gcov_files_list_filename))
Пример #16
0
    def _call_generation_run(self, runtool, args):
        ## locate max-time
        timeout_grace_period = 600
        max_time = None
        cur_max_time = float(self.get_value_in_arglist(args, 'max-time'))
        #to give time to klee add FRAMEWORK GRACE
        max_time = cur_max_time + \
                                self.config.TEST_GEN_TIMEOUT_FRAMEWORK_GRACE

        # set stack to unlimited
        stack_ulimit_soft, stack_ulimit_hard = \
                                    resource.getrlimit(resource.RLIMIT_STACK)
        if stack_ulimit_soft != -1:
            resource.setrlimit(resource.RLIMIT_STACK, (-1, stack_ulimit_hard))

        # Execute Klee
        if self.driver_config.get_suppress_generation_stdout():
            ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                    runtool, args_list=args, timeout=max_time,\
                                    timeout_grace_period=timeout_grace_period, \
                                    out_on=False, err_on=True, \
                                    merge_err_to_out=False)
            out, err = err, out
        else:
            ret, out, err = DriversUtils.execute_and_get_retcode_out_err(\
                                    runtool, args_list=args, timeout=max_time,\
                                    timeout_grace_period=timeout_grace_period)
            #out_on=False, err_on=False)
        '''o_d_dbg = self.get_value_in_arglist(args, "output-dir") #DBG
        if os.path.isdir(o_d_dbg): #DBG
            shutil.rmtree(o_d_dbg) #DBG
        import subprocess #DBG
        p = subprocess.Popen([runtool]+args, env=None, cwd=None, \
                                                            #close_fds=True, \
                                                        stdin=None, \
                                                        stderr=subprocess.STDOUT, \
                                                        stdout=None, shell=True, \
                                                        preexec_fn=os.setsid) #DBG
        try: #DBG
            stdout, stderr = p.communicate(timeout=max_time) #DBG
        except subprocess.TimeoutExpired: #DBG
            stdout, stderr = p.communicate(timeout=max_time) #DBG
        #os.system(" ".join([runtool]+args)) #DBG'''

        # restore stack
        if stack_ulimit_soft != -1:
            resource.setrlimit(resource.RLIMIT_STACK, \
                                        (stack_ulimit_soft, stack_ulimit_hard))

        if (ret != 0 and ret not in DriversUtils.EXEC_TIMED_OUT_RET_CODE \
                and not out.rstrip().endswith(": ctrl-c detected, exiting.")):
            logging.error(out)
            logging.error(err)
            logging.error("\n>> CMD: " + " ".join([runtool] + args) + '\n')
            ERROR_HANDLER.error_exit(
                "call to klee testgen failed, " +
                "error code is {}".format(ret), __file__)

        if self.driver_config.get_verbose_generation():
            logging.debug(out)
            logging.debug(err)
            logging.debug("\nCMD: " + " ".join([runtool] + args))
Пример #17
0
    def convert_code(self, src_fmt, dest_fmt, file_src_dest_map, \
                                                repository_manager, **kwargs):
        # TODO: add can_fail parameter, in kwarg, for case like mutant
        # compilatioon that can fail but should not terminate execution
        # but return a specific value
        ERROR_HANDLER.assert_true(src_fmt in self.src_formats, \
                                    "Unsupported src format", __file__)

        # post build callbacks
        class CopyCallbackObject(DefaultCallbackObject):
            def after_command(self):
                if self.op_retval == \
                                    common_mix.GlobalConstants.COMMAND_FAILURE:
                    return common_mix.GlobalConstants.COMMAND_FAILURE
                for sf, df in list(file_src_dest_map.items()):
                    abs_sf = repository_manager.repo_abs_path(sf)
                    if not os.path.isfile(abs_sf):
                        ERROR_HANDLER.error_exit(\
                                "an expected file missing after build: "+\
                                                            abs_sf, __file__)
                    if df is not None:
                        shutil.copy2(abs_sf, df)
                return None

            #~ def after_command()

        #~ class CopyCallbackObject

        # Should not have callback_object and file_src_dest_map at the
        # same time
        callbak_obj_key = 'callback_object'
        if callbak_obj_key in kwargs:
            ERROR_HANDLER.assert_true(file_src_dest_map is None,\
                            "file_src_dest_map must be None "+ \
                            "if callback_object is passed", __file__)
        elif file_src_dest_map is not None and len(file_src_dest_map) > 0:
            kwargs[callbak_obj_key] = CopyCallbackObject()
        else:
            kwargs[callbak_obj_key] = None

        # Actual Processing
        if (dest_fmt == ccs.CodeFormats.C_PREPROCESSED_SOURCE):
            if (src_fmt == ccs.CodeFormats.C_SOURCE):
                ERROR_HANDLER.error_exit("Must Implement1", __file__)
            else:
                for src, dest in list(file_src_dest_map.items()):
                    shutil.copy2(src, dest)
        if (dest_fmt == ccs.CodeFormats.LLVM_BITCODE):
            # XXX: This build overrides passed clean_tmp and reconfigure
            # also overrides Compiler if wllvm if found
            # and does not use the callback object
            # and require file_src_dest_map to have the place to store
            # generated .bc by specifying the corresponding native file.
            # EX: {x.c: /path/to/main} passed to have /path/to/main.bc
            # generated

            spec_compiler = kwargs['compiler'] if 'compiler' in kwargs \
                                                                    else None
            # special kwargs
            spec_llvm_compiler_path = None
            if 'llvm_compiler_path' in kwargs:
                spec_llvm_compiler_path = kwargs['llvm_compiler_path']
                del kwargs['llvm_compiler_path']

            if spec_compiler is not None:
                bak_llvm_compiler = os.environ['LLVM_COMPILER']
                os.environ['LLVM_COMPILER'] = spec_compiler
            if spec_llvm_compiler_path is not None:
                bak_llvm_compiler_path = os.environ['LLVM_COMPILER_PATH']
                os.environ['LLVM_COMPILER_PATH'] = spec_llvm_compiler_path

            #1. Ensure wllvm is installed (For now use default llvm compiler)
            has_wllvm = DriversUtils.check_tool('wllvm', ['--version'])
            ERROR_HANDLER.assert_true(has_wllvm, 'wllvm not found', __file__)

            # tmp['LLVM_COMPILER_PATH'] = ...
            kwargs['compiler'] = 'wllvm'
            kwargs['clean_tmp'] = True
            kwargs['reconfigure'] = True

            # Normal build followed by executable copying
            pre_ret, ret, post_ret = repository_manager.build_code(**kwargs)
            ERROR_HANDLER.assert_true(\
                    ret != common_mix.GlobalConstants.COMMAND_FAILURE and\
                    pre_ret != common_mix.GlobalConstants.COMMAND_FAILURE and\
                    post_ret != common_mix.GlobalConstants.COMMAND_FAILURE,\
                                        "Build LLVM bitcode failed!", __file__)

            # extract bitcode from copied executables and remove non bitcode
            if file_src_dest_map is not None:
                for src, dest in list(file_src_dest_map.items()):
                    ret, out, err = \
                            DriversUtils.execute_and_get_retcode_out_err( \
                                                        "extract-bc", [dest])
                    ERROR_HANDLER.assert_true(ret == 0, \
                                        '{}. \n# OUT: {}\n# ERR: {}'.format(\
                                    'extract-bc failed', out, err), __file__)
                    os.remove(dest)

            if spec_compiler is not None:
                os.environ['LLVM_COMPILER'] = bak_llvm_compiler
            if spec_llvm_compiler_path is not None:
                os.environ['LLVM_COMPILER_PATH'] = bak_llvm_compiler_path

            # Clean build
            kwargs['compiler'] = None
            pre_ret, ret, post_ret = repository_manager.build_code(**kwargs)

        if (dest_fmt == ccs.CodeFormats.OBJECT_FILE):
            ERROR_HANDLER.error_exit("Must Implement3", __file__)
        if (dest_fmt == ccs.CodeFormats.NATIVE_CODE):
            pre_ret, ret, post_ret = repository_manager.build_code(**kwargs)
        return pre_ret, ret, post_ret