def run(project_path, program_path): emitter.title("Repairing Program") ## Generate all possible solutions by running the synthesizer. time_check = time.time() # satisfied = utilities.check_budget(values.DEFAULT_TIME_DURATION) initial_patch_list = generator.generate_patch_set(project_path) result_list = parallel.remove_duplicate_patches_parallel( initial_patch_list) filtered_patch_list = [] for result in result_list: is_redundant, index = result patch = initial_patch_list[index] if not is_redundant: filtered_patch_list.append(patch) index_map = generator.generate_patch_index_map(filtered_patch_list) writer.write_as_json(index_map, definitions.FILE_PATCH_RANK_INDEX) for patch in filtered_patch_list: patch_constraint_str = app.generator.generate_formula_from_patch( patch).serialize() patch_index = utilities.get_hash(patch_constraint_str) if patch_index in values.LIST_PATCH_SCORE: emitter.warning("\tcollision detected in patch score map") values.LIST_PATCH_SCORE[patch_index] = 0 values.LIST_PATCH_OVERAPPROX_CHECK[patch_index] = False values.LIST_PATCH_UNDERAPPROX_CHECK[patch_index] = False values.LIST_PATCH_SPACE[patch_index] = generator.generate_patch_space( patch) emitter.note("\t\t|P|=" + str(utilities.count_concrete_patches(filtered_patch_list)) + ":" + str(len(filtered_patch_list))) if values.DEFAULT_PATCH_TYPE == values.OPTIONS_PATCH_TYPE[1]: values.COUNT_PATCH_START = utilities.count_concrete_patches( filtered_patch_list) values.COUNT_TEMPLATE_START = len(filtered_patch_list) else: values.COUNT_PATCH_START = len(filtered_patch_list) duration = format((time.time() - time_check) / 60, '.3f') values.TIME_TO_GENERATE = str(duration) definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-gen" writer.write_patch_set(filtered_patch_list, definitions.FILE_PATCH_SET) if values.CONF_ONLY_GEN: return if values.DEFAULT_REDUCE_METHOD == "cpr": run_cpr(program_path, filtered_patch_list) elif values.DEFAULT_REDUCE_METHOD == "cegis": run_cegis(program_path, project_path, filtered_patch_list) values.COUNT_PATHS_EXPLORED_GEN = len(concolic.list_path_explored) values.COUNT_PATHS_DETECTED = len(concolic.list_path_detected) values.COUNT_PATHS_SKIPPED = len(concolic.list_path_infeasible)
def collect_trace(file_path, project_path): """ This function will read the output log of a klee concolic execution and extract the instruction trace """ emitter.normal("\textracting instruction trace") list_trace = list() if os.path.exists(file_path): with open(file_path, 'r') as trace_file: for line in trace_file: if '[klee:trace]' in line: if project_path in line: trace_line = str(line.replace("[klee:trace] ", '')) trace_line = trace_line.strip() source_path, line_number = trace_line.split(":") source_path = os.path.abspath(source_path) trace_line = source_path + ":" + str(line_number) if (not list_trace) or (list_trace[-1] != trace_line): list_trace.append(trace_line) if values.CONF_LOC_PATCH: if values.CONF_LOC_PATCH in list_trace: emitter.note("\t\t[note] patch location detected in trace") values.COUNT_HIT_PATCH_LOC = values.COUNT_HIT_PATCH_LOC + 1 if values.CONF_LOC_BUG: if values.CONF_LOC_BUG in list_trace: emitter.note("\t\t[note] fault location detected in trace") values.COUNT_HIT_BUG_LOG = values.COUNT_HIT_BUG_LOG + 1 if values.CONF_LOC_LIST_CRASH: if not set(values.CONF_LOC_LIST_CRASH).isdisjoint(list_trace): emitter.note("\t\t[note] a crash location detected in trace") values.COUNT_HIT_CRASH_LOC = values.COUNT_HIT_CRASH_LOC + 1 is_crash = collect_crash_point(values.FILE_MESSAGE_LOG) if is_crash: values.IS_CRASH = True values.COUNT_HIT_CRASH = values.COUNT_HIT_CRASH + 1 emitter.note("\t\t[note] program crashed") else: values.IS_CRASH = False emitter.note("\t\t[note] program did not crash") return list_trace
def load_component_list(): emitter.normal("loading custom/general components") # base_list = ["equal.smt2", "not-equal.smt2", "less-than.smt2", "less-or-equal.smt2"] base_list = [] if definitions.DIRECTORY_TESTS in values.CONF_PATH_PROJECT: base_list = [] gen_comp_files = [] os.chdir(definitions.DIRECTORY_COMPONENTS) if values.CONF_GENERAL_COMP_LIST and not values.CONF_ALL_COMPS: comp_list = list(set(values.CONF_GENERAL_COMP_LIST + base_list)) for component_name in comp_list: gen_comp_files.append(Path(component_name)) emitter.note("\tloading component: " + str(component_name)) else: component_file_list = os.listdir(definitions.DIRECTORY_COMPONENTS) for comp_file in component_file_list: if ".smt2" in comp_file: if any(x in comp_file for x in [ "logical-not", "post-decrement", "post-increment", "minus", "constant", "assignment", "sequence", "greater", "remainder" ]): continue gen_comp_files.append(Path(comp_file)) emitter.note("\tloading component: " + str(comp_file)) gen_comp_files = list(set(gen_comp_files)) general_components = synthesis.load_components(gen_comp_files) proj_comp_files = [] os.chdir(values.CONF_PATH_PROJECT) for component_name in values.CONF_CUSTOM_COMP_LIST: proj_comp_files.append(Path(component_name)) emitter.note("\tloading component: " + str(component_name)) project_components = synthesis.load_components(proj_comp_files) values.LIST_COMPONENTS = project_components + general_components values.COUNT_COMPONENTS = len(values.LIST_COMPONENTS) values.COUNT_COMPONENTS_CUS = len(project_components) values.COUNT_COMPONENTS_GEN = len(general_components)
def select_new_input(patch_list=None): """ This function will select a new path for the next concolic execution and generate the inputs that satisfies the path log_path : log file for the previous concolic execution that captures PPC project_path: project path is the root directory of the program to filter PPC from libraries """ logger.info("generating new input for new path") global list_path_explored, list_path_inprogress, count_discovered # input_file_byte_list = list() # input_file_stat_byte_list = list() generated_path_list = values.LIST_GENERATED_PATH var_expr_map = reader.collect_symbolic_expression(values.FILE_EXPR_LOG) # generated_path_list = generate_new_symbolic_paths(constraint_list) # list_path_explored = list(set(list_path_explored + current_path_list)) selected_patch = None patch_constraint = TRUE new_path_count = 0 for (control_loc, generated_path, ppc_len), arg_list, poc_path, bin_path in generated_path_list: path_str = str(generated_path.serialize()) if path_str not in (list_path_detected + list_path_explored): reach_patch_loc = 100 - path_str.count("angelic!") reach_obs_loc = 100 - path_str.count("obs!") ppc_len = 10000 - ppc_len list_path_inprogress.append( (control_loc, generated_path, ppc_len, reach_patch_loc, reach_obs_loc, arg_list, poc_path, bin_path)) list_path_detected.append(str(generated_path.serialize())) new_path_count = new_path_count + 1 count_discovered = count_discovered + new_path_count emitter.highlight("\tidentified " + str(new_path_count) + " new path(s)") emitter.highlight("\ttotal discovered: " + str(count_discovered) + " path(s)") emitter.highlight("\ttotal remaining: " + str(len(list_path_inprogress)) + " path(s)") emitter.highlight("\ttotal infeasible: " + str(len(list_path_infeasible)) + " path(s)") if not list_path_inprogress: emitter.note("\t\tCount paths explored: " + str(len(list_path_explored))) emitter.note("\t\tCount paths remaining: " + str(len(list_path_inprogress))) return None, None, patch_list, None, None, None values.LIST_GENERATED_PATH = [] patch_constraint = None selected_new_path = "" selected_control_loc = "" if patch_list: while not patch_constraint: emitter.normal("\tfinding a feasible path for current patch set") if not list_path_inprogress: emitter.note("\t\tCount paths explored: " + str(len(list_path_explored))) emitter.note("\t\tCount paths remaining: " + str(len(list_path_inprogress))) return None, None, patch_list, None, None, None selected_new_path, selected_control_loc, argument_list, poc_path, bin_path = select_new_path_condition( ) patch_constraint = select_patch_constraint_for_input( patch_list, selected_new_path) if patch_constraint: list_path_explored.append(str(selected_new_path.serialize())) if is_sat(And(selected_new_path, patch_constraint)): selected_new_path = And(selected_new_path, patch_constraint) else: emitter.warning("\t[warning] no model generated") else: list_path_infeasible.append(str(selected_new_path.serialize())) else: selected_new_path, selected_control_loc, argument_list, poc_path, bin_path = select_new_path_condition( ) list_path_explored.append(str(selected_new_path.serialize())) emitter.highlight("\tSelected control location: " + selected_control_loc) emitter.highlight("\tSelected path: " + str(selected_new_path)) emitter.highlight("\tSelected binary: " + str(bin_path)) emitter.highlight("\tSelected arguments for mutation: " + str(argument_list)) if poc_path: emitter.highlight("\tSelected seed file: " + str(poc_path)) input_arg_list, input_var_list = generator.generate_new_input( selected_new_path, argument_list, poc_path) if input_arg_list is None and input_var_list is None: return None, None, patch_list, argument_list, poc_path, bin_path return input_arg_list, input_var_list, patch_list, argument_list, poc_path, bin_path
def run_cpr(program_path, patch_list): emitter.sub_title("Evaluating Patch Pool") values.CONF_TIME_CHECK = None satisfied = utilities.check_budget(values.DEFAULT_TIME_DURATION) if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIME_DURATION) + " minutes") iteration = 0 assertion_template = values.SPECIFICATION_TXT binary_dir_path = "/".join(program_path.split("/")[:-1]) while not satisfied and len(patch_list) > 0: if iteration == 0: test_input_list = values.LIST_TEST_INPUT seed_id = 0 for argument_list in test_input_list: time_check = time.time() poc_path = None iteration = iteration + 1 seed_id = seed_id + 1 values.ITERATION_NO = iteration klee_out_dir = binary_dir_path + "/klee-out-" + str( test_input_list.index(argument_list)) argument_list = app.configuration.extract_input_arg_list( argument_list) generalized_arg_list = [] for arg in argument_list: if arg in (values.LIST_SEED_FILES + list(values.LIST_TEST_FILES.values())): poc_path = arg values.FILE_POC_SEED = arg values.FILE_POC_GEN = arg generalized_arg_list.append("$POC") else: generalized_arg_list.append(arg) emitter.sub_sub_title("Iteration: " + str(iteration) + " - Using Seed #" + str(seed_id)) emitter.highlight("\tUsing Arguments: " + str(generalized_arg_list)) emitter.highlight("\tUsing Input: " + str(poc_path)) values.ARGUMENT_LIST = generalized_arg_list _, second_var_list = generator.generate_angelic_val( klee_out_dir, generalized_arg_list, poc_path) exit_code = run_concolic_execution(program_path + ".bc", generalized_arg_list, second_var_list, True) # assert exit_code == 0 duration = (time.time() - time_check) / 60 generated_path_list = app.parallel.generate_symbolic_paths( values.LIST_PPC, generalized_arg_list, poc_path) if generated_path_list: values.LIST_GENERATED_PATH = generated_path_list + values.LIST_GENERATED_PATH values.LIST_PPC = [] values.TIME_TO_EXPLORE = values.TIME_TO_EXPLORE + duration # check if new path hits patch location / fault location gen_masked_byte_list = generator.generate_mask_bytes( klee_out_dir, poc_path) if values.FILE_POC_SEED not in values.MASK_BYTE_LIST: values.MASK_BYTE_LIST[ values.FILE_POC_SEED] = gen_masked_byte_list else: current_mask_list = values.MASK_BYTE_LIST[ values.FILE_POC_SEED] values.MASK_BYTE_LIST[values.FILE_POC_SEED] = sorted( list(set(current_mask_list + gen_masked_byte_list))) distance.update_distance_map() if not oracle.is_loc_in_trace(values.CONF_LOC_PATCH): continue if not values.SPECIFICATION_TXT and not oracle.is_loc_in_trace( values.CONF_LOC_BUG): continue time_check = time.time() assertion, count_obs = generator.generate_assertion( assertion_template, Path(binary_dir_path + "/klee-last/").resolve()) # print(assertion.serialize()) patch_list = reduce( patch_list, Path(binary_dir_path + "/klee-last/").resolve(), assertion) emitter.note( "\t\t|P|=" + str(utilities.count_concrete_patches(patch_list)) + ":" + str(len(patch_list))) duration = (time.time() - time_check) / 60 values.TIME_TO_REDUCE = values.TIME_TO_REDUCE + duration satisfied = utilities.check_budget( values.DEFAULT_TIME_DURATION) if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIME_DURATION) + " minutes") break emitter.success( "\t\tend of concolic exploration using user-provided seeds") emitter.success("\t\t\t|P|=" + str(utilities.count_concrete_patches(patch_list)) + ":" + str(len(patch_list))) values.COUNT_TEMPLATE_END_SEED = len(patch_list) values.COUNT_PATCH_END_SEED = utilities.count_concrete_patches( patch_list) else: iteration = iteration + 1 values.ITERATION_NO = iteration emitter.sub_sub_title("Iteration: " + str(iteration)) time_check = time.time() argument_list = values.ARGUMENT_LIST second_var_list = values.SECOND_VAR_LIST # if oracle.is_loc_in_trace(values.CONF_LOC_PATCH): gen_arg_list, gen_var_list, patch_list, argument_list, poc_path = select_new_input( patch_list) if not patch_list: emitter.warning("\t\t[warning] unable to generate a patch") break elif not gen_arg_list and not gen_var_list: emitter.warning( "\t\t[warning] no more paths to generate new input") break assert gen_arg_list # there should be a concrete input # print(">> new input: " + str(gen_arg_list)) ## Concolic execution of concrete input and patch candidate to retrieve path constraint. exit_code = run_concolic_execution(program_path + ".bc", gen_arg_list, gen_var_list) # assert exit_code == 0 duration = (time.time() - time_check) / 60 values.TIME_TO_EXPLORE = values.TIME_TO_EXPLORE + duration # Checks for the current coverage. satisfied = utilities.check_budget(values.DEFAULT_TIME_DURATION) time_check = time.time() values.LIST_GENERATED_PATH = app.parallel.generate_symbolic_paths( values.LIST_PPC, argument_list, poc_path) values.LIST_PPC = [] # check if new path hits patch location / fault location if not oracle.is_loc_in_trace(values.CONF_LOC_PATCH): continue if not values.SPECIFICATION_TXT and not oracle.is_loc_in_trace( values.CONF_LOC_BUG): continue distance.update_distance_map() ## Reduces the set of patch candidates based on the current path constraint assertion, count_obs = generator.generate_assertion( assertion_template, Path(binary_dir_path + "/klee-last/").resolve()) # print(assertion.serialize()) patch_list = reduce( patch_list, Path(binary_dir_path + "/klee-last/").resolve(), assertion) emitter.note("\t\t|P|=" + str(utilities.count_concrete_patches(patch_list)) + ":" + str(len(patch_list))) duration = (time.time() - time_check) / 60 values.TIME_TO_REDUCE = values.TIME_TO_REDUCE + duration if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIME_DURATION) + " minutes") if values.DEFAULT_COLLECT_STAT: ranked_patch_list = rank_patches(patch_list) update_rank_matrix(ranked_patch_list, iteration) definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-ranked-" + str( iteration) writer.write_patch_set(ranked_patch_list, definitions.FILE_PATCH_SET) writer.write_as_json(values.LIST_PATCH_RANKING, definitions.FILE_PATCH_RANK_MATRIX) if not patch_list: values.COUNT_PATCH_END = len(patch_list) emitter.warning("\t\t[warning] unable to generate a patch") else: definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-original" writer.write_patch_set(patch_list, definitions.FILE_PATCH_SET) ranked_patch_list = rank_patches(patch_list) print_patch_list(ranked_patch_list) definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-ranked" writer.write_patch_set(ranked_patch_list, definitions.FILE_PATCH_SET) if values.DEFAULT_COLLECT_STAT: check_infeasible_paths(patch_list) if values.DEFAULT_PATCH_TYPE == values.OPTIONS_PATCH_TYPE[1]: values.COUNT_PATCH_END = utilities.count_concrete_patches( ranked_patch_list) values.COUNT_TEMPLATE_END = len(patch_list) else: values.COUNT_PATCH_END = len(ranked_patch_list)
def read_conf_file(): emitter.normal("reading configuration values form configuration file") emitter.note("\t[file] " + values.FILE_CONFIGURATION) logger.information(values.FILE_CONFIGURATION) if not os.path.exists(values.FILE_CONFIGURATION): emitter.error("[NOT FOUND] Configuration file " + values.FILE_CONFIGURATION) exit() if os.path.getsize(values.FILE_CONFIGURATION) == 0: emitter.error("[EMPTY] Configuration file " + values.FILE_CONFIGURATION) exit() with open(values.FILE_CONFIGURATION, 'r') as conf_file: configuration_list = [i.strip() for i in conf_file.readlines()] for configuration in configuration_list: if definitions.CONF_PATH_PROJECT in configuration: values.CONF_PATH_PROJECT = configuration.replace( definitions.CONF_PATH_PROJECT, '') elif definitions.CONF_BINARY_PATH in configuration: values.CONF_PATH_PROGRAM = configuration.replace( definitions.CONF_BINARY_PATH, '') elif definitions.CONF_COMMAND_BUILD in configuration: values.CONF_COMMAND_BUILD = configuration.replace( definitions.CONF_COMMAND_BUILD, '') elif definitions.CONF_COMMAND_CONFIG in configuration: values.CONF_COMMAND_CONFIG = configuration.replace( definitions.CONF_COMMAND_CONFIG, '') elif definitions.CONF_RANK_LIMIT in configuration: values.CONF_RANK_LIMIT = int( configuration.replace(definitions.CONF_RANK_LIMIT, '')) elif definitions.CONF_SEED_FILE in configuration: seed_file_path = configuration.replace(definitions.CONF_SEED_FILE, '') if not os.path.isfile(seed_file_path): seed_file_path = values.CONF_PATH_PROJECT + "/" + seed_file_path if not os.path.isfile(seed_file_path): error_exit("Seed file " + seed_file_path + " not found") values.CONF_SEED_FILE = seed_file_path elif definitions.CONF_TEST_INPUT_FILE in configuration: test_file_path = configuration.replace( definitions.CONF_TEST_INPUT_FILE, '') if not os.path.isfile(test_file_path): test_file_path = values.CONF_PATH_PROJECT + "/" + test_file_path if not os.path.isfile(test_file_path): error_exit("Seed file " + test_file_path + " not found") values.CONF_TEST_INPUT_FILE = test_file_path elif definitions.CONF_SEED_DIR in configuration: seed_dir_path = configuration.replace(definitions.CONF_SEED_DIR, '') if not os.path.isdir(seed_dir_path): seed_dir_path = values.CONF_PATH_PROJECT + "/" + seed_dir_path if not os.path.isdir(seed_dir_path): error_exit("Seed dir " + seed_dir_path + " not found") values.CONF_SEED_DIR = seed_dir_path elif definitions.CONF_TEST_OUTPUT_DIR in configuration: output_dir_path = configuration.replace( definitions.CONF_TEST_OUTPUT_DIR, '') if not os.path.isdir(output_dir_path): output_dir_path = values.CONF_PATH_PROJECT + "/" + output_dir_path if not os.path.isdir(output_dir_path): error_exit("Seed dir " + output_dir_path + " not found") values.CONF_TEST_OUTPUT_DIR = output_dir_path elif definitions.CONF_TEST_INPUT_DIR in configuration: input_dir_path = configuration.replace( definitions.CONF_TEST_INPUT_DIR, '') if not os.path.isdir(input_dir_path): input_dir_path = values.CONF_PATH_PROJECT + "/" + input_dir_path if not os.path.isdir(input_dir_path): error_exit("Seed dir " + input_dir_path + " not found") values.CONF_TEST_INPUT_DIR = input_dir_path elif definitions.CONF_TEST_OUTPUT_LIST in configuration: values.CONF_TEST_OUTPUT_LIST = configuration.replace( definitions.CONF_TEST_OUTPUT_LIST, '').split(",") elif definitions.CONF_TEST_INPUT_LIST in configuration: input_list = configuration.replace( definitions.CONF_TEST_INPUT_LIST, '').split("],[") processed_list = [] for input in input_list: processed_list.append(input.replace("[", "").replace("]", "")) values.CONF_TEST_INPUT_LIST = processed_list elif definitions.CONF_SEED_LIST in configuration: seed_list = configuration.replace(definitions.CONF_SEED_LIST, '').split("],[") processed_list = [] for seed_input in seed_list: processed_list.append( seed_input.replace("[", "").replace("]", "")) values.CONF_SEED_LIST = processed_list elif definitions.CONF_PATH_SPECIFICATION in configuration: values.CONF_PATH_SPECIFICATION = configuration.replace( definitions.CONF_PATH_SPECIFICATION, '') assertion_file_path = values.CONF_PATH_PROJECT + "/" + values.CONF_PATH_SPECIFICATION values.SPECIFICATION_TXT = reader.collect_specification( assertion_file_path) elif definitions.CONF_CUSTOM_COMP_LIST in configuration: values.CONF_CUSTOM_COMP_LIST = configuration.replace( definitions.CONF_CUSTOM_COMP_LIST, '').split(",") elif definitions.CONF_GENERAL_COMP_LIST in configuration: values.CONF_GENERAL_COMP_LIST = configuration.replace( definitions.CONF_GENERAL_COMP_LIST, '').split(",") elif definitions.CONF_DEPTH_VALUE in configuration: values.CONF_DEPTH_VALUE = configuration.replace( definitions.CONF_DEPTH_VALUE, '') elif definitions.CONF_DIR_SRC in configuration: values.CONF_DIR_SRC = configuration.replace( definitions.CONF_DIR_SRC, '').replace("//", "/") if values.CONF_DIR_SRC: if values.CONF_DIR_SRC[-1] == "/": values.CONF_DIR_SRC = values.CONF_DIR_SRC[:-1] elif definitions.CONF_LOC_BUG in configuration: values.CONF_LOC_BUG = configuration.replace( definitions.CONF_LOC_BUG, '') elif definitions.CONF_LOC_PATCH in configuration: values.CONF_LOC_PATCH = configuration.replace( definitions.CONF_LOC_PATCH, '') elif definitions.CONF_PATH_POC in configuration: values.CONF_PATH_POC = configuration.replace( definitions.CONF_PATH_POC, '') if not os.path.isfile(values.CONF_PATH_POC): poc_path = values.CONF_PATH_PROJECT + "/" + values.CONF_PATH_POC if os.path.isfile(poc_path): values.CONF_PATH_POC = poc_path else: error_exit("Test file " + values.CONF_PATH_POC + " not found") elif definitions.CONF_LOW_BOUND in configuration: values.CONF_LOW_BOUND = int( configuration.replace(definitions.CONF_LOW_BOUND, '')) elif definitions.CONF_MAX_BOUND in configuration: values.CONF_MAX_BOUND = int( configuration.replace(definitions.CONF_MAX_BOUND, '')) elif definitions.CONF_MAX_FORK in configuration: values.CONF_MAX_FORK = int( configuration.replace(definitions.CONF_MAX_FORK, '')) elif definitions.CONF_GEN_SEARCH_LIMIT in configuration: values.CONF_GEN_SEARCH_LIMIT = int( configuration.replace(definitions.CONF_GEN_SEARCH_LIMIT, '')) elif definitions.CONF_TAG_ID in configuration: values.CONF_TAG_ID = configuration.replace(definitions.CONF_TAG_ID, '') elif definitions.CONF_STATIC in configuration: conf_text = configuration.replace(definitions.CONF_STATIC, '') if "true" in str(conf_text).lower(): values.CONF_STATIC = True elif definitions.CONF_IS_CRASH in configuration: conf_text = configuration.replace(definitions.CONF_IS_CRASH, '') if "true" in str(conf_text).lower(): values.CONF_IS_CRASH = True elif definitions.CONF_GEN_SPECIAL_PATH in configuration: conf_text = configuration.replace( definitions.CONF_GEN_SPECIAL_PATH, '') if "false" in str(conf_text).lower(): values.CONF_GEN_PATH_SPECIAL = False elif definitions.CONF_IS_CPP in configuration: conf_text = configuration.replace(definitions.CONF_IS_CPP, '') if "true" in str(conf_text).lower(): values.CONF_IS_CPP = True elif definitions.CONF_FLAG_ASAN in configuration: values.CONF_FLAG_ASAN = configuration.replace( definitions.CONF_FLAG_ASAN, '') elif definitions.CONF_FLAGS_C in configuration: values.CONF_FLAGS_C = configuration.replace( definitions.CONF_FLAGS_C, '') elif definitions.CONF_FLAGS_CXX in configuration: values.CONF_FLAGS_CXX = configuration.replace( definitions.CONF_FLAGS_CXX, '') elif definitions.CONF_SELECTION_STRATEGY in configuration: values.CONF_SELECTION_STRATEGY = configuration.replace( definitions.CONF_SELECTION_STRATEGY, '') if values.CONF_SELECTION_STRATEGY not in values.OPTIONS_SELECT_METHOD: error_exit("Invalid configuration for " + definitions.CONF_SELECTION_STRATEGY) elif definitions.CONF_DISTANCE_METRIC in configuration: values.CONF_DISTANCE_METRIC = configuration.replace( definitions.CONF_DISTANCE_METRIC, '') if values.CONF_DISTANCE_METRIC not in values.OPTIONS_DIST_METRIC.values( ): error_exit("Invalid configuration for " + definitions.CONF_DISTANCE_METRIC) elif definitions.CONF_PATCH_TYPE in configuration: values.CONF_PATCH_TYPE = configuration.replace( definitions.CONF_PATCH_TYPE, '') if values.CONF_PATCH_TYPE not in values.OPTIONS_PATCH_TYPE.values( ): error_exit("Invalid configuration for " + definitions.CONF_PATCH_TYPE) elif definitions.CONF_OPERATION_MODE in configuration: values.CONF_OPERATION_MODE = configuration.replace( definitions.CONF_OPERATION_MODE, '') if values.CONF_OPERATION_MODE not in values.OPTIONS_OPERATION_MODE: error_exit("Invalid configuration for " + definitions.CONF_OPERATION_MODE) elif definitions.CONF_BUILD_FLAGS in configuration: values.CONF_BUILD_FLAGS = configuration.replace( definitions.CONF_BUILD_FLAGS, '') elif definitions.CONF_KLEE_FLAGS in configuration: values.CONF_KLEE_FLAGS = configuration.replace( definitions.CONF_KLEE_FLAGS, '') elif definitions.CONF_ITERATION_LIMIT in configuration: values.CONF_ITERATION_LIMIT = int( configuration.replace(definitions.CONF_ITERATION_LIMIT, '')) elif definitions.CONF_STACK_SIZE in configuration: values.CONF_STACK_SIZE = int( configuration.replace(definitions.CONF_STACK_SIZE, '')) elif definitions.CONF_MASK_ARG in configuration: values.CONF_MASK_ARG = configuration.replace( definitions.CONF_MASK_ARG, '').split(",") elif definitions.CONF_TIMEOUT_SAT in configuration: values.CONF_TIMEOUT_SAT = int( configuration.replace(definitions.CONF_TIMEOUT_SAT, '')) elif definitions.CONF_TIMEOUT_KLEE in configuration: values.CONF_TIMEOUT_KLEE = int( configuration.replace(definitions.CONF_TIMEOUT_KLEE, '')) if not values.CONF_TAG_ID: emitter.error("[NOT FOUND] Tag ID ") exit() if values.CONF_DIR_SRC: if "/" != values.CONF_DIR_SRC[0]: values.CONF_DIR_SRC = values.CONF_PATH_PROJECT + "/" + values.CONF_DIR_SRC else: values.CONF_DIR_SRC = values.CONF_PATH_PROJECT if "/" != values.CONF_PATH_PROGRAM[0]: values.CONF_PATH_PROGRAM = values.CONF_DIR_SRC + "/" + values.CONF_PATH_PROGRAM