def generate_symbolic_paths(ppc_list, arg_list, poc_path): """ This function will analyse the partial path conditions collected at each branch location and isolate the branch conditions added at each location, negate the constraint to create a new path ppc_list : a dictionary containing the partial path condition at each branch location returns a list of new partial path conditions """ emitter.normal("\tgenerating new paths") path_list = [] if values.DEFAULT_GEN_SPECIAL_PATH: path_list = generate_special_paths(ppc_list, arg_list, poc_path) path_count = len(path_list) result_list = generate_flipped_paths(ppc_list) for result in result_list: path_count = path_count + 1 path_list.append((result, arg_list, poc_path)) emitter.highlight("\t\tgenerated " + str(path_count) + " flipped path(s)") return path_list
def initialize(): emitter.title("Initializing Program") program_path = values.CONF_PATH_PROGRAM extractor.extract_byte_code(program_path) test_input_list = values.LIST_TEST_INPUT second_var_list = list() directory_path = "/".join(str(program_path).split("/")[:-1]) klee_out_dir = directory_path + "/klee-last" emitter.sub_title("Running Test-Suite") test_case_id = 0 for argument_list in test_input_list: print_argument_list = app.configuration.extract_input_arg_list(argument_list) generalized_arg_list = [] seed_file = None test_case_id = test_case_id + 1 for arg in print_argument_list: if arg in (values.LIST_SEED_FILES + list(values.LIST_TEST_FILES.values())): generalized_arg_list.append("$POC") seed_file = arg else: generalized_arg_list.append(arg) emitter.sub_sub_title("Test Case #" + str(test_case_id)) emitter.highlight("\tUsing Arguments: " + str(generalized_arg_list)) emitter.highlight("\tUsing Input: " + str(seed_file)) emitter.debug("input list in test case:" + argument_list) argument_list = app.configuration.extract_input_arg_list(argument_list) exit_code = run_concrete_execution(program_path + ".bc", argument_list, True) assert exit_code == 0 # set location of bug/crash values.IS_CRASH = False latest_crash_loc = reader.collect_crash_point(values.FILE_MESSAGE_LOG) if latest_crash_loc: values.IS_CRASH = True emitter.success("\t\t\t[info] identified a crash location: " + str(latest_crash_loc)) if latest_crash_loc not in values.CONF_LOC_LIST_CRASH: values.CONF_LOC_LIST_CRASH.append(latest_crash_loc)
def initialize(): emitter.title("Initializing Program") test_input_list = values.LIST_TEST_INPUT second_var_list = list() output_dir_path = definitions.DIRECTORY_OUTPUT emitter.sub_title("Running Test-Suite") test_case_id = 0 count_seeds = len(values.LIST_SEED_INPUT) count_inputs = len(test_input_list) for argument_list in test_input_list[:count_inputs - count_seeds]: print_argument_list = app.configuration.extract_input_arg_list( argument_list) generalized_arg_list = [] seed_file = None test_case_id = test_case_id + 1 for arg in print_argument_list: if arg in (list(values.LIST_SEED_FILES.values()) + list(values.LIST_TEST_FILES.values())): generalized_arg_list.append("$POC") seed_file = arg else: generalized_arg_list.append(arg) emitter.sub_sub_title("Test Case #" + str(test_case_id)) emitter.highlight("\tUsing Arguments: " + str(generalized_arg_list)) emitter.highlight("\tUsing Input File: " + str(seed_file)) emitter.debug("input list in test case:" + argument_list) argument_list = app.configuration.extract_input_arg_list(argument_list) klee_out_dir = output_dir_path + "/klee-out-test-" + str(test_case_id - 1) if values.LIST_TEST_BINARY: program_path = values.LIST_TEST_BINARY[test_case_id - 1] values.CONF_PATH_PROGRAM = program_path else: program_path = values.CONF_PATH_PROGRAM emitter.highlight("\tUsing Binary: " + str(program_path)) extractor.extract_byte_code(program_path) if not os.path.isfile(program_path + ".bc"): app.utilities.error_exit("Unable to generate bytecode for " + program_path) exit_code = run_concrete_execution(program_path + ".bc", argument_list, True, klee_out_dir) assert exit_code == 0 # set location of bug/crash values.IS_CRASH = False latest_crash_loc = reader.collect_crash_point(values.FILE_MESSAGE_LOG) # if oracle.is_loc_in_trace(values.CONF_LOC_PATCH): # values.USEFUL_SEED_ID_LIST.append(test_case_id) if latest_crash_loc: values.IS_CRASH = True emitter.success("\t\t\t[info] identified a crash location: " + str(latest_crash_loc)) if latest_crash_loc not in values.CONF_LOC_LIST_CRASH: values.CONF_LOC_LIST_CRASH.append(latest_crash_loc)
def check_infeasible_paths(patch_list): global list_path_inprogress, list_path_infeasible, list_path_detected emitter.sub_title("Evaluating Path Pool") emitter.normal("\tcomputing infeasibility on remaining paths") count = 0 for path in list_path_inprogress: count = count + 1 emitter.sub_sub_title("Path #" + str(count)) control_loc, generated_path, ppc_len, reach_patch_loc, reach_obs_loc, _, _ = path feasible_patch_list = select_patch_constraint_for_input( patch_list, generated_path) if not feasible_patch_list: list_path_infeasible.append(path) list_path_inprogress.remove(path) emitter.highlight("\ttotal discovered: " + str(len(list_path_detected)) + " path(s)") emitter.highlight("\ttotal remaining: " + str(len(list_path_inprogress)) + " path(s)") emitter.highlight("\ttotal infeasible: " + str(len(list_path_infeasible)) + " path(s)")
def run_concolic_exploration(program_path, patch_list): values.CONF_TIME_CHECK = None satisfied = utilities.check_budget(values.DEFAULT_TIMEOUT_CEGIS_EXPLORE) iteration = 0 emitter.sub_title("Concolic Path Exploration") assertion_template = values.SPECIFICATION_TXT max_count = 0 largest_assertion = None largest_path_condition = None while not satisfied: if iteration == 0: second_var_list = list() seed_id = 0 test_input_list = values.LIST_TEST_INPUT for argument_list in test_input_list: poc_path = None iteration = iteration + 1 seed_id = seed_id + 1 values.ITERATION_NO = iteration emitter.sub_sub_title("Iteration: " + str(iteration)) output_dir_path = definitions.DIRECTORY_OUTPUT klee_out_dir = output_dir_path + "/klee-out-repair-" + str( iteration - 1) argument_list = app.configuration.extract_input_arg_list( argument_list) generalized_arg_list = [] for arg in argument_list: if arg in (list(values.LIST_SEED_FILES.values()) + list(values.LIST_TEST_FILES.values())): poc_path = arg values.FILE_POC_SEED = arg values.FILE_POC_GEN = arg generalized_arg_list.append("$POC") else: generalized_arg_list.append(arg) emitter.sub_sub_title("Iteration: " + str(iteration) + " - Using Seed #" + str(seed_id)) emitter.highlight("\tUsing Arguments: " + str(generalized_arg_list)) emitter.highlight("\tUsing Input File: " + str(poc_path)) if values.LIST_TEST_BINARY: program_path = values.LIST_TEST_BINARY[seed_id - 1] values.CONF_PATH_PROGRAM = program_path else: program_path = values.CONF_PATH_PROGRAM extractor.extract_byte_code(program_path) if not os.path.isfile(program_path + ".bc"): app.utilities.error_exit( "Unable to generate bytecode for " + program_path) values.ARGUMENT_LIST = generalized_arg_list _, second_var_list = generator.generate_angelic_val( klee_out_dir, generalized_arg_list, poc_path) exit_code = run_concolic_execution(program_path + ".bc", generalized_arg_list, second_var_list, True, klee_out_dir) # assert exit_code == 0 generated_path_list = app.parallel.generate_symbolic_paths( values.LIST_PPC, generalized_arg_list, poc_path, program_path) if generated_path_list: values.LIST_GENERATED_PATH = generated_path_list + values.LIST_GENERATED_PATH values.LIST_PPC = [] # klee_dir = Path(binary_dir_path + "/klee-last/").resolve() assertion, count_obs = generator.generate_assertion( assertion_template, klee_out_dir) if count_obs > max_count: max_count = count_obs largest_assertion = assertion path_constraint_file_path = str( klee_out_dir) + "/test000001.smt2" largest_path_condition = extractor.extract_formula_from_file( path_constraint_file_path) satisfied = utilities.check_budget( values.DEFAULT_TIMEOUT_CEGIS_EXPLORE) # check if new path hits patch location / fault location gen_masked_byte_list = generator.generate_mask_bytes( klee_out_dir, poc_path) if values.FILE_POC_SEED not in values.MASK_BYTE_LIST: values.MASK_BYTE_LIST[ values.FILE_POC_SEED] = gen_masked_byte_list else: current_mask_list = values.MASK_BYTE_LIST[ values.FILE_POC_SEED] values.MASK_BYTE_LIST[values.FILE_POC_SEED] = sorted( list(set(current_mask_list + gen_masked_byte_list))) distance.update_distance_map() if not oracle.is_loc_in_trace(values.CONF_LOC_PATCH): continue if not values.SPECIFICATION_TXT and not oracle.is_loc_in_trace( values.CONF_LOC_BUG): continue if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIMEOUT_CEGIS_EXPLORE) + " minutes") else: iteration = iteration + 1 values.ITERATION_NO = iteration emitter.sub_sub_title("Iteration: " + str(iteration)) argument_list = values.ARGUMENT_LIST second_var_list = values.SECOND_VAR_LIST gen_arg_list, gen_var_list, patch_list, argument_list, poc_path, program_path = select_new_input( patch_list) output_dir_path = definitions.DIRECTORY_OUTPUT klee_out_dir = output_dir_path + "/klee-out-repair-" + str( iteration - 1) if not patch_list: emitter.warning("\t\t[warning] unable to generate a patch") break elif not gen_arg_list and not gen_var_list: emitter.warning( "\t\t[warning] no more paths to generate new input") break assert gen_arg_list # there should be a concrete input time_check = time.time() ## Concolic execution of concrete input and patch candidate to retrieve path constraint. exit_code = run_concolic_execution(program_path + ".bc", gen_arg_list, gen_var_list, False, klee_out_dir) duration = (time.time() - time_check) / 60 values.TIME_TO_EXPLORE = values.TIME_TO_EXPLORE + duration # assert exit_code == 0 # klee_dir = Path(binary_dir_path + "/klee-last/").resolve() assertion, count_obs = generator.generate_assertion( assertion_template, klee_out_dir) if count_obs > max_count: max_count = count_obs largest_assertion = assertion path_constraint_file_path = str( klee_out_dir) + "/test000001.smt2" largest_path_condition = extractor.extract_formula_from_file( path_constraint_file_path) # Checks for the current coverage. satisfied = utilities.check_budget( values.DEFAULT_TIMEOUT_CEGIS_EXPLORE) values.LIST_GENERATED_PATH = app.parallel.generate_symbolic_paths( values.LIST_PPC, argument_list, poc_path, program_path) values.LIST_PPC = [] # check if new path hits patch location / fault location if not oracle.is_loc_in_trace(values.CONF_LOC_PATCH): continue if not values.SPECIFICATION_TXT and not oracle.is_loc_in_trace( values.CONF_LOC_BUG): continue distance.update_distance_map() if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIMEOUT_CEGIS_EXPLORE) + " minutes") return largest_assertion, largest_path_condition
def select_new_input(patch_list=None): """ This function will select a new path for the next concolic execution and generate the inputs that satisfies the path log_path : log file for the previous concolic execution that captures PPC project_path: project path is the root directory of the program to filter PPC from libraries """ logger.info("generating new input for new path") global list_path_explored, list_path_inprogress, count_discovered # input_file_byte_list = list() # input_file_stat_byte_list = list() generated_path_list = values.LIST_GENERATED_PATH var_expr_map = reader.collect_symbolic_expression(values.FILE_EXPR_LOG) # generated_path_list = generate_new_symbolic_paths(constraint_list) # list_path_explored = list(set(list_path_explored + current_path_list)) selected_patch = None patch_constraint = TRUE new_path_count = 0 for (control_loc, generated_path, ppc_len), arg_list, poc_path, bin_path in generated_path_list: path_str = str(generated_path.serialize()) if path_str not in (list_path_detected + list_path_explored): reach_patch_loc = 100 - path_str.count("angelic!") reach_obs_loc = 100 - path_str.count("obs!") ppc_len = 10000 - ppc_len list_path_inprogress.append( (control_loc, generated_path, ppc_len, reach_patch_loc, reach_obs_loc, arg_list, poc_path, bin_path)) list_path_detected.append(str(generated_path.serialize())) new_path_count = new_path_count + 1 count_discovered = count_discovered + new_path_count emitter.highlight("\tidentified " + str(new_path_count) + " new path(s)") emitter.highlight("\ttotal discovered: " + str(count_discovered) + " path(s)") emitter.highlight("\ttotal remaining: " + str(len(list_path_inprogress)) + " path(s)") emitter.highlight("\ttotal infeasible: " + str(len(list_path_infeasible)) + " path(s)") if not list_path_inprogress: emitter.note("\t\tCount paths explored: " + str(len(list_path_explored))) emitter.note("\t\tCount paths remaining: " + str(len(list_path_inprogress))) return None, None, patch_list, None, None, None values.LIST_GENERATED_PATH = [] patch_constraint = None selected_new_path = "" selected_control_loc = "" if patch_list: while not patch_constraint: emitter.normal("\tfinding a feasible path for current patch set") if not list_path_inprogress: emitter.note("\t\tCount paths explored: " + str(len(list_path_explored))) emitter.note("\t\tCount paths remaining: " + str(len(list_path_inprogress))) return None, None, patch_list, None, None, None selected_new_path, selected_control_loc, argument_list, poc_path, bin_path = select_new_path_condition( ) patch_constraint = select_patch_constraint_for_input( patch_list, selected_new_path) if patch_constraint: list_path_explored.append(str(selected_new_path.serialize())) if is_sat(And(selected_new_path, patch_constraint)): selected_new_path = And(selected_new_path, patch_constraint) else: emitter.warning("\t[warning] no model generated") else: list_path_infeasible.append(str(selected_new_path.serialize())) else: selected_new_path, selected_control_loc, argument_list, poc_path, bin_path = select_new_path_condition( ) list_path_explored.append(str(selected_new_path.serialize())) emitter.highlight("\tSelected control location: " + selected_control_loc) emitter.highlight("\tSelected path: " + str(selected_new_path)) emitter.highlight("\tSelected binary: " + str(bin_path)) emitter.highlight("\tSelected arguments for mutation: " + str(argument_list)) if poc_path: emitter.highlight("\tSelected seed file: " + str(poc_path)) input_arg_list, input_var_list = generator.generate_new_input( selected_new_path, argument_list, poc_path) if input_arg_list is None and input_var_list is None: return None, None, patch_list, argument_list, poc_path, bin_path return input_arg_list, input_var_list, patch_list, argument_list, poc_path, bin_path
def run_cpr(program_path, patch_list): emitter.sub_title("Evaluating Patch Pool") values.CONF_TIME_CHECK = None satisfied = utilities.check_budget(values.DEFAULT_TIME_DURATION) if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIME_DURATION) + " minutes") iteration = 0 assertion_template = values.SPECIFICATION_TXT binary_dir_path = "/".join(program_path.split("/")[:-1]) while not satisfied and len(patch_list) > 0: if iteration == 0: test_input_list = values.LIST_TEST_INPUT seed_id = 0 for argument_list in test_input_list: time_check = time.time() poc_path = None iteration = iteration + 1 seed_id = seed_id + 1 values.ITERATION_NO = iteration klee_out_dir = binary_dir_path + "/klee-out-" + str( test_input_list.index(argument_list)) argument_list = app.configuration.extract_input_arg_list( argument_list) generalized_arg_list = [] for arg in argument_list: if arg in (values.LIST_SEED_FILES + list(values.LIST_TEST_FILES.values())): poc_path = arg values.FILE_POC_SEED = arg values.FILE_POC_GEN = arg generalized_arg_list.append("$POC") else: generalized_arg_list.append(arg) emitter.sub_sub_title("Iteration: " + str(iteration) + " - Using Seed #" + str(seed_id)) emitter.highlight("\tUsing Arguments: " + str(generalized_arg_list)) emitter.highlight("\tUsing Input: " + str(poc_path)) values.ARGUMENT_LIST = generalized_arg_list _, second_var_list = generator.generate_angelic_val( klee_out_dir, generalized_arg_list, poc_path) exit_code = run_concolic_execution(program_path + ".bc", generalized_arg_list, second_var_list, True) # assert exit_code == 0 duration = (time.time() - time_check) / 60 generated_path_list = app.parallel.generate_symbolic_paths( values.LIST_PPC, generalized_arg_list, poc_path) if generated_path_list: values.LIST_GENERATED_PATH = generated_path_list + values.LIST_GENERATED_PATH values.LIST_PPC = [] values.TIME_TO_EXPLORE = values.TIME_TO_EXPLORE + duration # check if new path hits patch location / fault location gen_masked_byte_list = generator.generate_mask_bytes( klee_out_dir, poc_path) if values.FILE_POC_SEED not in values.MASK_BYTE_LIST: values.MASK_BYTE_LIST[ values.FILE_POC_SEED] = gen_masked_byte_list else: current_mask_list = values.MASK_BYTE_LIST[ values.FILE_POC_SEED] values.MASK_BYTE_LIST[values.FILE_POC_SEED] = sorted( list(set(current_mask_list + gen_masked_byte_list))) distance.update_distance_map() if not oracle.is_loc_in_trace(values.CONF_LOC_PATCH): continue if not values.SPECIFICATION_TXT and not oracle.is_loc_in_trace( values.CONF_LOC_BUG): continue time_check = time.time() assertion, count_obs = generator.generate_assertion( assertion_template, Path(binary_dir_path + "/klee-last/").resolve()) # print(assertion.serialize()) patch_list = reduce( patch_list, Path(binary_dir_path + "/klee-last/").resolve(), assertion) emitter.note( "\t\t|P|=" + str(utilities.count_concrete_patches(patch_list)) + ":" + str(len(patch_list))) duration = (time.time() - time_check) / 60 values.TIME_TO_REDUCE = values.TIME_TO_REDUCE + duration satisfied = utilities.check_budget( values.DEFAULT_TIME_DURATION) if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIME_DURATION) + " minutes") break emitter.success( "\t\tend of concolic exploration using user-provided seeds") emitter.success("\t\t\t|P|=" + str(utilities.count_concrete_patches(patch_list)) + ":" + str(len(patch_list))) values.COUNT_TEMPLATE_END_SEED = len(patch_list) values.COUNT_PATCH_END_SEED = utilities.count_concrete_patches( patch_list) else: iteration = iteration + 1 values.ITERATION_NO = iteration emitter.sub_sub_title("Iteration: " + str(iteration)) time_check = time.time() argument_list = values.ARGUMENT_LIST second_var_list = values.SECOND_VAR_LIST # if oracle.is_loc_in_trace(values.CONF_LOC_PATCH): gen_arg_list, gen_var_list, patch_list, argument_list, poc_path = select_new_input( patch_list) if not patch_list: emitter.warning("\t\t[warning] unable to generate a patch") break elif not gen_arg_list and not gen_var_list: emitter.warning( "\t\t[warning] no more paths to generate new input") break assert gen_arg_list # there should be a concrete input # print(">> new input: " + str(gen_arg_list)) ## Concolic execution of concrete input and patch candidate to retrieve path constraint. exit_code = run_concolic_execution(program_path + ".bc", gen_arg_list, gen_var_list) # assert exit_code == 0 duration = (time.time() - time_check) / 60 values.TIME_TO_EXPLORE = values.TIME_TO_EXPLORE + duration # Checks for the current coverage. satisfied = utilities.check_budget(values.DEFAULT_TIME_DURATION) time_check = time.time() values.LIST_GENERATED_PATH = app.parallel.generate_symbolic_paths( values.LIST_PPC, argument_list, poc_path) values.LIST_PPC = [] # check if new path hits patch location / fault location if not oracle.is_loc_in_trace(values.CONF_LOC_PATCH): continue if not values.SPECIFICATION_TXT and not oracle.is_loc_in_trace( values.CONF_LOC_BUG): continue distance.update_distance_map() ## Reduces the set of patch candidates based on the current path constraint assertion, count_obs = generator.generate_assertion( assertion_template, Path(binary_dir_path + "/klee-last/").resolve()) # print(assertion.serialize()) patch_list = reduce( patch_list, Path(binary_dir_path + "/klee-last/").resolve(), assertion) emitter.note("\t\t|P|=" + str(utilities.count_concrete_patches(patch_list)) + ":" + str(len(patch_list))) duration = (time.time() - time_check) / 60 values.TIME_TO_REDUCE = values.TIME_TO_REDUCE + duration if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIME_DURATION) + " minutes") if values.DEFAULT_COLLECT_STAT: ranked_patch_list = rank_patches(patch_list) update_rank_matrix(ranked_patch_list, iteration) definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-ranked-" + str( iteration) writer.write_patch_set(ranked_patch_list, definitions.FILE_PATCH_SET) writer.write_as_json(values.LIST_PATCH_RANKING, definitions.FILE_PATCH_RANK_MATRIX) if not patch_list: values.COUNT_PATCH_END = len(patch_list) emitter.warning("\t\t[warning] unable to generate a patch") else: definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-original" writer.write_patch_set(patch_list, definitions.FILE_PATCH_SET) ranked_patch_list = rank_patches(patch_list) print_patch_list(ranked_patch_list) definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-ranked" writer.write_patch_set(ranked_patch_list, definitions.FILE_PATCH_SET) if values.DEFAULT_COLLECT_STAT: check_infeasible_paths(patch_list) if values.DEFAULT_PATCH_TYPE == values.OPTIONS_PATCH_TYPE[1]: values.COUNT_PATCH_END = utilities.count_concrete_patches( ranked_patch_list) values.COUNT_TEMPLATE_END = len(patch_list) else: values.COUNT_PATCH_END = len(ranked_patch_list)
def run_cegis(program_path, project_path, patch_list): test_output_list = values.LIST_TEST_OUTPUT test_template = reader.collect_specification(test_output_list[0]) binary_dir_path = "/".join(program_path.split("/")[:-1]) time_check = time.time() assertion, largest_path_condition = concolic.run_concolic_exploration( program_path, patch_list) duration = (time.time() - time_check) / 60 values.TIME_TO_EXPLORE = duration emitter.normal("\tcombining explored program paths") if not assertion: patch = patch_list[0] emitter.emit_patch(patch, message="\tfinal patch: ") return program_specification = generator.generate_program_specification( binary_dir_path) complete_specification = And(Not(assertion), program_specification) emitter.normal("\tcomputed the program specification formula") emitter.sub_title("Evaluating Patch Pool") iteration = 0 output_dir = definitions.DIRECTORY_OUTPUT counter_example_list = [] time_check = time.time() values.CONF_TIME_CHECK = None satisfied = utilities.check_budget(values.DEFAULT_TIMEOUT_CEGIS_REFINE) patch_generator = generator.generate_patch(project_path, counter_example_list) count_throw = 0 while not satisfied: iteration = iteration + 1 values.ITERATION_NO = iteration emitter.sub_sub_title("Iteration: " + str(iteration)) patch = next(patch_generator, None) if not patch: emitter.error("[error] cannot generate a patch") patch_formula = app.generator.generate_formula_from_patch(patch) emitter.emit_patch(patch, message="\tgenerated patch: ") patch_formula_extended = generator.generate_extended_patch_formula( patch_formula, largest_path_condition) violation_check = And(complete_specification, patch_formula_extended) if is_sat(violation_check): model = generator.generate_model(violation_check) # print(model) arg_list = values.ARGUMENT_LIST poc_path = values.CONF_PATH_POC values.FILE_POC_GEN = definitions.DIRECTORY_OUTPUT + "/violation-" + str( values.ITERATION_NO) gen_path = values.FILE_POC_GEN input_arg_list, input_var_list = generator.generate_new_input( violation_check, arg_list, poc_path, gen_path) klee_out_dir = output_dir + "/klee-output-" + str(iteration) klee_test_file = output_dir + "/klee-test-" + str(iteration) exit_code = concolic.run_concrete_execution( program_path + ".bc", input_arg_list, True, klee_out_dir) # assert exit_code == 0 emitter.normal("\t\tgenerating new assertion") test_assertion, count_obs = generator.generate_assertion( test_template, klee_out_dir) write_smtlib(test_assertion, klee_test_file) counter_example_list.append((klee_test_file, klee_out_dir)) emitter.highlight("\t\tnew counter-example added") patch = None emitter.highlight("\t\tremoving current patch") count_throw = count_throw + 1 else: klee_test_file = output_dir + "/klee-test-FINAL" # print(to_smtlib(violation_check, False)) write_smtlib(violation_check, klee_test_file) break satisfied = utilities.check_budget(values.DEFAULT_TIMEOUT_CEGIS_REFINE) if satisfied: emitter.warning("\t[warning] ending due to timeout of " + str(values.DEFAULT_TIMEOUT_CEGIS_REFINE) + " minutes") duration = (time.time() - time_check) / 60 values.TIME_TO_REDUCE = duration # patch_list = [patch] # definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set-cegis" # writer.write_patch_set(patch_list, definitions.FILE_PATCH_SET) # patch = next(patch_generator, None) # while patch is not None: # patch_formula = app.generator.generate_formula_from_patch(patch) # patch_formula_extended = generator.generate_extended_patch_formula(patch_formula, largest_path_condition) # violation_check = And(complete_specification, patch_formula_extended) # if is_unsat(violation_check): # count_final = count_final + 1 # patch = next(patch_generator, None) emitter.emit_patch(patch, message="\tfinal patch: ") values.COUNT_PATCH_END = values.COUNT_PATCH_START - count_throw
def print_patch_list(patch_list): template_count = 0 emitter.sub_title("List of Top " + str(values.DEFAULT_PATCH_RANK_LIMIT) + " Correct Patches") if not patch_list: emitter.warning("\t[warning] unable to generate any patch") return for patch in patch_list: template_count = template_count + 1 emitter.sub_sub_title("Patch #" + str(template_count)) emitter.emit_patch(patch, message="\t\t") patch_formula = app.generator.generate_formula_from_patch(patch) patch_formula_str = patch_formula.serialize() patch_index = utilities.get_hash(patch_formula_str) patch_score = values.LIST_PATCH_SCORE[patch_index] concrete_patch_count = 1 if values.DEFAULT_PATCH_TYPE == values.OPTIONS_PATCH_TYPE[1]: patch_space = values.LIST_PATCH_SPACE[patch_index] partition_count = 0 for partition in patch_space: partition_count = partition_count + 1 emitter.highlight("\t\tPartition: " + str(partition_count)) for constant_name in partition: emitter.highlight("\t\t\tConstant: " + constant_name) constant_info = partition[constant_name] lower_bound = str(constant_info['lower-bound']) upper_bound = str(constant_info['upper-bound']) emitter.highlight("\t\t\tRange: " + lower_bound + " <= " + constant_name + " <= " + upper_bound) dimension = len( range(int(lower_bound), int(upper_bound) + 1)) emitter.highlight("\t\t\tDimension: " + str(dimension)) concrete_patch_count = utilities.count_concrete_patches_per_template( patch) emitter.highlight("\t\tPatch Count: " + str(concrete_patch_count)) emitter.highlight("\t\tPath Coverage: " + str(patch_score)) emitter.highlight( "\t\tIs Under-approximating: " + str(values.LIST_PATCH_UNDERAPPROX_CHECK[patch_index])) emitter.highlight("\t\tIs Over-approximating: " + str(values.LIST_PATCH_OVERAPPROX_CHECK[patch_index])) if template_count == values.DEFAULT_PATCH_RANK_LIMIT: break
def generate_patch_set(project_path, model_list=None) -> List[Dict[str, Program]]: definitions.FILE_PATCH_SET = definitions.DIRECTORY_OUTPUT + "/patch-set" definitions.FILE_PATCH_RANK_INDEX = definitions.DIRECTORY_OUTPUT + "/patch-index" definitions.FILE_PATCH_RANK_MATRIX = definitions.DIRECTORY_OUTPUT + "/patch-rank-matrix" if values.CONF_SKIP_GEN: emitter.sub_title("Loading Patch Pool") list_of_patches = reader.read_pickle(definitions.FILE_PATCH_SET) emitter.normal("\tnumber of patches in pool: " + str(len(list_of_patches))) return list_of_patches emitter.sub_title("Generating Patch Pool") test_output_list = values.LIST_TEST_OUTPUT test_input_list = values.LIST_TEST_INPUT test_file_list = values.LIST_TEST_FILES components = values.LIST_COMPONENTS depth = values.DEFAULT_DEPTH if values.CONF_DEPTH_VALUE.isnumeric(): depth = int(values.CONF_DEPTH_VALUE) spec_files = [] binary_dir_path = "/".join(values.CONF_PATH_PROGRAM.split("/")[:-1]) emitter.sub_sub_title("Loading Test-Results") test_index = -1 for arg_list in test_input_list: seed_file = None test_index = test_index + 1 expected_output_file = None output_spec_path = None for arg in arg_list: if arg in list(test_file_list.values()): seed_file = arg break if seed_file: seed_name = seed_file.split("/")[-1].split(".")[0] expected_output_file = project_path + "/" + values.CONF_TEST_OUTPUT_DIR + "/" + seed_name if os.path.isfile(expected_output_file): output_spec_path = Path(os.path.abspath(expected_output_file)) arg_list = [x.replace(seed_file, "$POC") for x in arg_list] else: expected_output_file = project_path + "/" + test_output_list[test_index] if os.path.isfile(expected_output_file): output_spec_path = Path(os.path.abspath(expected_output_file)) klee_spec_path = None if output_spec_path: klee_spec_path = Path(binary_dir_path + "/klee-out-" + str(test_index)) spec_files.append((output_spec_path, klee_spec_path)) emitter.normal("\tTest #" + str(test_index + 1)) emitter.highlight("\tInput Arg: " + str(arg_list)) if seed_file: emitter.highlight("\tInput file: " + str(seed_file)) emitter.highlight("\tOutput file: " + str(expected_output_file)) emitter.highlight("\tKlee Run: " + str(klee_spec_path)) if model_list: for output_spec_path, klee_spec_path in model_list: spec_files.append((Path(output_spec_path), Path(klee_spec_path))) specification = load_specification(spec_files) values.TEST_SPECIFICATION = specification concrete_enumeration = False if values.DEFAULT_PATCH_TYPE == values.OPTIONS_PATCH_TYPE[0]: concrete_enumeration = True lower_bound = values.DEFAULT_PATCH_LOWER_BOUND upper_bound = values.DEFAULT_PATCH_UPPER_BOUND + 1 emitter.sub_sub_title("Synthesising Patches") emitter.normal("\tenumerating patch space") if values.DEFAULT_PATCH_TYPE == values.OPTIONS_PATCH_TYPE[0]: result = synthesize_lazy(components, depth, specification, concrete_enumeration, lower_bound, upper_bound) else: result = synthesize_parallel(components, depth, specification, concrete_enumeration, lower_bound, upper_bound) list_of_patches = [_ for _ in result] filtered_patch_list = [] # writer.write_as_pickle(list_of_patches, definitions.FILE_PATCH_SET) # values.COUNT_TEMPLATE_GEN = len(list_of_patches) # values.COUNT_PATCH_GEN = utilities.count_concrete_patches(list_of_patches) emitter.highlight("\tnumber of patches in pool: " + str(len(list_of_patches))) return list_of_patches