def one_inv_pass(go, cp, junit_torun, this_hash, refined_target_set, test_selection, analysis_only=False): if not analysis_only: git_adapter.checkout(this_hash) if SHOW_DEBUG_INFO: print "\n===full classpath===\n" + cp + "\n" java_cmd = " ".join([ "java", "-cp", cp, # "-Xms"+config.min_heap, "-Xmx" + config.max_heap, "-XX:+UseConcMarkSweepGC", # "-XX:-UseGCOverheadLimit", # "-XX:-UseSplitVerifier", # FIXME: JDK 8- only! ]) maven_adapter.compile_tests(this_hash) if SHOW_DEBUG_INFO: print "\n===junit torun===\n" + junit_torun + "\n" # v3.2, v4 execute with 4 core num_primary_workers = config.num_master_workers auto_parallel_targets = config.auto_fork slave_load = config.classes_per_fork target_map = daikon.target_s2m(refined_target_set) all_classes = target_map.keys() consider_expansion = (not analysis_only) if len(refined_target_set) <= num_primary_workers or ( num_primary_workers == 1 and not auto_parallel_targets): single_set_tuple = (refined_target_set, "0") seq_get_invs(single_set_tuple, java_cmd, junit_torun, go, this_hash, consider_expansion, test_selection) elif num_primary_workers > 1: # FIXME: this distributation is buggy target_set_inputs = [] all_target_set_list = list(refined_target_set) each_bulk_size = int(len(refined_target_set) / num_primary_workers) seq_func = partial(seq_get_invs, java_cmd=java_cmd, junit_torun=junit_torun, go=go, this_hash=this_hash, consider_expansion=consider_expansion, test_selection=test_selection) for i in range(num_primary_workers): if not (i == num_primary_workers - 1): sub_list_tuple = (all_target_set_list[each_bulk_size * i:each_bulk_size * (i + 1)], str(i)) target_set_inputs.append(sub_list_tuple) else: sub_list_tuple = (all_target_set_list[each_bulk_size * i:], str(i)) target_set_inputs.append(sub_list_tuple) input_pool = Pool(num_primary_workers) input_pool.map(seq_func, target_set_inputs) input_pool.close() input_pool.join() elif num_primary_workers == 1 and auto_parallel_targets and slave_load >= 1: # elastic automatic processing target_set_inputs = [] num_processes = 0 # target_map has been calculated already # target_map = daikon.target_s2m(refined_target_set) # all_classes = target_map.keys() num_keys = len(all_classes) seq_func = partial(seq_get_invs, java_cmd=java_cmd, junit_torun=junit_torun, go=go, this_hash=this_hash, consider_expansion=consider_expansion, test_selection=test_selection) for i in range(0, num_keys, slave_load): # (inclusive) lower bound is i # (exclusive) upper bound: j = min(i + slave_load, num_keys) sublist = [] for k in range(i, j): the_key = all_classes[k] sublist.append( the_key) # so it won't miss class/object invariants sublist += target_map[the_key] sublist_tuple = (sublist, str(num_processes)) target_set_inputs.append(sublist_tuple) num_processes += 1 max_parallel_processes = config.num_slave_workers if not analysis_only: profiler.log_csv([ "class_count", "process_count", "max_parallel_processes", "slave_load" ], [[num_keys, num_processes, max_parallel_processes, slave_load]], go + "_getty_y_elastic_count_" + this_hash + "_.profile.readable") input_pool = Pool(max_parallel_processes) input_pool.map(seq_func, target_set_inputs) input_pool.close() input_pool.join() else: print "\nIncorrect option for one center pass:"******"\tnum_primary_workers:", str(num_primary_workers) print "\tauto_parallel_targets:", str(auto_parallel_targets) print "\tslave_load", str(slave_load) sys.exit(1) if config.compress_inv: os.remove_many_files(go, "*.inv.gz") else: os.remove_many_files(go, "*.inv") # include coverage report for compare if config.analyze_test_coverage and not analysis_only: try: maven_adapter.generate_test_report(go, this_hash) except: pass if not analysis_only: git.clear_temp_checkout(this_hash) if config.class_level_expansion: extra_expansion = get_expansion_set(go) os.remove_many_files(go, config.expansion_tmp_files + "*") else: extra_expansion = None return all_classes, extra_expansion
def one_info_pass(junit_path, sys_classpath, agent_path, cust_mvn_repo, dyng_go, go, this_hash, target_set, changed_methods, changed_tests, json_filepath): bin_path = maven_adapter.get_bin_path(this_hash) test_bin_path = maven_adapter.get_test_bin_path(this_hash) cp = maven_adapter.get_full_class_path(this_hash, junit_path, sys_classpath, bin_path, test_bin_path) if SHOW_DEBUG_INFO: print "\n===full classpath===\n" + cp + "\n" print "\ncopying all code to specific directory ...\n" all_code_dirs = [ maven_adapter.get_source_directory(this_hash), maven_adapter.get_test_source_directory(this_hash) ] getty_code_store = go + '_getty_allcode_' + this_hash + '_/' print 'copy to ' + getty_code_store + '\n' makedirs(getty_code_store) for adir in all_code_dirs: os.sys_call(" ".join(["cp -r", adir + "/*", getty_code_store]), ignore_bad_exit=True) if config.use_special_junit_for_dyn: info_junit_path = os.rreplace(junit_path, config.default_junit_version, config.special_junit_version, 1) infocp = maven_adapter.get_full_class_path(this_hash, info_junit_path, sys_classpath, bin_path, test_bin_path) else: infocp = cp maven_adapter.compile_tests(this_hash) junit_torun = maven_adapter.get_junit_torun(cust_mvn_repo, this_hash) if SHOW_DEBUG_INFO: print "\n===junit torun===\n" + junit_torun + "\n" #### dynamic run one round for all information prefixes = daikon.common_prefixes(target_set) common_package = '' if len(prefixes) == 1: last_period_index = prefixes[0].rindex('.') if last_period_index > 0: # the common package should be at least one period away from the rest common_package = prefixes[0][:last_period_index] prefix_regexes = [] for p in prefixes: prefix_regexes.append(p + "*") instrument_regex = "|".join(prefix_regexes) if SHOW_DEBUG_INFO: print "\n===instrumentation pattern===\n" + instrument_regex + "\n" if not path.exists(dyng_go): makedirs(dyng_go) full_info_exfile = java.run_instrumented_tests(this_hash, go, infocp, agent_path, instrument_regex, junit_torun) full_method_info_map = {} ext_start_index = len(config.method_info_line_prefix) with open(full_info_exfile, 'r') as f: contents = f.read().split("\n") for line in contents: line = line.strip() if line.startswith(config.method_info_line_prefix): rawdata = line[ext_start_index:] k, v = rawdata.split(" : ") full_method_info_map[k.strip()] = v.strip() print "dyng_go=", dyng_go, " go=", go os.merge_dyn_files(dyng_go, go, "_getty_dyncg_-hash-_.ex", this_hash) os.merge_dyn_files(dyng_go, go, "_getty_dynfg_-hash-_.ex", this_hash) caller_of, callee_of = agency.caller_callee(go, this_hash) pred_of, succ_of = agency.pred_succ(go, this_hash) if json_filepath != "": junit_torun, target_set, test_set = get_tests_and_target_set( go, json_filepath, junit_torun, this_hash) else: test_set = agency.get_test_set_dyn(callee_of, junit_torun) # test_set is correct # reset target set here refined_target_set, changed_methods, changed_tests = \ agency.refine_targets(full_method_info_map, target_set, test_set, caller_of, callee_of, pred_of, succ_of, changed_methods, changed_tests, json_filepath) profiler.log_csv(["method_count", "test_count", "refined_target_count"], [[ len(target_set), len(test_set), len(refined_target_set) ]], go + "_getty_y_method_count_" + this_hash + "_.profile.readable") git.clear_temp_checkout(this_hash) return common_package, test_set, refined_target_set, changed_methods, changed_tests, \ cp, junit_torun, full_method_info_map
def one_info_pass( junit_path, sys_classpath, agent_path, cust_mvn_repo, dyng_go, go, this_hash, target_set, changed_methods, changed_tests, inner_dataflow_methods, outer_dataflow_methods, ): os.sys_call("git checkout " + this_hash) os.sys_call("mvn clean") bin_path = mvn.path_from_mvn_call("outputDirectory") test_bin_path = mvn.path_from_mvn_call("testOutputDirectory") cp = mvn.full_classpath(junit_path, sys_classpath, bin_path, test_bin_path) if SHOW_DEBUG_INFO: print "\n===full classpath===\n" + cp + "\n" print "\ncopying all code to specific directory ...\n" all_code_dirs = [ mvn.path_from_mvn_call("sourceDirectory"), # mvn.path_from_mvn_call("scriptSourceDirectory"), mvn.path_from_mvn_call("testSourceDirectory"), ] getty_code_store = go + "_getty_allcode_" + this_hash + "_/" print "copy to " + getty_code_store + "\n" makedirs(getty_code_store) for adir in all_code_dirs: os.sys_call(" ".join(["cp -r", adir + "/*", getty_code_store]), ignore_bad_exit=True) if config.use_special_junit_for_dyn: info_junit_path = os.rreplace(junit_path, config.default_junit_version, config.special_junit_version, 1) infocp = mvn.full_classpath(info_junit_path, sys_classpath, bin_path, test_bin_path) else: infocp = cp java_cmd = " ".join( [ "java", "-cp", infocp, # "-Xms"+config.min_heap, "-Xmx" + config.max_heap, "-XX:+UseConcMarkSweepGC", # "-XX:-UseGCOverheadLimit", "-XX:-UseSplitVerifier", # FIXME: JDK 8- only! ] ) # os.sys_call("mvn test -DskipTests", ignore_bad_exit=True) os.sys_call("mvn test-compile") junit_torun = mvn.junit_torun_str(cust_mvn_repo) if SHOW_DEBUG_INFO: print "\n===junit torun===\n" + junit_torun + "\n" #### dynamic run one round for all information prefixes = daikon.common_prefixes(target_set) common_package = "" if len(prefixes) == 1: last_period_index = prefixes[0].rindex(".") if last_period_index > 0: # the common package should be at least one period away from the rest common_package = prefixes[0][:last_period_index] prefix_regexes = [] for p in prefixes: prefix_regexes.append(p + "*") instrument_regex = "|".join(prefix_regexes) if SHOW_DEBUG_INFO: print "\n===instrumentation pattern===\n" + instrument_regex + "\n" # run tests with instrumentation run_instrumented_tests = " ".join( [java_cmd, "-javaagent:" + agent_path + '="' + instrument_regex + '"', junit_torun] ) if SHOW_DEBUG_INFO: print "\n=== Instrumented testing command to run: \n" + run_instrumented_tests if not path.exists(dyng_go): makedirs(dyng_go) os.sys_call(run_instrumented_tests, ignore_bad_exit=True) os.merge_dyn_files(dyng_go, go, "_getty_dyncg_-hash-_.ex", this_hash) os.merge_dyn_files(dyng_go, go, "_getty_dynfg_-hash-_.ex", this_hash) caller_of, callee_of = agency.caller_callee(go, this_hash) pred_of, succ_of = agency.pred_succ(go, this_hash) # add test methods into target set test_set = agency.get_test_set_dyn(target_set, callee_of, junit_torun) # set target set here refined_target_set = agency.refine_targets( target_set, test_set, caller_of, callee_of, pred_of, succ_of, changed_methods, changed_tests, inner_dataflow_methods, outer_dataflow_methods, ) profiler.log_csv( ["method_count", "test_count", "refined_target_count"], [[len(target_set), len(test_set), len(refined_target_set)]], go + "_getty_y_method_count_" + this_hash + "_.profile.readable", ) git.clear_temp_checkout(this_hash) return common_package, test_set, refined_target_set, cp, junit_torun
def one_inv_pass(go, cp, junit_torun, this_hash, refined_target_set, analysis_only=False): if not analysis_only: os.sys_call("git checkout " + this_hash) os.sys_call("mvn clean") if SHOW_DEBUG_INFO: print "\n===full classpath===\n" + cp + "\n" java_cmd = " ".join( [ "java", "-cp", cp, # "-Xms"+config.min_heap, "-Xmx" + config.max_heap, "-XX:+UseConcMarkSweepGC", # "-XX:-UseGCOverheadLimit", "-XX:-UseSplitVerifier", # FIXME: JDK 8- only! ] ) # os.sys_call("mvn test -DskipTests", ignore_bad_exit=True) os.sys_call("mvn test-compile") if SHOW_DEBUG_INFO: print "\n===junit torun===\n" + junit_torun + "\n" # v3.2, v4 execute with 4 core num_primary_workers = config.num_master_workers auto_parallel_targets = config.auto_fork slave_load = config.classes_per_fork target_map = daikon.target_s2m(refined_target_set) all_classes = target_map.keys() if len(refined_target_set) <= num_primary_workers or (num_primary_workers == 1 and not auto_parallel_targets): single_set_tuple = (refined_target_set, "0") seq_get_invs(single_set_tuple, java_cmd, junit_torun, go, this_hash) elif num_primary_workers > 1: # FIXME: this distributation is buggy target_set_inputs = [] all_target_set_list = list(refined_target_set) each_bulk_size = int(len(refined_target_set) / num_primary_workers) seq_func = partial(seq_get_invs, java_cmd=java_cmd, junit_torun=junit_torun, go=go, this_hash=this_hash) for i in range(num_primary_workers): if not (i == num_primary_workers - 1): sub_list_tuple = (all_target_set_list[each_bulk_size * i : each_bulk_size * (i + 1)], str(i)) target_set_inputs.append(sub_list_tuple) else: sub_list_tuple = (all_target_set_list[each_bulk_size * i :], str(i)) target_set_inputs.append(sub_list_tuple) input_pool = Pool(num_primary_workers) input_pool.map(seq_func, target_set_inputs) input_pool.close() input_pool.join() elif num_primary_workers == 1 and auto_parallel_targets and slave_load >= 1: # elastic automatic processing target_set_inputs = [] num_processes = 0 # target_map has been calculated already # target_map = daikon.target_s2m(refined_target_set) # all_classes = target_map.keys() num_keys = len(all_classes) seq_func = partial(seq_get_invs, java_cmd=java_cmd, junit_torun=junit_torun, go=go, this_hash=this_hash) for i in range(0, num_keys, slave_load): # (inclusive) lower bound is i # (exclusive) upper bound: j = min(i + slave_load, num_keys) sublist = [] for k in range(i, j): the_key = all_classes[k] sublist.append(the_key) # so it won't miss class/object invariants sublist += target_map[the_key] sublist_tuple = (sublist, str(num_processes)) target_set_inputs.append(sublist_tuple) num_processes += 1 max_parallel_processes = config.num_slave_workers if not analysis_only: profiler.log_csv( ["class_count", "process_count", "max_parallel_processes", "slave_load"], [[num_keys, num_processes, max_parallel_processes, slave_load]], go + "_getty_y_elastic_count_" + this_hash + "_.profile.readable", ) input_pool = Pool(max_parallel_processes) input_pool.map(seq_func, target_set_inputs) input_pool.close() input_pool.join() else: print "\nIncorrect option for one center pass:"******"\tnum_primary_workers:", str(num_primary_workers) print "\tauto_parallel_targets:", str(auto_parallel_targets) print "\tslave_load", str(slave_load) sys.exit(1) if config.compress_inv: os.remove_many_files(go, "*.inv.gz") else: os.remove_many_files(go, "*.inv") # include coverage report for compare if config.analyze_test_coverage and not analysis_only: try: mvn.generate_coverage_report(go, this_hash) except: pass if not analysis_only: git.clear_temp_checkout(this_hash) return all_classes
def one_info_pass( junit_path, sys_classpath, agent_path, cust_mvn_repo, dyng_go, go, this_hash, target_set, changed_methods, changed_tests, inner_dataflow_methods, outer_dataflow_methods): os.sys_call("git checkout " + this_hash) os.sys_call("mvn clean") bin_path = mvn.path_from_mvn_call("outputDirectory") test_bin_path = mvn.path_from_mvn_call("testOutputDirectory") cp = mvn.full_classpath(junit_path, sys_classpath, bin_path, test_bin_path) if SHOW_DEBUG_INFO: print "\n===full classpath===\n" + cp + "\n" print "\ncopying all code to specific directory ...\n" all_code_dirs = [mvn.path_from_mvn_call("sourceDirectory"), # mvn.path_from_mvn_call("scriptSourceDirectory"), mvn.path_from_mvn_call("testSourceDirectory")] getty_code_store = go + '_getty_allcode_' + this_hash + '_/' print 'copy to ' + getty_code_store + '\n' makedirs(getty_code_store) for adir in all_code_dirs: os.sys_call(" ".join(["cp -r", adir + "/*", getty_code_store]), ignore_bad_exit=True) if config.use_special_junit_for_dyn: info_junit_path = os.rreplace(junit_path, config.default_junit_version, config.special_junit_version, 1) infocp = mvn.full_classpath(info_junit_path, sys_classpath, bin_path, test_bin_path) else: infocp = cp java_cmd = " ".join(["java", "-cp", infocp, # "-Xms"+config.min_heap, "-Xmx"+config.max_heap, "-XX:+UseConcMarkSweepGC", # "-XX:-UseGCOverheadLimit", "-XX:-UseSplitVerifier", # FIXME: JDK 8- only! ]) # os.sys_call("mvn test -DskipTests", ignore_bad_exit=True) os.sys_call("mvn test-compile") junit_torun = mvn.junit_torun_str(cust_mvn_repo) if SHOW_DEBUG_INFO: print "\n===junit torun===\n" + junit_torun + "\n" #### dynamic run one round for all information prefixes = daikon.common_prefixes(target_set) common_package = '' if len(prefixes) == 1: last_period_index = prefixes[0].rindex('.') if last_period_index > 0: # the common package should be at least one period away from the rest common_package = prefixes[0][:last_period_index] prefix_regexes = [] for p in prefixes: prefix_regexes.append(p + "*") instrument_regex = "|".join(prefix_regexes) if SHOW_DEBUG_INFO: print "\n===instrumentation pattern===\n" + instrument_regex + "\n" # run tests with instrumentation run_instrumented_tests = \ " ".join([java_cmd, "-ea", "-javaagent:" + agent_path + "=\"" + instrument_regex + "\"", junit_torun]) if SHOW_DEBUG_INFO: print "\n=== Instrumented testing command to run: \n" + run_instrumented_tests if not path.exists(dyng_go): makedirs(dyng_go) full_info_exfile = go + "_getty_binary_info_" + this_hash + "_.ex" os.sys_call(run_instrumented_tests + " > " + full_info_exfile + ("" if config.show_stack_trace_info else " 2> /dev/null"), ignore_bad_exit=True) full_method_info_map = {} ext_start_index = len(config.method_info_line_prefix) with open(full_info_exfile, 'r') as f: contents = f.read().split("\n") for line in contents: line = line.strip() if line.startswith(config.method_info_line_prefix): rawdata = line[ext_start_index:] k, v = rawdata.split(" : ") full_method_info_map[k.strip()] = v.strip() os.merge_dyn_files(dyng_go, go, "_getty_dyncg_-hash-_.ex", this_hash) os.merge_dyn_files(dyng_go, go, "_getty_dynfg_-hash-_.ex", this_hash) caller_of, callee_of = agency.caller_callee(go, this_hash) pred_of, succ_of = agency.pred_succ(go, this_hash) # add test methods into target set test_set = agency.get_test_set_dyn(target_set, callee_of, junit_torun) # reset target set here refined_target_set, changed_methods, changed_tests = \ agency.refine_targets(full_method_info_map, target_set, test_set, caller_of, callee_of, pred_of, succ_of, changed_methods, changed_tests, inner_dataflow_methods, outer_dataflow_methods) profiler.log_csv(["method_count", "test_count", "refined_target_count"], [[len(target_set), len(test_set), len(refined_target_set)]], go + "_getty_y_method_count_" + this_hash + "_.profile.readable") git.clear_temp_checkout(this_hash) return common_package, test_set, refined_target_set, changed_methods, changed_tests, \ cp, junit_torun, full_method_info_map