def link_generated_junit_test_cases(variant_dir, target_variant_dir): logger.info( f"Linking JUnit Test for variant [{get_file_name_without_ext(variant_dir)}]" ) generated_test_dir = get_test_dir(variant_dir) target_test_dir = get_test_dir(target_variant_dir, force_mkdir=False) create_symlink(generated_test_dir, target_test_dir)
def run_junit_test_cases_with_coverage(variant_dir, lib_paths=None, halt_on_failure=False, halt_on_error=True, custom_ant=None): if lib_paths is None: lib_paths = [] logger.info( f"Running JUnit Test for variant [{get_file_name_without_ext(variant_dir)}] - Using custom ant [{custom_ant}]" ) src_dir = get_src_dir(variant_dir) test_dir = get_test_dir(variant_dir) src_classes_dir = get_compiled_source_classes_dir(variant_dir) test_classes_dir = get_compiled_test_classes_dir(variant_dir) test_coverage_dir = get_test_coverage_dir(variant_dir) output_log = execute_shell_command( f'/bin/sh {JUNIT_PLUGIN_PATH}', extra_args=[ { "-src": src_dir }, { "-test": test_dir }, { "-build.classes": src_classes_dir }, { "-build.testclasses": test_classes_dir }, { "-report.coveragedir": test_coverage_dir }, { "-junit.haltonfailure": "yes" if halt_on_failure else "no" }, { "-ant.name": custom_ant }, { "-lib_path": ":".join(lib_paths) }, ], log_to_file=True) is_test_failure = re.search( "(Failures: [1-9]+|Errors: [1-9]+|BUILD FAILED)", output_log) if is_test_failure: if halt_on_failure or (halt_on_error and "BUILD FAILED" in is_test_failure.group()): logger.fatal( "Some test cases were failed, see log for more detail\n{}". format(output_log)) raise RuntimeError("Test case failures") return False return True
def get_all_test_case_name_from_source_files(variant_dir): test_dir = get_test_dir(variant_dir) + "/" shell_command = f"""find {test_dir} -name "*_ESTest.java" -exec egrep -oH "void test[0-9]+\(\) throws" {{}} \; | sed 's|{test_dir}||1' | sed 's/^\///1; s/\//./g; s/\.java:void /./1; s/() throws//1;'""" output = execute_shell_command(shell_command, show_command=False) test_cases_names = output.split("\n")[0:-1] test_cases_name_set = set(test_cases_names) assert len(test_cases_names) == len( test_cases_name_set ), f"\n---\nDuplicate test case names\nBefore [LIST]: {test_cases_names}\nAfter [SET]: {test_cases_name_set}\n\nVariant dir: {variant_dir}\n---" return test_cases_name_set
def generate_junit_test_cases(lib_paths, variant_dir): logger.info( f"Generating JUnit Test for variant [{get_file_name_without_ext(variant_dir)}]" ) compiled_classes_dir = get_compiled_source_classes_dir(variant_dir) evosuite_temp_dir = join_path(".evosuite_" + hash_md5(variant_dir)) delete_dir(evosuite_temp_dir) test_cases_dir = get_test_dir(variant_dir, force_mkdir=False) delete_dir(test_cases_dir) output_log = execute_shell_command( f'java -jar {EVOSUITE_PLUGIN_PATH}', extra_args=[ { "-projectCP": ":".join([compiled_classes_dir] + lib_paths) }, { "-seed": 1583738192420 }, { "-target": compiled_classes_dir }, { "-continuous": "execute" }, { "-Dctg_memory": "4000" }, { "-Dctg_cores": "4" }, { "-Dctg_dir": evosuite_temp_dir }, { "-Dctg_export_folder": test_cases_dir }, ], log_to_file=True)
def get_failed_test_info_from_junit_report(failed_variant_dir): junit_report_path = get_junit_report_path(failed_variant_dir) failed_test_info_list = [] test_dir = get_test_dir(failed_variant_dir, force_mkdir=False) with open(junit_report_path) as input_file: soup = BeautifulSoup(input_file, "html.parser") for elm in soup.find_all('tr', {'class': 'Error'}): if not str(elm.parent.previous_sibling.previous_sibling.text ).startswith("TestCase"): continue test_file_name = elm.parent.find_previous_sibling( "a")["name"].strip() + SOURCE_CODE_EXTENSION tr = elm td_children = list(tr.findChildren("td", recursive=False)) test_case_name = td_children[0].text.strip() test_case_stack_trace_elm = td_children[2].find("code") failed_test_info = None for code_elm in list(test_case_stack_trace_elm.children)[5:]: if not isinstance(code_elm, NavigableString): continue trace_info = str(code_elm) if test_file_name in trace_info: qualified_test_case_class_name = re.search( f"([a-zA-Z._]+).{test_case_name}", trace_info).group(1) test_case_file_path = join_path( test_dir, *qualified_test_case_class_name.split(".")[:-1], test_file_name) failed_assertion_line_number = int( re.search(f"{SOURCE_CODE_EXTENSION}:(\d+)\)", trace_info).group(1)) failed_test_info = (test_case_file_path, test_case_name, failed_assertion_line_number) break if not failed_test_info: # "java.lang.StackOverflowError" does not show detail source test file, so all the test is taken test_case_file_path = find_file_by_wildcard(test_dir, "**/" + test_file_name, recursive=True) test_case_method_signature = TEST_CASE_METHOD_SIGNATURE_TEMPLATE.format( test_case_name=test_case_name) source_code_lines = open(test_case_file_path).readlines() indentation_count = -1 for index, line in enumerate(source_code_lines): if test_case_method_signature in line: indentation_count = len(line) - len(line.lstrip()) elif indentation_count >= 0 and line.startswith( " " * indentation_count + "}"): if 'verifyException("' in source_code_lines[index - 2]: failed_assertion_line_number = index - 8 else: failed_assertion_line_number = index break failed_test_info = (test_case_file_path, test_case_name, failed_assertion_line_number) failed_test_info_list.append(failed_test_info) return failed_test_info_list