def main() -> None: parser = argparse.ArgumentParser( description= "A script that creates a README and bug_report directory for a test and its result_dir." ) parser.add_argument( "source_dir", help="Source directory containing test.json and shaders.") parser.add_argument( "result_dir", help= "Path to the result_dir of a test containing e.g. the intermediate shader files, log.txt, etc.", ) parser.add_argument( "--output_dir", help= "Output directory where the README and bug_report directory will be written.", default=".", ) parsed_args = parser.parse_args(sys.argv[1:]) source_dir = Path(parsed_args.source_dir) result_dir = Path(parsed_args.result_dir) output_dir = Path(parsed_args.output_dir) check_dir_exists(source_dir) check_dir_exists(result_dir) test = test_util.metadata_read_from_path(source_dir / test_util.TEST_METADATA) binary_manager = binaries_util.get_default_binary_manager( settings=Settings()).get_child_binary_manager( binary_list=list(test.binaries) + list(test.device.binaries)) check(test.HasField("glsl"), AssertionError("Only glsl tests currently supported")) check( test.device.HasField("preprocess"), AssertionError("Only preprocess device tests currently supported"), ) fuzz_test_util.tool_crash_summary_bug_report_dir(source_dir, result_dir, output_dir, binary_manager)
def main_helper( # pylint: disable=too-many-locals, too-many-branches, too-many-statements; settings_path: Path, iteration_seed_override: Optional[int] = None, fuzzing_tool_pattern: Optional[List[FuzzingTool]] = None, allow_no_stack_traces: bool = False, override_sigint: bool = True, use_amber_vulkan_loader: bool = False, active_device_names: Optional[List[str]] = None, update_ignored_crash_signatures_gerrit_cookie: Optional[str] = None, ) -> None: if not fuzzing_tool_pattern: fuzzing_tool_pattern = [FuzzingTool.GLSL_FUZZ] util.update_gcov_environment_variable_if_needed() if override_sigint: interrupt_util.override_sigint() try_get_root_file() settings = settings_util.read_or_create(settings_path) binary_manager = binaries_util.get_default_binary_manager( settings=settings) temp_dir = Path() / "temp" # Note: we use "is not None" so that if the user passes an empty Gerrit cookie, we still try to execute this code. if update_ignored_crash_signatures_gerrit_cookie is not None: git_tool = util.tool_on_path("git") downloaded_graphicsfuzz_tests_dir = ( temp_dir / f"graphicsfuzz_cts_tests_{get_random_name()[:8]}") work_dir = temp_dir / f"graphicsfuzz_cts_run_{get_random_name()[:8]}" download_cts_gf_tests.download_cts_graphicsfuzz_tests( git_tool=git_tool, cookie=update_ignored_crash_signatures_gerrit_cookie, output_tests_dir=downloaded_graphicsfuzz_tests_dir, ) download_cts_gf_tests.extract_shaders( tests_dir=downloaded_graphicsfuzz_tests_dir, binaries=binary_manager) with util.file_open_text(work_dir / "results.csv", "w") as results_out_handle: run_cts_gf_tests.main_helper( tests_dir=downloaded_graphicsfuzz_tests_dir, work_dir=work_dir, binaries=binary_manager, settings=settings, active_devices=devices_util.get_active_devices( settings.device_list), results_out_handle=results_out_handle, updated_settings_output_path=settings_path, ) return active_devices = devices_util.get_active_devices( settings.device_list, active_device_names=active_device_names) # Add host_preprocessor device from device list if it is missing. if not active_devices[0].HasField("preprocess"): for device in settings.device_list.devices: if device.HasField("preprocess"): active_devices.insert(0, device) break # Add host_preprocessor device (from scratch) if it is still missing. if not active_devices[0].HasField("preprocess"): active_devices.insert( 0, Device(name="host_preprocessor", preprocess=DevicePreprocess())) reports_dir = Path() / "reports" fuzz_failures_dir = reports_dir / FUZZ_FAILURES_DIR_NAME references_dir = Path() / REFERENCES_DIR donors_dir = Path() / DONORS_DIR spirv_fuzz_shaders_dir = Path() / "spirv_fuzz_shaders" # Log a warning if there is no tool on the PATH for printing stack traces. prepended = util.prepend_catchsegv_if_available([], log_warning=True) if not allow_no_stack_traces and not prepended: raise AssertionError("Stopping because we cannot get stack traces.") spirv_fuzz_shaders: List[Path] = [] references: List[Path] = [] if FuzzingTool.SPIRV_FUZZ in fuzzing_tool_pattern: check_dir_exists(spirv_fuzz_shaders_dir) spirv_fuzz_shaders = sorted(spirv_fuzz_shaders_dir.rglob("*.json")) if FuzzingTool.GLSL_FUZZ in fuzzing_tool_pattern: check_dir_exists(references_dir) check_dir_exists(donors_dir) # TODO: make GraphicsFuzz find donors recursively. references = sorted(references_dir.rglob("*.json")) # Filter to only include .json files that have at least one shader (.frag, .vert, .comp) file. references = [ ref for ref in references if shader_job_util.get_related_files(ref) ] if use_amber_vulkan_loader: library_path = binary_manager.get_binary_path_by_name( binaries_util.AMBER_VULKAN_LOADER_NAME).path.parent util.add_library_paths_to_environ([library_path], os.environ) fuzzing_tool_index = 0 while True: interrupt_util.interrupt_if_needed() # We have to use "is not None" because the seed could be 0. if iteration_seed_override is not None: iteration_seed = iteration_seed_override else: iteration_seed = secrets.randbits(ITERATION_SEED_BITS) log(f"Iteration seed: {iteration_seed}") random.seed(iteration_seed) staging_name = get_random_name()[:8] staging_dir = temp_dir / staging_name try: util.mkdir_p_new(staging_dir) except FileExistsError: if iteration_seed_override is not None: raise log(f"Staging directory already exists: {str(staging_dir)}") log("Starting new iteration.") continue # Pseudocode: # - Create test_dir(s) in staging directory. # - Run test_dir(s) on all active devices (stop early if appropriate). # - For each test failure on each device, copy the test to reports_dir, adding the device and crash signature. # - Reduce each report (on the given device). # - Produce a summary for each report. fuzzing_tool = fuzzing_tool_pattern[fuzzing_tool_index] fuzzing_tool_index = (fuzzing_tool_index + 1) % len(fuzzing_tool_pattern) if fuzzing_tool == FuzzingTool.SPIRV_FUZZ: fuzz_spirv_test.fuzz_spirv( staging_dir, reports_dir, fuzz_failures_dir, active_devices, spirv_fuzz_shaders, settings, binary_manager, ) elif fuzzing_tool == FuzzingTool.GLSL_FUZZ: fuzz_glsl_test.fuzz_glsl( staging_dir, reports_dir, fuzz_failures_dir, active_devices, references, donors_dir, settings, binary_manager, ) else: raise AssertionError(f"Unknown fuzzing tool: {fuzzing_tool}") if iteration_seed_override is not None: log("Stopping due to iteration_seed") break shutil.rmtree(staging_dir)
def main_helper( # pylint: disable=too-many-locals, too-many-branches, too-many-statements; settings_path: Path, iteration_seed_override: Optional[int], use_spirv_fuzz: bool, force_no_stack_traces: bool, ) -> None: util.update_gcov_environment_variable_if_needed() try: artifact_util.artifact_path_get_root() except FileNotFoundError: log( "Could not find ROOT file (in the current directory or above) to mark where binaries should be stored. " "Creating a ROOT file in the current directory." ) util.file_write_text(Path(artifact_util.ARTIFACT_ROOT_FILE_NAME), "") settings = settings_util.read_or_create(settings_path) active_devices = devices_util.get_active_devices(settings.device_list) reports_dir = Path() / "reports" fuzz_failures_dir = reports_dir / FUZZ_FAILURES_DIR_NAME temp_dir = Path() / "temp" references_dir = Path() / "references" donors_dir = Path() / "donors" spirv_fuzz_shaders_dir = Path() / "spirv_fuzz_shaders" # Log a warning if there is no tool on the PATH for printing stack traces. prepended = util.prepend_catchsegv_if_available([], log_warning=True) if not force_no_stack_traces and not prepended: raise AssertionError("Stopping because we cannot get stack traces.") spirv_fuzz_shaders: List[Path] = [] references: List[Path] = [] if use_spirv_fuzz: check_dir_exists(spirv_fuzz_shaders_dir) spirv_fuzz_shaders = sorted(spirv_fuzz_shaders_dir.rglob("*.json")) else: check_dir_exists(references_dir) check_dir_exists(donors_dir) # TODO: make GraphicsFuzz find donors recursively. references = sorted(references_dir.rglob("*.json")) # Filter to only include .json files that have at least one shader (.frag, .vert, .comp) file. references = [ ref for ref in references if shader_job_util.get_related_files(ref) ] binary_manager = binaries_util.get_default_binary_manager( settings=settings ).get_child_binary_manager(list(settings.custom_binaries), prepend=True) while True: # We have to use "is not None" because the seed could be 0. if iteration_seed_override is not None: iteration_seed = iteration_seed_override else: iteration_seed = secrets.randbits(ITERATION_SEED_BITS) log(f"Iteration seed: {iteration_seed}") random.seed(iteration_seed) staging_name = get_random_name()[:8] staging_dir = temp_dir / staging_name try: util.mkdir_p_new(staging_dir) except FileExistsError: if iteration_seed_override is not None: raise log(f"Staging directory already exists: {str(staging_dir)}") log(f"Starting new iteration.") continue # Pseudocode: # - Create test_dir(s) in staging directory. # - Run test_dir(s) on all active devices (stop early if appropriate). # - For each test failure on each device, copy the test to reports_dir, adding the device and crash signature. # - Reduce each report (on the given device). # - Produce a summary for each report. if use_spirv_fuzz: fuzz_spirv_test.fuzz_spirv( staging_dir, reports_dir, fuzz_failures_dir, active_devices, spirv_fuzz_shaders, settings, binary_manager, ) else: fuzz_glsl_test.fuzz_glsl( staging_dir, reports_dir, fuzz_failures_dir, active_devices, references, donors_dir, settings, binary_manager, ) shutil.rmtree(staging_dir) if iteration_seed_override is not None: log("Stopping due to iteration_seed") break
def main() -> None: # pylint: disable=too-many-locals; parser = argparse.ArgumentParser( description= "Classifies spirv-fuzz tests via the set of remaining transformation types." ) parser.add_argument( "--tests_dir", help= "The directory in which to search for tests by looking for summary/ directories.", default=str(Path("") / "reports" / "crashes" / "bad_image"), ) parsed_args = parser.parse_args(sys.argv[1:]) tests_dir: Path = Path(parsed_args.tests_dir) check_dir_exists(tests_dir) summary_dirs: List[Path] = list(tests_dir.glob("**/summary")) summary_dirs = [d for d in summary_dirs if d.is_dir()] summary_dirs = sorted(summary_dirs) check( bool(summary_dirs), AssertionError(f"No summary dirs found under {str(tests_dir)}"), ) signature_to_dirs: Dict[str, List[Path]] = {} # For each summary directory, get its signature based on the transformation types remaining # and add the info to |signature_to_dirs|. for summary_dir in summary_dirs: log(f"Checking {summary_dir}") transformations_json = util.file_read_text( summary_dir / "reduced_1" / "variant" / "shader.frag.transformations_json") transformations = json.loads(transformations_json) # E.g. # { # "transformation": [ # { # "addConstantScalar": {...} # }, # { # "addConstantComposite": {...} # }, # ..., # } transformation_types: Set[str] = set() transformation_list = transformations["transformation"] check( bool(transformation_list), AssertionError( f"No transformations found for {str(transformations_json)}"), ) for transformation in transformation_list: keys = transformation.keys() check( len(keys) == 1, AssertionError( f"Transformation had more than one key: {transformation}"), ) transformation_types.add(list(keys)[0]) transformation_types -= COMMON_TRANSFORMATION_TYPES transformation_types_sorted = sorted(transformation_types) signature = "_".join(transformation_types_sorted) log(f"signature: {signature}") # Add to or update the map. signature_to_dirs.setdefault(signature, []).append(summary_dir) log("\n\nTable:\n") for (signature, cases) in sorted(signature_to_dirs.items(), key=lambda item: item[0]): log(f"{signature}:") for case in cases: log(f" {str(case)}")