def main() -> None: parser = argparse.ArgumentParser(description="Splits a seed file.") parser.add_argument( "seed_file", help="Seed file to process.", ) parsed_args = parser.parse_args(sys.argv[1:]) seed_file: Path = Path(parsed_args.seed_file) # Get chunks of seeds and call process_chunk. seeds: List[str] = util.file_read_text(seed_file).split() chunk_size = 1000 check( (len(seeds) % chunk_size) == 0, AssertionError( "The number of seeds should be a multiple of chunk_size."), ) index = 0 for chunk_num in range(0, len(seeds) // chunk_size): chunk: List[str] = [] for _ in range(0, chunk_size): chunk.append(seeds[index]) index += 1 util.file_write_text(Path(f"seeds_{chunk_num}.txt"), " ".join(chunk) + "\n") check(index == len(seeds), AssertionError("Expected to have processed all seeds."))
def spirv_asm_shader_job_to_amber_script( shader_job_file_amber_test: ShaderJobFileBasedAmberTest, output_amber_script_file_path: Path, amberfy_settings: AmberfySettings, ) -> Path: log(f"Amberfy: {[str(variant.asm_spirv_shader_job_json) for variant in shader_job_file_amber_test.variants_asm_spirv_job]} " + (f"with reference {str(shader_job_file_amber_test.reference_asm_spirv_job.asm_spirv_shader_job_json)} " if shader_job_file_amber_test.reference_asm_spirv_job else "") + f"to {str(output_amber_script_file_path)}") shader_job_amber_test = shader_job_file_amber_test.to_shader_job_based() if isinstance(shader_job_amber_test.variants[0], GraphicsShaderJob): result = graphics_shader_job_amber_test_to_amber_script( shader_job_amber_test, amberfy_settings) elif isinstance(shader_job_amber_test.variants[0], ComputeShaderJob): result = compute_shader_job_amber_test_to_amber_script( shader_job_amber_test, amberfy_settings) else: raise AssertionError( f"Unknown shader job type: {shader_job_amber_test.variants[0]}") util.file_write_text(output_amber_script_file_path, result) return output_amber_script_file_path
def message_to_file( message: Message, output_file_path: Path, including_default_value_fields: bool = True, ) -> Path: message_json = message_to_json(message, including_default_value_fields) util.file_write_text(output_file_path, message_json) return output_file_path
def write_status(result_output_dir: Path, status: str, bad_shader_name: Optional[str] = None) -> None: util.file_write_text(get_status_path(result_output_dir), status) if bad_shader_name: util.file_write_text( get_status_bad_shader_name_path(result_output_dir), bad_shader_name)
def try_get_root_file() -> Path: try: return artifact_util.artifact_path_get_root() except FileNotFoundError: log("Could not find ROOT file (in the current directory or above) to mark where binaries should be stored. " "Creating a ROOT file in the current directory.") return util.file_write_text( Path(artifact_util.ARTIFACT_ROOT_FILE_NAME), "")
def run_generate_on_shader_job( spirv_fuzz_path: Path, reference_shader_json: Path, output_shader_json: Path, donor_shader_job_paths: Optional[List[Path]] = None, seed: Optional[str] = None, other_args: Optional[List[str]] = None, ) -> Path: if donor_shader_job_paths is None: donor_shader_job_paths = [] util.copy_file(reference_shader_json, output_shader_json) suffixes_that_exist = shader_job_util.get_related_suffixes_that_exist( reference_shader_json, shader_job_util.EXT_ALL, [shader_job_util.SUFFIX_SPIRV]) for suffix in suffixes_that_exist: # Create a donors list file "donors.{suffix}.txt" containing the file paths to all relevant donor .spv shaders. donor_list_contents = "" for donor_shader_job_path in donor_shader_job_paths: donor_shader_path = donor_shader_job_path.with_suffix(suffix) if donor_shader_path.exists(): donor_list_contents += f"{str(donor_shader_path)}\n" donors_list_path = util.file_write_text( reference_shader_json.parent / f"donors{suffix}.txt", donor_list_contents) run_generate( spirv_fuzz_path, reference_shader_json.with_suffix(suffix), output_shader_json.with_suffix(suffix), donors_list_path=donors_list_path, seed=seed, other_args=other_args, ) return output_shader_json
def write_shader( shader_asm: str, amber_file: Path, output_dir: Path, shader_type: str, shader_name: str, binaries: binaries_util.BinaryManager, ) -> List[Path]: files_written: List[Path] = [] shader_type_to_suffix = { "fragment": shader_job_util.EXT_FRAG, "vertex": shader_job_util.EXT_VERT, "compute": shader_job_util.EXT_COMP, } shader_type_suffix = shader_type_to_suffix[shader_type] # E.g. ifs-and-whiles.variant_fragment_shader.frag.asm shader_asm_file_path = output_dir / ( f"{amber_file.stem}.{shader_name}{shader_type_suffix}{shader_job_util.SUFFIX_ASM_SPIRV}" ) # E.g. ifs-and-whiles.variant_fragment_shader.frag.spv shader_spirv_file_path = output_dir / ( f"{amber_file.stem}.{shader_name}{shader_type_suffix}{shader_job_util.SUFFIX_SPIRV}" ) # E.g. dEQP-VK.graphicsfuzz.ifs-and-whiles.variant_fragment_shader.spvas # These files can be added to the llpc repo as a shader test. shader_llpc_asm_test_file_path = output_dir / ( f"dEQP-VK.graphicsfuzz.{amber_file.stem}.{shader_name}.spvas") util.file_write_text(shader_asm_file_path, shader_asm) files_written.append(shader_asm_file_path) spirv_as_path = binaries.get_binary_path_by_name("spirv-as").path subprocess_util.run( [ str(spirv_as_path), "-o", str(shader_spirv_file_path), str(shader_asm_file_path), "--target-env", "spv1.0", ], verbose=True, ) files_written.append(shader_spirv_file_path) util.file_write_text( shader_llpc_asm_test_file_path, """; BEGIN_SHADERTEST ; RUN: amdllpc -verify-ir -spvgen-dir=%spvgendir% -v %gfxip %s | FileCheck -check-prefix=SHADERTEST %s ; SHADERTEST-LABEL: {{^// LLPC.*}} SPIRV-to-LLVM translation results ; SHADERTEST: AMDLLPC SUCCESS ; END_SHADERTEST ; """ + f"; Based on dEQP-VK.graphicsfuzz.{amber_file.stem}\n\n" + shader_asm, ) files_written.append(shader_llpc_asm_test_file_path) return files_written
def run_amber_on_device_helper( amber_script_file: Path, output_dir: Path, dump_image: bool, dump_buffer: bool, skip_render: bool = False, serial: Optional[str] = None, ) -> Path: next_logcat_timestamp_after_clear = prepare_device(wait_for_screen=True, serial=serial) adb_check( serial, ["push", str(amber_script_file), ANDROID_DEVICE_AMBER_SCRIPT_FILE]) amber_args = [ "-d", # Disables validation layers. ANDROID_DEVICE_AMBER_SCRIPT_FILE, "--log-graphics-calls-time", "--disable-spirv-val", ] if skip_render: # -ps tells amber to stop after pipeline creation amber_args.append("-ps") else: if dump_image: amber_args += [ "-I", "variant_framebuffer", "-i", f"{ANDROID_DEVICE_RESULT_DIR}/{fuzz.VARIANT_IMAGE_FILE_NAME}", "-I", "reference_framebuffer", "-i", f"{ANDROID_DEVICE_RESULT_DIR}/{fuzz.REFERENCE_IMAGE_FILE_NAME}", ] if dump_buffer: amber_args += [ "-b", f"{ANDROID_DEVICE_RESULT_DIR}/{fuzz.BUFFER_FILE_NAME}", "-B", "0", ] cmd = [ "shell", get_amber_adb_shell_cmd(amber_args), ] status = "UNEXPECTED_ERROR" result: Optional[types.CompletedProcess] = None # Before running, try to ensure the app is not already running. adb_can_fail(serial, ["shell", "am force-stop com.google.amber"]) try: result = adb_can_fail(serial, cmd, verbose=True, timeout=fuzz.AMBER_RUN_TIME_LIMIT) except subprocess.TimeoutExpired: status = fuzz.STATUS_TIMEOUT adb_can_fail(serial, ["shell", "am force-stop com.google.amber"]) try: if result: if result.returncode != 0: log("WARNING: am instrument command failed, which is unexpected, even if the GPU driver crashed!" ) status = fuzz.STATUS_CRASH elif "shortMsg=Process crashed" in result.stdout: status = fuzz.STATUS_CRASH else: status = fuzz.STATUS_SUCCESS adb_check( serial, # The /. syntax means the contents of the results directory will be copied into output_dir. ["pull", ANDROID_DEVICE_RESULT_DIR + "/.", str(output_dir)], ) gflogging.log_a_file(output_dir / "amber_stdout.txt") gflogging.log_a_file(output_dir / "amber_stderr.txt") # Grab the log. logcat_dump_cmd = ["logcat", "-d"] if next_logcat_timestamp_after_clear: # Only include logcat events after the previous logcat clear. logcat_dump_cmd += ["-T", next_logcat_timestamp_after_clear] # Use a short time limit to increase the chance of detecting a device reboot. adb_check(serial, logcat_dump_cmd, verbose=True, timeout=ADB_SHORT_LOGCAT_TIME_LIMIT) except subprocess.SubprocessError: # If we fail in getting the results directory or log, assume the device has rebooted. status = fuzz.STATUS_UNRESPONSIVE log("\nSTATUS " + status + "\n") file_write_text(result_util.get_status_path(output_dir), status) return output_dir
def tool_crash_summary_bug_report_dir( # pylint: disable=too-many-locals; reduced_glsl_source_dir: Path, variant_reduced_glsl_result_dir: Path, output_dir: Path, binary_manager: binaries_util.BinaryManager, ) -> Optional[Path]: # Create a simple script and README. shader_job = reduced_glsl_source_dir / test_util.VARIANT_DIR / test_util.SHADER_JOB if not shader_job.is_file(): return None test_metadata: Test = test_util.metadata_read_from_path( reduced_glsl_source_dir / test_util.TEST_METADATA) shader_files = shader_job_util.get_related_files( shader_job, shader_job_util.EXT_ALL, (shader_job_util.SUFFIX_GLSL, shader_job_util.SUFFIX_SPIRV), ) check( len(shader_files) > 0, AssertionError(f"Need at least one shader for {shader_job}"), ) shader_extension = shader_files[0].suffix bug_report_dir = util.copy_dir(variant_reduced_glsl_result_dir, output_dir / "bug_report") shader_files = sorted(bug_report_dir.rglob("shader.*")) glsl_files = [ shader_file for shader_file in shader_files if shader_file.suffix == shader_extension ] asm_files = [ shader_file for shader_file in shader_files if shader_file.name.endswith(shader_extension + shader_job_util.SUFFIX_ASM_SPIRV) ] spv_files = [ shader_file for shader_file in shader_files if shader_file.name.endswith(shader_extension + shader_job_util.SUFFIX_SPIRV) ] readme = "\n\n" readme += ( "Issue found using [GraphicsFuzz](https://github.com/google/graphicsfuzz).\n\n" ) readme += "Tool versions:\n\n" # noinspection PyTypeChecker if test_metadata.HasField("glsl"): readme += f"* glslangValidator commit hash: {binary_manager.get_binary_by_name(binaries_util.GLSLANG_VALIDATOR_NAME).version}\n" if test_metadata.glsl.spirv_opt_args or test_metadata.spirv_fuzz.spirv_opt_args: readme += f"* spirv-opt commit hash: {binary_manager.get_binary_by_name(binaries_util.SPIRV_OPT_NAME).version}\n" readme += "\nTo reproduce:\n\n" readme += f"`glslangValidator -V shader{shader_extension} -o shader{shader_extension}.spv`\n\n" if (test_metadata.HasField("glsl") and spv_files and not test_metadata.glsl.spirv_opt_args): # GLSL was converted to SPIR-V, and spirv-opt was not run, so indicate that we should validate the SPIR-V. readme += f"`spirv-val shader{shader_extension}.spv`\n\n" if test_metadata.glsl.spirv_opt_args or test_metadata.spirv_fuzz.spirv_opt_args: readme += f"`spirv-opt shader{shader_extension}.spv -o temp.spv --validate-after-all {' '.join(test_metadata.glsl.spirv_opt_args)}`\n\n" files_to_list = glsl_files + spv_files + asm_files files_to_list.sort() files_to_show = glsl_files + asm_files files_to_show.sort() readme += "The following shader files are included in the attached archive, some of which are also shown inline below:\n\n" for file_to_list in files_to_list: short_path = file_to_list.relative_to(bug_report_dir).as_posix() readme += f"* {short_path}\n" for file_to_show in files_to_show: short_path = file_to_show.relative_to(bug_report_dir).as_posix() file_contents = util.file_read_text(file_to_show) readme += f"\n{short_path}:\n\n" readme += f"```\n{file_contents}\n```\n" util.file_write_text(output_dir / "README.md", readme) return bug_report_dir
def metadata_write_to_path(metadata: Test, test_metadata_path: Path) -> Path: text = proto_util.message_to_json(metadata) util.file_write_text(test_metadata_path, text) return test_metadata_path
def run_amber_helper( amber_script_file: Path, output_dir: Path, dump_image: bool, dump_buffer: bool, amber_path: Path, skip_render: bool = False, debug_layers: bool = False, icd: Optional[Path] = None, ) -> Path: variant_image_file = output_dir / fuzz.VARIANT_IMAGE_FILE_NAME reference_image_file = output_dir / fuzz.REFERENCE_IMAGE_FILE_NAME buffer_file = output_dir / fuzz.BUFFER_FILE_NAME cmd = [ str(amber_path), str(amber_script_file), "--log-graphics-calls-time", "--disable-spirv-val", ] if not debug_layers: cmd.append("-d") if skip_render: # -ps tells amber to stop after pipeline creation cmd.append("-ps") else: if dump_image: cmd.append("-I") cmd.append("variant_framebuffer") cmd.append("-i") cmd.append(str(variant_image_file)) cmd.append("-I") cmd.append("reference_framebuffer") cmd.append("-i") cmd.append(str(reference_image_file)) if dump_buffer: cmd.append("-b") cmd.append(str(buffer_file)) cmd.append("-B") cmd.append("0") cmd = util.prepend_catchsegv_if_available(cmd) status = "UNEXPECTED_ERROR" result: Optional[types.CompletedProcess] = None env: Optional[Dict[str, str]] = None if icd: env = {"VK_ICD_FILENAMES": str(icd)} try: result = subprocess_util.run( cmd, check_exit_code=False, timeout=fuzz.AMBER_RUN_TIME_LIMIT, verbose=True, env=env, ) except subprocess.TimeoutExpired: status = fuzz.STATUS_TIMEOUT if result: if result.returncode != 0: status = fuzz.STATUS_CRASH else: status = fuzz.STATUS_SUCCESS log("\nSTATUS " + status + "\n") util.file_write_text(result_util.get_status_path(output_dir), status) return output_dir
def main_helper( # pylint: disable=too-many-locals, too-many-branches, too-many-statements; settings_path: Path, iteration_seed_override: Optional[int], use_spirv_fuzz: bool, force_no_stack_traces: bool, ) -> None: util.update_gcov_environment_variable_if_needed() try: artifact_util.artifact_path_get_root() except FileNotFoundError: log( "Could not find ROOT file (in the current directory or above) to mark where binaries should be stored. " "Creating a ROOT file in the current directory." ) util.file_write_text(Path(artifact_util.ARTIFACT_ROOT_FILE_NAME), "") settings = settings_util.read_or_create(settings_path) active_devices = devices_util.get_active_devices(settings.device_list) reports_dir = Path() / "reports" fuzz_failures_dir = reports_dir / FUZZ_FAILURES_DIR_NAME temp_dir = Path() / "temp" references_dir = Path() / "references" donors_dir = Path() / "donors" spirv_fuzz_shaders_dir = Path() / "spirv_fuzz_shaders" # Log a warning if there is no tool on the PATH for printing stack traces. prepended = util.prepend_catchsegv_if_available([], log_warning=True) if not force_no_stack_traces and not prepended: raise AssertionError("Stopping because we cannot get stack traces.") spirv_fuzz_shaders: List[Path] = [] references: List[Path] = [] if use_spirv_fuzz: check_dir_exists(spirv_fuzz_shaders_dir) spirv_fuzz_shaders = sorted(spirv_fuzz_shaders_dir.rglob("*.json")) else: check_dir_exists(references_dir) check_dir_exists(donors_dir) # TODO: make GraphicsFuzz find donors recursively. references = sorted(references_dir.rglob("*.json")) # Filter to only include .json files that have at least one shader (.frag, .vert, .comp) file. references = [ ref for ref in references if shader_job_util.get_related_files(ref) ] binary_manager = binaries_util.get_default_binary_manager( settings=settings ).get_child_binary_manager(list(settings.custom_binaries), prepend=True) while True: # We have to use "is not None" because the seed could be 0. if iteration_seed_override is not None: iteration_seed = iteration_seed_override else: iteration_seed = secrets.randbits(ITERATION_SEED_BITS) log(f"Iteration seed: {iteration_seed}") random.seed(iteration_seed) staging_name = get_random_name()[:8] staging_dir = temp_dir / staging_name try: util.mkdir_p_new(staging_dir) except FileExistsError: if iteration_seed_override is not None: raise log(f"Staging directory already exists: {str(staging_dir)}") log(f"Starting new iteration.") continue # Pseudocode: # - Create test_dir(s) in staging directory. # - Run test_dir(s) on all active devices (stop early if appropriate). # - For each test failure on each device, copy the test to reports_dir, adding the device and crash signature. # - Reduce each report (on the given device). # - Produce a summary for each report. if use_spirv_fuzz: fuzz_spirv_test.fuzz_spirv( staging_dir, reports_dir, fuzz_failures_dir, active_devices, spirv_fuzz_shaders, settings, binary_manager, ) else: fuzz_glsl_test.fuzz_glsl( staging_dir, reports_dir, fuzz_failures_dir, active_devices, references, donors_dir, settings, binary_manager, ) shutil.rmtree(staging_dir) if iteration_seed_override is not None: log("Stopping due to iteration_seed") break