def main() -> None: parser = argparse.ArgumentParser(description="Splits a seed file.") parser.add_argument( "seed_file", help="Seed file to process.", ) parsed_args = parser.parse_args(sys.argv[1:]) seed_file: Path = Path(parsed_args.seed_file) # Get chunks of seeds and call process_chunk. seeds: List[str] = util.file_read_text(seed_file).split() chunk_size = 1000 check( (len(seeds) % chunk_size) == 0, AssertionError( "The number of seeds should be a multiple of chunk_size."), ) index = 0 for chunk_num in range(0, len(seeds) // chunk_size): chunk: List[str] = [] for _ in range(0, chunk_size): chunk.append(seeds[index]) index += 1 util.file_write_text(Path(f"seeds_{chunk_num}.txt"), " ".join(chunk) + "\n") check(index == len(seeds), AssertionError("Expected to have processed all seeds."))
def check_result() -> None: # no_signature because we use a Release build. bucket = Path() / "reports" / "crashes" / "no_signature" assert bucket.is_dir() test_dirs = list(bucket.iterdir()) assert len(test_dirs) == 1 assert "opt_O" in test_dirs[0].name log_path = test_dirs[0] / "results" / "amdllpc" / "result" / "log.txt" assert (util.file_read_text(log_path).count( PREPROCESSOR_CACHE_HIT_STRING) == 4), f"{log_path}" summary_dir = test_dirs[0] / "summary" check_common_summary_files(summary_dir)
def check_result() -> None: bucket = Path() / "reports" / "crashes" / "vkGetInstanceProcAddr" assert bucket.is_dir() test_dirs = list(bucket.iterdir()) assert len(test_dirs) == 1 assert "no_opt" in test_dirs[0].name log_path = test_dirs[ 0] / "results" / "swift_shader" / "result" / "log.txt" assert (util.file_read_text(log_path).count( PREPROCESSOR_CACHE_HIT_STRING) == 2), f"{log_path}" summary_dir = test_dirs[0] / "summary" check_common_summary_files(summary_dir)
def get_shader_contents( shader_job_file_path: pathlib.Path, extension: str, language_suffix: str = SUFFIX_GLSL, must_exist: bool = False, ) -> Optional[str]: shader_file = shader_job_file_path.with_suffix(extension + language_suffix) if shader_file.exists(): return util.file_read_text(shader_file) if must_exist: raise AssertionError(f"could not read {shader_file}") return None
def check_result() -> None: bucket = Path() / "reports" / "crashes" / "vkGetInstanceProcAddr" assert bucket.is_dir() test_dirs = list(bucket.iterdir()) assert len(test_dirs) == 1 assert "no_opt" in test_dirs[0].name log_path = test_dirs[0] / "results" / "swift_shader" / "result" / "log.txt" assert ( util.file_read_text(log_path).count(PREPROCESSOR_CACHE_HIT_STRING) == 1 ), f"{log_path}" summary_dir = test_dirs[0] / "summary" reduced_dir = summary_dir / "reduced" assert not reduced_dir.is_dir() # No reduction because of regex below.
def main() -> None: parser = argparse.ArgumentParser( description="A tool for extracting a signature from a log file.") parser.add_argument( "log_file", help="The log file from which a signature should be extracted.", ) parsed_args = parser.parse_args(sys.argv[1:]) log_file: Path = Path(parsed_args.log_file) log(get_signature_from_log_contents(util.file_read_text(log_file)))
def check_result() -> None: bucket = Path( ) / "reports" / "crashes" / "UNIMPLEMENTED_extensionFeaturessType" assert bucket.is_dir() test_dirs = list(bucket.iterdir()) assert len(test_dirs) == 1 assert "no_opt" in test_dirs[0].name summary_dir = test_dirs[0] / "summary" log_path = test_dirs[ 0] / "results" / "swift_shader" / "result" / "log.txt" assert (util.file_read_text(log_path).count( PREPROCESSOR_CACHE_HIT_STRING) == 1), f"{log_path}" summary_dir = test_dirs[0] / "summary" check_common_summary_files(summary_dir)
def main() -> None: parser = argparse.ArgumentParser(description="Processes a seed file.") parser.add_argument( "seed_file", help="Seed file to process.", ) parser.add_argument( "--out", help="Output file.", default="signatures_chunked.txt", ) parsed_args = parser.parse_args(sys.argv[1:]) seed_file: Path = Path(parsed_args.seed_file) output_file: Path = Path(parsed_args.out) # Get a list of all log files. log_files: List[Path] = sorted(Path().glob("log_*.txt")) # Get chunks of seeds and call process_chunk. seeds: List[str] = util.file_read_text(seed_file).split() check(len(seeds) == 10_000, AssertionError("Expected 10,000 seeds.")) with util.file_open_text(output_file, "w") as output: index = 0 for chunk_num in range(0, 10): chunk: Set[str] = set() for _ in range(0, 1_000): chunk.add(seeds[index]) index += 1 process_chunk(chunk_num, chunk, log_files, output) check( index == 10_000, AssertionError("Expected to have processed 10,000 seeds.") )
def metadata_read_from_path(test_metadata_path: Path) -> Test: text = util.file_read_text(test_metadata_path) result = Test() proto_util.json_to_message(text, result) return result
def main() -> None: # pylint: disable=too-many-locals,too-many-branches,too-many-statements; parser = argparse.ArgumentParser( description="Runs GraphicsFuzz AmberScript tests on the active devices listed in " "the settings.json file.", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( "--settings", help="Path to the settings JSON file for this instance.", default=str(settings_util.DEFAULT_SETTINGS_FILE_PATH), ) parser.add_argument( "--tests", help="Path to the directory of AmberScript tests with shaders extracted.", default="graphicsfuzz", ) parser.add_argument( "--update_ignored_signatures", help="As the tests are run for each device, add any crash signatures to the device's ignored_crash_signatures " "property and write out the updated settings.json file.", action="store_true", ) parser.add_argument( "--results_out", help="Output file path for the CSV results table.", default="results.csv", ) parsed_args = parser.parse_args(sys.argv[1:]) # Args. tests_dir: Path = Path(parsed_args.tests) settings_path: Path = Path(parsed_args.settings) update_ignored_signatures: bool = parsed_args.update_ignored_signatures results_out_path: Path = Path(parsed_args.results_out) # Settings and devices. settings = settings_util.read_or_create(settings_path) active_devices = devices_util.get_active_devices(settings.device_list) # Binaries. binaries = binaries_util.get_default_binary_manager(settings=settings) work_dir = Path() / "temp" / f"cts_run_{fuzz.get_random_name()[:8]}" util.mkdirs_p(work_dir) with util.file_open_text(results_out_path, "w") as results_handle: def write_entry(entry: str) -> None: results_handle.write(entry) results_handle.write(", ") results_handle.flush() def write_newline() -> None: results_handle.write("\n") results_handle.flush() spirv_opt_path: Optional[Path] = None swift_shader_path: Optional[Path] = None amber_path: Optional[Path] = None # Small hack to ensure we have three devices for spirv-opt, each with a different name. main_spirv_opt_device: Optional[Device] = None if active_devices and active_devices[0].name == "host_preprocessor": main_spirv_opt_device = active_devices[0] main_spirv_opt_device.name = SPIRV_OPT_O spirv_opt_custom = Device() spirv_opt_custom.CopyFrom(main_spirv_opt_device) spirv_opt_custom.name = SPIRV_OPT_CUSTOM active_devices.insert(1, spirv_opt_custom) spirv_opt_os = Device() spirv_opt_os.CopyFrom(main_spirv_opt_device) spirv_opt_os.name = SPIRV_OPT_OS active_devices.insert(1, spirv_opt_os) # Enumerate active devices, writing their name and storing binary paths if needed. write_entry("test") for device in active_devices: write_entry(device.name) if device.HasField("preprocess"): spirv_opt_path = binaries.get_binary_path_by_name( binaries_util.SPIRV_OPT_NAME ).path if device.HasField("swift_shader"): swift_shader_path = binaries.get_binary_path_by_name( binaries_util.SWIFT_SHADER_NAME ).path if device.HasField("swift_shader") or device.HasField("host"): amber_path = binaries.get_binary_path_by_name( binaries_util.AMBER_NAME ).path write_newline() # Enumerate tests and devices, writing the results. for test in sorted(tests_dir.glob("*.amber")): test_name = util.remove_end(test.name, ".amber") write_entry(test_name) spirv_shaders = sorted( tests_dir.glob(util.remove_end(test.name, "amber") + "*.spv") ) for device in active_devices: test_run_dir = work_dir / f"{test_name}_{device.name}" util.mkdirs_p(test_run_dir) ignored_signatures_set: Set[str] = set(device.ignored_crash_signatures) with util.file_open_text(test_run_dir / "log.txt", "w") as log_stream: try: gflogging.push_stream_for_logging(log_stream) if device.HasField("preprocess"): # This just means spirv-opt for now. assert spirv_opt_path # noqa assert main_spirv_opt_device # noqa # Pick spirv-opt arguments based on device name. if device.name == SPIRV_OPT_O: spirv_opt_args = ["-O"] elif device.name == SPIRV_OPT_OS: spirv_opt_args = ["-Os"] elif device.name == SPIRV_OPT_CUSTOM: spirv_opt_args = ( spirv_opt_util.OPT_INTERESTING_SUBSET_OF_PASSES ) else: raise AssertionError( f"Can't tell how to run device {device.name}; " f"must be named host_preprocessor and be the first active device." ) # Reset device and ignored_crash_signatures. device = main_spirv_opt_device ignored_signatures_set = set( device.ignored_crash_signatures ) try: for spirv_shader in spirv_shaders: spirv_opt_util.run_spirv_opt_on_spirv_shader( spirv_shader, test_run_dir, spirv_opt_args, spirv_opt_path, ) result_util.write_status( test_run_dir, fuzz.STATUS_SUCCESS, ) except subprocess.CalledProcessError: result_util.write_status( test_run_dir, fuzz.STATUS_TOOL_CRASH, ) except subprocess.TimeoutExpired: result_util.write_status( test_run_dir, fuzz.STATUS_TOOL_TIMEOUT, ) elif device.HasField("shader_compiler"): try: for spirv_shader in spirv_shaders: shader_compiler_util.run_shader( shader_compiler_device=device.shader_compiler, shader_path=spirv_shader, output_dir=test_run_dir, compiler_path=binaries.get_binary_path_by_name( device.shader_compiler.binary ).path, timeout=DEFAULT_TIMEOUT, ) result_util.write_status( test_run_dir, fuzz.STATUS_SUCCESS, ) except subprocess.CalledProcessError: result_util.write_status( test_run_dir, fuzz.STATUS_CRASH, ) except subprocess.TimeoutExpired: result_util.write_status( test_run_dir, fuzz.STATUS_TIMEOUT, ) elif device.HasField("swift_shader"): assert swift_shader_path # noqa assert amber_path # noqa host_device_util.run_amber( test, test_run_dir, amber_path=amber_path, dump_image=False, dump_buffer=False, icd=swift_shader_path, ) elif device.HasField("host"): assert amber_path # noqa host_device_util.run_amber( test, test_run_dir, amber_path=amber_path, dump_image=False, dump_buffer=False, custom_launcher=list(device.host.custom_launcher), ) elif device.HasField("android"): android_device.run_amber_on_device( test, test_run_dir, dump_image=False, dump_buffer=False, serial=device.android.serial, ) else: raise AssertionError(f"Unsupported device {device.name}") finally: gflogging.pop_stream_for_logging() status = result_util.get_status(test_run_dir) if status == fuzz.STATUS_SUCCESS: write_entry("P") elif status in (fuzz.STATUS_TIMEOUT, fuzz.STATUS_TOOL_TIMEOUT): write_entry("T") else: write_entry("F") # Update ignored signatures. if ( status in ( fuzz.STATUS_TOOL_CRASH, fuzz.STATUS_CRASH, fuzz.STATUS_UNRESPONSIVE, ) and update_ignored_signatures ): log_contents = util.file_read_text( result_util.get_log_path(test_run_dir) ) signature = signature_util.get_signature_from_log_contents( log_contents ) if signature == signature_util.NO_SIGNATURE: log(f"NOT updating ignored signatures to include {signature}") elif signature in ignored_signatures_set: log(f"Signature is already ignored: {signature}") else: log(f"Adding ignored signature: {signature}") device.ignored_crash_signatures.append(signature) write_newline() if update_ignored_signatures: # Reset main_spirv_opt_device name before writing it back out. if main_spirv_opt_device: main_spirv_opt_device.name = "host_preprocessor" settings_util.write(settings, settings_path)
def tool_crash_summary_bug_report_dir( # pylint: disable=too-many-locals; reduced_glsl_source_dir: Path, variant_reduced_glsl_result_dir: Path, output_dir: Path, binary_manager: binaries_util.BinaryManager, ) -> Optional[Path]: # Create a simple script and README. shader_job = reduced_glsl_source_dir / test_util.VARIANT_DIR / test_util.SHADER_JOB if not shader_job.is_file(): return None test_metadata: Test = test_util.metadata_read_from_path( reduced_glsl_source_dir / test_util.TEST_METADATA) shader_files = shader_job_util.get_related_files( shader_job, shader_job_util.EXT_ALL, (shader_job_util.SUFFIX_GLSL, shader_job_util.SUFFIX_SPIRV), ) check( len(shader_files) > 0, AssertionError(f"Need at least one shader for {shader_job}"), ) shader_extension = shader_files[0].suffix bug_report_dir = util.copy_dir(variant_reduced_glsl_result_dir, output_dir / "bug_report") shader_files = sorted(bug_report_dir.rglob("shader.*")) glsl_files = [ shader_file for shader_file in shader_files if shader_file.suffix == shader_extension ] asm_files = [ shader_file for shader_file in shader_files if shader_file.name.endswith(shader_extension + shader_job_util.SUFFIX_ASM_SPIRV) ] spv_files = [ shader_file for shader_file in shader_files if shader_file.name.endswith(shader_extension + shader_job_util.SUFFIX_SPIRV) ] readme = "\n\n" readme += ( "Issue found using [GraphicsFuzz](https://github.com/google/graphicsfuzz).\n\n" ) readme += "Tool versions:\n\n" # noinspection PyTypeChecker if test_metadata.HasField("glsl"): readme += f"* glslangValidator commit hash: {binary_manager.get_binary_by_name(binaries_util.GLSLANG_VALIDATOR_NAME).version}\n" if test_metadata.glsl.spirv_opt_args or test_metadata.spirv_fuzz.spirv_opt_args: readme += f"* spirv-opt commit hash: {binary_manager.get_binary_by_name(binaries_util.SPIRV_OPT_NAME).version}\n" readme += "\nTo reproduce:\n\n" readme += f"`glslangValidator -V shader{shader_extension} -o shader{shader_extension}.spv`\n\n" if (test_metadata.HasField("glsl") and spv_files and not test_metadata.glsl.spirv_opt_args): # GLSL was converted to SPIR-V, and spirv-opt was not run, so indicate that we should validate the SPIR-V. readme += f"`spirv-val shader{shader_extension}.spv`\n\n" if test_metadata.glsl.spirv_opt_args or test_metadata.spirv_fuzz.spirv_opt_args: readme += f"`spirv-opt shader{shader_extension}.spv -o temp.spv --validate-after-all {' '.join(test_metadata.glsl.spirv_opt_args)}`\n\n" files_to_list = glsl_files + spv_files + asm_files files_to_list.sort() files_to_show = glsl_files + asm_files files_to_show.sort() readme += "The following shader files are included in the attached archive, some of which are also shown inline below:\n\n" for file_to_list in files_to_list: short_path = file_to_list.relative_to(bug_report_dir).as_posix() readme += f"* {short_path}\n" for file_to_show in files_to_show: short_path = file_to_show.relative_to(bug_report_dir).as_posix() file_contents = util.file_read_text(file_to_show) readme += f"\n{short_path}:\n\n" readme += f"```\n{file_contents}\n```\n" util.file_write_text(output_dir / "README.md", readme) return bug_report_dir
def main() -> None: # pylint: disable=too-many-statements, too-many-locals, too-many-branches; parser = argparse.ArgumentParser( description="Interestingness test that runs a test using Amber, " "calculates the crash signature based on the result, and returns 0 " "if the signature matches the expected crash signature.") parser.add_argument( "source_dir", help= "The source directory containing the shaders and the test.json file that describes how to run the test.", ) parser.add_argument( "--override_shader_job", nargs=2, metavar=("shader_job_name", "shader_job_json"), help= 'Override one of the shader jobs. E.g.: "--override_shader_job variant temp/variant.json". Note that ' "the output directory will be set to shader_job_json/ (with the .json extension removed) by default in this case. ", ) parser.add_argument( "--override_shader", nargs=3, metavar=("shader_name", "suffix", "shader_path"), help= 'Override one of the shaders. E.g.: "--override_shader variant .frag.spv temp/my_shader.spv". Note that ' "the output directory will be set to shader_path/ (with the .spv extension removed) by default in this case. ", ) parser.add_argument( "--use_default_binaries", help="Use the latest binaries, ignoring those defined in the test.json. " "Implies --fallback_binaries. Passing --settings is recommended to ensure the latest binaries are used.", action="store_true", ) parser.add_argument( "--fallback_binaries", help= "Fallback to the latest binaries if they are not defined in the test.json.", action="store_true", ) parser.add_argument( "--output", help= "Output directory. Required unless --override_shader[_job] is used; see --override_shader[_job] for details.", default=None, ) parser.add_argument( "--settings", help="Path to a settings JSON file for this instance. " "Unlike with gfauto_fuzz, the default value is an empty string, which is ignored. " "You only need to use a settings file if you pass --use_default_binaries and you want to use the latest binary versions. " 'In this case, use e.g. "--settings settings.json" so that a default settings file is generated with the latest binary version numbers ' "and then run gfauto_interestingness_test again to use those latest binaries.", default="", ) parsed_args = parser.parse_args(sys.argv[1:]) source_dir: Path = Path(parsed_args.source_dir) override_shader_job: Optional[Tuple[str, str]] = parsed_args.override_shader_job override_shader: Optional[Tuple[str, str, str]] = parsed_args.override_shader settings_str: str = parsed_args.settings settings = Settings() if settings_str: settings = settings_util.read_or_create(Path(settings_str)) use_default_binaries: bool = parsed_args.use_default_binaries fallback_binaries: bool = parsed_args.fallback_binaries or use_default_binaries output: Path if parsed_args.output: output = Path(parsed_args.output) elif override_shader_job: output = Path(override_shader_job[1]).with_suffix("") elif override_shader: output = Path(override_shader[2]).with_suffix("") else: raise AssertionError( "Need --output or --override_shader[_job] parameter.") binary_manager = binaries_util.get_default_binary_manager( settings=settings) if not fallback_binaries: binary_manager = binary_manager.get_child_binary_manager( binary_list=[]) shader_job_overrides: List[tool.NameAndShaderJob] = [] if override_shader_job: shader_job_overrides.append( tool.NameAndShaderJob(name=override_shader_job[0], shader_job=Path(override_shader_job[1]))) shader_overrides: tool.ShaderJobNameToShaderOverridesMap = {} if override_shader: override = tool.ShaderPathWithNameAndSuffix( name=override_shader[0], suffix=override_shader[1], path=Path(override_shader[2]), ) shader_overrides[override.name] = {override.suffix: override} # E.g. shader_overrides == # { # "variant": { # ".frag.spv": ShaderPathWithNameAndSuffix("variant", ".frag.spv", Path("path/to/shader.frag.spv")) # } # } # We don't need to read this to run the shader, but we need it afterwards anyway. test = test_util.metadata_read_from_path(source_dir / test_util.TEST_METADATA) output_dir = fuzz_glsl_test.run_shader_job( source_dir=source_dir, output_dir=output, binary_manager=binary_manager, test=test, ignore_test_and_device_binaries=use_default_binaries, shader_job_overrides=shader_job_overrides, shader_job_shader_overrides=shader_overrides, ) log(f"gfauto_interestingness_test: finished running {str(source_dir)} in {str(output_dir)}." ) if override_shader_job: log(f"The {override_shader_job[0]} shader was overridden with {override_shader_job[1]}" ) status = result_util.get_status(output_dir) if test.expected_status: log("") log(f"Expected status: {test.expected_status}") log(f"Actual status: {status}") log_contents = util.file_read_text(result_util.get_log_path(output_dir)) signature = signature_util.get_signature_from_log_contents(log_contents) log("") log(f"Expected signature: {test.crash_signature}") log(f"Actual signature: {signature}") log("") # The |crash_regex_override| overrides all other checks. if test.crash_regex_override: log(f"Testing crash_regex_override: {test.crash_regex_override}") override_pattern: Pattern[str] = re.compile(test.crash_regex_override, re.DOTALL) match: Optional[Match[str]] = override_pattern.fullmatch(log_contents) if match: log("Match!") log("Interesting") sys.exit(0) else: log("No match; not interesting") sys.exit(1) if test.expected_status: if status != test.expected_status: log("status != expected_status; not interesting") sys.exit(1) else: # There is no expected status given, so just assume it needs to be one of the "bad" statuses: if status not in ( fuzz.STATUS_CRASH, fuzz.STATUS_TOOL_CRASH, fuzz.STATUS_UNRESPONSIVE, ): log("shader run did not fail; not interesting.") sys.exit(1) if signature != test.crash_signature: log("signature != crash_signature; not interesting") sys.exit(1) log("Interesting!")
def main_helper( # pylint: disable=too-many-locals,too-many-branches,too-many-statements; tests_dir: Path, work_dir: Path, binaries: binaries_util.BinaryManager, settings: Settings, active_devices: List[Device], results_out_handle: Optional[TextIO], updated_settings_output_path: Optional[Path], ) -> None: util.mkdirs_p(work_dir) def write_entry(entry: str) -> None: if not results_out_handle: return results_out_handle.write(entry) results_out_handle.write(", ") results_out_handle.flush() def write_newline() -> None: if not results_out_handle: return results_out_handle.write("\n") results_out_handle.flush() spirv_opt_path: Optional[Path] = None swift_shader_path: Optional[Path] = None amber_path: Optional[Path] = None # Small hack to ensure we have three devices for spirv-opt, each with a different name. main_spirv_opt_device: Optional[Device] = None if active_devices and active_devices[0].name == "host_preprocessor": main_spirv_opt_device = active_devices[0] main_spirv_opt_device.name = SPIRV_OPT_O spirv_opt_custom = Device() spirv_opt_custom.CopyFrom(main_spirv_opt_device) spirv_opt_custom.name = SPIRV_OPT_CUSTOM active_devices.insert(1, spirv_opt_custom) spirv_opt_os = Device() spirv_opt_os.CopyFrom(main_spirv_opt_device) spirv_opt_os.name = SPIRV_OPT_OS active_devices.insert(1, spirv_opt_os) # Enumerate active devices, writing their name and storing binary paths if needed. write_entry("test") for device in active_devices: write_entry(device.name) if device.HasField("preprocess"): spirv_opt_path = binaries.get_binary_path_by_name( binaries_util.SPIRV_OPT_NAME).path if device.HasField("swift_shader"): swift_shader_path = binaries.get_binary_path_by_name( binaries_util.SWIFT_SHADER_NAME).path if device.HasField("swift_shader") or device.HasField("host"): amber_path = binaries.get_binary_path_by_name( binaries_util.AMBER_NAME).path write_newline() # Enumerate tests and devices, writing the results. for test in sorted(tests_dir.glob("*.amber")): test_name = util.remove_end(test.name, ".amber") write_entry(test_name) spirv_shaders = sorted( tests_dir.glob(util.remove_end(test.name, "amber") + "*.spv")) for device in active_devices: test_run_dir = work_dir / f"{test_name}_{device.name}" util.mkdirs_p(test_run_dir) ignored_signatures_set: Set[str] = set( device.ignored_crash_signatures) with util.file_open_text(test_run_dir / "log.txt", "w") as log_stream: try: gflogging.push_stream_for_logging(log_stream) if device.HasField("preprocess"): # This just means spirv-opt for now. assert spirv_opt_path # noqa assert main_spirv_opt_device # noqa # Pick spirv-opt arguments based on device name. if device.name == SPIRV_OPT_O: spirv_opt_args = ["-O"] elif device.name == SPIRV_OPT_OS: spirv_opt_args = ["-Os"] elif device.name == SPIRV_OPT_CUSTOM: spirv_opt_args = (spirv_opt_util. OPT_INTERESTING_SUBSET_OF_PASSES) else: raise AssertionError( f"Can't tell how to run device {device.name}; " f"must be named host_preprocessor and be the first active device." ) # Reset device and ignored_crash_signatures. device = main_spirv_opt_device ignored_signatures_set = set( device.ignored_crash_signatures) try: for spirv_shader in spirv_shaders: spirv_opt_util.run_spirv_opt_on_spirv_shader( spirv_shader, test_run_dir, spirv_opt_args, spirv_opt_path, ) result_util.write_status( test_run_dir, fuzz.STATUS_SUCCESS, ) except subprocess.CalledProcessError: result_util.write_status( test_run_dir, fuzz.STATUS_TOOL_CRASH, ) except subprocess.TimeoutExpired: result_util.write_status( test_run_dir, fuzz.STATUS_TOOL_TIMEOUT, ) elif device.HasField("shader_compiler"): try: for spirv_shader in spirv_shaders: shader_compiler_util.run_shader( shader_compiler_device=device. shader_compiler, shader_path=spirv_shader, output_dir=test_run_dir, compiler_path=binaries. get_binary_path_by_name( device.shader_compiler.binary).path, timeout=DEFAULT_TIMEOUT, ) result_util.write_status( test_run_dir, fuzz.STATUS_SUCCESS, ) except subprocess.CalledProcessError: result_util.write_status( test_run_dir, fuzz.STATUS_CRASH, ) except subprocess.TimeoutExpired: result_util.write_status( test_run_dir, fuzz.STATUS_TIMEOUT, ) elif device.HasField("swift_shader"): assert swift_shader_path # noqa assert amber_path # noqa host_device_util.run_amber( test, test_run_dir, amber_path=amber_path, dump_image=False, dump_buffer=False, icd=swift_shader_path, ) elif device.HasField("host"): assert amber_path # noqa host_device_util.run_amber( test, test_run_dir, amber_path=amber_path, dump_image=False, dump_buffer=False, custom_launcher=list(device.host.custom_launcher), ) elif device.HasField("android"): android_device.run_amber_on_device( test, test_run_dir, dump_image=False, dump_buffer=False, serial=device.android.serial, ) else: raise AssertionError( f"Unsupported device {device.name}") finally: gflogging.pop_stream_for_logging() status = result_util.get_status(test_run_dir) if status == fuzz.STATUS_SUCCESS: write_entry("P") elif status in (fuzz.STATUS_TIMEOUT, fuzz.STATUS_TOOL_TIMEOUT): write_entry("T") else: write_entry("F") # Update ignored signatures. if (status in ( fuzz.STATUS_TOOL_CRASH, fuzz.STATUS_CRASH, fuzz.STATUS_UNRESPONSIVE, ) and updated_settings_output_path): log_contents = util.file_read_text( result_util.get_log_path(test_run_dir)) signature = signature_util.get_signature_from_log_contents( log_contents) if signature == signature_util.NO_SIGNATURE: log(f"NOT updating ignored signatures to include {signature}" ) elif signature in ignored_signatures_set: log(f"Signature is already ignored: {signature}") else: log(f"Adding ignored signature: {signature}") device.ignored_crash_signatures.append(signature) write_newline() if updated_settings_output_path: # Reset main_spirv_opt_device name before writing it back out. if main_spirv_opt_device: main_spirv_opt_device.name = "host_preprocessor" settings_util.write(settings, updated_settings_output_path)
def to_shader_job(self) -> ShaderJob: json_contents = util.file_read_text(self.asm_spirv_shader_job_json) if is_compute_job(self.asm_spirv_shader_job_json): glsl_comp_contents = None if self.glsl_source_json: glsl_comp_contents = shader_job_util.get_shader_contents( self.glsl_source_json, shader_job_util.EXT_COMP) comp_asm_contents = shader_job_util.get_shader_contents( self.asm_spirv_shader_job_json, shader_job_util.EXT_COMP, shader_job_util.SUFFIX_ASM_SPIRV, must_exist=True, ) # Guaranteed assert comp_asm_contents # noqa return ComputeShaderJob( self.name_prefix, amberscript_uniform_buffer_def(json_contents, self.name_prefix), amberscript_uniform_buffer_bind(json_contents, self.name_prefix), Shader( ShaderType.COMPUTE, comp_asm_contents, glsl_comp_contents, self.processing_info, ), amberscript_comp_buff_def(json_contents), amberscript_comp_buff_def(json_contents, make_empty_buffer=True), amberscript_comp_num_groups_def(json_contents), ) # Get GLSL contents glsl_vert_contents = None glsl_frag_contents = None if self.glsl_source_json: glsl_vert_contents = shader_job_util.get_shader_contents( self.glsl_source_json, shader_job_util.EXT_VERT) glsl_frag_contents = shader_job_util.get_shader_contents( self.glsl_source_json, shader_job_util.EXT_FRAG) # Get spirv asm contents vert_contents = shader_job_util.get_shader_contents( self.asm_spirv_shader_job_json, shader_job_util.EXT_VERT, shader_job_util.SUFFIX_ASM_SPIRV, ) frag_contents = shader_job_util.get_shader_contents( self.asm_spirv_shader_job_json, shader_job_util.EXT_FRAG, shader_job_util.SUFFIX_ASM_SPIRV, must_exist=True, ) return GraphicsShaderJob( self.name_prefix, amberscript_uniform_buffer_def(json_contents, self.name_prefix), amberscript_uniform_buffer_bind(json_contents, self.name_prefix), Shader( ShaderType.VERTEX, vert_contents, glsl_vert_contents, self.processing_info, ), Shader( ShaderType.FRAGMENT, frag_contents, glsl_frag_contents, self.processing_info, ), )
def log_a_file(log_file: Path) -> None: log(f"Logging the contents of {str(log_file)}") try: log(util.file_read_text(log_file)) except IOError: log(f"Failed to read {str(log_file)}")
def file_to_message(input_file_path: Path, message: M) -> M: message_json = util.file_read_text(input_file_path) return json_to_message(message_json, message)
def artifact_read_recipe(artifact_path: str = "") -> Recipe: recipe = Recipe() json_file_path = artifact_get_recipe_file_path(artifact_path) json_text = util.file_read_text(json_file_path) proto_util.json_to_message(json_text, recipe) return recipe
def maybe_add_report( # pylint: disable=too-many-locals; test_dir: Path, reports_dir: Path, device: Device, settings: Settings) -> Optional[Path]: result_output_dir = test_util.get_results_directory(test_dir, device.name) status = result_util.get_status(result_output_dir) report_subdirectory_name = "" if status == fuzz.STATUS_CRASH: report_subdirectory_name = "crashes" elif status == fuzz.STATUS_TOOL_CRASH: report_subdirectory_name = "tool_crashes" elif status == fuzz.STATUS_UNRESPONSIVE: report_subdirectory_name = "unresponsive" if not report_subdirectory_name: return None log_path = result_util.get_log_path(result_output_dir) log_contents = util.file_read_text(log_path) signature = signature_util.get_signature_from_log_contents(log_contents) signature_dir = reports_dir / report_subdirectory_name / signature util.mkdirs_p(signature_dir) # If the signature_dir contains a NOT_INTERESTING file, then don't bother creating a report. if (signature_dir / "NOT_INTERESTING").exists(): return None if signature != signature_util.BAD_IMAGE_SIGNATURE: # If we have reached the maximum number of crashes per signature for this device, don't create a report. num_duplicates = [ report_dir for report_dir in signature_dir.iterdir() if report_dir.is_dir() and report_dir.name.endswith(f"_{device.name}") ] if len(num_duplicates) >= settings.maximum_duplicate_crashes: return None # We include the device name in the directory name because it is possible that this test crashes on two # different devices but gives the same crash signature in both cases (e.g. for generic signatures # like "compile_error"). This would lead to two test copies having the same path. # It also means we can limit duplicates per device using the directory name. test_dir_in_reports = signature_dir / f"{test_dir.name}_{device.name}" util.copy_dir(test_dir, test_dir_in_reports) if signature != signature_util.BAD_IMAGE_SIGNATURE: # If we found a crash, rename the directories for all shaders other than the variant. Thus, only the variant # shader will run. bad_shader_name = result_util.get_status_bad_shader_name( test_util.get_results_directory(test_dir_in_reports, device.name)) # TODO: Could possibly improve this. Could try scanning the Amber log to figure out which shader failed? if not bad_shader_name: log("WARNING: assuming that the bad shader is the variant") bad_shader_name = test_util.VARIANT_DIR shader_jobs = tool.get_shader_jobs( test_util.get_source_dir(test_dir_in_reports)) found_bad_shader = False for shader_job in shader_jobs: if shader_job.name == bad_shader_name: found_bad_shader = True else: shader_job.shader_job.parent.rename( shader_job.shader_job.parent.parent / f"_{shader_job.name}") check( found_bad_shader, AssertionError( f"Could not find bad shader at: {test_util.get_source_dir(test_dir_in_reports) / bad_shader_name}" ), ) test_metadata = test_util.metadata_read(test_dir_in_reports) test_metadata.crash_signature = signature test_metadata.device.CopyFrom(device) test_metadata.expected_status = status test_util.metadata_write(test_metadata, test_dir_in_reports) return test_dir_in_reports
def artifact_read_metadata(artifact_path: str = "") -> ArtifactMetadata: artifact_metadata = ArtifactMetadata() json_file_path = artifact_get_metadata_file_path(artifact_path) json_contents = util.file_read_text(json_file_path) proto_util.json_to_message(json_contents, artifact_metadata) return artifact_metadata
def to_shader_job(self) -> ShaderJob: json_contents = util.file_read_text(self.asm_spirv_shader_job_json) if is_compute_job(self.asm_spirv_shader_job_json): glsl_comp_contents = None if self.glsl_source_json: glsl_comp_contents = shader_job_util.get_shader_contents( self.glsl_source_json, shader_job_util.EXT_COMP) comp_asm_contents = shader_job_util.get_shader_contents( self.asm_spirv_shader_job_json, shader_job_util.EXT_COMP, shader_job_util.SUFFIX_ASM_SPIRV, must_exist=True, ) # Guaranteed assert comp_asm_contents # noqa return ComputeShaderJob( self.name_prefix, amberscript_uniform_buffer_def(json_contents, self.name_prefix), amberscript_uniform_buffer_bind(json_contents, self.name_prefix), Shader( ShaderType.COMPUTE, comp_asm_contents, glsl_comp_contents, self.processing_info, ), amberscript_comp_buff_def(json_contents), amberscript_comp_buff_def(json_contents, make_empty_buffer=True), amberscript_comp_num_groups_def(json_contents), amberscript_comp_buffer_bind(json_contents), ) # Get GLSL contents glsl_vert_contents = None glsl_frag_contents = None if self.glsl_source_json: glsl_vert_contents = shader_job_util.get_shader_contents( self.glsl_source_json, shader_job_util.EXT_VERT) glsl_frag_contents = shader_job_util.get_shader_contents( self.glsl_source_json, shader_job_util.EXT_FRAG) # Get spirv asm contents vert_contents = shader_job_util.get_shader_contents( self.asm_spirv_shader_job_json, shader_job_util.EXT_VERT, shader_job_util.SUFFIX_ASM_SPIRV, ) frag_contents = shader_job_util.get_shader_contents( self.asm_spirv_shader_job_json, shader_job_util.EXT_FRAG, shader_job_util.SUFFIX_ASM_SPIRV, must_exist=True, ) # Figure out if we want to draw a rectangle or a grid. draw_command = derive_draw_command(json_contents) framebuffer_width = 256 framebuffer_height = 256 shader_job_info = json.loads(json_contents) if "$framebuffer" in shader_job_info.keys(): framebuffer_width = shader_job_info["$framebuffer"]["width"] framebuffer_height = shader_job_info["$framebuffer"]["height"] return GraphicsShaderJob( self.name_prefix, amberscript_uniform_buffer_def(json_contents, self.name_prefix), amberscript_uniform_buffer_bind(json_contents, self.name_prefix), Shader( ShaderType.VERTEX, vert_contents, glsl_vert_contents, self.processing_info, ), Shader( ShaderType.FRAGMENT, frag_contents, glsl_frag_contents, self.processing_info, ), draw_command, framebuffer_width, framebuffer_height, )
def main() -> None: # pylint: disable=too-many-locals; parser = argparse.ArgumentParser( description= "Classifies spirv-fuzz tests via the set of remaining transformation types." ) parser.add_argument( "--tests_dir", help= "The directory in which to search for tests by looking for summary/ directories.", default=str(Path("") / "reports" / "crashes" / "bad_image"), ) parsed_args = parser.parse_args(sys.argv[1:]) tests_dir: Path = Path(parsed_args.tests_dir) check_dir_exists(tests_dir) summary_dirs: List[Path] = list(tests_dir.glob("**/summary")) summary_dirs = [d for d in summary_dirs if d.is_dir()] summary_dirs = sorted(summary_dirs) check( bool(summary_dirs), AssertionError(f"No summary dirs found under {str(tests_dir)}"), ) signature_to_dirs: Dict[str, List[Path]] = {} # For each summary directory, get its signature based on the transformation types remaining # and add the info to |signature_to_dirs|. for summary_dir in summary_dirs: log(f"Checking {summary_dir}") transformations_json = util.file_read_text( summary_dir / "reduced_1" / "variant" / "shader.frag.transformations_json") transformations = json.loads(transformations_json) # E.g. # { # "transformation": [ # { # "addConstantScalar": {...} # }, # { # "addConstantComposite": {...} # }, # ..., # } transformation_types: Set[str] = set() transformation_list = transformations["transformation"] check( bool(transformation_list), AssertionError( f"No transformations found for {str(transformations_json)}"), ) for transformation in transformation_list: keys = transformation.keys() check( len(keys) == 1, AssertionError( f"Transformation had more than one key: {transformation}"), ) transformation_types.add(list(keys)[0]) transformation_types -= COMMON_TRANSFORMATION_TYPES transformation_types_sorted = sorted(transformation_types) signature = "_".join(transformation_types_sorted) log(f"signature: {signature}") # Add to or update the map. signature_to_dirs.setdefault(signature, []).append(summary_dir) log("\n\nTable:\n") for (signature, cases) in sorted(signature_to_dirs.items(), key=lambda item: item[0]): log(f"{signature}:") for case in cases: log(f" {str(case)}")