def make_shader_test_string(shader_job: str, nodraw: bool) -> str: """ Makes a piglit shader_test from a shader job and shader. :param shader_job: The path to the shader job file. :param nodraw: determines if the draw command is added to draw the shader. :return: the shader_test """ shader_job_json_parsed = get_json_properties(shader_job) with gfuzz_common.open_helper(get_shader_from_job(shader_job), 'r') as shader: shader_file_string = shader.read() shader_lines = shader_file_string.split('\n') shader_test_string = '' # The version header always has to be on the first line of the shader. shader_version_header = shader_lines[0] shader_test_string += make_require_header(shader_version_header) + '\n' shader_test_string += make_vertex_shader_header() + '\n' shader_test_string += make_fragment_shader_header( shader_file_string) + '\n' shader_test_string += make_test_header(shader_job_json_parsed, nodraw) return shader_test_string
def main_helper(args: List[str]) -> None: """ Main function. Parses arguments, delegates to other functions to write the shader_test string, and writes the string to file. :param args: the command line arguments. """ description = ( 'Given a GraphicsFuzz shader job JSON file, produce a Mesa piglit shader_test file. ' 'The shader test will be the same name as the shader job.') argparser = argparse.ArgumentParser(description=description) argparser.add_argument( 'shader_job', help='Path to the GraphicsFuzz shader job JSON file.') argparser.add_argument( '--nodraw', action='store_true', help= 'Do not draw the shader output when running the test. Useful for crash testing.' ) args = argparser.parse_args(args) test_string = make_shader_test_string(args.shader_job, args.nodraw) with gfuzz_common.open_helper(get_shader_test_from_job(args.shader_job), 'w') as shader_test: shader_test.write(test_string)
def get_json_properties(shader_job: str) -> dict: """ Helper function to parse a shader job JSON file into a dict of properties. Throws IOError if the file can't be parsed. :param shader_job: the path to the shader job file. :return: a dict of JSON properties. """ with gfuzz_common.open_helper(shader_job, 'r') as job: json_parsed = json.load(job) return json_parsed
def dump_glxinfo(filename: str) -> None: """ Helper function that dumps the stable results of 'glxinfo -B' to a JSON file. Removes any file with the same name as filename before writing. Will throw an exception if 'glxinfo' fails or the JSON file can't be written. :param filename: the filename to write to. """ # There are some useless or unstable lines in glxinfo we need to remove before trying to parse # into JSON. glxinfo_lines = filter( lambda glx_line: 'OpenGL' in glx_line, gfuzz_common.subprocess_helper(glxinfo_cmd()).stdout.split('\n')) # We form keys out of the OpenGL info descriptors and values out of the hardware dependent # strings. For example, "OpenGL version string: 4.6.0 NVIDIA 430.14" would become # { "OpenGL version string": "4.6.0 NVIDIA 430.14" }. glx_dict = dict() for line in glxinfo_lines: prop = line.split(': ') assert len(prop) is 2 glx_dict.update({prop[0]: prop[1]}) with gfuzz_common.open_helper(filename, 'w') as info_file: info_file.write(json.JSONEncoder().encode(glx_dict))
def main(): description = ( 'Uses the piglit GLES3 shader runner to render shader jobs.') parser = argparse.ArgumentParser(description=description) # Required parser.add_argument('worker_name', help='The name that will refer to this worker.') # Optional parser.add_argument( '--server', default='http://localhost:8080', help='Server URL to connect to (default: http://localhost:8080 )') args = parser.parse_args() gfuzz_common.tool_on_path('shader_runner_gles3') gfuzz_common.log('Worker: ' + args.worker_name) server = args.server + '/request' gfuzz_common.log('server: ' + server) # Get worker info worker_info_json_string = '{}' gfuzz_common.log('Dumping glxinfo to file for worker info string...') try: dump_glxinfo(WORKER_INFO_FILE) with gfuzz_common.open_helper(WORKER_INFO_FILE, 'r') as info_file: worker_info_json_string = info_file.read() except Exception as ex: gfuzz_common.log(str(ex)) gfuzz_common.log('Could not get worker info, continuing without it.') service = None worker = None while True: if not service: gfuzz_common.log('Connecting to server...') service, worker = thrift_connect(server, args.worker_name, worker_info_json_string) if not service: gfuzz_common.log('Failed to connect, retrying...') time.sleep(1) continue assert worker os.makedirs(worker, exist_ok=True) try: job = service.getJob(worker) if job.noJob is not None: gfuzz_common.log("No job") elif job.skipJob is not None: gfuzz_common.log("Skip job") service.jobDone(worker, job) else: assert job.imageJob if job.imageJob.computeSource: gfuzz_common.log("Got a compute job, but this worker " "doesn't support compute shaders.") job.imageJob.result = tt.ImageJobResult() job.imageJob.result.status = tt.JobStatus.UNEXPECTED_ERROR else: gfuzz_common.log("#### Image job: " + job.imageJob.name) job.imageJob.result = do_image_job(job.imageJob, work_dir=worker) gfuzz_common.log("Sending back, results status: {}".format( job.imageJob.result.status)) service.jobDone(worker, job) gfuzz_common.remove(worker) continue except (TApplicationException, ConnectionError): gfuzz_common.log( "Connection to server lost. Re-initialising client.") service = None time.sleep(1)
def run_image_job(json_file: str, status_file: str, png_file: str, output_dir: str, skip_render: bool): """ Runs an image job. Converts the shader job to a piglit shader_test file, then delegates to run_shader_test to render with shader_runner. Writes the status of the job to file. :param json_file: The JSON uniforms to use with the shader. :param status_file: The status file to write to. :param png_file: The PNG file to write to. :param output_dir: The directory to use for the job. :param skip_render: whether to skip rendering or not. """ use_catchsegv = True try: gfuzz_common.tool_on_path('catchsegv') except gfuzz_common.ToolNotOnPathError: use_catchsegv = False assert os.path.isdir(output_dir) assert os.path.isfile(json_file) arglist = [json_file] if skip_render: arglist.append(NO_DRAW_ARG) shader_test_file = graphicsfuzz_piglit_converter.get_shader_test_from_job( json_file) try: gfuzz_common.log('Creating shader_test file...') graphicsfuzz_piglit_converter.main_helper(arglist) except Exception as ex: gfuzz_common.log('Could not create shader_test from the given job.') raise ex shader_runner_cmd_list = shader_runner_cmd() + \ [shader_test_file, SHADER_RUNNER_ARG_AUTO, SHADER_RUNNER_ARG_UNIFORMS, SHADER_RUNNER_ARG_FBO, SHADER_RUNNER_ARG_SUBTESTS] if use_catchsegv: shader_runner_cmd_list.insert(0, catchsegv_cmd()) if not skip_render: shader_runner_cmd_list.append(SHADER_RUNNER_ARG_PNG) gfuzz_common.remove(PNG_FILENAME) gfuzz_common.remove(COMPARE_PNG_FILENAME) status = \ gfuzz_common.run_catchsegv(shader_runner_cmd_list, timeout=TIMEOUT, verbose=True) \ if use_catchsegv else \ gfuzz_common.subprocess_helper(shader_runner_cmd_list, timeout=TIMEOUT, verbose=True) # Piglit throws the output PNG render into whatever the current working directory is # (and there's no way to specify a location to write to) - we need to move it to wherever our # output is. if not skip_render and status == STATUS_SUCCESS: try: # An image was rendered, so we need to check for nondet. We do this by renaming the # rendered image, rendering a second image, and using filecmp to compare the files. assert os.path.isfile(PNG_FILENAME), \ "Shader runner successfully rendered, but no image was dumped?" gfuzz_common.log( 'An image was rendered - rendering again to check for nondet.') os.rename(PNG_FILENAME, COMPARE_PNG_FILENAME) status = \ gfuzz_common.run_catchsegv(shader_runner_cmd_list, timeout=TIMEOUT, verbose=True) \ if use_catchsegv else \ gfuzz_common.subprocess_helper(shader_runner_cmd_list, timeout=TIMEOUT, verbose=True) # Something is horribly wrong if shader crashes/timeouts are inconsistent per shader. assert status == STATUS_SUCCESS, \ "Shader inconsistently fails - check your graphics drivers?" assert os.path.isfile(PNG_FILENAME), \ "Shader runner successfully rendered, but no image was dumped?" gfuzz_common.log('Comparing dumped PNG images...') if filecmp.cmp(PNG_FILENAME, COMPARE_PNG_FILENAME): gfuzz_common.log('Images are identical.') shutil.move(PNG_FILENAME, png_file) else: gfuzz_common.log('Images are different.') status = STATUS_NONDET shutil.move(COMPARE_PNG_FILENAME, os.path.join(output_dir, NONDET0_PNG)) shutil.move(PNG_FILENAME, os.path.join(output_dir, NONDET1_PNG)) finally: gfuzz_common.log('Removing dumped images...') gfuzz_common.remove(PNG_FILENAME) gfuzz_common.remove(COMPARE_PNG_FILENAME) gfuzz_common.log('STATUS: ' + status) with gfuzz_common.open_helper(status_file, 'w') as f: f.write(status)
def do_image_job(image_job: tt.ImageJob, work_dir: str) -> tt.ImageJobResult: """ Does an image job. Sets up directories and some files, then delegates to run_image_job to convert the job to a shader_test and run it. Sets a global logfile to log to for the lifetime of the function. Gets the status of the shader job from a file that is written to by run_image_job. :param image_job: the image job containing the shader/uniforms. :param work_dir: the directory to work in. :return: the result of the image job, including the log, PNG and status. """ # Output directory is based on the name of job. output_dir = os.path.join(work_dir, image_job.name) # Delete and create output directory. gfuzz_common.remove(output_dir) os.makedirs(output_dir, exist_ok=True) name = image_job.name if name.endswith('.frag'): name = gfuzz_common.remove_end(name, '.frag') frag_file = os.path.join(output_dir, name + FRAG_SUFFIX) json_file = os.path.join(output_dir, name + JSON_SUFFIX) log_file = os.path.join(output_dir, LOGFILE_NAME) status_file = os.path.join(output_dir, STATUS_FILENAME) png_file = os.path.join(output_dir, name + PNG_SUFFIX) nondet_0 = os.path.join(output_dir, NONDET0_PNG) nondet_1 = os.path.join(output_dir, NONDET1_PNG) gfuzz_common.write_to_file(image_job.fragmentSource, frag_file) gfuzz_common.write_to_file(image_job.uniformsInfo, json_file) res = tt.ImageJobResult() # Set nice defaults to fields we will not update anyway res.passSanityCheck = True res.log = 'Start: ' + name + '\n' with gfuzz_common.open_helper(log_file, 'w') as f: try: gfuzz_common.set_logfile(f) run_image_job(json_file, status_file, png_file, output_dir, image_job.skipRender) except Exception as ex: gfuzz_common.log(str(ex)) gfuzz_common.log('Removing status file and continuing...') gfuzz_common.remove(status_file) finally: gfuzz_common.unset_logfile() if os.path.isfile(log_file): with gfuzz_common.open_helper(log_file, 'r') as f: res.log += f.read() if os.path.isfile(png_file): with gfuzz_common.open_bin_helper(png_file, 'rb') as f: res.PNG = f.read() if os.path.isfile(status_file): with gfuzz_common.open_helper(status_file, 'r') as f: status = f.read().rstrip() if status == STATUS_SUCCESS: res.status = tt.JobStatus.SUCCESS elif status == STATUS_CRASH: res.status = tt.JobStatus.CRASH elif status == STATUS_TIMEOUT: res.status = tt.JobStatus.TIMEOUT elif status == STATUS_UNEXPECTED: res.status = tt.JobStatus.UNEXPECTED_ERROR elif status == STATUS_NONDET: res.status = tt.JobStatus.NONDET with gfuzz_common.open_bin_helper(nondet_0, 'rb') as f: res.PNG = f.read() with gfuzz_common.open_bin_helper(nondet_1, 'rb') as f: res.PNG2 = f.read() else: res.log += '\nUnknown status value: ' + status + '\n' res.status = tt.JobStatus.UNEXPECTED_ERROR else: # Not even a status file? res.log += '\nNo STATUS file\n' res.status = tt.JobStatus.UNEXPECTED_ERROR return res
def main(): parser = argparse.ArgumentParser() # Required arguments parser.add_argument( 'worker', help='Worker name to identify to the server') parser.add_argument( 'target', help=runspv.TARGET_HELP) # Optional arguments parser.add_argument( '--force', action='store_true', help=runspv.FORCE_OPTION_HELP) parser.add_argument( '--legacy-worker', action='store_true', help=runspv.LEGACY_OPTION_HELP) parser.add_argument( '--serial', help=runspv.SERIAL_OPTION_HELP) parser.add_argument( '--server', default='http://localhost:8080', help='Server URL (default: http://localhost:8080 )') parser.add_argument( '--spirvopt', help=runspv.SPIRV_OPT_OPTION_HELP + 'Pass RANDOM to have a random selection of ' 'optimization arguments used for each shader job that ' 'is processed. Note that this makes repeat runs of ' 'the same shader job nondeterministic, as different ' 'optimizer arguments may be chosen.') parser.add_argument( '--local-shader-job', help='Execute a single, locally stored shader job (for debugging), instead of using the ' 'server.') parser.add_argument( '--seed', help='A seed to control random number generation.') args = parser.parse_args() # Seed the random number generator. If no seed is provided then 'None' will be used, which # seeds the generator based on system time. random.seed(args.seed) spirvopt_args = None # type: Union[Optional[List[str]], str] if args.spirvopt: if args.spirvopt == "RANDOM": spirvopt_args = "RANDOM" else: spirvopt_args = args.spirvopt.split() # Check the target is known. if not (args.target == 'android' or args.target == 'host'): raise ValueError('Target must be "android" or "host"') # Record whether or not we are targeting Android. is_android = (args.target == 'android') # Check the optional arguments are consistent with the target. if not is_android and args.force: raise ValueError('"force" option is only compatible with "android" target') if not is_android and args.serial: raise ValueError('"serial" option is only compatible with "android" target') print('Worker: ' + args.worker) server = args.server + '/request' print('server: ' + server) if args.serial: os.environ['ANDROID_SERIAL'] = args.serial service = None worker = None # Get worker info worker_info_file = 'worker_info.json' gfuzz_common.remove(worker_info_file) worker_info_json_string = '{}' try: if is_android: runspv.dump_info_android_legacy(wait_for_screen=not args.force) else: runspv.dump_info_host_legacy() if not os.path.isfile(worker_info_file): raise Exception( 'Failed to retrieve worker information. If targeting Android, make sure ' 'the app permission to write to external storage is enabled.' ) with gfuzz_common.open_helper(worker_info_file, 'r') as f: worker_info_json_string = f.read() except Exception as ex: if args.legacy_worker: raise ex else: print(ex) print('Continuing without worker information.') # Main loop while True: if is_android \ and 'ANDROID_SERIAL' in os.environ and \ not is_device_available(os.environ['ANDROID_SERIAL']): raise Exception( '#### ABORT: device {} is not available (either offline or not connected?)' .format(os.environ['ANDROID_SERIAL']) ) # Special case: local shader job for debugging. if args.local_shader_job: assert args.local_shader_job.endswith('.json'), \ 'Expected local shader job "{}" to end with .json' shader_job_prefix = gfuzz_common.remove_end(args.local_shader_job, '.json') fake_job = tt.ImageJob() fake_job.name = os.path.basename(shader_job_prefix) assert os.path.isfile(args.local_shader_job), \ 'Shader job {} does not exist'.format(args.local_shader_job) with gfuzz_common.open_helper(args.local_shader_job, 'r') as f: fake_job.uniformsInfo = f.read() if os.path.isfile(shader_job_prefix + '.frag'): with gfuzz_common.open_helper(shader_job_prefix + '.frag', 'r') as f: fake_job.fragmentSource = f.read() if os.path.isfile(shader_job_prefix + '.vert'): with gfuzz_common.open_helper(shader_job_prefix + '.vert', 'r') as f: fake_job.vertexSource = f.read() if os.path.isfile(shader_job_prefix + '.comp'): with gfuzz_common.open_helper(shader_job_prefix + '.comp', 'r') as f: fake_job.computeSource = f.read() fake_job.computeInfo = fake_job.uniformsInfo do_image_job(args, fake_job, spirvopt_args, work_dir='out') return if not service: service, worker = get_service(server, args, worker_info_json_string) if not service: print("Cannot connect to server, retry in a second...") time.sleep(1) continue assert worker is not None os.makedirs(worker, exist_ok=True) try: job = service.getJob(worker) if job.noJob is not None: print("No job") elif job.skipJob is not None: print("Skip job") service.jobDone(worker, job) else: assert job.imageJob is not None if job.imageJob.computeSource: print("#### Compute job: " + job.imageJob.name) job.imageJob.result = do_compute_job( args, job.imageJob, spirvopt_args, work_dir=worker ) else: print("#### Image job: " + job.imageJob.name) job.imageJob.result = do_image_job( args, job.imageJob, spirvopt_args, work_dir=worker ) print("Send back, results status: {}".format(job.imageJob.result.status)) service.jobDone(worker, job) continue except (TApplicationException, ConnectionError): print("Connection to server lost. Re-initialising client.") service = None time.sleep(1)
def do_compute_job( args, comp_job: tt.ImageJob, spirv_opt_args: Optional[List[str]], work_dir: str ) -> tt.ImageJobResult: # Output directory is based on the name of job. output_dir = os.path.join(work_dir, comp_job.name) # Delete and create output directory. gfuzz_common.remove(output_dir) os.makedirs(output_dir, exist_ok=True) tmpcomp = os.path.join(output_dir, 'tmp.comp') tmpjson = os.path.join(output_dir, 'tmp.json') log_file = os.path.join(output_dir, runspv.LOGFILE_NAME) ssbo_json_file = os.path.join(output_dir, 'ssbo.json') # Output files from running the app. status_file = os.path.join(output_dir, 'STATUS') gfuzz_common.write_to_file(comp_job.computeSource, tmpcomp) gfuzz_common.write_to_file(comp_job.computeInfo, tmpjson) res = tt.ImageJobResult() res.log = '#### Start compute shader\n\n' assert not args.legacy_worker # Set runspv logger. Use try-finally to clean up. with gfuzz_common.open_helper(log_file, 'w') as f: try: runspv.log_to_file = f runspv.run_compute_amber( comp_original=tmpcomp, json_file=tmpjson, output_dir=output_dir, force=args.force, is_android=(args.target == 'android'), skip_render=comp_job.skipRender, spirv_opt_args=spirv_opt_args ) except Exception as ex: runspv.log('Exception: ' + str(ex)) runspv.log('Removing STATUS file.') gfuzz_common.remove(status_file) runspv.log('Continuing.') finally: runspv.log_to_file = None if os.path.isfile(log_file): with gfuzz_common.open_helper(log_file, 'r') as f: res.log += f.read() if os.path.isfile(status_file): with gfuzz_common.open_helper(status_file, 'r') as f: status = f.read().rstrip() if status == 'SUCCESS': res.status = tt.JobStatus.SUCCESS assert (os.path.isfile(ssbo_json_file)) with gfuzz_common.open_helper(ssbo_json_file, 'r') as f: res.computeOutputs = f.read() elif status == 'CRASH': res.status = tt.JobStatus.CRASH elif status == 'TIMEOUT': res.status = tt.JobStatus.TIMEOUT else: res.log += '\nUnknown status value: ' + status + '\n' res.status = tt.JobStatus.UNEXPECTED_ERROR else: # Not even a status file? res.log += '\nNo STATUS file\n' res.status = tt.JobStatus.UNEXPECTED_ERROR return res
def do_image_job( args, image_job, spirv_opt_args: Optional[List[str]], work_dir: str ) -> tt.ImageJobResult: # Output directory is based on the name of job. output_dir = os.path.join(work_dir, image_job.name) # Delete and create output directory. gfuzz_common.remove(output_dir) os.makedirs(output_dir, exist_ok=True) name = image_job.name if name.endswith('.frag'): name = gfuzz_common.remove_end(name, '.frag') # TODO(324): the worker currently assumes that no vertex shader is present in the image job. vert_file = prepare_vert_file(output_dir) if args.legacy_worker else None frag_file = os.path.join(output_dir, name + '.frag') json_file = os.path.join(output_dir, name + '.json') png_file = os.path.join(output_dir, 'image_0.png') log_file = os.path.join(output_dir, runspv.LOGFILE_NAME) status_file = os.path.join(output_dir, 'STATUS') nondet_0 = os.path.join(output_dir, 'nondet0.png') nondet_1 = os.path.join(output_dir, 'nondet1.png') res = tt.ImageJobResult() skip_render = image_job.skipRender # Set nice defaults to fields we will not update anyway res.passSanityCheck = True res.log = 'Start: ' + name + '\n' gfuzz_common.write_to_file(image_job.fragmentSource, frag_file) gfuzz_common.write_to_file(image_job.uniformsInfo, json_file) # Set runspv logger. Use try-finally to clean up. with gfuzz_common.open_helper(log_file, 'w') as f: try: runspv.log_to_file = f resolved_spirvopt_args = resolve_spirvopt_args(spirv_opt_args) if args.legacy_worker: if args.target == 'host': runspv.run_image_host_legacy( vert_original=vert_file, frag_original=frag_file, json_file=json_file, output_dir=output_dir, skip_render=skip_render, spirv_opt_args=resolved_spirvopt_args, ) else: assert args.target == 'android' runspv.run_image_android_legacy( vert_original=vert_file, frag_original=frag_file, json_file=json_file, output_dir=output_dir, force=args.force, skip_render=skip_render, spirv_opt_args=resolved_spirvopt_args, ) else: runspv.run_image_amber( vert_original=vert_file, frag_original=frag_file, json_file=json_file, output_dir=output_dir, force=args.force, is_android=(args.target == 'android'), skip_render=skip_render, spirv_opt_args=resolved_spirvopt_args ) except Exception as ex: runspv.log('Exception: ' + str(ex)) runspv.log('Removing STATUS file.') gfuzz_common.remove(status_file) runspv.log('Continuing.') finally: runspv.log_to_file = None if os.path.isfile(log_file): with gfuzz_common.open_helper(log_file, 'r') as f: res.log += f.read() if os.path.isfile(png_file): with gfuzz_common.open_bin_helper(png_file, 'rb') as f: res.PNG = f.read() if os.path.isfile(status_file): with gfuzz_common.open_helper(status_file, 'r') as f: status = f.read().rstrip() if status == 'SUCCESS': res.status = tt.JobStatus.SUCCESS elif status == 'CRASH': res.status = tt.JobStatus.CRASH elif status == 'TIMEOUT': res.status = tt.JobStatus.TIMEOUT elif status == 'SANITY_ERROR': res.status = tt.JobStatus.SANITY_ERROR elif status == 'UNEXPECTED_ERROR': res.status = tt.JobStatus.UNEXPECTED_ERROR elif status == 'NONDET': res.status = tt.JobStatus.NONDET with gfuzz_common.open_bin_helper(nondet_0, 'rb') as f: res.PNG = f.read() with gfuzz_common.open_bin_helper(nondet_1, 'rb') as f: res.PNG2 = f.read() else: res.log += '\nUnknown status value: ' + status + '\n' res.status = tt.JobStatus.UNEXPECTED_ERROR else: # Not even a status file? res.log += '\nNo STATUS file\n' res.status = tt.JobStatus.UNEXPECTED_ERROR return res
def get_ssbo(result_json_filename: str) -> List: with gfuzz_common.open_helper(result_json_filename, 'r') as f: parsed = json.load(f) if not parsed or 'outputs' not in parsed or 'ssbo' not in parsed['outputs']: raise ValueError('No SSBO data found') return parsed['outputs']['ssbo']