Exemplo n.º 1
0
def run_generate_on_shader_job(
    spirv_fuzz_path: Path,
    reference_shader_json: Path,
    output_shader_json: Path,
    donor_shader_job_paths: Optional[List[Path]] = None,
    seed: Optional[str] = None,
    other_args: Optional[List[str]] = None,
) -> Path:

    if donor_shader_job_paths is None:
        donor_shader_job_paths = []

    util.copy_file(reference_shader_json, output_shader_json)

    suffixes_that_exist = shader_job_util.get_related_suffixes_that_exist(
        reference_shader_json, shader_job_util.EXT_ALL,
        [shader_job_util.SUFFIX_SPIRV])

    for suffix in suffixes_that_exist:

        # Create a donors list file "donors.{suffix}.txt" containing the file paths to all relevant donor .spv shaders.
        donor_list_contents = ""
        for donor_shader_job_path in donor_shader_job_paths:
            donor_shader_path = donor_shader_job_path.with_suffix(suffix)
            if donor_shader_path.exists():
                donor_list_contents += f"{str(donor_shader_path)}\n"
        donors_list_path = util.file_write_text(
            reference_shader_json.parent / f"donors{suffix}.txt",
            donor_list_contents)

        run_generate(
            spirv_fuzz_path,
            reference_shader_json.with_suffix(suffix),
            output_shader_json.with_suffix(suffix),
            donors_list_path=donors_list_path,
            seed=seed,
            other_args=other_args,
        )

    return output_shader_json
Exemplo n.º 2
0
def run_replay_on_shader_job(
    spirv_fuzz_path: Path,
    variant_shader_job_json: Path,
    output_shader_job_json: Path,
    other_args: Optional[List[str]] = None,
) -> Path:
    """Replays all transformations except the last on all shaders to get a similar variant shader job."""
    util.copy_file(variant_shader_job_json, output_shader_job_json)

    suffixes_that_exist = shader_job_util.get_related_suffixes_that_exist(
        variant_shader_job_json, shader_job_util.EXT_ALL,
        [shader_job_util.SUFFIX_SPIRV])

    for suffix in suffixes_that_exist:

        run_replay(
            spirv_fuzz_path,
            variant_shader_job_json.with_suffix(suffix),
            output_shader_job_json.with_suffix(suffix),
            other_args=other_args,
        )

    return output_shader_job_json
Exemplo n.º 3
0
def run_generate_on_shader_job(
    spirv_fuzz_path: Path,
    reference_shader_json: Path,
    output_shader_json: Path,
    seed: Optional[str] = None,
    other_args: Optional[List[str]] = None,
) -> Path:

    util.copy_file(reference_shader_json, output_shader_json)

    suffixes_that_exist = shader_job_util.get_related_suffixes_that_exist(
        reference_shader_json, shader_job_util.EXT_ALL,
        [shader_job_util.SUFFIX_SPIRV])

    for suffix in suffixes_that_exist:
        run_generate(
            spirv_fuzz_path,
            reference_shader_json.with_suffix(suffix),
            output_shader_json.with_suffix(suffix),
            seed,
            other_args,
        )

    return output_shader_json
Exemplo n.º 4
0
def run_reduction(
    source_dir_to_reduce: Path,
    reduction_output_dir: Path,
    binary_manager: binaries_util.BinaryManager,
    settings: Settings,
) -> Path:
    test = test_util.metadata_read_from_source_dir(source_dir_to_reduce)
    shader_jobs = tool.get_shader_jobs(source_dir_to_reduce)

    # TODO: if needed, this could become a parameter to this function.
    shader_job_to_reduce = shader_jobs[0]

    if len(shader_jobs) > 1:
        check(
            len(shader_jobs) == 2
            and shader_jobs[1].name == test_util.VARIANT_DIR,
            AssertionError(
                "Can only reduce tests with shader jobs reference and variant, or just variant."
            ),
        )
        shader_job_to_reduce = shader_jobs[1]

    shader_transformation_suffixes = shader_job_util.get_related_suffixes_that_exist(
        shader_job_to_reduce.shader_job,
        language_suffix=(shader_job_util.SUFFIX_TRANSFORMATIONS, ),
    )

    shader_spv_suffixes = shader_job_util.get_related_suffixes_that_exist(
        shader_job_to_reduce.shader_job,
        language_suffix=(shader_job_util.SUFFIX_SPIRV, ))

    reduced_source_dir = source_dir_to_reduce

    for index, suffix in enumerate(shader_transformation_suffixes):
        # E.g. .frag.transformations -> .frag
        extension_to_reduce = str(Path(suffix).with_suffix(""))
        reduced_source_dir = run_reduction_part(
            reduction_part_output_dir=reduction_output_dir /
            f"0_{index}_{suffix.split('.')[1]}",
            source_dir_to_reduce=reduced_source_dir,
            shader_job_name_to_reduce=shader_job_to_reduce.name,
            extension_to_reduce=extension_to_reduce,
            preserve_semantics=True,
            binary_manager=binary_manager,
            settings=settings,
        )

    if (test.crash_signature != signature_util.BAD_IMAGE_SIGNATURE
            and not settings.skip_spirv_reduce):
        for index, suffix in enumerate(shader_spv_suffixes):
            # E.g. .frag.spv -> .frag
            extension_to_reduce = str(Path(suffix).with_suffix(""))
            reduced_source_dir = run_reduction_part(
                reduction_part_output_dir=reduction_output_dir /
                f"1_{index}_{suffix.split('.')[1]}",
                source_dir_to_reduce=reduced_source_dir,
                shader_job_name_to_reduce=shader_job_to_reduce.name,
                extension_to_reduce=extension_to_reduce,
                preserve_semantics=False,
                binary_manager=binary_manager,
                settings=settings,
            )

    # Create and return a symlink to the "best" reduction.
    return util.make_directory_symlink(
        new_symlink_file_path=reduction_output_dir / fuzz.BEST_REDUCTION_NAME,
        existing_dir=reduced_source_dir.parent,
    )
Exemplo n.º 5
0
def main() -> None:  # pylint: disable=too-many-locals;
    parser = argparse.ArgumentParser(
        description="Generates an AmberScript test from a shader job.")

    parser.add_argument(
        "shader_job",
        help="The input .json shader job file.",
    )

    parser.add_argument(
        "--output",
        help="Output directory.",
        default="output",
    )

    parser.add_argument(
        "--spirv_opt_args",
        help=
        "Arguments for spirv-opt as a space-separated string, or an empty string to skip running spirv-opt.",
        default="",
    )

    parser.add_argument(
        "--settings",
        help=
        "Path to a settings JSON file for this instance. The file will be generated if needed. ",
        default="settings.json",
    )

    parsed_args = parser.parse_args(sys.argv[1:])

    shader_job: Path = Path(parsed_args.shader_job)
    out_dir: Path = Path(parsed_args.output)
    spirv_opt_args_str: str = parsed_args.spirv_opt_args
    settings_path: Path = Path(parsed_args.settings)

    spirv_opt_args: List[str] = []
    if spirv_opt_args_str:
        spirv_opt_args = spirv_opt_args_str.split(" ")

    settings = settings_util.read_or_create(settings_path)

    binary_manager = binaries_util.get_default_binary_manager(settings)

    staging_dir = out_dir / "staging"

    template_source_dir = staging_dir / "source_template"
    test_dir = staging_dir / "test"

    run_output_dir: Path = out_dir / "run"

    # Remove stale directories.
    if staging_dir.is_dir():
        shutil.rmtree(staging_dir)
    if run_output_dir.is_dir():
        shutil.rmtree(run_output_dir)

    # Create source template and call |make_test|.

    if shader_job_util.get_related_suffixes_that_exist(
            shader_job, language_suffix=[shader_job_util.SUFFIX_SPIRV]):
        # This is a SPIR-V shader job.

        shader_job_util.copy(
            shader_job,
            template_source_dir / test_util.VARIANT_DIR / test_util.SHADER_JOB,
            language_suffix=shader_job_util.SUFFIXES_SPIRV_FUZZ_INPUT,
        )

        fuzz_spirv_amber_test.make_test(
            template_source_dir,
            test_dir,
            spirv_opt_args=spirv_opt_args,
            binary_manager=binary_manager,
            derived_from=shader_job.stem,
            stable_shader=False,
            common_spirv_args=list(settings.common_spirv_args),
        )

    elif shader_job_util.get_related_suffixes_that_exist(
            shader_job, language_suffix=[shader_job_util.SUFFIX_GLSL]):
        # This is a GLSL shader job.

        # The "graphicsfuzz-tool" tool is designed to be on your PATH so that e.g. ".bat" will be appended on Windows.
        # So we use tool_on_path with a custom PATH to get the actual file we want to execute.
        graphicsfuzz_tool_path = util.tool_on_path(
            "graphicsfuzz-tool",
            str(
                binary_manager.get_binary_path_by_name(
                    "graphicsfuzz-tool").path.parent),
        )

        with util.file_open_text(staging_dir / "log.txt", "w") as log_file:
            try:
                gflogging.push_stream_for_logging(log_file)

                # Create the prepared (for Vulkan GLSL) reference.
                glsl_generate_util.run_prepare_reference(
                    graphicsfuzz_tool_path,
                    shader_job,
                    template_source_dir / test_util.VARIANT_DIR /
                    test_util.SHADER_JOB,
                    legacy_graphics_fuzz_vulkan_arg=settings.
                    legacy_graphics_fuzz_vulkan_arg,
                )
            finally:
                gflogging.pop_stream_for_logging()

        fuzz_glsl_amber_test.make_test(
            template_source_dir,
            test_dir,
            spirv_opt_args=spirv_opt_args,
            binary_manager=binary_manager,
            derived_from=shader_job.stem,
            stable_shader=False,
            common_spirv_args=list(settings.common_spirv_args),
        )

    else:
        raise AssertionError(
            "Unexpected shader job type; expected GLSL or SPIR-V shaders.")

    preprocessor_cache = util.CommandCache()

    fuzz_test_util.run_shader_job(
        source_dir=test_util.get_source_dir(test_dir),
        output_dir=run_output_dir,
        binary_manager=binary_manager,
        device=Device(host=DeviceHost()),
        preprocessor_cache=preprocessor_cache,
        stop_after_amber=True,
    )
Exemplo n.º 6
0
def compile_shader_job(  # pylint: disable=too-many-locals;
    name: str,
    input_json: Path,
    work_dir: Path,
    binary_paths: binaries_util.BinaryGetter,
    spirv_opt_args: Optional[List[str]] = None,
    shader_overrides: Optional[ShaderSuffixToShaderOverride] = None,
    preprocessor_cache: Optional[util.CommandCache] = None,
    skip_validation: bool = False,
    common_spirv_args: Optional[List[str]] = None,
) -> SpirvCombinedShaderJob:

    result = input_json

    glsl_source_shader_job: Optional[Path] = None

    glsl_suffixes = shader_job_util.get_related_suffixes_that_exist(
        result, language_suffix=(shader_job_util.SUFFIX_GLSL,)
    )

    spirv_suffixes = shader_job_util.get_related_suffixes_that_exist(
        result, language_suffix=[shader_job_util.SUFFIX_SPIRV]
    )

    # If GLSL:
    if glsl_suffixes:
        glsl_source_shader_job = result

        result = shader_job_util.copy(result, work_dir / "0_glsl" / result.name)

        if shader_overrides:
            raise AssertionError("Shader overrides are not supported for GLSL")

        result = glslang_glsl_shader_job_to_spirv(
            result,
            work_dir / "1_spirv" / result.name,
            binary_paths,
            preprocessor_cache=preprocessor_cache,
        )
    # If SPIR-V:
    elif spirv_suffixes:

        result = shader_job_util.copy(
            result,
            work_dir / "1_spirv" / result.name,
            # Copy all spirv-fuzz related files too:
            language_suffix=shader_job_util.SUFFIXES_SPIRV_FUZZ,
        )

        if shader_overrides:
            for suffix in spirv_suffixes:
                shader_override = shader_overrides.get(suffix)
                if shader_override:
                    check(
                        name == shader_override.name,
                        AssertionError(
                            f"shader job name {name} does not match shader override job name {shader_override.name}"
                        ),
                    )
                    check(
                        shader_override.suffix == suffix,
                        AssertionError(
                            f"shader suffix {suffix} does not match shader override suffix {shader_override.suffix}"
                        ),
                    )

                    # These will be used as prefixes via .with_suffix().
                    # E.g. path/to/temp.spv
                    source_prefix = shader_override.path
                    # E.g. path/to/shader.json -> path/to/shader.frag.spv
                    dest_prefix = result.with_suffix(suffix)

                    util.copy_file_if_exists(source_prefix, dest_prefix)
                    util.copy_file_if_exists(
                        source_prefix.with_suffix(
                            shader_job_util.SUFFIX_TRANSFORMATIONS
                        ),
                        dest_prefix.with_suffix(shader_job_util.SUFFIX_TRANSFORMATIONS),
                    )
                    util.copy_file_if_exists(
                        source_prefix.with_suffix(
                            shader_job_util.SUFFIX_TRANSFORMATIONS_JSON
                        ),
                        dest_prefix.with_suffix(
                            shader_job_util.SUFFIX_TRANSFORMATIONS_JSON
                        ),
                    )
    else:
        # result has not changed, which means nothing was executed above.
        raise AssertionError(f"Unrecognized shader job type: {str(input_json)}")

    result_spirv = result

    result = spirv_dis_shader_job(
        result, work_dir / "1_spirv_asm" / result.name, binary_paths
    )

    if not skip_validation:
        validate_spirv_shader_job(
            result_spirv, binary_paths, extra_args=common_spirv_args,
        )

    if spirv_opt_args:
        result = result_spirv
        result = spirv_opt_shader_job(
            result,
            spirv_opt_args,
            work_dir / "2_spirv_opt" / result.name,
            binary_paths,
            preprocessor_cache=preprocessor_cache,
            skip_validation=skip_validation,
        )
        result_spirv = result
        result = spirv_dis_shader_job(
            result, work_dir / "2_spirv_opt_asm" / result.name, binary_paths
        )

        if not skip_validation:
            validate_spirv_shader_job(
                result_spirv, binary_paths, extra_args=common_spirv_args,
            )

    return SpirvCombinedShaderJob(
        name=name,
        spirv_asm_shader_job=result,
        spirv_shader_job=result_spirv,
        glsl_source_shader_job=glsl_source_shader_job,
    )
Exemplo n.º 7
0
def run_reduction_on_report(  # pylint: disable=too-many-locals;
        test_dir: Path, reports_dir: Path,
        binary_manager: binaries_util.BinaryManager) -> None:
    test = test_util.metadata_read(test_dir)

    check(
        bool(test.device and test.device.name),
        AssertionError(
            f"Cannot reduce {str(test_dir)}; "
            f"device must be specified in {str(test_util.get_metadata_path(test_dir))}"
        ),
    )

    check(
        bool(test.crash_signature),
        AssertionError(
            f"Cannot reduce {str(test_dir)} because there is no crash string specified."
        ),
    )

    source_dir = test_util.get_source_dir(test_dir)

    shader_jobs = tool.get_shader_jobs(source_dir)

    # TODO: if needed, this could become a parameter to this function.
    shader_job_to_reduce = shader_jobs[0]

    if len(shader_jobs) > 1:
        check(
            len(shader_jobs) == 2
            and shader_jobs[1].name == test_util.VARIANT_DIR,
            AssertionError(
                "Can only reduce tests with shader jobs reference and variant, or just variant."
            ),
        )
        shader_job_to_reduce = shader_jobs[1]

    shader_transformation_suffixes = shader_job_util.get_related_suffixes_that_exist(
        shader_job_to_reduce.shader_job,
        language_suffix=(shader_job_util.SUFFIX_TRANSFORMATIONS, ),
    )

    shader_spv_suffixes = shader_job_util.get_related_suffixes_that_exist(
        shader_job_to_reduce.shader_job,
        language_suffix=(shader_job_util.SUFFIX_SPIRV, ))

    try:
        reduced_test = test_dir

        for index, suffix in enumerate(shader_transformation_suffixes):
            # E.g. .frag.transformations -> .frag
            extension_to_reduce = str(Path(suffix).with_suffix(""))
            reduced_test = run_reduction(
                test_dir_reduction_output=test_dir,
                test_dir_to_reduce=reduced_test,
                shader_job_name_to_reduce=shader_job_to_reduce.name,
                extension_to_reduce=extension_to_reduce,
                preserve_semantics=True,
                binary_manager=binary_manager,
                reduction_name=f"0_{index}_{suffix.split('.')[1]}",
            )

        if test.crash_signature != signature_util.BAD_IMAGE_SIGNATURE:
            for index, suffix in enumerate(shader_spv_suffixes):
                # E.g. .frag.spv -> .frag
                extension_to_reduce = str(Path(suffix).with_suffix(""))
                reduced_test = run_reduction(
                    test_dir_reduction_output=test_dir,
                    test_dir_to_reduce=reduced_test,
                    shader_job_name_to_reduce=shader_job_to_reduce.name,
                    extension_to_reduce=extension_to_reduce,
                    preserve_semantics=False,
                    binary_manager=binary_manager,
                    reduction_name=f"1_{index}_{suffix.split('.')[1]}",
                )

        device_name = test.device.name

        # Create a symlink to the "best" reduction.
        best_reduced_test_link = test_util.get_reduced_test_dir(
            test_dir, device_name, fuzz.BEST_REDUCTION_NAME)
        util.make_directory_symlink(
            new_symlink_file_path=best_reduced_test_link,
            existing_dir=reduced_test)
    except ReductionFailedError as ex:
        # Create a symlink to the failed reduction so it is easy to investigate failed reductions.
        link_to_failed_reduction_path = (
            reports_dir / "failed_reductions" /
            f"{test_dir.name}_{ex.reduction_name}")
        util.make_directory_symlink(
            new_symlink_file_path=link_to_failed_reduction_path,
            existing_dir=ex.reduction_work_dir,
        )