예제 #1
0
def get_next_logcat_timestamp(serial: Optional[str]) -> Optional[str]:
    # Get last log event.
    res = adb_check(serial, ["logcat", "-d", "-T", "1"])
    out = res.stdout  # type: Optional[str]
    if not out:
        return None

    lines = out.splitlines()
    if not lines:
        return None

    # Get last line. E.g. "04-02 19:23:23.579   611   611 D logd    :  etc.".
    last_line = lines[-1]

    parts = last_line.split(" ")
    # The first two parts are the timestamp.
    if len(parts) < 2:
        return None
    timestamp = f"{parts[0]} {parts[1]}"
    if not TIMESTAMP_PATTERN.fullmatch(timestamp):
        return None

    check(
        last_line.startswith(timestamp),
        AssertionError(
            f"Last line and extracted timestamp did not match: \n{last_line}\n{timestamp}"
        ),
    )

    # We want a timestamp that is _later_ than the final timestamp.
    # Timestamp arguments passed to adb are rounded (by adb), so concatenating a "9"
    # is a simple way to get the "next" timestamp at the finest granularity.
    return timestamp + "9"
예제 #2
0
    def __init__(
        self,
        binary_list: Optional[List[Binary]] = None,
        platform: Optional[str] = None,
        built_in_binary_recipes: Optional[Dict[str, Recipe]] = None,
    ):
        self._binary_list = binary_list or DEFAULT_BINARIES
        self._resolved_paths = {}
        self._platform = platform or util.get_platform()
        self._binary_artifacts = []
        self._built_in_binary_recipes = {}

        self._binary_artifacts.extend(
            artifact_util.binary_artifacts_find(BINARY_RECIPES_PREFIX))

        # When changing this constructor, check self.get_child_binary_manager().

        if built_in_binary_recipes:
            self._built_in_binary_recipes = built_in_binary_recipes
            # For each recipe, add a tuple (ArchiveSet, artifact_path) to self._binary_artifacts.
            for (artifact_path,
                 recipe) in self._built_in_binary_recipes.items():
                check(
                    recipe.HasField("download_and_extract_archive_set"),
                    AssertionError(f"Bad built-in recipe: {recipe}"),
                )
                archive_set: RecipeDownloadAndExtractArchiveSet = recipe.download_and_extract_archive_set
                self._binary_artifacts.append(
                    (archive_set.archive_set, artifact_path))
예제 #3
0
    def get_binary_path(self, binary: Binary) -> Path:
        # Special case: allow the path to be specified in the binary object itself for testing purposes:
        if binary.path:
            return Path(binary.path)
        # Try resolved cache first.
        result = self._resolved_paths.get(binary.SerializePartialToString())
        if result:
            return result
        log(f"Finding path of binary:\n{binary}")

        # Try list (cache) of binary artifacts on disk.
        result = self._get_binary_path_from_binary_artifacts(binary)
        if result:
            return result

        # Try online.
        wrapped_recipe = get_github_release_recipe(binary)
        # Execute the recipe to download the binaries.
        artifact_util.artifact_execute_recipe_if_needed(
            wrapped_recipe.path, {wrapped_recipe.path: wrapped_recipe.recipe})
        # Add to binary artifacts list (cache).
        self._binary_artifacts.append((
            wrapped_recipe.recipe.download_and_extract_archive_set.archive_set,
            wrapped_recipe.path,
        ))
        # Now we should be able to find it in the binary artifacts list.
        result = self._get_binary_path_from_binary_artifacts(binary)
        check(
            bool(result),
            AssertionError(
                f"Could not find:\n{binary} even though we just added it:\n{wrapped_recipe}"
            ),
        )
        assert result  # noqa
        return result
예제 #4
0
def update_details(binary_manager: binaries_util.BinaryManager,
                   device: Device) -> None:

    check(
        device.HasField("android"),
        AssertionError(f"Expected Android device: {device}"),
    )

    build_fingerprint = ""
    try:
        adb_fingerprint_result = adb_check(
            device.android.serial,
            ["shell", "getprop ro.build.fingerprint"],
            verbose=True,
        )
        build_fingerprint = adb_fingerprint_result.stdout
        build_fingerprint = build_fingerprint.strip()
    except subprocess.CalledProcessError:
        log("Failed to get device fingerprint")

    device_properties = ""
    ensure_amber_installed(device.android.serial, binary_manager)
    try:
        device_properties = get_device_driver_details(device.android.serial)
    except devices_util.GetDeviceDetailsError as ex:
        log(f"WARNING: Failed to get device driver details: {ex}")

    device.android.build_fingerprint = build_fingerprint
    device.device_properties = device_properties
예제 #5
0
def main() -> None:
    parser = argparse.ArgumentParser(description="Splits a seed file.")

    parser.add_argument(
        "seed_file",
        help="Seed file to process.",
    )

    parsed_args = parser.parse_args(sys.argv[1:])

    seed_file: Path = Path(parsed_args.seed_file)

    # Get chunks of seeds and call process_chunk.
    seeds: List[str] = util.file_read_text(seed_file).split()

    chunk_size = 1000

    check(
        (len(seeds) % chunk_size) == 0,
        AssertionError(
            "The number of seeds should be a multiple of chunk_size."),
    )

    index = 0
    for chunk_num in range(0, len(seeds) // chunk_size):
        chunk: List[str] = []
        for _ in range(0, chunk_size):
            chunk.append(seeds[index])
            index += 1
        util.file_write_text(Path(f"seeds_{chunk_num}.txt"),
                             " ".join(chunk) + "\n")

    check(index == len(seeds),
          AssertionError("Expected to have processed all seeds."))
예제 #6
0
def run_reduction_on_report(  # pylint: disable=too-many-locals;
    test_dir: Path,
    reports_dir: Path,
    binary_manager: binaries_util.BinaryManager,
    settings: Settings,
) -> None:
    test = test_util.metadata_read(test_dir)

    check(
        bool(test.device and test.device.name),
        AssertionError(
            f"Cannot reduce {str(test_dir)}; "
            f"device must be specified in {str(test_util.get_metadata_path(test_dir))}"
        ),
    )

    source_dir = test_util.get_source_dir(test_dir)

    try:
        run_reduction(
            source_dir_to_reduce=source_dir,
            reduction_output_dir=test_util.get_reductions_dir(
                test_dir, test.device.name),
            binary_manager=binary_manager,
            settings=settings,
        )
    except fuzz_glsl_test.ReductionFailedError as ex:
        # Create a symlink to the failed reduction so it is easy to investigate failed reductions.
        link_to_failed_reduction_path = (
            reports_dir / "failed_reductions" /
            f"{test_dir.name}_{ex.reduction_work_dir.name}")
        util.make_directory_symlink(
            new_symlink_file_path=link_to_failed_reduction_path,
            existing_dir=ex.reduction_work_dir,
        )
예제 #7
0
def override_sigint() -> None:
    global original_sigint_handler  # pylint: disable=invalid-name,global-statement;
    util.check(
        original_sigint_handler is None,
        AssertionError("Called override_sig_int more than once"),
    )
    original_sigint_handler = signal.signal(signal.SIGINT, _sigint_handler)
예제 #8
0
def run_helper(
    cmd: List[str],
    check_exit_code: bool = True,
    timeout: Optional[float] = None,
    env: Optional[Dict[str, str]] = None,
    working_dir: Optional[Path] = None,
) -> types.CompletedProcess:
    check(
        bool(cmd) and cmd[0] is not None and isinstance(cmd[0], str),
        AssertionError("run takes a list of str, not a str"),
    )

    # When using catchsegv, only SIGSEGV will cause a backtrace to be printed.
    # We can also include SIGABRT by setting the following environment variable.
    if cmd[0].endswith("catchsegv"):
        if env is None:
            env = {}
        env["SEGFAULT_SIGNALS"] = "SEGV ABRT"

    env_child: Optional[Dict[str, str]] = None
    if env:
        log(f"Extra environment variables are: {env}")
        env_child = os.environ.copy()
        env_child.update(env)

    with subprocess.Popen(
            cmd,
            encoding="utf-8",
            errors="ignore",
            start_new_session=True,
            env=env_child,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=working_dir,
    ) as process:
        try:
            stdout, stderr = process.communicate(input=None, timeout=timeout)
        except subprocess.TimeoutExpired:
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            stdout, stderr = process.communicate()
            assert timeout  # noqa
            raise subprocess.TimeoutExpired(process.args, timeout, stdout,
                                            stderr)
        except:  # noqa
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            raise

        exit_code = process.poll()
        if check_exit_code and exit_code != 0:
            raise subprocess.CalledProcessError(exit_code, process.args,
                                                stdout, stderr)
        return subprocess.CompletedProcess(process.args, exit_code, stdout,
                                           stderr)
예제 #9
0
def get_config_from_binary(binary: Binary) -> str:
    tags = list(binary.tags)
    configs = [c for c in tags if c in CONFIGS_SET]
    if not configs:
        raise AssertionError(f"Could not find a config in tags: {tags}")
    check(len(configs) == 1, AssertionError(f"More than one config in: {binary}"))
    config = configs[0]
    return config
예제 #10
0
def artifact_get_inner_file_path(inner_file: str,
                                 artifact_path: str) -> pathlib.Path:
    check(
        not inner_file.startswith("//"),
        AssertionError(
            "bad inner_file argument passed to artifact_get_inner_file_path"),
    )
    # TODO: Consider absolute paths that we might want to support for quick hacks.
    return util.norm_path(
        artifact_get_directory_path(artifact_path) / pathlib.Path(inner_file))
예제 #11
0
def get_platform_from_binary(binary: Binary) -> str:
    tags = list(binary.tags)
    platforms = [p for p in tags if p in PLATFORMS_SET]
    if platforms:
        check(
            len(platforms) == 1,
            AssertionError(f"More than one platform in: {binary}"))
        platform = platforms[0]
    else:
        platform = util.get_platform()
    return platform
예제 #12
0
def is_compute_job(input_asm_spirv_job_json_path: pathlib.Path) -> bool:
    comp_files = shader_job_util.get_related_files(
        input_asm_spirv_job_json_path,
        [shader_job_util.EXT_COMP],
        [shader_job_util.SUFFIX_ASM_SPIRV],
    )
    check(
        len(comp_files) <= 1,
        AssertionError(f"Expected 1 or 0 compute shader files: {comp_files}"),
    )
    return len(comp_files) == 1
예제 #13
0
def run_helper(
    cmd: List[str],
    check_exit_code: bool = True,
    timeout: Optional[float] = None,
    env: Optional[Dict[str, str]] = None,
    working_dir: Optional[Path] = None,
) -> types.CompletedProcess:
    check(
        bool(cmd) and cmd[0] is not None and isinstance(cmd[0], str),
        AssertionError("run takes a list of str, not a str"),
    )

    env_child: Optional[Dict[str, str]] = None
    if env:
        log(f"Extra environment variables are: {env}")
        env_child = os.environ.copy()
        env_child.update(env)

    with subprocess.Popen(
            cmd,
            encoding="utf-8",
            errors="ignore",
            start_new_session=True,
            env=env_child,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=working_dir,
    ) as process:
        try:
            stdout, stderr = process.communicate(input=None, timeout=timeout)
        except subprocess.TimeoutExpired:
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            stdout, stderr = process.communicate()
            assert timeout  # noqa
            raise subprocess.TimeoutExpired(process.args, timeout, stdout,
                                            stderr)
        except:  # noqa
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            raise

        exit_code = process.poll()
        if check_exit_code and exit_code != 0:
            raise subprocess.CalledProcessError(exit_code, process.args,
                                                stdout, stderr)
        return subprocess.CompletedProcess(process.args, exit_code, stdout,
                                           stderr)
예제 #14
0
def run_generate(
    spirv_fuzz_path: Path,
    reference_shader_spv: Path,
    output_shader_spv: Path,
    donors_list_path: Path,
    seed: Optional[str] = None,
    other_args: Optional[List[str]] = None,
) -> Path:

    util.check(
        output_shader_spv.suffix == shader_job_util.SUFFIX_SPIRV,
        AssertionError(f"Expected {str(output_shader_spv)} to end with .spv"),
    )

    util.file_mkdirs_parent(output_shader_spv)
    cmd = [
        str(spirv_fuzz_path),
        str(reference_shader_spv),
        "-o",
        str(output_shader_spv),
        f"--donors={str(donors_list_path)}",
        "--fuzzer-pass-validation",
    ]

    if seed:
        cmd.append(f"--seed={seed}")

    if other_args:
        cmd.extend(other_args)

    subprocess_util.run(cmd)

    # reference.spv -> output.spv_orig

    util.copy_file(
        reference_shader_spv,
        output_shader_spv.with_suffix(shader_job_util.SUFFIX_SPIRV_ORIG),
    )

    # reference.spv.facts -> output.spv.facts

    source_facts_path = reference_shader_spv.with_suffix(
        shader_job_util.SUFFIX_FACTS)
    dest_facts_path = output_shader_spv.with_suffix(
        shader_job_util.SUFFIX_FACTS)

    if source_facts_path.exists():
        util.copy_file(source_facts_path, dest_facts_path)

    return output_shader_spv
예제 #15
0
def gerrit_get(url: str, path: str, params: Dict[str, str],
               cookie: str) -> Any:
    response = requests.get(url + path,
                            params=params,
                            cookies={
                                "GerritAccount": cookie
                            }).text

    check(
        response.startswith(RESPONSE_PREFIX),
        AssertionError(f"Unexpected response from Gerrit: {response}"),
    )
    response = util.remove_start(response, RESPONSE_PREFIX)
    return json.loads(response)
예제 #16
0
def main() -> None:
    parser = argparse.ArgumentParser(
        description=
        "A script that creates a README and bug_report directory for a test and its result_dir."
    )

    parser.add_argument(
        "source_dir",
        help="Source directory containing test.json and shaders.")

    parser.add_argument(
        "result_dir",
        help=
        "Path to the result_dir of a test containing e.g. the intermediate shader files, log.txt, etc.",
    )

    parser.add_argument(
        "--output_dir",
        help=
        "Output directory where the README and bug_report directory will be written.",
        default=".",
    )

    parsed_args = parser.parse_args(sys.argv[1:])

    source_dir = Path(parsed_args.source_dir)
    result_dir = Path(parsed_args.result_dir)
    output_dir = Path(parsed_args.output_dir)

    check_dir_exists(source_dir)
    check_dir_exists(result_dir)

    test = test_util.metadata_read_from_path(source_dir /
                                             test_util.TEST_METADATA)

    binary_manager = binaries_util.get_default_binary_manager(
        settings=Settings()).get_child_binary_manager(
            binary_list=list(test.binaries) + list(test.device.binaries))

    check(test.HasField("glsl"),
          AssertionError("Only glsl tests currently supported"))

    check(
        test.device.HasField("preprocess"),
        AssertionError("Only preprocess device tests currently supported"),
    )

    fuzz_test_util.tool_crash_summary_bug_report_dir(source_dir, result_dir,
                                                     output_dir,
                                                     binary_manager)
예제 #17
0
def strip_root(path: str) -> str:
    path_stripped = path
    if os.path.isabs(path_stripped):
        # Most of this coverage code only works on Linux, so we assume Linux here.
        # If we had Windows paths that could be on different drives etc., we would need to be more careful.
        util.check(
            path_stripped.startswith("/"),
            AssertionError(f"Non-posix absolute file path? {path}"),
        )
        path_stripped = path_stripped[1:]
    util.check(
        not path_stripped.startswith("/"),
        AssertionError(
            f"Internal error trying to make a relative path: {path_stripped}"),
    )
    return path_stripped
예제 #18
0
def get_all_android_devices(  # pylint: disable=too-many-locals;
    binary_manager: binaries_util.BinaryManager,
    include_device_details: bool = True,
) -> List[Device]:
    result: List[Device] = []

    log("Getting the list of connected Android devices via adb\n")

    adb_devices = adb_check(None, ["devices", "-l"], verbose=True)
    stdout: str = adb_devices.stdout
    lines: List[str] = stdout.splitlines()
    # Remove empty lines.
    lines = [line for line in lines if line]
    check(
        lines[0].startswith("List of devices"),
        AssertionError("Could find list of devices from 'adb devices'"),
    )
    for i in range(1, len(lines)):
        fields = lines[i].split()
        device_serial = fields[0]
        device_state = fields[1]
        if device_state != "device":
            log(f'Skipping adb device with serial {device_serial} as its state "{device_state}" is not "device".'
                )
        # Set a simple model name, but then try to find the actual model name.
        device_model = "android_device"
        for field_index in range(2, len(fields)):
            if fields[field_index].startswith("model:"):
                device_model = util.remove_start(fields[field_index], "model:")
                break

        log(f"Found Android device: {device_model}, {device_serial}")

        device = Device(
            name=f"{device_model}_{device_serial}",
            android=DeviceAndroid(serial=device_serial, model=device_model),
        )
        if include_device_details:
            update_details(binary_manager, device)
            log(f"Android device details:\n{str(device)}")
        else:
            log(f"Skipped getting Android device details:\n{str(device)}")

        result.append(device)

    return result
예제 #19
0
def run_replay(
    spirv_fuzz_path: Path,
    variant_shader_spv: Path,
    output_shader_spv: Path,
    other_args: Optional[List[str]] = None,
) -> Path:
    """Replays all transformations except the last to get a similar variant shader."""
    util.check(
        output_shader_spv.suffix == shader_job_util.SUFFIX_SPIRV,
        AssertionError(f"Expected {str(output_shader_spv)} to end with .spv"),
    )

    util.file_mkdirs_parent(output_shader_spv)

    # Copy shader.<STAGE>.facts.
    if variant_shader_spv.with_suffix(shader_job_util.SUFFIX_FACTS).is_file():
        util.copy_file(
            variant_shader_spv.with_suffix(shader_job_util.SUFFIX_FACTS),
            output_shader_spv.with_suffix(shader_job_util.SUFFIX_FACTS),
        )

    # Copy shader.<STAGE>.spv_orig.
    orig_spv = util.copy_file(
        variant_shader_spv.with_suffix(shader_job_util.SUFFIX_SPIRV_ORIG),
        output_shader_spv.with_suffix(shader_job_util.SUFFIX_SPIRV_ORIG),
    )

    transformations = variant_shader_spv.with_suffix(
        shader_job_util.SUFFIX_TRANSFORMATIONS)

    cmd = [
        str(spirv_fuzz_path),
        str(orig_spv),
        "-o",
        str(output_shader_spv),
        f"--replay={str(transformations)}",
        "--replay-range=-1",  # replays all transformations except the last
    ]

    if other_args:
        cmd.extend(other_args)

    subprocess_util.run(cmd)

    return output_shader_spv
예제 #20
0
def find_latest_change(changes: Any) -> Any:
    check(
        len(changes) > 0,
        AssertionError(f"Expected at least one CL but got: {changes}"))

    # Find the latest submit date (the default order is based on when the CL was last updated).

    latest_change = changes[0]
    latest_date = parse_date(latest_change["submitted"])

    for i in range(1, len(changes)):
        change = changes[i]
        submitted_date = parse_date(change["submitted"])
        if submitted_date > latest_date:
            latest_change = change
            latest_date = submitted_date

    return latest_change
예제 #21
0
def _update_device_host(binary_manager: binaries_util.BinaryManager,
                        device: Device) -> None:
    check(
        device.HasField("host"),
        AssertionError(f"Expected host device: {device}"),
    )

    amber_path = binary_manager.get_binary_path_by_name(
        binaries_util.AMBER_NAME).path

    driver_details = ""
    try:
        driver_details = host_device_util.get_driver_details(
            amber_path, custom_launcher=list(device.host.custom_launcher))
    except GetDeviceDetailsError as ex:
        log(f"WARNING: Failed to get device driver details: {ex}")

    device.device_properties = driver_details
예제 #22
0
def amberscript_comp_buffer_bind(comp_json: str) -> str:
    """
    Returns a string (template) containing an AmberScript command for binding the in/out buffer.

    Only the "$compute" key is read.

      {
        "myuniform": {
          "func": "glUniform1f",
          "args": [ 42.0 ],
          "binding": 3
        },

        "$compute": {
          "num_groups": [12, 13, 14];
          "buffer": {
            "binding": 123,
            "fields":
            [
              { "type": "int", "data": [ 0 ] },
              { "type": "int", "data": [ 1, 2 ] },
            ]
          }
        }

      }

    becomes:

      BIND BUFFER {} AS storage DESCRIPTOR_SET 0 BINDING 123

    The string template argument (use `format()`) is the name of the SSBO buffer.
    """
    comp = json.loads(comp_json)

    check(
        "$compute" in comp.keys(),
        AssertionError("Cannot find '$compute' key in JSON file"),
    )

    compute_info = comp["$compute"]
    assert "binding" in compute_info["buffer"].keys()
    return f"  BIND BUFFER {{}} AS storage DESCRIPTOR_SET 0 BINDING {compute_info['buffer']['binding']}\n"
예제 #23
0
def extract_shaders_amber_script(
    amber_file: Path,
    lines: List[str],
    output_dir: Path,
    binaries: binaries_util.BinaryManager,
) -> List[Path]:
    files_written: List[Path] = []
    i = -1
    while i < len(lines) - 1:
        i += 1
        line = lines[i]
        if not line.strip().startswith("SHADER"):
            continue
        parts = line.strip().split()
        shader_type = parts[1]
        shader_name = parts[2]
        shader_language = parts[3]
        if shader_language == "PASSTHROUGH":
            continue
        check(
            shader_language == "SPIRV-ASM",
            AssertionError(
                f"For {str(amber_file)}: unsupported shader language: {shader_language}"
            ),
        )
        i += 1
        shader_asm = ""
        while not lines[i].strip().startswith("END"):
            shader_asm += lines[i]
            i += 1

        files_written += write_shader(
            shader_asm=shader_asm,
            amber_file=amber_file,
            output_dir=output_dir,
            shader_type=shader_type,
            shader_name=shader_name,
            binaries=binaries,
        )

    return files_written
예제 #24
0
def _update_device_shader_compiler(binary_manager: binaries_util.BinaryManager,
                                   device: Device) -> None:
    check(
        device.HasField("shader_compiler"),
        AssertionError(f"Expected shader_compiler device: {device}"),
    )

    # The only thing we can do is update the shader compiler binary if it is a built-in binary.

    if binaries_util.is_built_in_binary_name(device.shader_compiler.binary):
        # Remove existing binaries with this name from the device's binaries list.
        binaries = list(device.binaries)
        binaries = [
            b for b in binaries if b.name != device.shader_compiler.binary
        ]
        del device.binaries[:]
        device.binaries.extend(binaries)

        # Add our latest version of the binary.
        device.binaries.extend(
            [binary_manager.get_binary_by_name(device.shader_compiler.binary)])
예제 #25
0
def get_amber_script_shader_def(shader: Shader, name: str) -> str:
    result = ""
    if shader.shader_source:
        result += f"\n# {name} is derived from the following GLSL:\n"
        result += get_text_as_comment(shader.shader_source)
    if shader.shader_spirv_asm:
        groups = re.findall(r"\n; Version: ([\d.]*)", shader.shader_spirv_asm)
        check(
            len(groups) == 1,
            AssertionError(
                f"Could not find version comment in SPIR-V shader {name} (or there were multiple)"
            ),
        )
        spirv_version = groups[0]
        result += f"\nSHADER {str(shader.shader_type.value)} {name} SPIRV-ASM TARGET_ENV spv{spirv_version}\n"
        result += shader.shader_spirv_asm
        result += "END\n"
    else:
        result += f"\nSHADER {str(shader.shader_type.value)} {name} PASSTHROUGH\n"

    return result
예제 #26
0
def extract_shaders_vkscript(
    amber_file: Path,
    lines: List[str],
    output_dir: Path,
    binaries: binaries_util.BinaryManager,
) -> List[Path]:
    files_written: List[Path] = []
    i = -1
    while i < len(lines) - 1:
        i += 1
        line = lines[i]
        match: Optional[Match[str]] = re.match(VK_SCRIPT_SHADER_REGEX,
                                               line.strip())
        if not match:
            continue
        shader_type = match.group(1)
        shader_language = match.group(2)
        if shader_language == "passthrough":
            continue
        check(
            shader_language == "spirv",
            AssertionError(
                f"For {str(amber_file)}: unsupported shader language: {shader_language}"
            ),
        )
        i += 1
        shader_asm = ""
        while not lines[i].strip().startswith("["):
            shader_asm += lines[i]
            i += 1
        files_written += write_shader(
            shader_asm=shader_asm,
            amber_file=amber_file,
            output_dir=output_dir,
            shader_type=shader_type,
            shader_name="shader",
            binaries=binaries,
        )
    return files_written
예제 #27
0
def create_spirv_fuzz_variant_2(
    source_dir: Path, binary_manager: binaries_util.BinaryManager, settings: Settings,
) -> Optional[Path]:
    """
    Replays all transformations except the last to get variant_2.

    Replays all transformations except the last to get a variant_2 shader job, such that variant <-> variant_2 are
    likely even more similar than reference <-> variant.

    |source_dir| must be a spirv_fuzz test.
    """
    test_metadata: Test = test_util.metadata_read_from_source_dir(source_dir)
    check(test_metadata.HasField("spirv_fuzz"), AssertionError("Not a spirv_fuzz test"))

    variant_shader_job = source_dir / test_util.VARIANT_DIR / test_util.SHADER_JOB
    variant_2_shader_job = (
        source_dir / f"{test_util.VARIANT_DIR}_2" / test_util.SHADER_JOB
    )
    if not variant_shader_job.is_file():
        log(
            f"Skip generating variant_2 for {str(source_dir)} because the variant shader job was not found."
        )
        return None

    if variant_2_shader_job.is_file():
        log(
            f"Skip generating variant_2 for {str(source_dir)} because variant_2 shader job already exists."
        )
        return None

    return spirv_fuzz_util.run_replay_on_shader_job(
        spirv_fuzz_path=binary_manager.get_binary_path_by_name(
            binaries_util.SPIRV_FUZZ_NAME
        ).path,
        variant_shader_job_json=variant_shader_job,
        output_shader_job_json=variant_2_shader_job,
        other_args=list(settings.common_spirv_args),
    )
예제 #28
0
def _update_device_swiftshader(binary_manager: binaries_util.BinaryManager,
                               device: Device) -> None:

    check(
        device.HasField("swift_shader"),
        AssertionError(f"Expected SwiftShader device: {device}"),
    )

    amber_path = binary_manager.get_binary_path_by_name(
        binaries_util.AMBER_NAME).path

    swift_shader_binary_and_path = binary_manager.get_binary_path_by_name(
        binaries_util.SWIFT_SHADER_NAME)
    driver_details = ""
    try:
        driver_details = host_device_util.get_driver_details(
            amber_path, swift_shader_binary_and_path.path)
    except GetDeviceDetailsError as ex:
        log(f"WARNING: Failed to get device driver details: {ex}")

    device.device_properties = driver_details

    del device.binaries[:]
    device.binaries.extend([swift_shader_binary_and_path.binary])
예제 #29
0
def extract_shaders_amber_script(
    amber_file: Path,
    lines: List[str],
    output_dir: Path,
    binaries: binaries_util.BinaryManager,
) -> List[Path]:
    files_written: List[Path] = []
    i = -1
    while i < len(lines) - 1:
        i += 1
        line = lines[i]
        if not line.strip().startswith("SHADER"):
            continue
        parts = line.strip().split()
        shader_type = parts[1]
        shader_name = parts[2]
        shader_format = parts[3]
        if shader_format in ("PASSTHROUGH", "GLSL"):
            continue
        check(
            shader_format == "SPIRV-ASM",
            AssertionError(
                f"{str(amber_file)}:{i+1}: unsupported shader format: {shader_format}"
            ),
        )

        # Get the target environment string. We do an extra check because this element was introduced more recently.
        check(
            len(parts) >= 6 and parts[4] == "TARGET_ENV",
            AssertionError(f"{str(amber_file)}:{i+1}: missing TARGET_ENV"),
        )

        shader_target_env = parts[5]

        # We only support target environments that specify a version of SPIR-V.
        # E.g. TARGET_ENV spv1.5
        check(
            shader_target_env.startswith("spv"),
            AssertionError(
                f"{str(amber_file)}:{i+1}: TARGET_ENV must start with spv"),
        )

        spirv_version_from_target_env = util.remove_start(
            shader_target_env, "spv")

        i += 1
        shader_asm = ""
        spirv_version_from_comment = ""
        while not lines[i].startswith("END"):
            # We should come across the version comment. E.g.
            # "; Version: 1.0"
            if lines[i].startswith("; Version: "):
                check(
                    not spirv_version_from_comment,
                    AssertionError(
                        f"{str(amber_file)}:{i+1}: Multiple version comments?"
                    ),
                )
                spirv_version_from_comment = lines[i].split()[2]
                check(
                    spirv_version_from_comment ==
                    spirv_version_from_target_env,
                    AssertionError(
                        f"{str(amber_file)}:{i+1}: TARGET_ENV and version comment mismatch."
                    ),
                )

            shader_asm += lines[i]
            i += 1

        check(
            bool(spirv_version_from_comment),
            AssertionError(
                f"{str(amber_file)}:{i+1}: missing version comment in SPIRV-ASM."
            ),
        )

        files_written += write_shader(
            shader_asm=shader_asm,
            amber_file=amber_file,
            output_dir=output_dir,
            shader_type=shader_type,
            shader_name=shader_name,
            binaries=binaries,
        )

    return files_written
예제 #30
0
def amberscript_comp_buff_def(comp_json: str,
                              make_empty_buffer: bool = False) -> str:
    """
    Returns a string (template) containing AmberScript commands for defining the initial in/out buffer.

    Only the "$compute" key is read.

      {
        "myuniform": {
          "func": "glUniform1f",
          "args": [ 42.0 ],
          "binding": 3
        },

        "$compute": {
          "num_groups": [12, 13, 14];
          "buffer": {
            "binding": 123,
            "fields":
            [
              { "type": "int", "data": [ 0 ] },
              { "type": "int", "data": [ 1, 2 ] },
            ]
          }
        }

      }

    becomes:

      BUFFER {} DATA_TYPE int DATA
        0 1 2
      END

    Or, if |make_empty_buffer| is True:

      BUFFER {} DATA_TYPE int SIZE 3 0


    :param comp_json: The shader job JSON as a string.
    :param make_empty_buffer: If true, an "empty" buffer is created that is of the same size and type as the normal
    in/out buffer; the empty buffer can be used to store the contents of the in/out buffer via the Amber COPY command.
    The only difference is the "empty" buffer is initially filled with just one value, which avoids redundantly
    listing hundreds of values that will just be overwritten, and makes it clear(er) for those reading the AmberScript
    file that the initial state of the buffer is unused.
    """
    ssbo_types = {
        "int": "int32",
        "ivec2": "vec2<int32>",
        "ivec3": "vec3<int32>",
        "ivec4": "vec4<int32>",
        "uint": "uint32",
        "float": "float",
        "vec2": "vec2<float>",
        "vec3": "vec3<float>",
        "vec4": "vec4<float>",
    }

    comp = json.loads(comp_json)

    check(
        "$compute" in comp.keys(),
        AssertionError("Cannot find '$compute' key in JSON file"),
    )

    compute_info = comp["$compute"]

    check(
        len(compute_info["buffer"]["fields"]) > 0,
        AssertionError("Compute shader test with empty SSBO"),
    )

    field_types_set = {
        field["type"]
        for field in compute_info["buffer"]["fields"]
    }

    check(
        len(field_types_set) == 1,
        AssertionError("All field types must be the same"))

    ssbo_type = compute_info["buffer"]["fields"][0]["type"]
    if ssbo_type not in ssbo_types.keys():
        raise ValueError(f"Unsupported SSBO datum type: {ssbo_type}")
    ssbo_type_amber = ssbo_types[ssbo_type]

    # E.g. [[0, 0], [5], [1, 2, 3]]
    field_data = [field["data"] for field in compute_info["buffer"]["fields"]]

    # E.g. [0, 0, 5, 1, 2, 3]
    # |*| unpacks the list so each element is passed as an argument.
    # |chain| takes a list of iterables and concatenates them.
    field_data_flattened = itertools.chain(*field_data)

    # E.g. ["0", "0", "5", "1", "2", "3"]
    field_data_flattened_str = [str(field) for field in field_data_flattened]

    result = ""
    if make_empty_buffer:
        # We just use the first value to initialize every element of the "empty" buffer.
        result += f"BUFFER {{}} DATA_TYPE {ssbo_type_amber} SIZE {len(field_data_flattened_str)} {field_data_flattened_str[0]}\n"
    else:
        result += f"BUFFER {{}} DATA_TYPE {ssbo_type_amber} DATA\n"
        result += f" {' '.join(field_data_flattened_str)}\n"
        result += "END\n"

    return result