Beispiel #1
0
def prepend_catchsegv_if_available(cmd: List[str],
                                   log_warning: bool = False) -> List[str]:
    try:
        return [str(tool_on_path("catchsegv"))] + cmd
    except ToolNotOnPathError:
        pass

    try:
        # cdb is the command line version of WinDbg.
        return [
            str(tool_on_path("cdb")),
            "-g",
            "-G",
            "-lines",
            "-nosqm",
            "-o",
            "-x",
            "-c",
            "kp;q",
        ] + cmd
    except ToolNotOnPathError:
        pass

    if log_warning:
        gflogging.log(
            "WARNING: Could not find catchsegv (Linux) or cdb (Windows) on your PATH; you will not be able to get "
            "stack traces from tools or the host driver.")

    return cmd
    def get_binary_path(self, binary: Binary) -> Path:
        # Special case: allow the path to be specified in the binary object itself for testing purposes:
        if binary.path:
            return Path(binary.path)
        # Try resolved cache first.
        result = self._resolved_paths.get(binary.SerializePartialToString())
        if result:
            return result
        log(f"Finding path of binary:\n{binary}")

        # Try list (cache) of binary artifacts on disk.
        result = self._get_binary_path_from_binary_artifacts(binary)
        if result:
            return result

        # Try online.
        wrapped_recipe = get_github_release_recipe(binary)
        # Execute the recipe to download the binaries.
        artifact_util.artifact_execute_recipe_if_needed(
            wrapped_recipe.path, {wrapped_recipe.path: wrapped_recipe.recipe})
        # Add to binary artifacts list (cache).
        self._binary_artifacts.append((
            wrapped_recipe.recipe.download_and_extract_archive_set.archive_set,
            wrapped_recipe.path,
        ))
        # Now we should be able to find it in the binary artifacts list.
        result = self._get_binary_path_from_binary_artifacts(binary)
        check(
            bool(result),
            AssertionError(
                f"Could not find:\n{binary} even though we just added it:\n{wrapped_recipe}"
            ),
        )
        assert result  # noqa
        return result
def run_shader_job(
    shader_compiler_device: DeviceShaderCompiler,
    spirv_shader_job_path: Path,
    output_dir: Path,
    binary_manager: binaries_util.BinaryManager,
) -> List[Path]:
    compiler_path = binary_manager.get_binary_path_by_name(
        shader_compiler_device.binary).path

    log(f"Running {str(compiler_path)} on shader job {str(spirv_shader_job_path)}"
        )

    shader_paths = shader_job_util.get_related_files(
        spirv_shader_job_path, language_suffix=[shader_job_util.SUFFIX_SPIRV])

    log(f"Running {str(compiler_path)} on shaders: {shader_paths}")

    result = []

    for shader_path in shader_paths:
        result.append(
            run_shader(
                shader_compiler_device,
                compiler_path=compiler_path,
                shader_path=shader_path,
                output_dir=output_dir,
            ))

    return result
Beispiel #4
0
def copy_file(
    source_file_path: pathlib.Path, dest_file_path: pathlib.Path
) -> pathlib.Path:
    file_mkdirs_parent(dest_file_path)
    gflogging.log(f"Copying {str(source_file_path)} to {str(dest_file_path)}")
    shutil.copy(str(source_file_path), str(dest_file_path))
    return dest_file_path
Beispiel #5
0
def update_details(binary_manager: binaries_util.BinaryManager,
                   device: Device) -> None:

    check(
        device.HasField("android"),
        AssertionError(f"Expected Android device: {device}"),
    )

    build_fingerprint = ""
    try:
        adb_fingerprint_result = adb_check(
            device.android.serial,
            ["shell", "getprop ro.build.fingerprint"],
            verbose=True,
        )
        build_fingerprint = adb_fingerprint_result.stdout
        build_fingerprint = build_fingerprint.strip()
    except subprocess.CalledProcessError:
        log("Failed to get device fingerprint")

    device_properties = ""
    ensure_amber_installed(device.android.serial, binary_manager)
    try:
        device_properties = get_device_driver_details(device.android.serial)
    except devices_util.GetDeviceDetailsError as ex:
        log(f"WARNING: Failed to get device driver details: {ex}")

    device.android.build_fingerprint = build_fingerprint
    device.device_properties = device_properties
Beispiel #6
0
def make_directory_symlink(new_symlink_file_path: Path,
                           existing_dir: Path) -> Path:
    gflogging.log(
        f"symlink: from {str(new_symlink_file_path)} to {str(existing_dir)}")
    check(existing_dir.is_dir(),
          AssertionError(f"Not a directory: {existing_dir}"))
    file_mkdirs_parent(new_symlink_file_path)

    # symlink_to takes a path relative to the location of the new file (or an absolute path, but we avoid this).
    symlink_contents = os.path.relpath(str(existing_dir),
                                       start=str(new_symlink_file_path.parent))
    try:
        new_symlink_file_path.symlink_to(symlink_contents,
                                         target_is_directory=True)
    except OSError:
        if get_platform() != "Windows":
            raise
        # Retry using junctions under Windows.
        try:
            # noinspection PyUnresolvedReferences
            import _winapi  # pylint: disable=import-error,import-outside-toplevel;

            # Unlike symlink_to, CreateJunction takes a path relative to the current directory.
            _winapi.CreateJunction(str(existing_dir),
                                   str(new_symlink_file_path))
            return new_symlink_file_path
        except ModuleNotFoundError:
            pass
        raise

    return new_symlink_file_path
Beispiel #7
0
def spirv_asm_shader_job_to_amber_script(
    shader_job_file_amber_test: ShaderJobFileBasedAmberTest,
    output_amber_script_file_path: Path,
    amberfy_settings: AmberfySettings,
) -> Path:

    log(f"Amberfy: {[str(variant.asm_spirv_shader_job_json) for variant in shader_job_file_amber_test.variants_asm_spirv_job]} "
        +
        (f"with reference {str(shader_job_file_amber_test.reference_asm_spirv_job.asm_spirv_shader_job_json)} "
         if shader_job_file_amber_test.reference_asm_spirv_job else "") +
        f"to {str(output_amber_script_file_path)}")

    shader_job_amber_test = shader_job_file_amber_test.to_shader_job_based()

    if isinstance(shader_job_amber_test.variants[0], GraphicsShaderJob):
        result = graphics_shader_job_amber_test_to_amber_script(
            shader_job_amber_test, amberfy_settings)

    elif isinstance(shader_job_amber_test.variants[0], ComputeShaderJob):
        result = compute_shader_job_amber_test_to_amber_script(
            shader_job_amber_test, amberfy_settings)
    else:
        raise AssertionError(
            f"Unknown shader job type: {shader_job_amber_test.variants[0]}")

    util.file_write_text(output_amber_script_file_path, result)
    return output_amber_script_file_path
Beispiel #8
0
def run_helper(
    cmd: List[str],
    check_exit_code: bool = True,
    timeout: Optional[float] = None,
    env: Optional[Dict[str, str]] = None,
    working_dir: Optional[Path] = None,
) -> types.CompletedProcess:
    check(
        bool(cmd) and cmd[0] is not None and isinstance(cmd[0], str),
        AssertionError("run takes a list of str, not a str"),
    )

    # When using catchsegv, only SIGSEGV will cause a backtrace to be printed.
    # We can also include SIGABRT by setting the following environment variable.
    if cmd[0].endswith("catchsegv"):
        if env is None:
            env = {}
        env["SEGFAULT_SIGNALS"] = "SEGV ABRT"

    env_child: Optional[Dict[str, str]] = None
    if env:
        log(f"Extra environment variables are: {env}")
        env_child = os.environ.copy()
        env_child.update(env)

    with subprocess.Popen(
            cmd,
            encoding="utf-8",
            errors="ignore",
            start_new_session=True,
            env=env_child,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=working_dir,
    ) as process:
        try:
            stdout, stderr = process.communicate(input=None, timeout=timeout)
        except subprocess.TimeoutExpired:
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            stdout, stderr = process.communicate()
            assert timeout  # noqa
            raise subprocess.TimeoutExpired(process.args, timeout, stdout,
                                            stderr)
        except:  # noqa
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            raise

        exit_code = process.poll()
        if check_exit_code and exit_code != 0:
            raise subprocess.CalledProcessError(exit_code, process.args,
                                                stdout, stderr)
        return subprocess.CompletedProcess(process.args, exit_code, stdout,
                                           stderr)
Beispiel #9
0
def try_get_root_file() -> Path:
    try:
        return artifact_util.artifact_path_get_root()
    except FileNotFoundError:
        log("Could not find ROOT file (in the current directory or above) to mark where binaries should be stored. "
            "Creating a ROOT file in the current directory.")
        return util.file_write_text(
            Path(artifact_util.ARTIFACT_ROOT_FILE_NAME), "")
def _sigint_handler(signum: int, _: Any) -> None:
    global exit_triggered  # pylint: disable=invalid-name,global-statement;
    msg = f"\nCaught signal {signum}. Terminating at next safe point.\n"
    log(msg)
    print(msg, flush=True, file=sys.stderr)  # noqa: T001
    exit_triggered = True
    # Restore signal handler.
    signal.signal(signal.SIGINT, original_sigint_handler)
Beispiel #11
0
def get_gerrit_change_details(change_number: str, cookie: str) -> Any:
    log(f"Getting change details for change number: {change_number}")
    return gerrit_get(
        KHRONOS_GERRIT_URL,
        f"/changes/{change_number}/detail",
        params={"O": "10004"},
        cookie=cookie,
    )
def handle_test(
    test_dir: Path,
    reports_dir: Path,
    active_devices: List[Device],
    binary_manager: binaries_util.BinaryManager,
    settings: Settings,
) -> bool:
    report_paths: List[Path] = []
    issue_found = False
    preprocessor_cache = util.CommandCache()

    # Run on all devices.
    for device in active_devices:
        status = run(test_dir,
                     binary_manager,
                     device,
                     preprocessor_cache=preprocessor_cache)
        if status in (
                fuzz.STATUS_CRASH,
                fuzz.STATUS_TOOL_CRASH,
                fuzz.STATUS_UNRESPONSIVE,
        ):
            issue_found = True

        # No need to run further on real devices if the pre-processing step failed.
        if status == fuzz.STATUS_TOOL_CRASH:
            break

        # Skip devices if interrupted, but finish reductions, if needed.
        if interrupt_util.interrupted():
            break

    # For each device that saw a crash, copy the test to reports_dir, adding the signature and device info to the test
    # metadata.
    for device in active_devices:
        report_dir = fuzz_test_util.maybe_add_report(test_dir, reports_dir,
                                                     device, settings)
        if report_dir:
            report_paths.append(report_dir)

    # For each report, run a reduction on the target device with the device-specific crash signature.
    for test_dir_in_reports in report_paths:
        if fuzz_test_util.should_reduce_report(settings, test_dir_in_reports):
            run_reduction_on_report(
                test_dir_in_reports,
                reports_dir,
                binary_manager=binary_manager,
                settings=settings,
            )
        else:
            log("Skipping reduction due to settings.")

    # For each report, create a summary and reproduce the bug.
    for test_dir_in_reports in report_paths:
        fuzz.create_summary_and_reproduce(test_dir_in_reports, binary_manager,
                                          settings)

    return issue_found
Beispiel #13
0
def create_zip(output_file_path: Path, entries: List[ZipEntry]) -> Path:
    gflogging.log(f"Creating zip {str(output_file_path)}:")
    with zipfile.ZipFile(
        output_file_path, "w", compression=zipfile.ZIP_DEFLATED
    ) as file_handle:
        for entry in entries:
            file_handle.write(entry.path, entry.path_in_archive)
            gflogging.log(f"Adding: {entry.path} {entry.path_in_archive or ''}")
    return output_file_path
Beispiel #14
0
def device_host(binary_manager: binaries_util.BinaryManager) -> Device:
    amber_path = binary_manager.get_binary_path_by_name(binaries_util.AMBER_NAME).path

    driver_details = ""
    try:
        driver_details = host_device_util.get_driver_details(amber_path)
    except GetDeviceDetailsError as ex:
        log(f"WARNING: Failed to get device driver details: {ex}")

    return Device(name="host", host=DeviceHost(), device_properties=driver_details)
Beispiel #15
0
def stay_awake_warning(serial: Optional[str] = None) -> None:
    try:
        res = adb_check(
            serial, ["shell", "settings get global stay_on_while_plugged_in"])
        if str(res.stdout).strip() == "0":
            log('\nWARNING: please enable "Stay Awake" from developer settings\n'
                )
    except subprocess.CalledProcessError:
        log("Failed to check Stay Awake setting. This can happen if the device has just booted."
            )
Beispiel #16
0
def extract_archive(archive_file: Path, output_dir: Path) -> Path:
    """
    Extract/unpack an archive.

    :return: output_dir
    """
    gflogging.log(f"Extracting {str(archive_file)} to {str(output_dir)}")
    shutil.unpack_archive(str(archive_file), extract_dir=str(output_dir))
    gflogging.log("Done")
    return output_dir
Beispiel #17
0
def read_or_create(settings_path: Path) -> Settings:
    try:
        return read(settings_path)
    except FileNotFoundError as exception:
        if settings_path.exists():
            raise
        log(f'\ngfauto could not find "{settings_path}" so one will be created for you\n'
            )
        write_default(settings_path)
        raise NoSettingsFile(
            f'\ngfauto could not find "{settings_path}" so one was created for you. Please review "{settings_path}" and try again.\n'
        ) from exception
Beispiel #18
0
def move_file(source_path: Path, dest_path: Path) -> Path:
    check_file_exists(source_path)
    check(
        not dest_path.is_dir(),
        AssertionError(
            f"Tried to move {str(source_path)} to a directory {str(dest_path)}"
        ),
    )
    file_mkdirs_parent(dest_path)
    gflogging.log(f"Move file {str(source_path)} to {str(dest_path)}")
    source_path.replace(dest_path)
    return dest_path
Beispiel #19
0
def run_helper(
    cmd: List[str],
    check_exit_code: bool = True,
    timeout: Optional[float] = None,
    env: Optional[Dict[str, str]] = None,
    working_dir: Optional[Path] = None,
) -> types.CompletedProcess:
    check(
        bool(cmd) and cmd[0] is not None and isinstance(cmd[0], str),
        AssertionError("run takes a list of str, not a str"),
    )

    env_child: Optional[Dict[str, str]] = None
    if env:
        log(f"Extra environment variables are: {env}")
        env_child = os.environ.copy()
        env_child.update(env)

    with subprocess.Popen(
            cmd,
            encoding="utf-8",
            errors="ignore",
            start_new_session=True,
            env=env_child,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            cwd=working_dir,
    ) as process:
        try:
            stdout, stderr = process.communicate(input=None, timeout=timeout)
        except subprocess.TimeoutExpired:
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            stdout, stderr = process.communicate()
            assert timeout  # noqa
            raise subprocess.TimeoutExpired(process.args, timeout, stdout,
                                            stderr)
        except:  # noqa
            try:
                posix_kill_group(process)
            except AttributeError:
                process.kill()
            raise

        exit_code = process.poll()
        if check_exit_code and exit_code != 0:
            raise subprocess.CalledProcessError(exit_code, process.args,
                                                stdout, stderr)
        return subprocess.CompletedProcess(process.args, exit_code, stdout,
                                           stderr)
Beispiel #20
0
def get_latest_deqp_change(cookie: str) -> Any:
    log("Getting latest deqp change")
    changes = gerrit_get(
        KHRONOS_GERRIT_URL,
        "/changes/",
        params={
            "q": "project:vk-gl-cts status:merged branch:master",
            "n": "1000"
        },
        cookie=cookie,
    )

    return find_latest_change(changes)
Beispiel #21
0
def extract_shaders(amber_file: Path, output_dir: Path,
                    binaries: binaries_util.BinaryManager) -> List[Path]:
    files_written: List[Path] = []
    with util.file_open_text(amber_file, "r") as file_handle:
        lines = file_handle.readlines()
        if lines[0].startswith("#!amber"):
            files_written += extract_shaders_amber_script(
                amber_file, lines, output_dir, binaries)
        else:
            log(f"Skipping VkScript file {str(amber_file)} for now.")
            files_written += extract_shaders_vkscript(amber_file, lines,
                                                      output_dir, binaries)

    return files_written
Beispiel #22
0
def main() -> None:
    parser = argparse.ArgumentParser(
        description="A tool for extracting a signature from a log file.")

    parser.add_argument(
        "log_file",
        help="The log file from which a signature should be extracted.",
    )

    parsed_args = parser.parse_args(sys.argv[1:])

    log_file: Path = Path(parsed_args.log_file)

    log(get_signature_from_log_contents(util.file_read_text(log_file)))
Beispiel #23
0
def update_test_json(test_json: Path) -> Path:
    test = test_util.metadata_read_from_path(test_json)

    for test_binary in itertools.chain(test.binaries,
                                       test.device.binaries):  # type: Binary
        for default_binary in binaries_util.DEFAULT_BINARIES:
            if (test_binary.name == default_binary.name
                    and test_binary.version != default_binary.version):
                log(f"Updating version: {test_binary.version} -> {default_binary.version}"
                    )
                test_binary.version = default_binary.version
                break

    return test_util.metadata_write_to_path(test, test_json)
Beispiel #24
0
def get_device_list(
    binary_manager: binaries_util.BinaryManager,
    device_list: Optional[DeviceList] = None,
) -> DeviceList:

    if not device_list:
        device_list = DeviceList()

    # We use |extend| below (instead of |append|) because you cannot append to a list of non-scalars in protobuf.
    # |extend| copies the elements from the list and appends them.

    # Host preprocessor.
    device = device_preprocessor()
    device_list.devices.extend([device])
    device_list.active_device_names.append(device.name)

    # SwiftShader.
    device = swift_shader_device(binary_manager)
    device_list.devices.extend([device])
    device_list.active_device_names.append(device.name)

    # Host device.
    device = device_host(binary_manager)
    device_list.devices.extend([device])
    device_list.active_device_names.append(device.name)

    try:
        # Android devices.
        android_devices = android_device.get_all_android_devices(binary_manager)
        device_list.devices.extend(android_devices)
        device_list.active_device_names.extend([d.name for d in android_devices])
    except ToolNotOnPathError:
        log(
            "WARNING: adb was not found on PATH nor was ANDROID_HOME set; "
            "Android devices will not be added to settings.json"
        )

    # Offline compiler.
    device = Device(
        name="amdllpc",
        shader_compiler=DeviceShaderCompiler(
            binary="amdllpc", args=["-gfxip=9.0.0", "-verify-ir", "-auto-layout-desc"]
        ),
        binaries=[binary_manager.get_binary_path_by_name("amdllpc").binary],
    )
    device_list.devices.extend([device])
    # Don't add to active devices, since this is mostly just an example.

    return device_list
Beispiel #25
0
def is_screen_off_or_locked(serial: Optional[str] = None) -> bool:
    """:return: True: the screen is off or locked. False: unknown."""
    res = adb_can_fail(serial, ["shell", "dumpsys nfc"])
    if res.returncode != 0:
        log("Failed to run dumpsys.")
        return False

    stdout = str(res.stdout)
    # You will often find "mScreenState=OFF_LOCKED", but this catches OFF too, which is good.
    if "mScreenState=OFF" in stdout:
        return True
    if "mScreenState=ON_LOCKED" in stdout:
        return True

    return False
def recipe_download_and_extract_archive_set(
        recipe: RecipeDownloadAndExtractArchiveSet,
        output_artifact_path: str) -> None:

    for archive in recipe.archive_set.archives:  # type: Archive
        util.check_field_truthy(archive.url, "url")
        util.check_field_truthy(archive.output_file, "output_file")
        util.check_field_truthy(archive.output_directory, "output_directory")

        output_file_path = artifact_util.artifact_get_inner_file_path(
            archive.output_file, output_artifact_path)

        output_directory_path = artifact_util.artifact_get_inner_file_path(
            archive.output_directory, output_artifact_path)

        log(f"Downloading {archive.url} to {str(output_file_path)}")
        urllib.request.urlretrieve(archive.url, str(output_file_path))

        if output_file_path.name.lower().endswith(".zip"):
            with ZipFile(str(output_file_path), "r") as zip_file:
                for info in zip_file.infolist():  # type: ZipInfo
                    extracted_file = zip_file.extract(
                        info, str(output_directory_path))
                    # If the file was created on a UNIX-y system:
                    if info.create_system == 3:
                        # Shift away first 2 bytes to get permission bits.
                        zip_file_exec_bits = info.external_attr >> 16
                        # Just consider the executable bits.
                        zip_file_exec_bits = (zip_file_exec_bits
                                              & ALL_EXECUTABLE_PERMISSION_BITS)
                        current_attribute_bits = os.stat(
                            extracted_file).st_mode
                        if (current_attribute_bits |
                                zip_file_exec_bits) != current_attribute_bits:
                            os.chmod(
                                extracted_file,
                                current_attribute_bits | zip_file_exec_bits,
                            )
        else:
            shutil.unpack_archive(str(output_file_path),
                                  str(output_directory_path))

    output_metadata = ArtifactMetadata()
    output_metadata.data.extracted_archive_set.archive_set.CopyFrom(
        recipe.archive_set)

    artifact_util.artifact_write_metadata(output_metadata,
                                          output_artifact_path)
Beispiel #27
0
def download_latest_binary_version_numbers() -> List[Binary]:
    log("Downloading the latest binary version numbers...")

    # Deep copy of DEFAULT_BINARIES.
    binaries: List[Binary] = []
    for binary in DEFAULT_BINARIES:
        new_binary = Binary()
        new_binary.CopyFrom(binary)
        binaries.append(new_binary)

    # Update version numbers.
    for binary in binaries:
        project_name = binary_name_to_project_name(binary.name)
        binary.version = _download_latest_version_number(project_name)

    return binaries
def process_chunk(  # pylint: disable=too-many-locals;
    chunk_num: int, chunk: Set[str], log_files: List[Path], output_file: TextIO
) -> None:

    log(f"\nChunk {chunk_num}:")
    output_file.write(f"\nChunk {chunk_num}:\n")

    unique_signatures: Set[str] = set()

    for log_file in log_files:
        with util.file_open_text(log_file, "r") as f:
            first_line = f.readline()
            match = re.fullmatch(r"Iteration seed: (\d+)\n", first_line)
            assert match  # noqa
            seed = match.group(1)
            if seed not in chunk:
                continue

            lines = f.readlines()
            start_line = 0
            end_line = 0
            found_bug = False
            for i, line in enumerate(lines):
                match = re.fullmatch(r"STATUS (\w+)\n", line)
                if not match:
                    continue
                status = match.group(1)
                if status == "SUCCESS":
                    start_line = i + 1
                    continue
                found_bug = True
                end_line = i + 1
                break

            if not found_bug:
                continue

            failure_log = "\n".join(lines[start_line:end_line])

            signature = signature_util.get_signature_from_log_contents(failure_log)

            unique_signatures.add(signature)

    # Print the signatures.
    for signature in sorted(unique_signatures):
        log(signature)
        output_file.write(f"{signature}\n")
Beispiel #29
0
def main() -> int:
    parser = argparse.ArgumentParser(
        description=
        "Runs a binary given the binary name and settings.json file. "
        "Use -- to separate args to run_bin and your binary. ")

    parser.add_argument(
        "--settings",
        help="Path to the settings JSON file for this instance.",
        default=str(settings_util.DEFAULT_SETTINGS_FILE_PATH),
    )

    parser.add_argument(
        "binary_name",
        help="The name of the binary to run. E.g. spirv-opt, glslangValidator",
        type=str,
    )

    parser.add_argument(
        "arguments",
        metavar="arguments",
        type=str,
        nargs="*",
        help="The arguments to pass to the binary",
    )

    parsed_args = parser.parse_args(sys.argv[1:])

    # Args.
    settings_path: Path = Path(parsed_args.settings)
    binary_name: str = parsed_args.binary_name
    arguments: List[str] = parsed_args.arguments

    try:
        settings = settings_util.read_or_create(settings_path)
    except settings_util.NoSettingsFile:
        log(f"Settings file {str(settings_path)} was created for you; using this."
            )
        settings = settings_util.read_or_create(settings_path)

    binary_manager = binaries_util.get_default_binary_manager(
        settings=settings)

    cmd = [str(binary_manager.get_binary_path_by_name(binary_name).path)]
    cmd.extend(arguments)
    return subprocess.run(cmd, check=False).returncode
def main() -> None:

    parser = argparse.ArgumentParser(
        description="Outputs number of lines covered from .cov files.",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )

    parser.add_argument(
        "coverage_files",
        metavar="coverage_files",
        type=str,
        nargs="*",
        help="The .cov files to process, one after the other.",
    )

    parser.add_argument(
        "--out",
        type=str,
        help="Output results text file.",
        default="out.txt",
    )

    parsed_args = parser.parse_args(sys.argv[1:])

    input_files: List[str] = parsed_args.coverage_files
    output_file: str = parsed_args.out

    with util.file_open_text(Path(output_file), "w") as out:

        for coverage_file in input_files:
            with open(coverage_file, mode="rb") as f:
                all_line_counts: cov_util.LineCounts = pickle.load(f)

            total_num_lines = 0
            total_num_covered_lines = 0

            # |all_line_counts| maps from source file to another map. We just need the map.
            for line_counts in all_line_counts.values():
                # |line_counts| maps from line number to execution count. We just need the execution count.
                for execution_count in line_counts.values():
                    total_num_lines += 1
                    if execution_count > 0:
                        total_num_covered_lines += 1

            log(f"{total_num_covered_lines}, {total_num_lines}")
            out.write(f"{total_num_covered_lines}, {total_num_lines}\n")