Пример #1
0
def create_version_objects(
        srcdir: Directory) -> tuple[list[GameEdition], list[GameExpansion]]:
    """
    Create GameEdition and GameExpansion objects from auxiliary
    config files.
    """
    game_expansion_list = []
    game_edition_list = []

    # initiliaze necessary paths
    game_edition_path = srcdir.joinpath("game_editions.toml")
    game_expansion_path = srcdir.joinpath("game_expansions.toml")

    # load toml config files to a dictionary variable
    with game_edition_path.open() as game_edition_toml:
        game_editions = toml.loads(game_edition_toml.read())

    with game_expansion_path.open() as game_expansion_toml:
        game_expansions = toml.loads(game_expansion_toml.read())

    # create and list GameEdition objects
    game_editions.pop("file_version")
    for game in game_editions:
        aux_path = srcdir[game_editions[game]["subfolder"]]
        game_obj = create_game_obj(game_editions[game], aux_path)
        game_edition_list.append(game_obj)

    # create and list GameExpansion objects
    game_expansions.pop("file_version")
    for game in game_expansions:
        aux_path = srcdir[game_expansions[game]["subfolder"]]
        game_obj = create_game_obj(game_expansions[game], aux_path, True)
        game_expansion_list.append(game_obj)

    return game_edition_list, game_expansion_list
Пример #2
0
def get_gamespec(srcdir: Directory, game_version: GameVersion,
                 pickle_cache: bool) -> ArrayMember:
    """
    Reads empires.dat file.
    """
    if game_version.edition.game_id in ("ROR", "AOE1DE", "AOC", "HDEDITION",
                                        "AOE2DE"):
        filepath = srcdir.joinpath(
            game_version.edition.media_paths[MediaType.DATFILE][0])

    elif game_version.edition.game_id == "SWGB":
        if "SWGB_CC" in [
                expansion.game_id for expansion in game_version.expansions
        ]:
            filepath = srcdir.joinpath(
                game_version.expansions[0].media_paths[MediaType.DATFILE][0])

        else:
            filepath = srcdir.joinpath(
                game_version.edition.media_paths[MediaType.DATFILE][0])

    else:
        raise Exception(
            "No service found for reading data file of version %s" %
            game_version.edition.game_id)

    cache_file = os.path.join(
        gettempdir(), f"{game_version.edition.game_id}_{filepath.name}.pickle")

    with filepath.open('rb') as empiresdat_file:
        gamespec = load_gamespec(empiresdat_file, game_version, cache_file,
                                 pickle_cache)

    return gamespec
Пример #3
0
def debug_modpack(debugdir: Directory, loglevel: int,
                  modpack: Modpack) -> None:
    """
    Create debug output for a modpack.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param modpack: Modpack container.
    :type modpack: Modpack
    """
    if loglevel < 1:
        return

    # Export info and manifest file
    logdir = debugdir.joinpath(f"export/{modpack.name}")

    with logdir[modpack.info.filename].open('wb') as outfile:
        outfile.write(modpack.info.dump().encode('utf-8'))

    with logdir[modpack.manifest.filename].open('wb') as outfile:
        outfile.write(modpack.manifest.dump().encode('utf-8'))

    if loglevel < 2:
        return

    logfile = debugdir.joinpath(f"export/{modpack.name}")["summary"]
    logtext = ""

    logtext += f"name: {modpack.name}\n"

    file_count = (len(modpack.get_data_files()) +
                  len(modpack.get_media_files()) +
                  len(modpack.get_metadata_files()))
    logtext += f"file count: {file_count}\n"
    logtext += f"    data: {len(modpack.get_data_files())}\n"
    logtext += f"    media: {len(modpack.get_media_files())}\n"

    # Count the files by type
    media_dict = {}
    for media_type, files in modpack.get_media_files().items():
        media_dict[media_type.value] = len(files)

    # Sort by type name
    media_dict = dict(sorted(media_dict.items(), key=lambda item: item[0]))

    for media_type, file_count in media_dict.items():
        logtext += f"        {media_type}: {file_count}\n"

    logtext += f"    metadata: {len(modpack.get_metadata_files())}\n"

    with logfile.open("w") as log:
        log.write(logtext)
Пример #4
0
def debug_game_version(debugdir: Directory, loglevel: int,
                       args: Namespace) -> None:
    """
    Create debug output for the detected game version.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param args: CLI arguments.
    :type args: Namespace
    """
    if loglevel < 2:
        return

    # Log game version
    logfile = debugdir.joinpath("init/")["game_version"]
    logtext = ""

    logtext += (f"game edition:\n" f"    - {args.game_version.edition}\n")

    if len(args.game_version.expansions) > 0:
        logtext += "game expansions:\n"
        for expansion in args.game_version.expansions:
            logtext += f"    - {expansion}\n"

    else:
        logtext += "game expansions: none detected"

    with logfile.open("w") as log:
        log.write(logtext)
Пример #5
0
def debug_converter_objects(debugdir: Directory, loglevel: int,
                            dataset: GenieObjectContainer) -> None:
    """
    Create debug output for ConverterObject instances from the
    conversion preprocessor.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param dataset: Dataset containing converter objects from pre-processing.
    :type dataset: GenieObjectContainer
    """
    if loglevel < 2:
        return

    logfile = debugdir.joinpath("conversion/")["preprocessor_objects"]
    logtext = ""

    logtext += (
        f"unit objects count: {len(dataset.genie_units)}\n"
        f"tech objects count: {len(dataset.genie_techs)}\n"
        f"civ objects count: {len(dataset.genie_civs)}\n"
        f"effect bundles count: {len(dataset.genie_effect_bundles)}\n"
        f"age connections count: {len(dataset.age_connections)}\n"
        f"building connections count: {len(dataset.building_connections)}\n"
        f"unit connections count: {len(dataset.unit_connections)}\n"
        f"tech connections count: {len(dataset.tech_connections)}\n"
        f"graphics objects count: {len(dataset.genie_graphics)}\n"
        f"sound objects count: {len(dataset.genie_sounds)}\n"
        f"terrain objects count: {len(dataset.genie_terrains)}\n")

    with logfile.open("w") as log:
        log.write(logtext)
Пример #6
0
def debug_registered_graphics(debugdir: Directory, loglevel: int,
                              existing_graphics: list[str]) -> None:
    """
    Create debug output for found graphics files.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param existing_graphics: List of graphic ids of graphic files.
    :type existing_graphics: list
    """
    if loglevel < 2:
        return

    logfile = debugdir.joinpath("read/")["existing_graphics"]
    logtext = ""

    logtext += f"file count: {len(existing_graphics)}\n\n"

    sorted_graphics = list(sorted(existing_graphics))
    logtext += "\n".join(sorted_graphics)

    with logfile.open("w") as log:
        log.write(logtext)
Пример #7
0
def debug_string_resources(debugdir: Directory, loglevel: int,
                           string_resources: StringResource) -> None:
    """
    Create debug output for found string resources.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param string_resources: Language and string information.
    :type string_resources: StringResource
    """
    if loglevel < 2:
        return

    logfile = debugdir.joinpath("read/")["string_resources"]
    logtext = ""

    logtext += "found languages: "
    logtext += ", ".join(string_resources.get_tables().keys())
    logtext += "\n\n"

    for language, strings in string_resources.get_tables().items():
        logtext += f"{language}: {len(strings)} IDs\n"

    with logfile.open("w") as log:
        log.write(logtext)
Пример #8
0
def debug_gamedata_format(debugdir: Directory, loglevel: int,
                          game_version: GameVersion) -> None:
    """
    Create debug output for the converted .dat format.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param game_version: Game version the .dat file comes with.
    :type game_version: GameVersion
    """
    if loglevel < 2:
        return

    logfile = debugdir.joinpath("read/")["data_format"]
    logtext = ""

    discovered_structs = {EmpiresDatWrapper}
    handled_structs = set()

    while discovered_structs:
        struct = discovered_structs.pop()

        if struct in handled_structs:
            continue

        members = struct.get_data_format_members(game_version)
        logtext += f"total member count: {len(members)}\n"

        max_name_width = 1
        max_vmemb_width = 1
        for member in members:
            # Find out width of columns for table formatting
            if len(str(member[1])) > max_name_width:
                max_name_width = len(str(member[1]))

            if len(str(member[2])) > max_vmemb_width:
                max_vmemb_width = len(str(member[2]))

            # Search for sub-structs
            if isinstance(member[3], IncludeMembers):
                discovered_structs.add(member[3].cls)

            elif isinstance(member[3], MultisubtypeMember):
                discovered_structs.update(member[3].class_lookup.values())

        for member in members:
            logtext += (f"{str(member[0].value):8}  "
                        f"{str(member[1]):{max_name_width}}  "
                        f"{str(member[2]):{max_vmemb_width}}  "
                        f"{str(member[3])}\n")

        handled_structs.add(struct)
        logtext += "\n"

    with logfile.open("w") as log:
        log.write(logtext)
Пример #9
0
def conversion_required(asset_dir: Directory, args: Namespace) -> bool:
    """
    Returns true if an asset conversion is required to run the game.

    Sets options in args according to what sorts of conversion are required.

    TODO: Reimplement change detection for new converter.
    """
    version_path = asset_dir / 'converted' / changelog.ASSET_VERSION_FILENAME
    # determine the version of assets
    try:
        with version_path.open() as fileobj:
            asset_version = fileobj.read().strip()

        try:
            asset_version = int(asset_version)
        except ValueError:
            info("Converted asset version has improper format; "
                 "expected integer number")
            asset_version = -1

    except FileNotFoundError:
        # assets have not been converted yet
        info("No converted assets have been found")
        asset_version = -1

    changes = changelog.changes(asset_version,)

    if not changes:
        dbg("Converted assets are up to date")
        return False

    if asset_version >= 0 and asset_version != changelog.ASSET_VERSION:
        info("Found converted assets with version %d, "
             "but need version %d", asset_version, changelog.ASSET_VERSION)

    info("Converting %s", ", ".join(sorted(changes)))

    # try to resolve resolve the output path
    target_path = asset_dir.resolve_native_path_w()
    if not target_path:
        raise OSError(f"could not resolve a writable asset path in {asset_dir}")

    info("Will save to '%s'", target_path.decode(errors="replace"))

    for component in changelog.COMPONENTS:
        if component not in changes:
            # don't reconvert this component:
            setattr(args, f"no_{component}", True)

    if "metadata" in changes:
        args.no_pickle_cache = True

    return True
Пример #10
0
def get_asset_path(custom_asset_dir: str = None) -> Directory:
    """
    Returns a Path object for the game assets.

    `custom_asset_dir` can a custom asset directory, which is mounted at the
    top of the union filesystem (i.e. has highest priority).

    This function is used by the both the conversion process
    and the game startup. The conversion uses it for its output,
    the game as its data source(s).
    """

    # if we're in devmode, use only the in-repo asset folder
    if not custom_asset_dir and config.DEVMODE:
        return Directory(os.path.join(config.BUILD_SRC_DIR, "assets")).root

    # else, mount the possible locations in an union:
    # overlay the global dir and the user dir.
    result = Union().root

    # the cmake-determined folder for storing assets
    global_data = Path(config.GLOBAL_ASSET_DIR)
    if global_data.is_dir():
        result.mount(WriteBlocker(Directory(global_data).root).root)

    # user-data directory as provided by environment variables
    # and platform standards
    # we always create this!
    home_data = default_dirs.get_dir("data_home") / "openage"
    result.mount(Directory(home_data, create_if_missing=True).root / "assets")

    # the program argument overrides it all
    if custom_asset_dir:
        result.mount(Directory(custom_asset_dir).root)

    return result
Пример #11
0
    def export(data_files: list[DataDefinition], exportdir: Directory) -> None:
        """
        Exports data files.

        :param data_files: Data definitions for data files.
        :param exportdir: Directory the resulting file(s) will be exported to. Target subfolder
                          and target filename should be stored in the export request.
        :type exportdir: Directory
        :type data_files: list
        """
        for data_file in data_files:
            output_dir = exportdir.joinpath(data_file.targetdir)
            output_content = data_file.dump()

            # generate human-readable file
            with output_dir[data_file.filename].open('wb') as outfile:
                outfile.write(output_content.encode('utf-8'))
Пример #12
0
    def save_smx(path: Path, target: Path, palette: ColorTable = None) -> None:
        """
        save a smx as png.
        """
        from ..entity_object.export.texture import Texture
        from ..value_object.read.media.smx import SMX
        from ..service.read.palette import get_palettes

        if not palette:
            palette = get_palettes(data, game_version)

        with path.open("rb") as smxfile:

            from ..processor.export.texture_merge import merge_frames

            tex = Texture(SMX(smxfile.read()), palette)

            merge_frames(tex)

            out_path, filename = os.path.split(target)
            MediaExporter.save_png(tex, Directory(out_path).root, filename)
Пример #13
0
def debug_media_cache(debugdir: Directory, loglevel: int, sourcedir: Directory,
                      cachedata: dict, game_version: GameVersion) -> None:
    """
    Create media cache data for graphics files. This allows using deterministic
    packer and compression settings for graphics file conversion.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param sourcedir: Sourcedir where the graphics files are mounted.
    :type sourcedir: Directory
    :param cachedata: Dict with cache data.
    :type cachedata: dict
    :param game_version: Game version.
    :type game_version: GameVersion
    """
    if loglevel < 6:
        return

    cache_file = MediaCacheFile("export/", "media_cache.toml", game_version)
    cache_file.set_hash_func("sha3_256")

    # Sort the output by filename
    cache_data = dict(
        sorted(cachedata.items(), key=lambda item: item[0].source_filename))

    for request, cache in cache_data.items():
        filepath = sourcedir[request.get_type().value, request.source_filename]

        cache_file.add_cache_data(request.get_type(), request.source_filename,
                                  hash_file(filepath), cache[1], cache[0])

    logfile = debugdir.joinpath("export/")["media_cache.toml"]
    logtext = cache_file.dump()

    with logfile.open("w") as log:
        log.write(logtext)
Пример #14
0
def iterate_game_versions(srcdir: Directory, avail_game_eds: list[GameEdition],
                          avail_game_exps: list[GameExpansion]) -> GameVersion:
    """
    Determine what editions and expansions of a game are installed in srcdir
    by iterating through all versions the converter knows about.
    """
    best_edition = None
    expansions = []

    for game_edition in avail_game_eds:
        # Check for files that we know exist in the game's folder
        for detection_hints in game_edition.game_file_versions:
            check_paths = detection_hints.get_paths()

            # Check if any of the known paths for the file exists
            found_file = False
            for required_path in check_paths:
                required_file = srcdir.joinpath(required_path)

                if required_file.is_file():
                    hash_val = hash_file(required_file,
                                         hash_algo=detection_hints.hash_algo)

                    if hash_val not in detection_hints.get_hashes():
                        dbg(f"Found required file {required_file.resolve_native_path()} "
                            "but could not determine version number")

                    else:
                        version_no = detection_hints.get_hashes()[hash_val]
                        dbg(f"Found required file {required_file.resolve_native_path()} "
                            f"for version {version_no}")

                    found_file = True
                    break

            if not found_file:
                break

        else:
            # All files were found. Now check if the version is supported.
            if game_edition.support == Support.NOPE:
                dbg(f"Found unsupported game edition: {game_edition}")

                if best_edition is None:
                    best_edition = game_edition

                # Continue to look for supported editions
                continue

            if game_edition.support == Support.BREAKS:
                dbg(f"Found broken game edition: {game_edition}")

                if best_edition is None or best_edition.support == Support.NOPE:
                    best_edition = game_edition

                # Continue to look for supported editions
                continue

            # We found a fully supported edition!
            # No need to check for better editions
            best_edition = game_edition
            break

    else:
        # Either no version or an unsupported or broken was found
        # Return the last detected edition
        return GameVersion(edition=best_edition)

    for game_expansion in best_edition.expansions:
        for existing_game_expansion in avail_game_exps:
            if game_expansion == existing_game_expansion.game_id:
                game_expansion = existing_game_expansion

        # Check for files that we know exist in the game expansion's folder
        for detection_hints in game_expansion.game_file_versions:
            check_paths = detection_hints.get_paths()

            # Check if any of the known paths for the file exists
            found_file = False
            for required_path in check_paths:
                required_file = srcdir.joinpath(required_path)

                if required_file.is_file():
                    found_file = True
                    break

            if not found_file:
                break

        else:
            if game_expansion.support == Support.NOPE:
                info(f"Found unsupported game expansion: {game_expansion}")
                # Continue to look for supported expansions
                continue

            if game_expansion.support == Support.BREAKS:
                info(f"Found broken game expansion: {best_edition}")
                # Continue to look for supported expansions
                continue

            expansions.append(game_expansion)

    return GameVersion(edition=best_edition, expansions=expansions)
Пример #15
0
def debug_converter_object_groups(debugdir: Directory, loglevel: int,
                                  dataset: GenieObjectContainer) -> None:
    """
    Create debug output for ConverterObjectGroup instances from the
    conversion preprocessor.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param dataset: Dataset containing converter object groups from processing.
    :type dataset: GenieObjectContainer
    """
    if loglevel < 3:
        return

    enitity_groups = {}
    enitity_groups.update(dataset.unit_lines)
    enitity_groups.update(dataset.building_lines)
    enitity_groups.update(dataset.ambient_groups)
    enitity_groups.update(dataset.variant_groups)

    entity_name_lookup_dict = get_entity_lookups(dataset.game_version)
    tech_name_lookup_dict = get_tech_lookups(dataset.game_version)
    civ_name_lookup_dict = get_civ_lookups(dataset.game_version)
    terrain_name_lookup_dict = get_terrain_lookups(dataset.game_version)

    # Used when a name lookup fails
    nnn = ("NameNotFound", "NameNotFound")

    for key, line in enitity_groups.items():
        logfile = debugdir.joinpath("conversion/entity_groups/")[str(key)]
        logtext = ""

        logtext += f"repr: {line}\n"
        logtext += (
            f"nyan name: "
            f"{entity_name_lookup_dict.get(line.get_head_unit_id(), nnn)[0]}\n"
        )

        logtext += f"is_creatable: {line.is_creatable()}\n"
        logtext += f"is_harvestable: {line.is_harvestable()}\n"
        logtext += f"is_garrison: {line.is_garrison()}\n"
        logtext += f"is_gatherer: {line.is_gatherer()}\n"
        logtext += f"is_passable: {line.is_passable()}\n"
        logtext += f"is_projectile_shooter: {line.is_projectile_shooter()}\n"
        logtext += f"is_ranged: {line.is_ranged()}\n"
        logtext += f"is_melee: {line.is_melee()}\n"
        logtext += f"is_repairable: {line.is_repairable()}\n"
        logtext += f"is_unique: {line.is_unique()}\n"

        logtext += f"class id: {line.get_class_id()}\n"
        logtext += f"garrison mode: {line.get_garrison_mode()}\n"
        logtext += f"head unit: {line.get_head_unit()}\n"
        logtext += f"train location id: {line.get_train_location_id()}\n"

        logtext += "line:\n"
        for unit in line.line:
            logtext += f"    - {unit}\n"

        if len(line.creates) > 0:
            logtext += "creates:\n"
            for unit in line.creates:
                logtext += (
                    f"    - {unit} "
                    f"({entity_name_lookup_dict.get(unit.get_head_unit_id(), nnn)[0]})\n"
                )

        else:
            logtext += "creates: nothing\n"

        if len(line.researches) > 0:
            logtext += "researches:\n"
            for tech in line.researches:
                logtext += (
                    f"    - {tech} "
                    f"({tech_name_lookup_dict.get(tech.get_id(), nnn)[0]})\n")

        else:
            logtext += "researches: nothing\n"

        if len(line.garrison_entities) > 0:
            logtext += "garrisons units:\n"
            for unit in line.garrison_entities:
                logtext += (
                    f"    - {unit} "
                    f"({entity_name_lookup_dict.get(unit.get_head_unit_id(), nnn)[0]})\n"
                )

        else:
            logtext += "garrisons units: nothing\n"

        if len(line.garrison_locations) > 0:
            logtext += "garrisons in:\n"
            for unit in line.garrison_locations:
                logtext += (
                    f"    - {unit} "
                    f"({entity_name_lookup_dict.get(unit.get_head_unit_id(), nnn)[0]})\n"
                )

        else:
            logtext += "garrisons in: nothing\n"

        if isinstance(line, GenieUnitLineGroup):
            logtext += "\n"
            logtext += (
                f"civ id: {line.get_civ_id()} "
                f"({civ_name_lookup_dict.get(line.get_civ_id(), nnn)[0]})\n")
            logtext += (
                f"enabling research id: {line.get_enabling_research_id()} "
                f"({tech_name_lookup_dict.get(line.get_enabling_research_id(), nnn)[0]})\n"
            )

        if isinstance(line, GenieBuildingLineGroup):
            logtext += "\n"
            logtext += f"has_foundation: {line.has_foundation()}\n"
            logtext += f"is_dropsite: {line.is_dropsite()}\n"
            logtext += f"is_trade_post {line.is_trade_post()}\n"
            logtext += (
                f"enabling research id: {line.get_enabling_research_id()} "
                f"({tech_name_lookup_dict.get(line.get_enabling_research_id(), nnn)[0]})\n"
            )
            logtext += f"dropoff gatherer ids: {line.get_gatherer_ids()}\n"

        if isinstance(line, GenieStackBuildingGroup):
            logtext += "\n"
            logtext += f"is_gate: {line.is_gate()}\n"
            logtext += f"stack unit: {line.get_stack_unit()}\n"

        if isinstance(line, GenieUnitTransformGroup):
            logtext += "\n"
            logtext += f"transform unit: {line.get_transform_unit()}\n"

        if isinstance(line, GenieMonkGroup):
            logtext += "\n"
            logtext += f"switch unit: {line.get_switch_unit()}\n"

        with logfile.open("w") as log:
            log.write(logtext)

    for key, civ in dataset.civ_groups.items():
        logfile = debugdir.joinpath("conversion/civ_groups/")[str(key)]
        logtext = ""

        logtext += f"repr: {civ}\n"
        logtext += (f"nyan name: "
                    f"{civ_name_lookup_dict.get(civ.get_id(), nnn)[0]}\n")

        logtext += f"team bonus: {civ.team_bonus}\n"
        logtext += f"tech tree: {civ.tech_tree}\n"

        logtext += "civ bonus ids:\n"
        for bonus in civ.civ_boni:
            logtext += f"    - {bonus}\n"

        logtext += "unique unit ids:\n"
        for unit in civ.unique_entities:
            logtext += (f"    - {unit} "
                        f"({entity_name_lookup_dict.get(unit, nnn)[0]})\n")

        logtext += "unique tech ids:\n"
        for tech in civ.unique_techs:
            logtext += (f"    - {tech} "
                        f"({tech_name_lookup_dict.get(tech, nnn)[0]})\n")

        with logfile.open("w") as log:
            log.write(logtext)

    for key, tech in dataset.tech_groups.items():
        logfile = debugdir.joinpath("conversion/tech_groups/")[str(key)]
        logtext = ""

        logtext += f"repr: {tech}\n"
        logtext += (f"nyan name: "
                    f"{tech_name_lookup_dict.get(tech.get_id(), nnn)[0]}\n")

        logtext += f"is_researchable: {tech.is_researchable()}\n"
        logtext += f"is_unique: {tech.is_unique()}\n"
        logtext += (
            f"research location id: {tech.get_research_location_id()} "
            f"({entity_name_lookup_dict.get(tech.get_research_location_id(), nnn)[0]})\n"
        )

        logtext += f"required tech count: {tech.get_required_tech_count()}\n"
        logtext += "required techs:\n"
        for req_tech in tech.get_required_techs():
            logtext += (f"    - {req_tech} "
                        f"({tech_name_lookup_dict.get(req_tech, nnn)[0]})\n")

        if isinstance(tech, AgeUpgrade):
            logtext += "\n"
            logtext += f"researched age id: {tech.age_id}\n"

        if isinstance(tech, UnitLineUpgrade):
            logtext += "\n"
            logtext += f"upgraded line id: {tech.get_line_id()}\n"
            logtext += (
                f"upgraded line: {tech.get_upgraded_line()} "
                f"({entity_name_lookup_dict.get(tech.get_line_id(), nnn)[0]})\n"
            )
            logtext += f"upgrade target id: {tech.get_upgrade_target_id()}\n"

        if isinstance(tech, BuildingLineUpgrade):
            logtext += "\n"
            logtext += f"upgraded line id: {tech.get_line_id()}\n"
            logtext += (
                f"upgraded line: {tech.get_upgraded_line()} "
                f"({entity_name_lookup_dict.get(tech.get_line_id(), nnn)[0]})\n"
            )
            logtext += f"upgrade target id: {tech.get_upgrade_target_id()}\n"

        if isinstance(tech, UnitUnlock):
            logtext += "\n"
            logtext += (
                f"unlocked line: {tech.get_unlocked_line()} "
                f"({entity_name_lookup_dict.get(tech.get_unlocked_line().get_head_unit_id(), nnn)[0]})\n"
            )

        if isinstance(tech, BuildingUnlock):
            logtext += "\n"
            logtext += (
                f"unlocked line: {tech.get_unlocked_line()} "
                f"({entity_name_lookup_dict.get(tech.get_unlocked_line().get_head_unit_id(), nnn)[0]})\n"
            )

        with logfile.open("w") as log:
            log.write(logtext)

    for key, terrain in dataset.terrain_groups.items():
        logfile = debugdir.joinpath("conversion/terrain_groups/")[str(key)]
        logtext = ""

        logtext += f"repr: {terrain}\n"
        logtext += (
            f"nyan name: "
            f"{terrain_name_lookup_dict.get(terrain.get_id(), nnn)[1]}\n")

        logtext += f"has_subterrain: {terrain.has_subterrain()}\n"

        with logfile.open("w") as log:
            log.write(logtext)
Пример #16
0
def debug_mounts(debugdir: Directory, loglevel: int, args: Namespace) -> None:
    """
    Create debug output for the mounted files and folders.

    :param debugdir: Output directory for the debug info.
    :type debugdir: Directory
    :param loglevel: Determines how detailed the output is.
    :type loglevel: int
    :param args: CLI arguments.
    :type args: Namespace
    """
    if loglevel < 2:
        return

    # Log mounts
    logfile = debugdir.joinpath("init/")["mounts"]
    logtext = ""

    mounts = args.srcdir.fsobj.obj.fsobj.mounts

    # Sort by mounted directory name
    mount_dict = {}
    for mount in mounts:
        if mount[0] in mount_dict.keys():
            mount_dict[mount[0]].append(mount[1])

        else:
            mount_dict[mount[0]] = [mount[1]]

    mount_dict = dict(sorted(mount_dict.items(), key=lambda item: item[0]))

    # Format mounts
    for mountpoint, resources in mount_dict.items():
        if len(mountpoint) == 0:
            logtext += "mountpoint: ${srcdir}/\n"

        else:
            logtext += f"mountpoint: ${{srcdir}}/{mountpoint[0].decode()}/\n"

        for resource in resources:
            resource_type = None
            abs_path = ""
            file_count = 0

            if type(resource) is Path:
                resource_type = "dir"
                abs_path = resource.fsobj.path.decode()

            elif type(resource) is FileCollectionPath:
                resource_type = "file collection"
                abs_path = resource.fsobj.fileobj.name.decode()
                file_count = len(resource.fsobj.rootentries[0])

            logtext += f"    resource type: {resource_type}\n"
            logtext += f"    source path: {abs_path}\n"

            if resource_type == "file collection":
                logtext += f"    file count: {file_count}\n"

            logtext += "    ----\n"

    with logfile.open("w") as log:
        log.write(logtext)