Ejemplo n.º 1
0
    def unpack(self, bnd_buffer):
        if isinstance(bnd_buffer, bytes):
            bnd_buffer = BytesIO(bnd_buffer)

        self.header_struct = BinaryStruct(*self.HEADER_STRUCT_START,
                                          byte_order='<')
        header = self.header_struct.unpack(bnd_buffer)
        self.bnd_version = header.bnd_version
        self.bnd_signature = header.bnd_signature
        self.bnd_magic = header.bnd_magic
        self.big_endian = header.big_endian or is_big_endian(self.bnd_magic)
        byte_order = '>' if self.big_endian else '<'
        header.update(
            self.header_struct.unpack(bnd_buffer,
                                      *self.HEADER_STRUCT_ENDIAN,
                                      byte_order=byte_order))
        self.unknown = header.unknown

        self.entry_header_struct = BinaryStruct(*self.BND_ENTRY_HEADER,
                                                byte_order=byte_order)
        if has_id(self.bnd_magic):
            self.entry_header_struct.add_fields(self.ENTRY_ID,
                                                byte_order=byte_order)
        if has_path(self.bnd_magic):
            self.entry_header_struct.add_fields(self.NAME_OFFSET,
                                                byte_order=byte_order)
        if has_uncompressed_size(self.bnd_magic):
            self.entry_header_struct.add_fields(self.UNCOMPRESSED_DATA_SIZE,
                                                byte_order=byte_order)

        for entry in BNDEntry.unpack(bnd_buffer,
                                     self.entry_header_struct,
                                     path_encoding='shift-jis',
                                     count=header.entry_count):
            self.add_entry(entry)
Ejemplo n.º 2
0
    def load_unpacked_dir(self, directory):
        directory = Path(directory)
        if not directory.is_dir():
            raise ValueError(f"Could not find unpacked BND directory {repr(directory)}.")
        with (directory / "bnd_manifest.txt").open("rb") as f:
            self.bnd_version = self.read_bnd_setting(f.readline(), "version")
            self.bnd_signature = self.read_bnd_setting(f.readline(), "bnd_signature")
            self.bnd_magic = self.read_bnd_setting(f.readline(), "bnd_magic", assert_type=int)
            self.big_endian = self.read_bnd_setting(f.readline(), "big_endian", assert_type=bool)
            self.unknown = self.read_bnd_setting(f.readline(), "unknown", assert_type=bool)
            self.dcx = self.read_bnd_setting(f.readline(), "dcx", assert_type=tuple)

            self.add_entries_from_manifest_paths(f, directory)

        # Create header structs.
        self.header_struct = BinaryStruct(*self.HEADER_STRUCT_START, byte_order="<")
        byte_order = ">" if self.big_endian else "<"
        self.header_struct.add_fields(*self.HEADER_STRUCT_ENDIAN, byte_order=byte_order)
        self.entry_header_struct = BinaryStruct(*self.BND_ENTRY_HEADER, byte_order=byte_order)
        if has_id(self.bnd_magic):
            self.entry_header_struct.add_fields(self.ENTRY_ID, byte_order=byte_order)
        if has_path(self.bnd_magic):
            self.entry_header_struct.add_fields(self.NAME_OFFSET, byte_order=byte_order)
        if has_uncompressed_size(self.bnd_magic):
            self.entry_header_struct.add_fields(self.UNCOMPRESSED_DATA_SIZE, byte_order=byte_order)
Ejemplo n.º 3
0
    def unpack(self, bnd_buffer):
        if isinstance(bnd_buffer, bytes):
            bnd_buffer = BytesIO(bnd_buffer)

        self.header_struct = BinaryStruct(*self.HEADER_STRUCT_START, byte_order="<")
        header = self.header_struct.unpack(bnd_buffer)
        self.bnd_flags = (header["flag_1"], header["flag_2"])
        self.bnd_version = header["bnd_version"]
        self.big_endian = header["big_endian"] == 0x00000100  # Magic not used to infer endianness here.
        byte_order = ">" if self.big_endian else "<"
        header.update(self.header_struct.unpack(bnd_buffer, *self.HEADER_STRUCT_ENDIAN, byte_order=byte_order))
        self.bnd_signature = header["bnd_signature"]
        self.bnd_magic = header["bnd_magic"]
        self.utf16_paths = header["utf16_paths"]
        self.hash_table_type = header["hash_table_type"]
        self.hash_table_offset = header["hash_table_offset"]
        path_encoding = ("utf-16be" if self.big_endian else "utf-16le") if self.utf16_paths else "shift-jis"

        if header["entry_header_size"] != header_size(self.bnd_magic):
            raise ValueError(
                f"Expected BND entry header size {header_size(self.bnd_magic)} based on magic\n"
                f"{hex(self.bnd_magic)}, but BND header says {header['entry_header_size']}."
            )
        if self.hash_table_type != 4 and self.hash_table_offset != 0:
            _LOGGER.warning(
                f"Found non-zero hash table offset {self.hash_table_offset}, but header says this BND has no hash "
                f"table."
            )
        self.entry_header_struct = BinaryStruct(*self.BND_ENTRY_HEADER, byte_order=byte_order)
        if has_uncompressed_size(self.bnd_magic):
            self.entry_header_struct.add_fields(self.UNCOMPRESSED_DATA_SIZE, byte_order=byte_order)
        self.entry_header_struct.add_fields(self.DATA_OFFSET, byte_order=byte_order)
        if has_id(self.bnd_magic):
            self.entry_header_struct.add_fields(self.ENTRY_ID, byte_order=byte_order)
        if has_path(self.bnd_magic):
            self.entry_header_struct.add_fields(self.NAME_OFFSET, byte_order=byte_order)
        if self.bnd_magic == 0x20:
            # Extra pad.
            self.entry_header_struct.add_fields("8x")
        if header["entry_header_size"] != self.entry_header_struct.size:
            _LOGGER.warning(
                f"Entry header size given in BND header ({header['entry_header_size']}) does not match actual entry "
                f"header size ({self.entry_header_struct.size})."
            )
        for entry in BNDEntry.unpack(
            bnd_buffer, self.entry_header_struct, path_encoding=path_encoding, count=header["entry_count"]
        ):
            self.add_entry(entry)

        # Read hash table.
        if self.hash_table_type == 4:
            bnd_buffer.seek(self.hash_table_offset)
            self._most_recent_hash_table = bnd_buffer.read(header["data_offset"] - self.hash_table_offset)
        self._most_recent_entry_count = len(self.binary_entries)
        self._most_recent_paths = [entry.path for entry in self.binary_entries]
Ejemplo n.º 4
0
    def __init__(self,
                 luainfo_source=None,
                 big_endian=False,
                 use_struct_64=False):
        self.big_endian = big_endian
        self.use_struct_64 = use_struct_64
        self.luainfo_path = None
        self.header_struct = BinaryStruct(
            *self.HEADER_STRUCT, byte_order=">" if self.big_endian else "<")

        self.goals = []  # type: List[LuaGoal]

        if luainfo_source is None:
            return
        if isinstance(luainfo_source, (list, tuple)):
            self.goals = luainfo_source  # type: List[LuaGoal]
            return
        if isinstance(luainfo_source, (str, Path)):
            self.luainfo_path = Path(luainfo_source)
            with self.luainfo_path.open("rb") as f:
                self.unpack(f)
            return
        if hasattr(luainfo_source, "data"):
            luainfo_source = luainfo_source.data
        if isinstance(luainfo_source, bytes):
            luainfo_source = BytesIO(luainfo_source)
        if isinstance(luainfo_source, BufferedIOBase):
            self.unpack(luainfo_source)
Ejemplo n.º 5
0
class EventArg(BaseEventArg):
    STRUCT = BinaryStruct(
        ('instruction_line', 'Q'),
        ('write_from_byte', 'Q'),
        ('read_from_byte', 'Q'),
        ('bytes_to_write', 'Q'),
    )
Ejemplo n.º 6
0
    def pack(self):
        header = self.header_struct.pack(goal_count=len(self.goals))
        packed_goals = b""
        packed_strings = b""
        goal_struct = BinaryStruct(
            *(self.GOAL_STRUCT_64
              if self.use_struct_64 else self.GOAL_STRUCT_32),
            byte_order=">" if self.big_endian else "<",
        )
        packed_strings_offset = len(header) + len(
            self.goals) * goal_struct.size
        for goal in self.goals:
            name_offset = packed_strings_offset + len(packed_strings)
            packed_strings += goal.goal_name.encode(
                encoding="shift-jis") + b"\0"
            goal_kwargs = goal.get_interrupt_details()
            logic_interrupt_name = goal_kwargs.pop("logic_interrupt_name")
            if logic_interrupt_name:
                logic_interrupt_name_offset = packed_strings_offset + len(
                    packed_strings)
                packed_strings += logic_interrupt_name.encode(
                    encoding="shift-jis") + b"\0"
            else:
                logic_interrupt_name_offset = 0
            packed_goals += goal_struct.pack(
                goal_id=goal.goal_id,
                name_offset=name_offset,
                logic_interrupt_name_offset=logic_interrupt_name_offset,
                **goal_kwargs,
            )

        return header + packed_goals + packed_strings
Ejemplo n.º 7
0
class EMEVD(BaseEMEVD):
    Event = Event
    GAME_MODULE = sys.modules["soulstruct.events.darksouls1"]
    STRING_ENCODING = "utf-8"
    DCX_MAGIC = (36, 44)
    STRUCT = BinaryStruct(
        ("version", "4s", b"EVD\x00"),
        ("ds1_marker_1", "I", 0),
        ("ds1_marker_2", "I", 204),
        ("file_size_1", "I"),
        ("event_count", "I"),
        ("event_table_offset", "I"),
        ("instruction_count", "I"),
        ("instruction_table_offset", "I"),
        "4x",  # unknown table, unused in all games
        ("unknown_table_offset", "I"),
        ("event_layers_count", "I"),  # unused in DS1
        ("event_layers_table_offset", "I"),  # unused in DS1
        ("event_arg_count", "I"),
        ("event_arg_table_offset", "I"),
        ("linked_files_count", "I"),
        ("linked_files_table_offset", "I"),
        ("base_arg_data_size", "I"),
        ("base_arg_data_offset", "I"),
        ("packed_strings_size", "I"),
        ("packed_strings_offset", "I"),
        "4x",
    )

    def compute_base_args_size(self, existing_data_size):
        return sum([e.total_args_size
                    for e in self.events.values()]) + 4  # add z4 sizege

    def pad_after_base_args(self, emevd_binary_after_base_args):
        return emevd_binary_after_base_args + b"\x00\x00\x00\x00"  # terminate with z4
Ejemplo n.º 8
0
class EventArg(BaseEventArg):
    STRUCT = BinaryStruct(
        ("instruction_line", "Q"),
        ("write_from_byte", "Q"),
        ("read_from_byte", "Q"),
        ("bytes_to_write", "Q"),
    )
Ejemplo n.º 9
0
class EMEVD(BaseEMEVD):
    Event = Event
    GAME_MODULE = sys.modules["soulstruct.events.darksouls1"]
    STRING_ENCODING = 'utf-8'
    DCX_MAGIC = (36, 44)
    STRUCT = BinaryStruct(
        ('version', '4s', b'EVD\x00'),
        ('ds1_marker_1', 'I', 0),
        ('ds1_marker_2', 'I', 204),
        ('file_size_1', 'I'),
        ('event_count', 'I'),
        ('event_table_offset', 'I'),
        ('instruction_count', 'I'),
        ('instruction_table_offset', 'I'),
        '4x',  # unknown table, unused in all games
        ('unknown_table_offset', 'I'),
        ('event_layers_count', 'I'),  # unused in DS1
        ('event_layers_table_offset', 'I'),  # unused in DS1
        ('event_arg_count', 'I'),
        ('event_arg_table_offset', 'I'),
        ('linked_files_count', 'I'),
        ('linked_files_table_offset', 'I'),
        ('base_arg_data_size', 'I'),
        ('base_arg_data_offset', 'I'),
        ('packed_strings_size', 'I'),
        ('packed_strings_offset', 'I'),
        '4x',
    )

    def compute_base_args_size(self, existing_data_size):
        return sum([e.total_args_size
                    for e in self.events.values()]) + 4  # add z4 sizege

    def pad_after_base_args(self, emevd_binary_after_base_args):
        return emevd_binary_after_base_args + b'\x00\x00\x00\x00'  # terminate with z4
Ejemplo n.º 10
0
class EventLayers(BaseEventLayers):
    STRUCT = BinaryStruct(
        ('two', 'I', 2),
        ('event_layers', 'I'),  # 32-bit bit field
        ('zero', 'Q', 0),
        ('minus_one', 'q', -1),
        ('one', 'Q', 1),
    )
Ejemplo n.º 11
0
class EventLayers(BaseEventLayers):
    STRUCT = BinaryStruct(
        ("two", "I", 2),
        ("event_layers", "I"),  # 32-bit bit field
        ("zero", "Q", 0),
        ("minus_one", "q", -1),
        ("one", "Q", 1),
    )
Ejemplo n.º 12
0
    def _set_version(self, version):
        if str(version).lower() in {'des', '0'}:
            self.version = v = 0
            self.big_endian = True
        elif str(version).lower() in {'ds1', 'ptd', 'ptde', 'dsr', 'ds2', 'bb', '1'}:
            self.version = v = 1
            self.big_endian = False
        elif str(version).lower() in {'ds3', '2'}:
            self.version = v = 2
            self.big_endian = False
        else:
            raise ValueError(f"Unrecognized FMG version: {version}. Try one in: ('ds1', 'ds2', 'bb', 'ds3').")

        byte_order = '>' if self.big_endian else '<'
        self.header_struct = BinaryStruct(*self.HEADER_STRUCTS[v], byte_order=byte_order)
        self.range_struct = BinaryStruct(*self.RANGE_STRUCTS[v], byte_order=byte_order)
        self.string_offset_struct = BinaryStruct(*self.STRING_OFFSET_STRUCTS[v], byte_order=byte_order)
Ejemplo n.º 13
0
 def unpack(self, info_buffer):
     self.big_endian = self._check_big_endian(info_buffer)
     self.header_struct = BinaryStruct(*self.HEADER_STRUCT, byte_order=">" if self.big_endian else "<")
     header = self.header_struct.unpack(info_buffer)
     # TODO: auto-detect `use_struct_64` for 64-bit offsets (PTDE and DSR both use 32-bit).
     goal_struct = BinaryStruct(*(self.GOAL_STRUCT_64 if self.use_struct_64 else self.GOAL_STRUCT_32),
                                byte_order=">" if self.big_endian else "<")
     self.goals = []
     for _ in range(header.goal_count):
         goal = self.unpack_goal(info_buffer, goal_struct)
         if goal.script_name in [g.script_name for g in self.goals]:
             _LOGGER.warning(
                 f"Goal '{goal.goal_id}' is referenced multiple times in LuaInfo (same ID and type). Each goal ID "
                 f"should have (at most) one 'battle' goal and one 'logic' goal. All goal entries after the first "
                 f"will be ignored.")
         else:
             self.goals.append(goal)
Ejemplo n.º 14
0
class EventLayers(BaseEventLayers):
    """Never used in DS1 and very probably not actually supported by the engine."""
    STRUCT = BinaryStruct(
        ('two', 'I', 2),
        ('event_layers', 'I'),  # 32-bit bit field
        ('zero', 'I', 0),  # format is a guess
        ('minus_one', 'i', -1),  # format is a guess
        ('one', 'I', 1),  # format is a guess
    )
Ejemplo n.º 15
0
class EventLayers(BaseEventLayers):
    """Never used in DS1 and very probably not actually supported by the engine."""

    STRUCT = BinaryStruct(
        ("two", "I", 2),
        ("event_layers", "I"),  # 32-bit bit field
        ("zero", "I", 0),  # format is a guess
        ("minus_one", "i", -1),  # format is a guess
        ("one", "I", 1),  # format is a guess
    )
Ejemplo n.º 16
0
class Event(BaseEvent):
    Instruction = Instruction
    EventArg = EventArg
    EVENT_ARG_TYPES = {}
    STRUCT = BinaryStruct(
        ('event_id', 'I'),
        ('instruction_count', 'I'),
        ('first_instruction_offset', 'I'),
        ('event_arg_count', 'I'),
        ('first_event_arg_offset', 'i'),
        ('restart_type', 'I'),
        '4x',
    )
Ejemplo n.º 17
0
    def __init__(self, fmg_source, remove_empty_entries=True, version=None):

        self.pre_header_struct = BinaryStruct(*self.PRE_HEADER_STRUCT)
        self.version = None
        self.big_endian = False

        self.header_struct = BinaryStruct()
        self.range_struct = BinaryStruct()
        self.string_offset_struct = BinaryStruct()

        self.fmg_path = None
        self.entries = {}

        if fmg_source is None:
            return

        if isinstance(fmg_source, dict):
            self.entries = fmg_source
            self._set_version(version)
            return

        if version is not None:
            raise ValueError(
                "You cannot specify 'version' when loading an FMG from file content. The version will\n"
                "be automatically detected.")

        if isinstance(fmg_source, bytes):
            self.unpack(io.BytesIO(fmg_source), remove_empty_entries)

        elif isinstance(fmg_source, str):
            self.fmg_path = fmg_source
            with open(fmg_source, "rb") as file:
                self.unpack(file, remove_empty_entries)

        elif isinstance(fmg_source, BNDEntry):
            self.unpack(io.BytesIO(fmg_source.data), remove_empty_entries)

        else:
            raise TypeError(f"Invalid `fmg_source` type: {type(fmg_source)}")
Ejemplo n.º 18
0
class Event(BaseEvent):
    EVENT_ARG_TYPES = {}
    Instruction = Instruction
    EventArg = EventArg
    STRUCT = BinaryStruct(
        ("event_id", "Q"),
        ("instruction_count", "Q"),
        ("first_instruction_offset", "Q"),
        ("event_arg_count", "Q"),
        ("first_event_arg_offset", "q"),
        ("restart_type", "I"),
        "4x",
    )
Ejemplo n.º 19
0
    def _set_version(self, version):
        if str(version).lower() in {"des", "0"}:
            self.version = v = 0
            self.big_endian = True
        elif str(version).lower() in {
                "ds1", "ptd", "ptde", "dsr", "ds2", "bb", "1"
        }:
            self.version = v = 1
            self.big_endian = False
        elif str(version).lower() in {"ds3", "2"}:
            self.version = v = 2
            self.big_endian = False
        else:
            raise ValueError(
                f"Unrecognized FMG version: {version}. Try one in: ('ds1', 'ds2', 'bb', 'ds3')."
            )

        byte_order = ">" if self.big_endian else "<"
        self.header_struct = BinaryStruct(*self.HEADER_STRUCTS[v],
                                          byte_order=byte_order)
        self.range_struct = BinaryStruct(*self.RANGE_STRUCTS[v],
                                         byte_order=byte_order)
        self.string_offset_struct = BinaryStruct(
            *self.STRING_OFFSET_STRUCTS[v], byte_order=byte_order)
Ejemplo n.º 20
0
class EMEVD(BaseEMEVD):
    Event = Event
    GAME_MODULE = sys.modules["soulstruct.events.bloodborne"]
    STRING_ENCODING = "utf-16le"
    DCX_MAGIC = (68, 76)
    STRUCT = BinaryStruct(
        ("version", "4s", b"EVD\x00"),
        ("bloodborne_marker", "I", 65280),
        ("unknown", "I", 204),
        ("file_size_1", "I"),
        ("event_count", "Q"),
        ("event_table_offset", "Q"),
        ("instruction_count", "Q"),
        ("instruction_table_offset", "Q"),
        "8x",  # unknown table, unused in all games
        ("unknown_table_offset", "Q"),
        ("event_layers_count", "Q"),  # unused in BB
        ("event_layers_table_offset", "Q"),  # unused in BB
        ("event_arg_count", "Q"),
        ("event_arg_table_offset", "Q"),
        ("linked_files_count", "Q"),
        ("linked_files_table_offset", "Q"),
        ("base_arg_data_size", "Q"),
        ("base_arg_data_offset", "Q"),
        ("packed_strings_size", "Q"),
        ("packed_strings_offset", "Q"),
        # No more 4x at the end.
    )

    def compute_base_args_size(self, existing_data_size):
        total_arg_size = sum([e.total_args_size for e in self.events.values()])
        while (existing_data_size + total_arg_size) % 16 != 0:
            total_arg_size += 1  # pad to multiple of 16
        return total_arg_size

    def pad_after_base_args(self, emevd_binary_after_base_args):
        while len(emevd_binary_after_base_args) % 16 != 0:
            emevd_binary_after_base_args += b"\0"  # pad to multiple of 16
        return emevd_binary_after_base_args
Ejemplo n.º 21
0
class EMEVD(BaseEMEVD):
    Event = Event
    GAME_MODULE = sys.modules["soulstruct.events.bloodborne"]
    STRING_ENCODING = 'utf-16le'
    DCX_MAGIC = (68, 76)
    STRUCT = BinaryStruct(
        ('version', '4s', b'EVD\x00'),
        ('bloodborne_marker', 'I', 65280),
        ('unknown', 'I', 204),
        ('file_size_1', 'I'),
        ('event_count', 'Q'),
        ('event_table_offset', 'Q'),
        ('instruction_count', 'Q'),
        ('instruction_table_offset', 'Q'),
        '8x',  # unknown table, unused in all games
        ('unknown_table_offset', 'Q'),
        ('event_layers_count', 'Q'),  # unused in BB
        ('event_layers_table_offset', 'Q'),  # unused in BB
        ('event_arg_count', 'Q'),
        ('event_arg_table_offset', 'Q'),
        ('linked_files_count', 'Q'),
        ('linked_files_table_offset', 'Q'),
        ('base_arg_data_size', 'Q'),
        ('base_arg_data_offset', 'Q'),
        ('packed_strings_size', 'Q'),
        ('packed_strings_offset', 'Q'),
        # No more 4x at the end.
    )

    def compute_base_args_size(self, existing_data_size):
        total_arg_size = sum([e.total_args_size for e in self.events.values()])
        while (existing_data_size + total_arg_size) % 16 != 0:
            total_arg_size += 1  # pad to multiple of 16
        return total_arg_size

    def pad_after_base_args(self, emevd_binary_after_base_args):
        while len(emevd_binary_after_base_args) % 16 != 0:
            emevd_binary_after_base_args += b'\0'  # pad to multiple of 16
        return emevd_binary_after_base_args
Ejemplo n.º 22
0
class ParamTable(object):

    # TODO: This is currently for DeS/DS1 only.
    HEADER_STRUCT = BinaryStruct(
        ('name_data_offset', 'I'),
        ('entry_data_offset', 'H'),
        ('magic0', 'H'),  # 0 or 1
        ('magic1', 'H'),  # 1, 2, or 3
        ('entry_count', 'H'),
        ('param_name', '32j'),
        ('big_endian', 'b', 0),  # TODO: check, rather than assert
        ('magic2', 'H'),  # TODO: Actually two format flag bytes.
        ('unknown', 'B'),  # TODO: sometimes -1 in later formats.
    )

    ENTRY_POINTER_STRUCT = BinaryStruct(
        # These are packed together, and contain offsets into packed entry data and packed names.
        ('id', 'i'),
        ('data_offset', 'i'),
        ('name_offset', 'i'),
    )

    entries: Dict[int, ParamEntry]

    def __init__(self, param_source, paramdef_bnd):
        # TODO: Need to specify params type somewhere.
        self.param_path = ''
        self.param_name = ''  # internal name (shift-jis) with capitals and underscores
        self.paramdef_bnd = PARAMDEF_BND(paramdef_bnd) if isinstance(
            paramdef_bnd, str) else paramdef_bnd
        self.entries = {}
        self.__magic = []
        self.__unknown = None
        self.nickname = ''

        if isinstance(param_source, dict):
            self.entries = param_source

        elif isinstance(param_source, bytes):
            self.unpack(BytesIO(param_source))

        elif isinstance(param_source, str):
            self.param_path = param_source
            with open(param_source, 'rb') as data:
                self.unpack(data)

        elif hasattr(param_source, 'data'):
            # Try reading .data attribute (e.g. BNDEntry).
            try:
                self.unpack(BytesIO(param_source.data))
            except ValueError:
                raise ValueError(
                    "ParamTable source has a '.data' attribute, but it could not be interpreted."
                )

    def __getitem__(self, entry_id):
        if entry_id in self.entries:
            return self.entries[entry_id]
        raise KeyError(f"No entry with ID {entry_id} in {self.param_name}.")

    def __setitem__(self, entry_index, entry):
        if isinstance(entry, dict):
            if 'name' not in entry:
                raise ValueError("New entry must have a 'name' field.")
            entry = ParamEntry(entry, self.paramdef_bnd[self.param_name])
        if isinstance(entry, ParamEntry):
            self.entries[entry_index] = entry
        else:
            raise TypeError(
                "New entry must be a ParamEntry or a dictionary that contains all required fields."
            )

    def keys(self):
        return self.entries.keys()

    def values(self):
        return self.entries.values()

    def items(self):
        return self.entries.items()

    def __iter__(self):
        return iter(self.entries)

    def __len__(self):
        return len(self.entries)

    @property
    def field_names(self):
        # TODO: hack job. get nice field names and structure from fields.py.
        return self.entries[list(self.entries)[0]].field_names

    def get_field_info(self,
                       param_entry: ParamEntry = None,
                       field_name: str = None):
        param_info = GAME_PARAM_INFO.get(self.param_name, None)
        if param_info is None:
            raise KeyError(
                f"No field info available for param table {self.param_name}.")
        if field_name is None:
            return param_info
        field_info = param_info.get(field_name,
                                    (field_name, True, None, "DOC-TODO"))
        if callable(field_info):
            field_info = field_info(param_entry)
        return field_info

    # TODO: __repr__ method returns basic information about ParamTable (but not entire entry list).

    def unpack(self, param_buffer):
        header = self.HEADER_STRUCT.unpack(param_buffer)
        self.param_name = header.param_name
        self.__magic = [header.magic0, header.magic1, header.magic2]
        self.__unknown = header.unknown
        entry_data_offset = header.entry_data_offset
        name_data_offset = header.name_data_offset  # CANNOT BE TRUSTED IN VANILLA FILES! Off by +12 bytes.

        # Load entry pointer data.
        entry_pointers = self.ENTRY_POINTER_STRUCT.unpack(
            param_buffer, count=header.entry_count)

        # Entry size is lazily determined. TODO: Unpack entry data in sequence and associate with names separately.
        if len(entry_pointers) == 0:
            return
        elif len(entry_pointers) == 1:
            # NOTE: The only vanilla param in Dark Souls with one entry is LEVELSYNC_PARAM_ST (Remastered only).
            # Otherwise, we can trust the repacked name_data_offset from Soulstruct.
            if self.param_name == 'LEVELSYNC_PARAM_ST':
                entry_size = 220
            else:
                entry_size = name_data_offset - entry_data_offset
        else:
            entry_size = entry_pointers[1].data_offset - entry_pointers[
                0].data_offset

        # Store packed data blocks.
        param_buffer.seek(entry_data_offset)
        packed_entry_data = param_buffer.read(entry_size * header.entry_count)
        name_data_offset = param_buffer.tell(
        )  # Overrides untrustworthy value from header.
        packed_name_data = param_buffer.read()

        # Note that we no longer need to track buffer offset.
        for entry_struct in entry_pointers:
            relative_entry_offset = entry_struct.data_offset - entry_data_offset
            entry_data = packed_entry_data[
                relative_entry_offset:relative_entry_offset + entry_size]
            if entry_struct.name_offset != 0:
                relative_name_offset = entry_struct.name_offset - name_data_offset
                try:
                    name = read_chars_from_bytes(
                        packed_name_data,
                        offset=relative_name_offset,
                        encoding='shift_jis_2004',
                        ignore_encoding_error_for_these_chars=JUNK_ENTRY_NAMES)
                except ValueError:
                    _LOGGER.error(
                        f"Could not find null termination for entry name string in {self.param_name}.\n"
                        f"    Header: {header}\n"
                        f"    Entry Struct: {entry_struct}\n"
                        f"    Buffer: {packed_name_data}")
                    raise
            else:
                name = ''
            self.entries[entry_struct.id] = ParamEntry(
                entry_data, self.paramdef_bnd[self.param_name], name=name)

    def pack(self, sort=True):
        sorted_entries = sorted(
            self.entries.items()) if sort else self.entries.items()

        current_name_offset = 0
        name_offset_list = []
        data_offset = 0
        data_offset_list = []
        packed_names = b''
        packed_data = b''

        for entry_id, entry in sorted_entries:

            # Pack names with relative offsets (to be globally offset later).
            name_z_str = entry.name.encode('shift_jis_2004') + b'\x00'
            packed_names += name_z_str
            name_offset_list.append(current_name_offset)
            current_name_offset += len(name_z_str)

            # Pack entry data.
            packed_entry = entry.pack()
            packed_data += packed_entry
            data_offset_list.append(data_offset)
            data_offset += len(packed_entry)

        entry_pointer_table_offset = self.HEADER_STRUCT.size
        entry_data_offset = entry_pointer_table_offset + self.ENTRY_POINTER_STRUCT.size * len(
            sorted_entries)
        name_data_offset = entry_data_offset + len(packed_data)

        # Entries.
        entry_pointer_data = b''
        for i, (entry_id, _) in enumerate(sorted_entries):
            entry_pointer_data += self.ENTRY_POINTER_STRUCT.pack(
                dict(id=entry_id,
                     data_offset=entry_data_offset + data_offset_list[i],
                     name_offset=name_data_offset + name_offset_list[i]))

        # Header.
        header = self.HEADER_STRUCT.pack(
            dict(
                name_data_offset=name_data_offset,
                entry_data_offset=entry_data_offset,
                magic0=self.__magic[0],
                magic1=self.__magic[1],
                entry_count=len(sorted_entries),
                param_name=self.param_name,
                magic2=self.__magic[2],
                unknown=self.__unknown,
            ))

        return header + entry_pointer_data + packed_data + packed_names

    def write_packed(self, param_path=None):
        if param_path is None:
            if self.param_path:
                param_path = self.param_path
            else:
                raise ValueError(
                    "Param path could not be determined automatically (must be specified)."
                )
        if not param_path.endswith('.param'):
            param_path += '.param'

        with open(param_path, 'wb') as output:
            output.write(self.pack())

    def get_range(self, start, count):
        return [(param_id, self[param_id])
                for param_id in sorted(self.entries)[start:start + count]]

    def pop(self, entry_id):
        """Useful for changing entry ID, for example."""
        return self.entries.pop(entry_id)
Ejemplo n.º 23
0
class ParamDef(object):
    # No pack/write methods; these are essentially hard-coded structures, and therefore read-only.

    HEADER_STRUCT = BinaryStruct(
        ('size', 'i'),
        ('field_table_offset', 'H', 48),
        ('unk1', 'H'),
        ('field_count', 'H'),
        ('field_size', 'H', 176),
        ('param_name', '32j'),
        ('unk2', 'h'),
        ('relative_field_description_offset', 'h', 104),
    )

    FIELD_STRUCT = BinaryStruct(
        ('debug_name', '64j'),
        ('debug_type', '8j'),
        ('debug_format', '8j'),  # %i, %u, %d, etc.
        ('default', 'f'),
        ('minimum', 'f'),
        ('maximum', 'f'),
        ('increment', 'f'),
        ('debug_display', 'i'),
        ('size', 'i'),
        ('description_offset', 'i'),  # offset of null-terminated string (unlimited length)
        ('internal_type', '32j'),  # could be an enum name (see params.enums)
        ('name', '32j'),
        ('id', 'i'),  # TODO: what is this?
    )

    def __init__(self, paramdef_source, param_name=None):

        self.param_name = None
        self.fields = []
        self.fields_by_name = {}

        if isinstance(paramdef_source, list):
            if param_name is None:
                raise ValueError("`param_name` must be given to ParamDef if a list of fields is passed.")
            self.param_name = param_name
            self.fields = paramdef_source

        elif isinstance(paramdef_source, bytes):
            self.unpack(BytesIO(paramdef_source))

        elif isinstance(paramdef_source, str):
            if paramdef_source in PARAMDEF_BASE_NAMES:
                paramdef_source = PARAMDEF_BASE_NAMES[paramdef_source] + '.paramdef'
            self.paramdef_path = paramdef_source
            with open(paramdef_source, 'rb') as file:
                self.unpack(file)

        elif hasattr(paramdef_source, 'data'):
            # Try reading .data attribute (e.g. BNDEntry).
            self.unpack(BytesIO(paramdef_source.data))

    def unpack(self, paramdef_buffer):
        """Convert a paramdef file to a dictionary, indexed by ID."""
        header = self.HEADER_STRUCT.unpack(paramdef_buffer)
        self.param_name = header.param_name
        fields = self.FIELD_STRUCT.unpack(paramdef_buffer, count=header.field_count)
        description_table_offset = paramdef_buffer.tell()
        packed_desc_data = paramdef_buffer.read()

        for field_index, field in enumerate(fields):
            if field.description_offset != 0:
                fdo = field.description_offset - description_table_offset
                field.description = read_chars_from_bytes(packed_desc_data, offset=fdo, encoding='shift_jis_2004')
            else:
                field.description = ''

            # print(f"{self.param_name} {field_index} | {field.internal_type} | {field.debug_type} | {field.name} | "
            #       f"{field.debug_name} | {field.description}")

            is_bits = field.name.find(': ')
            if is_bits == -1:
                is_bits = field.name.find(':')
            if is_bits != -1:
                try:
                    bit_size = int(field.name[is_bits + 1])
                except ValueError:
                    bit_size = int(field.name[is_bits + 2])
            elif field.internal_type == 'dummy8':
                is_pad = field.name.find('[')
                if is_pad != -1:
                    bit_size = int(field.name[is_pad + 1]) * 8
                else:
                    bit_size = 8
            else:
                bit_size = field.size * 8

            field.index = field_index
            field.bit_size = bit_size

            self.fields.append(field)
            if self.fields_by_name is not None:
                if field.name in self.fields_by_name:
                    _LOGGER.warning(
                        f"ParamDef field with name '{field.name}' was unpacked more than once, so you will not be able "
                        f"to access fields by name. (Should NOT happen in any known files.)")
                else:
                    self.fields_by_name[field.name] = field

    def __getitem__(self, field_name):
        if self.fields_by_name is None:
            return AttributeError("Cannot access ParamDef fields by name due to one or more repeated field name.\n"
                                  "This should NOT happen unless you've edited the ParamDef for some ungodly reason.")
        return self.fields_by_name[field_name]

    def __repr__(self):
        return f"ParamDef {self.param_name}:\n  " + "\n  ".join(
            [f"{field.index} | {field.debug_name} | {field.description}" for field in self.fields])
Ejemplo n.º 24
0
class Instruction(BaseInstruction):
    EventLayers = EventLayers
    INSTRUCTION_ARG_TYPES = {
        2000: {
            0: "iII",
            1: "iI",
            2: "B",
            3: "B",
            4: "I",
            5: "B"
        },
        2001: {},
        2002: {
            1: "iI",
            2: "iIiBB",
            3: "iIi",
            4: "iIiBBi",
            5: "iIffifi",
            6: "iIiBBiB",
            7: "iIiB",
            8: "iBB"
        },
        2003: {
            1: "iiBB",
            2: "iB",
            3: "iB",
            4: "i",
            5: "iiiiiii",
            6: "iB",
            7: "iB",
            8: "iiB",
            9: "i",
            10: "h",
            11: "bihi",
            12: "i",
            13: "iIB",
            14: "BBi",
            15: "i",
            16: "I",
            17: "IIB",
            18: "iiBBB",
            19: "hh",
            20: "i",
            21: "B",
            22: "iiB",
            23: "i",
            24: "iii",
            25: "iiiii",
            26: "iB",
            27: "B",
            28: "i",
            29: "Bb",
            30: "B",
            31: "iII",
            32: "iI",
            33: "i",
            34: "iiii",
            35: "ii",
            36: "i",
            37: "",
            38: "",
            39: "",
            40: "",
            41: "iIiiIb",
            42: "iiiI",
            43: "iiiI",
            44: "B",
            45: "ihhhB",
            46: "hB",
            47: "hhhB",
            48: "iBfi",
            49: "i",
            50: "i",
            51: "iiiii",
            52: "i",
            53: "ib",
            54: "i",
        },
        2004: {
            1: "iB",
            2: "iB",
            3: "iBii",
            4: "iB",
            5: "iB",
            6: "iiB",
            7: "i",
            8: "iiB",
            9: "iiiiii",
            10: "iB",
            11: "ii",
            12: "iB",
            13: "ii",
            14: "iiiB",
            15: "iB",
            16: "i",
            17: "iiB",
            18: "iif",
            19: "ii",
            20: "i",
            21: "ii",
            22: "ihhiffBB",
            23: "iiiB",
            24: "iiii",
            25: "iif",
            26: "iBB",
            27: "iBB",
            28: "ii",
            29: "iB",
            30: "iB",
            31: "iB",
            32: "iiBii",
            33: "ii",
            34: "iBb",
            35: "iB",
            36: "iii",
            37: "i",
            38: "B",
            39: "iB",
            40: "iBiii",
            41: "iBii",
            42: "iBiii",
            43: "iB",
            44: "iB",
            45: "ii",
            46: "B",
            47: "",
            48: "iBB",
            49: "ii",
            50: "if",
            51: "iiiiii",
            52: "i",
            53: "i",
            54: "iB",
            55: "iiB",
        },
        2005: {
            1: "ib",
            2: "i",
            3: "iB",
            4: "iB",
            5: "iii",
            6: "iiB",
            7: "ii",
            8: "ib",
            9: "iiiiifff",
            10: "iBBB",
            11: "iih",
            12: "i",
            13: "iB",
            14: "iiiB",
            15: "i",
            16: "iiii",
            17: "iB",
        },
        2006: {
            1: "iB",
            2: "i",
            3: "iiii",
            4: "iii",
            5: "ii"
        },
        2007: {
            1: "ihhif",
            2: "B",
            3: "iB",
            4: "iB",
            5: "i",
            6: "i",
            7: "i",
            8: "i",
            9: "i"
        },
        2008: {
            1: "ii",
            2: "iiiiff",
            3: "BBH"
        },
        2009: {
            0: "iii",
            1: "iii",
            2: "iii",
            3: "iiffi",
            4: "i",
            5: "iiffii",
            6: "B"
        },
        2010: {
            1: "BHiii",
            2: "iii",
            3: "iB",
            4: "iB",
            5: "iB"
        },
        2011: {
            1: "iB",
            2: "iB",
            3: "iB"
        },
        2012: {
            1: "iB"
        },
        2013: {
            1: "s",
            2: "IsB",
            3: "I",
            4: "BsB"
        },
        1000: {
            0: "Bb",
            1: "BBb",
            2: "BBb",
            3: "B",
            4: "B",
            5: "Bbii",
            6: "Bbii",
            7: "BBb",
            8: "BBb",
            9: "f",
            101: "BBb",
            103: "B",
            105: "Bbii",
            107: "BBb",
        },
        1001: {
            0: "f",
            1: "i",
            2: "ff",
            3: "ii"
        },
        1003: {
            0: "BBi",
            1: "BBBi",
            2: "BBBi",
            3: "BBBii",
            4: "BBBii",
            5: "Bb",
            6: "Bb",
            7: "BBBB",
            8: "BBBB",
            9: "BBB",
            10: "BBB",
            11: "BB",
            12: "BB",
            13: "BB",
            14: "BBBB",
            15: "BBBB",
            16: "BBBB",
            101: "BBBi",
            103: "BBBii",
            105: "Bb",
            107: "BBBB",
            109: "BBB",
        },
        1005: {
            0: "Bi",
            1: "BBi",
            2: "BBi",
            101: "BBi"
        },
        1014: {
            0: "",
            1: "",
            2: "",
            3: "",
            4: "",
            5: "",
            6: "",
            7: "",
            8: "",
            9: ""
        },
        0: {
            0: "bBb",
            1: "bbii"
        },
        1: {
            0: "bf",
            1: "bi",
            2: "bff",
            3: "bii"
        },
        3: {
            0: "bBBi",
            1: "bBBii",
            2: "bBii",
            3: "bBiif",
            4: "bBiB",
            5: "biifhfiBi",
            6: "bb",
            7: "bBi",
            8: "bBBB",
            9: "bI",
            10: "bBiibi",
            11: "bBBB",
            12: "biBBI",
            13: "biifhfiBi",
            14: "bi",
            15: "bii",
            16: "bBiB",
            17: "bBB",
            18: "biifhfiBii",
            19: "biifhfiBii",
            20: "biBBiB",
            21: "bB",
            22: "bB",
            23: "biiB",
            24: "bii",
            25: "bBii",
            26: "bBb",
            27: "bb",
            28: "bb",
            29: "bBBB",
        },
        4: {
            0: "biB",
            1: "bii",
            2: "bibf",
            3: "bib",
            4: "biiB",
            5: "biiB",
            6: "biiib",
            7: "biB",
            8: "biiB",
            9: "biB",
            10: "bB",
            11: "bB",
            12: "bB",
            13: "bBI",
            14: "biBi",
            15: "biB",
        },
        5: {
            0: "bBi",
            1: "bii",
            2: "bi",
            3: "bibi"
        },
        11: {
            0: "bi",
            1: "bi",
            2: "bi"
        },
    }
    STRUCT = BinaryStruct(
        ("instruction_class", "I"),
        ("instruction_index", "I"),
        ("base_args_size", "Q"),
        ("first_base_arg_offset", "i"),
        "4x",
        ("first_event_layers_offset", "i"),  # unused in BB
        "4x",
    )
Ejemplo n.º 25
0
class ParamDef:
    # No pack/write methods; these are essentially hard-coded structures, and therefore read-only.

    HEADER_STRUCT = BinaryStruct(
        ("size", "i"),
        ("field_table_offset", "H", 48),
        ("unk1", "H"),
        ("field_count", "H"),
        ("field_size", "H", 176),
        ("param_name", "32j"),
        ("unk2", "h"),
        ("relative_field_description_offset", "h", 104),
    )

    def __init__(self, paramdef_source, param_name=None):

        self.param_name = None
        self.paramdef_path = None
        self.fields = []
        self.param_info = {}

        if isinstance(paramdef_source, list):
            if param_name is None:
                raise ValueError(
                    "`param_name` must be given to `ParamDef` constructor if a list of fields is passed."
                )
            self.param_name = param_name
            self.fields = paramdef_source

        elif isinstance(paramdef_source, bytes):
            self.unpack(io.BytesIO(paramdef_source))

        elif isinstance(paramdef_source, str):
            if paramdef_source in PARAMDEF_BASE_NAMES:
                paramdef_source = PARAMDEF_BASE_NAMES[
                    paramdef_source] + ".paramdef"
            self.paramdef_path = Path(paramdef_source)
            with self.paramdef_path.open("rb") as file:
                self.unpack(file)

        elif isinstance(paramdef_source, BNDEntry):
            self.unpack(io.BytesIO(paramdef_source.data))

        else:
            raise TypeError(
                f"Invalid `paramdef_source` type: {type(paramdef_source)}")

        try:
            self.param_info = get_param_info(self.param_name)
        except KeyError:
            # This param has no extra information.
            self.param_info = None

    def unpack(self, paramdef_buffer):
        """Convert a paramdef file to a dictionary, indexed by ID."""
        header = self.HEADER_STRUCT.unpack(paramdef_buffer)
        self.param_name = header["param_name"]
        self.fields = ParamDefField.unpack_fields(self.param_name,
                                                  paramdef_buffer,
                                                  header["field_count"])

    def __getitem__(self, field_name) -> ParamDefField:
        hits = [field for field in self.fields if field.name == field_name]
        if len(hits) >= 2:
            raise AttributeError(
                f"Field {field_name} appears more than once in ParamDef.\n"
                "This should NOT happen unless you've edited the ParamDef for some ungodly reason."
            )
        elif not hits:
            raise AttributeError(
                f"Field {field_name} does not exist in ParamDef.")
        return hits[0]

    def __repr__(self):
        return f"ParamDef {self.param_name}:\n  " + "\n  ".join([
            f"{field.index} | {field.debug_name} | {field.description}"
            for field in self.fields
        ])
Ejemplo n.º 26
0
class Instruction(BaseInstruction):
    EventLayers = EventLayers
    INSTRUCTION_ARG_TYPES = {
        2000: {
            0: 'iII',
            1: 'iI',
            2: 'B',
            3: 'B',
            4: 'I',
            5: 'B'
        },
        2001: {},
        2002: {
            1: 'iI',
            2: 'iIiBB',
            3: 'iIi',
            4: 'iIiBBi',
            5: 'iIffifi',
            6: 'iIiBBiB',
            7: 'iIiB',
            8: 'iBB'
        },
        2003: {
            1: 'iiBB',
            2: 'iB',
            3: 'iB',
            4: 'i',
            5: 'iiiiiii',
            6: 'iB',
            7: 'iB',
            8: 'iiB',
            9: 'i',
            10: 'h',
            11: 'bihi',
            12: 'i',
            13: 'iIB',
            14: 'BBi',
            15: 'i',
            16: 'I',
            17: 'IIB',
            18: 'iiBBB',
            19: 'hh',
            20: 'i',
            21: 'B',
            22: 'iiB',
            23: 'i',
            24: 'iii',
            25: 'iiiii',
            26: 'iB',
            27: 'B',
            28: 'i',
            29: 'Bb',
            30: 'B',
            31: 'iII',
            32: 'iI',
            33: 'i',
            34: 'iiii',
            35: 'ii',
            36: 'i',
            37: '',
            38: '',
            39: '',
            40: '',
            41: 'iIiiIb',
            42: 'iiiI',
            43: 'iiiI',
            44: 'B',
            45: 'ihhhB',
            46: 'hB',
            47: 'hhhB',
            48: 'iBfi',
            49: 'i',
            50: 'i',
            51: 'iiiii',
            52: 'i',
            53: 'ib',
            54: 'i'
        },
        2004: {
            1: 'iB',
            2: 'iB',
            3: 'iBii',
            4: 'iB',
            5: 'iB',
            6: 'iiB',
            7: 'i',
            8: 'iiB',
            9: 'iiiiii',
            10: 'iB',
            11: 'ii',
            12: 'iB',
            13: 'ii',
            14: 'iiiB',
            15: 'iB',
            16: 'i',
            17: 'iiB',
            18: 'iif',
            19: 'ii',
            20: 'i',
            21: 'ii',
            22: 'ihhiffBB',
            23: 'iiiB',
            24: 'iiii',
            25: 'iif',
            26: 'iBB',
            27: 'iBB',
            28: 'ii',
            29: 'iB',
            30: 'iB',
            31: 'iB',
            32: 'iiBii',
            33: 'ii',
            34: 'iBb',
            35: 'iB',
            36: 'iii',
            37: 'i',
            38: 'B',
            39: 'iB',
            40: 'iBiii',
            41: 'iBii',
            42: 'iBiii',
            43: 'iB',
            44: 'iB',
            45: 'ii',
            46: 'B',
            47: '',
            48: 'iBB',
            49: 'ii',
            50: 'if',
            51: 'iiiiii',
            52: 'i',
            53: 'i',
            54: 'iB',
            55: 'iiB'
        },
        2005: {
            1: 'ib',
            2: 'i',
            3: 'iB',
            4: 'iB',
            5: 'iii',
            6: 'iiB',
            7: 'ii',
            8: 'ib',
            9: 'iiiiifff',
            10: 'iBBB',
            11: 'iih',
            12: 'i',
            13: 'iB',
            14: 'iiiB',
            15: 'i',
            16: 'iiii',
            17: 'iB'
        },
        2006: {
            1: 'iB',
            2: 'i',
            3: 'iiii',
            4: 'iii',
            5: 'ii'
        },
        2007: {
            1: 'ihhif',
            2: 'B',
            3: 'iB',
            4: 'iB',
            5: 'i',
            6: 'i',
            7: 'i',
            8: 'i',
            9: 'i'
        },
        2008: {
            1: 'ii',
            2: 'iiiiff',
            3: 'BBH'
        },
        2009: {
            0: 'iii',
            1: 'iii',
            2: 'iii',
            3: 'iiffi',
            4: 'i',
            5: 'iiffii',
            6: 'B'
        },
        2010: {
            1: 'BHiii',
            2: 'iii',
            3: 'iB',
            4: 'iB',
            5: 'iB'
        },
        2011: {
            1: 'iB',
            2: 'iB',
            3: 'iB'
        },
        2012: {
            1: 'iB'
        },
        2013: {
            1: 's',
            2: 'IsB',
            3: 'I',
            4: 'BsB'
        },
        1000: {
            0: 'Bb',
            1: 'BBb',
            2: 'BBb',
            3: 'B',
            4: 'B',
            5: 'Bbii',
            6: 'Bbii',
            7: 'BBb',
            8: 'BBb',
            9: 'f',
            101: 'BBb',
            103: 'B',
            105: 'Bbii',
            107: 'BBb'
        },
        1001: {
            0: 'f',
            1: 'i',
            2: 'ff',
            3: 'ii'
        },
        1003: {
            0: 'BBi',
            1: 'BBBi',
            2: 'BBBi',
            3: 'BBBii',
            4: 'BBBii',
            5: 'Bb',
            6: 'Bb',
            7: 'BBBB',
            8: 'BBBB',
            9: 'BBB',
            10: 'BBB',
            11: 'BB',
            12: 'BB',
            13: 'BB',
            14: 'BBBB',
            15: 'BBBB',
            16: 'BBBB',
            101: 'BBBi',
            103: 'BBBii',
            105: 'Bb',
            107: 'BBBB',
            109: 'BBB'
        },
        1005: {
            0: 'Bi',
            1: 'BBi',
            2: 'BBi',
            101: 'BBi'
        },
        1014: {
            0: '',
            1: '',
            2: '',
            3: '',
            4: '',
            5: '',
            6: '',
            7: '',
            8: '',
            9: ''
        },
        0: {
            0: 'bBb',
            1: 'bbii'
        },
        1: {
            0: 'bf',
            1: 'bi',
            2: 'bff',
            3: 'bii'
        },
        3: {
            0: 'bBBi',
            1: 'bBBii',
            2: 'bBii',
            3: 'bBiif',
            4: 'bBiB',
            5: 'biifhfiBi',
            6: 'bb',
            7: 'bBi',
            8: 'bBBB',
            9: 'bI',
            10: 'bBiibi',
            11: 'bBBB',
            12: 'biBBI',
            13: 'biifhfiBi',
            14: 'bi',
            15: 'bii',
            16: 'bBiB',
            17: 'bBB',
            18: 'biifhfiBii',
            19: 'biifhfiBii',
            20: 'biBBiB',
            21: 'bB',
            22: 'bB',
            23: 'biiB',
            24: 'bii',
            25: 'bBii',
            26: 'bBb',
            27: 'bb',
            28: 'bb',
            29: 'bBBB'
        },
        4: {
            0: 'biB',
            1: 'bii',
            2: 'bibf',
            3: 'bib',
            4: 'biiB',
            5: 'biiB',
            6: 'biiib',
            7: 'biB',
            8: 'biiB',
            9: 'biB',
            10: 'bB',
            11: 'bB',
            12: 'bB',
            13: 'bBI',
            14: 'biBi',
            15: 'biB'
        },
        5: {
            0: 'bBi',
            1: 'bii',
            2: 'bi',
            3: 'bibi'
        },
        11: {
            0: 'bi',
            1: 'bi',
            2: 'bi'
        }
    }
    STRUCT = BinaryStruct(
        ('instruction_class', 'I'),
        ('instruction_index', 'I'),
        ('base_args_size', 'Q'),
        ('first_base_arg_offset', 'i'),
        '4x',
        ('first_event_layers_offset', 'i'),  # unused in BB
        '4x',
    )
Ejemplo n.º 27
0
class ParamDefField:
    """Information about a single field in a ParamTable."""

    FIELD_STRUCT = BinaryStruct(
        ("debug_name", "64j"),
        ("debug_type", "8j"),
        ("debug_format", "8j"),  # %i, %u, %d, etc.
        ("default", "f"),
        ("minimum", "f"),
        ("maximum", "f"),
        ("increment", "f"),
        ("debug_display", "i"),
        ("size", "i"),
        ("description_offset",
         "i"),  # offset of null-terminated string (unlimited length)
        ("internal_type", "32j"),  # could be an enum name (see params.enums)
        ("name", "32j"),
        ("id", "i"),  # TODO: what is this?
    )

    def __init__(self,
                 field_struct: dict,
                 index: int,
                 description: str,
                 field_info: FieldDisplayInfo = None):
        self.field_index = index
        self.name = field_struct["name"]
        self.description = description
        self.size = field_struct["size"]
        self.internal_type = field_struct["internal_type"]
        self._display_info = field_info

        self.debug_name = field_struct["debug_name"]
        self.debug_type = field_struct["debug_type"]
        self.debug_format = field_struct["debug_format"]

        self.default = field_struct["default"]
        self.minimum = field_struct["minimum"]
        self.maximum = field_struct["maximum"]
        self.increment = field_struct["increment"]

        self.debug_display = field_struct["debug_display"]
        self.field_id = field_struct["id"]

        self.bit_size = self.get_bit_size(self.name, self.internal_type,
                                          self.size)

    def get_display_info(self, entry: ParamEntry):
        if not self._display_info:
            raise ValueError(
                f"No display information given for field '{self.name}'.")
        return self._display_info(entry)

    @classmethod
    def unpack_fields(cls, param_name: str, paramdef_buffer: io.BytesIO,
                      field_count: int):
        """Buffer should be at the start of the packed fields (which are followed by the packed descriptions)."""
        fields = []
        field_structs = cls.FIELD_STRUCT.unpack_count(paramdef_buffer,
                                                      count=field_count)
        description_table_offset = paramdef_buffer.tell()
        packed_desc_data = paramdef_buffer.read()
        for field_index, field_struct in enumerate(field_structs):
            if field_struct["description_offset"] != 0:
                field_description = read_chars_from_bytes(
                    packed_desc_data,
                    offset=field_struct["description_offset"] -
                    description_table_offset,
                    encoding="shift_jis_2004",
                )
            else:
                field_description = ""
            try:
                field_info = get_param_info_field(param_name,
                                                  field_struct["name"])
            except KeyError:
                # No information given for this field.
                field_info = None
            fields.append(
                cls(field_struct, field_index, field_description, field_info))
        return fields

    @staticmethod
    def get_bit_size(name, internal_type, size):
        is_bits = name.find(": ")
        if is_bits == -1:
            is_bits = name.find(":")
        if is_bits != -1:
            try:
                return int(name[is_bits + 1])
            except ValueError:
                return int(name[is_bits + 2])
        elif internal_type == "dummy8":
            is_pad = name.find("[")
            if is_pad != -1:
                return int(name[is_pad + 1]) * 8
            else:
                return 8
        else:
            return size * 8
Ejemplo n.º 28
0
class ParamTable:

    # TODO: This is currently for DeS/DS1 only.
    HEADER_STRUCT = BinaryStruct(
        ("name_data_offset", "I"),
        ("entry_data_offset", "H"),
        ("magic0", "H"),  # 0 or 1
        ("magic1", "H"),  # 1, 2, or 3
        ("entry_count", "H"),
        ("param_name", "32j"),
        ("big_endian", "b", 0),  # TODO: check, rather than assert
        ("magic2", "H"),  # TODO: Actually two format flag bytes.
        ("unknown", "B"),  # TODO: sometimes -1 in later formats.
    )

    ENTRY_POINTER_STRUCT = BinaryStruct(
        # These are packed together, and contain offsets into packed entry data and packed names.
        ("id", "i"),
        ("data_offset", "i"),
        ("name_offset", "i"),
    )

    entries: tp.Dict[int, ParamEntry]

    def __init__(self, param_source, paramdef_bnd):
        self.param_path = ""
        self.param_name = ""  # internal name (shift-jis) with capitals and underscores
        self._paramdef_bnd = PARAMDEF_BND(paramdef_bnd) if isinstance(
            paramdef_bnd, str) else paramdef_bnd
        self.entries = {}
        self.__magic = []
        self.__unknown = None
        self._nickname = ""

        if isinstance(param_source, dict):
            self.entries = param_source

        elif isinstance(param_source, bytes):
            self.unpack(io.BytesIO(param_source))

        elif isinstance(param_source, str):
            self.param_path = param_source
            with open(param_source, "rb") as data:
                self.unpack(data)

        elif isinstance(param_source, BNDEntry):
            self.unpack(io.BytesIO(param_source.data))

        else:
            raise TypeError(
                f"Invalid `param_source` type: {type(param_source)}")

    def __getitem__(self, entry_id):
        if entry_id in self.entries:
            return self.entries[entry_id]
        raise KeyError(f"No entry with ID {entry_id} in {self.param_name}.")

    def __setitem__(self, entry_index, entry):
        if isinstance(entry, dict):
            if "name" not in entry:
                raise ValueError("New entry must have a 'name' field.")
            entry = ParamEntry(entry, self._paramdef_bnd[self.param_name])
        if isinstance(entry, ParamEntry):
            self.entries[entry_index] = entry
        else:
            raise TypeError(
                "New entry must be a ParamEntry or a dictionary that contains all required fields."
            )

    def keys(self):
        return self.entries.keys()

    def values(self):
        return self.entries.values()

    def items(self):
        return self.entries.items()

    def __iter__(self):
        return iter(self.entries)

    def __len__(self):
        return len(self.entries)

    def pop(self, entry_id):
        return self.entries.pop(entry_id)

    @property
    def paramdef(self):
        return self._paramdef_bnd[self.param_name]

    @property
    def param_info(self):
        return self.paramdef.param_info

    @property
    def field_names(self):
        if self.paramdef.param_info:
            return [field.name for field in self.paramdef.param_info["fields"]]
        else:
            return list(self.entries[0].fields.keys())

    @property
    def nickname(self):
        """Could return None for ambiguous tables like 'PlayerBehaviors'. Handled separately."""
        return self.paramdef.param_info["nickname"]

    # TODO: __repr__ method returns basic information about ParamTable (but not entire entry list).

    def unpack(self, param_buffer):
        header = self.HEADER_STRUCT.unpack(param_buffer)
        self.param_name = header["param_name"]
        self.__magic = [header["magic0"], header["magic1"], header["magic2"]]
        self.__unknown = header["unknown"]
        # Entry data offset in header not used. (It's an unsigned short, yet doesn't limit entry count to 5461.)
        name_data_offset = header[
            "name_data_offset"]  # CANNOT BE TRUSTED IN VANILLA FILES! Off by +12 bytes.

        # Load entry pointer data.
        entry_pointers = self.ENTRY_POINTER_STRUCT.unpack_count(
            param_buffer, count=header["entry_count"])
        entry_data_offset = param_buffer.tell()  # Reliable entry data offset.

        # Entry size is lazily determined. TODO: Unpack entry data in sequence and associate with names separately.
        if len(entry_pointers) == 0:
            return
        elif len(entry_pointers) == 1:
            # NOTE: The only vanilla param in Dark Souls with one entry is LEVELSYNC_PARAM_ST (Remastered only),
            # for which the entry size is hard-coded here. Otherwise, we can trust the repacked offset from Soulstruct
            # (and SoulsFormats, etc.).
            if self.param_name == "LEVELSYNC_PARAM_ST":
                entry_size = 220
            else:
                entry_size = name_data_offset - entry_data_offset
        else:
            entry_size = entry_pointers[1]["data_offset"] - entry_pointers[0][
                "data_offset"]

        # Note that we no longer need to track buffer offset.
        for entry_struct in entry_pointers:
            param_buffer.seek(entry_struct["data_offset"])
            entry_data = param_buffer.read(entry_size)
            if entry_struct["name_offset"] != 0:
                try:
                    name = read_chars_from_buffer(
                        param_buffer,
                        offset=entry_struct["name_offset"],
                        encoding="shift_jis_2004",
                        reset_old_offset=False,  # no need to reset
                        ignore_encoding_error_for_these_chars=JUNK_ENTRY_NAMES,
                    )
                except ValueError:
                    param_buffer.seek(entry_struct["name_offset"])
                    _LOGGER.error(
                        f"Could not find null termination for entry name string in {self.param_name}.\n"
                        f"    Header: {header}\n"
                        f"    Entry Struct: {entry_struct}\n"
                        f"    30 chrs of name data: {param_buffer.read(30)}")
                    raise
            else:
                name = ""
            self.entries[entry_struct["id"]] = ParamEntry(entry_data,
                                                          self.paramdef,
                                                          name=name)

    def pack(self, sort=True):
        # if len(self.entries) > 5461:
        #     raise SoulstructError(
        #         f"ParamTable {self.param_name} has {len(self.entries)} entries, which is more than a "
        #         f"DS1 Param can store (5461). Remove some entries before packing it.")

        sorted_entries = sorted(
            self.entries.items()) if sort else self.entries.items()

        current_name_offset = 0
        name_offset_list = []
        data_offset = 0
        data_offset_list = []
        packed_names = b""
        packed_data = b""

        for entry_id, entry in sorted_entries:

            # Pack names with relative offsets (to be globally offset later).
            if entry.name in JUNK_ENTRY_NAMES:
                name_z_str = entry.name + b"\0"  # never decoded
            else:
                name_z_str = entry.name.encode("shift_jis_2004") + b"\0"
            packed_names += name_z_str
            name_offset_list.append(current_name_offset)
            current_name_offset += len(name_z_str)

            # Pack entry data.
            packed_entry = entry.pack()
            packed_data += packed_entry
            data_offset_list.append(data_offset)
            data_offset += len(packed_entry)

        entry_pointer_table_offset = self.HEADER_STRUCT.size
        entry_data_offset = entry_pointer_table_offset + self.ENTRY_POINTER_STRUCT.size * len(
            sorted_entries)
        name_data_offset = entry_data_offset + len(packed_data)

        # Entries.
        entry_pointer_data = b""
        for i, (entry_id, _) in enumerate(sorted_entries):
            entry_pointer_data += self.ENTRY_POINTER_STRUCT.pack(
                dict(
                    id=entry_id,
                    data_offset=entry_data_offset + data_offset_list[i],
                    name_offset=name_data_offset + name_offset_list[i],
                ))

        # Header.
        header = self.HEADER_STRUCT.pack(
            dict(
                name_data_offset=name_data_offset,
                entry_data_offset=min(
                    entry_data_offset,
                    2**16 - 1),  # This ushort field isn't actually used.
                magic0=self.__magic[0],
                magic1=self.__magic[1],
                entry_count=len(sorted_entries),
                param_name=self.param_name,
                magic2=self.__magic[2],
                unknown=self.__unknown,
            ))

        return header + entry_pointer_data + packed_data + packed_names

    def write_packed(self, param_path=None):
        if param_path is None:
            if self.param_path:
                param_path = self.param_path
            else:
                raise ValueError(
                    "Param path could not be determined automatically (must be specified)."
                )
        if not param_path.endswith(".param"):
            param_path += ".param"

        with open(param_path, "wb") as output:
            output.write(self.pack())

    def get_range(self, start, count):
        return [(param_id, self[param_id])
                for param_id in sorted(self.entries)[start:start + count]]

    def copy(self):
        return copy.deepcopy(self)
Ejemplo n.º 29
0
class BND4(BaseBND):

    HEADER_STRUCT_START = (
        ('bnd_version', '4s', b'BND4'), ('flag_1', '?'), ('flag_2', '?'), '2x',
        ('big_endian', 'i'))  # 0x00010000 (False) or 0x00000100 (True)
    HEADER_STRUCT_ENDIAN = (
        ('entry_count', 'i'),
        ('header_size', 'q', 64),
        ('bnd_signature',
         '8s'),  # Real signature may be shorter, but packing will pad it out.
        ('entry_header_size', 'q'),
        ('data_offset', 'q'),
        ('utf16_paths', '?'),
        ('bnd_magic', 'b'),
        ('hash_table_type', 'B'),  # 0, 1, 4, or 128
        '5x',
        ('hash_table_offset', 'q'),  # only non-zero if hash_table_type == 4
    )

    BND_ENTRY_HEADER = (('entry_magic', 'B'), '3x', ('minus_one', 'i', -1),
                        ('compressed_data_size', 'q'))
    UNCOMPRESSED_DATA_SIZE = ('uncompressed_data_size', 'q')
    DATA_OFFSET = ('data_offset', 'I')
    ENTRY_ID = ('entry_id', 'i')
    NAME_OFFSET = ('path_offset', 'i')

    HASH_TABLE_HEADER = BinaryStruct('8x', ('path_hashes_offset', 'q'),
                                     ('hash_group_count', 'I'),
                                     ('unknown', 'i', 0x00080810))
    PATH_HASH_STRUCT = BinaryStruct(
        ('hashed_value', 'I'),
        ('entry_index', 'i'),
    )
    HASH_GROUP_STRUCT = BinaryStruct(
        ('length', 'i'),
        ('index', 'i'),
    )

    def __init__(self, bnd_source=None, entry_class=None):
        self.bnd_flags = (False, False)  # Two unknown bools.
        self.utf16_paths = False  # If False, paths are written in Shift-JIS.
        self.hash_table_type = 0
        self.hash_table_offset = 0
        super().__init__(bnd_source, entry_class)

    def unpack(self, bnd_buffer):
        if isinstance(bnd_buffer, bytes):
            bnd_buffer = BytesIO(bnd_buffer)

        self.header_struct = BinaryStruct(*self.HEADER_STRUCT_START,
                                          byte_order='<')
        header = self.header_struct.unpack(bnd_buffer)
        self.bnd_flags = (header.flag_1, header.flag_2)
        self.bnd_version = header.bnd_version
        self.big_endian = header.big_endian == 0x00000100  # Magic not used to infer endianness here.
        byte_order = '>' if self.big_endian else '<'
        header.update(
            self.header_struct.unpack(bnd_buffer,
                                      *self.HEADER_STRUCT_ENDIAN,
                                      byte_order=byte_order))
        self.bnd_signature = header.bnd_signature
        self.bnd_magic = header.bnd_magic
        self.utf16_paths = header.utf16_paths
        self.hash_table_type = header.hash_table_type
        self.hash_table_offset = header.hash_table_offset
        path_encoding = ('utf-16be' if self.big_endian else
                         'utf-16le') if self.utf16_paths else 'shift-jis'

        if header.entry_header_size != header_size(self.bnd_magic):
            raise ValueError(
                f"Expected BND entry header size {header_size(self.bnd_magic)} based on magic\n"
                f"{hex(self.bnd_magic)}, but BND header says {header.entry_header_size}."
            )
        if self.hash_table_type != 4 and self.hash_table_offset != 0:
            _LOGGER.warning(
                f"Found non-zero hash table offset {self.hash_table_offset}, but header says this BND has no hash "
                f"table.")
        self.entry_header_struct = BinaryStruct(*self.BND_ENTRY_HEADER,
                                                byte_order=byte_order)
        if has_uncompressed_size(self.bnd_magic):
            self.entry_header_struct.add_fields(self.UNCOMPRESSED_DATA_SIZE,
                                                byte_order=byte_order)
        self.entry_header_struct.add_fields(self.DATA_OFFSET,
                                            byte_order=byte_order)
        if has_id(self.bnd_magic):
            self.entry_header_struct.add_fields(self.ENTRY_ID,
                                                byte_order=byte_order)
        if has_path(self.bnd_magic):
            self.entry_header_struct.add_fields(self.NAME_OFFSET,
                                                byte_order=byte_order)
        if self.bnd_magic == 0x20:
            # Extra pad.
            self.entry_header_struct.add_fields('8x')
        if header.entry_header_size != self.entry_header_struct.size:
            _LOGGER.warning(
                f"Entry header size given in BND header ({header.entry_header_size}) does not match actual entry "
                f"header size ({self.entry_header_struct.size}).")
        for entry in BNDEntry.unpack(bnd_buffer,
                                     self.entry_header_struct,
                                     path_encoding=path_encoding,
                                     count=header.entry_count):
            self.add_entry(entry)

        # Read hash table.
        if self.hash_table_type == 4:
            bnd_buffer.seek(self.hash_table_offset)
            self._most_recent_hash_table = bnd_buffer.read(
                header.data_offset - self.hash_table_offset)
        self._most_recent_entry_count = len(self.binary_entries)
        self._most_recent_paths = [entry.path for entry in self.binary_entries]

    def load_unpacked_dir(self, directory):
        directory = Path(directory)
        if not directory.is_dir():
            raise ValueError(
                f"Could not find unpacked BND directory {repr(directory)}.")
        with (directory / 'bnd_manifest.txt').open('rb') as f:
            self.bnd_version = self.read_bnd_setting(f.readline(),
                                                     'version',
                                                     assert_values=[b'BND4'])
            self.bnd_signature = self.read_bnd_setting(f.readline(),
                                                       'bnd_signature')
            self.bnd_magic = self.read_bnd_setting(f.readline(),
                                                   'bnd_magic',
                                                   assert_type=int)
            self.big_endian = self.read_bnd_setting(f.readline(),
                                                    'big_endian',
                                                    assert_type=bool)
            self.utf16_paths = self.read_bnd_setting(f.readline(),
                                                     'utf16_paths',
                                                     assert_type=bool)
            self.hash_table_type = self.read_bnd_setting(f.readline(),
                                                         'hash_table_type',
                                                         assert_type=int)
            self.bnd_flags = self.read_bnd_setting(f.readline(),
                                                   'unknown_flags',
                                                   assert_type=tuple)
            self.dcx = self.read_bnd_setting(f.readline(),
                                             'dcx',
                                             assert_type=tuple)

            self.add_entries_from_manifest_paths(f, directory)

            self._most_recent_hash_table = b''  # Hash table will need to be built on first pack.
            self._most_recent_entry_count = len(self.binary_entries)
            self._most_recent_paths = [
                entry.path for entry in self.binary_entries
            ]

    def pack(self):
        entry_header_dicts = []
        packed_entry_headers = b''
        packed_entry_paths = b''
        relative_entry_path_offsets = []
        packed_entry_data = b''
        relative_entry_data_offsets = []
        rebuild_hash_table = not self._most_recent_hash_table
        path_encoding = ('utf-16be' if self.big_endian else
                         'utf-16le') if self.utf16_paths else 'shift-jis'

        if len(self.binary_entries) != len(self._entries):
            raise ValueError(
                "Number of classed entries does not match number of binary entries.\n"
                "You must use the add_entry() method to add new BND entries.")

        if len(self.binary_entries) != self._most_recent_entry_count:
            rebuild_hash_table = True
        for i, entry in enumerate(self._entries):
            if not isinstance(entry, BNDEntry):
                if not hasattr(entry, 'pack'):
                    raise AttributeError(
                        f"Cannot pack BND: entry class {self._entry_class} has no pack() method."
                    )
                self.binary_entries[i].data = entry.pack()
                entry = self.binary_entries[i]
            if not rebuild_hash_table and entry.path != self._most_recent_paths[
                    i]:
                rebuild_hash_table = True

        self._most_recent_entry_count = len(self.binary_entries)
        self._most_recent_paths = [entry.path for entry in self.binary_entries]

        for entry in self.binary_entries:

            packed_entry_data += b'\0' * 10  # Each entry is separated by ten pad bytes. (Probably not necessary.)

            entry_header_dict = {
                'entry_magic': entry.magic,
                'compressed_data_size': entry.data_size,
                'data_offset': len(packed_entry_data),
            }
            if has_id(self.bnd_magic):
                entry_header_dict['entry_id'] = entry.id
            if has_path(self.bnd_magic):
                entry_header_dict['path_offset'] = len(packed_entry_paths)
                relative_entry_path_offsets.append(
                    len(packed_entry_paths
                        ))  # Relative to start of packed entry paths.
                packed_entry_paths += entry.get_packed_path(path_encoding)
            if has_uncompressed_size(self.bnd_magic):
                entry_header_dict['uncompressed_data_size'] = entry.data_size

            relative_entry_data_offsets.append(len(packed_entry_data))
            entry_data, is_compressed = entry.get_data_for_pack()
            if is_compressed:
                entry_header_dict['compressed_data_size'] = len(entry_data)
            packed_entry_data += entry_data
            entry_header_dicts.append(entry_header_dict)

        entry_header_table_offset = self.header_struct.size
        entry_path_table_offset = entry_header_table_offset + self.entry_header_struct.size * len(
            self._entries)
        if self.hash_table_type == 4:
            hash_table_offset = entry_path_table_offset + len(
                packed_entry_paths)
            if rebuild_hash_table:
                packed_hash_table = self.build_hash_table()
            else:
                packed_hash_table = self._most_recent_hash_table
            entry_packed_data_offset = hash_table_offset + len(
                packed_hash_table)
        else:
            hash_table_offset = 0
            packed_hash_table = b''
            entry_packed_data_offset = entry_path_table_offset + len(
                packed_entry_paths)
        # BND file size not needed.

        packed_header = self.header_struct.pack(
            flag_1=self.bnd_flags[0],
            flag_2=self.bnd_flags[1],
            big_endian=self.big_endian,
            entry_count=len(self._entries),
            bnd_signature=self.bnd_signature,
            entry_header_size=self.entry_header_struct.size,
            data_offset=entry_packed_data_offset,
            utf16_paths=self.utf16_paths,
            bnd_magic=self.bnd_magic,
            hash_table_type=self.hash_table_type,
            hash_table_offset=hash_table_offset,
        )

        # Convert relative offsets to absolute and pack entry headers.
        for entry_header_dict in entry_header_dicts:
            entry_header_dict['data_offset'] += entry_packed_data_offset
            if has_path(self.bnd_magic):
                entry_header_dict['path_offset'] += entry_path_table_offset
            packed_entry_headers += self.entry_header_struct.pack(
                entry_header_dict)

        return packed_header + packed_entry_headers + packed_entry_paths + packed_hash_table + packed_entry_data

    @property
    def bnd_manifest_header(self):
        bnd_signature = self.bnd_signature.rstrip(b'\0').decode()
        return (f"version = BND4\n"
                f"bnd_signature = {bnd_signature}\n"
                f"bnd_magic = {repr(self.bnd_magic)}\n"
                f"big_endian = {self.big_endian}\n"
                f"utf16_paths = {self.utf16_paths}\n"
                f"hash_table_type = {self.hash_table_type}\n"
                f"unknown_flags = {repr(self.bnd_flags)}\n"
                f"dcx = {repr(self.dcx)}\n"
                f"\n")

    @staticmethod
    def is_prime(p):
        if p < 2:
            return False
        if p == 2:
            return True
        if (p % 2) == 0:
            return False
        for i in range(3, p // 2, 2):
            if (p % i) == 0:
                return False
            if i**2 > p:
                return True
        return True

    def build_hash_table(self):
        """ Some BND4 resources include tables of hashed entry paths, which aren't needed to read file contents, but
        need to be re-hashed to properly pack the file in case any paths have changed (or the number of entries). """

        # Group count set to first prime number greater than or equal to the number of entries divided by 7.
        for p in range(len(self._entries) // 7, 100000):
            if self.is_prime(p):
                group_count = p
                break
        else:
            raise ValueError("Hash group count could not be determined.")

        hashes = []
        hash_lists = [[] for _ in range(group_count)]

        for entry_index, entry in enumerate(self.binary_entries):
            hashes.append(self.path_hash(entry.path))
            list_index = hashes[-1] % group_count
            hash_lists[list_index].append((hashes[-1], entry_index))

        for hash_list in hash_lists:
            hash_list.sort()  # Sort by hash value.

        hash_groups = []
        path_hashes = []

        total_hash_count = 0
        for hash_list in hash_lists:
            first_hash_index = total_hash_count
            for path_hash in hash_list:
                path_hashes.append({
                    'hashed_value': path_hash[0],
                    'entry_index': path_hash[1]
                })
                total_hash_count += 1
            hash_groups.append({
                'index': first_hash_index,
                'length': total_hash_count - first_hash_index
            })

        packed_hash_groups = self.HASH_GROUP_STRUCT.pack(hash_groups)
        packed_hash_table_header = self.HASH_TABLE_HEADER.pack(
            path_hashes_offset=self.HASH_TABLE_HEADER.size +
            len(packed_hash_groups),
            hash_group_count=group_count,
        )
        packed_path_hashes = self.PATH_HASH_STRUCT.pack(path_hashes)

        return packed_hash_table_header + packed_hash_groups + packed_path_hashes

    @staticmethod
    def path_hash(path_string):
        """ Simple string-hashing algorithm used by FROM. Strings use forward-slash path separators and always start
        with a forward slash. """
        hashable = path_string.replace('\\', '/')
        if not hashable.startswith('/'):
            hashable = '/' + hashable
        h = 0
        for i, s in enumerate(hashable):
            h += i * 37 + ord(s)
        return h