def pack_bone_indices(self, writer: BinaryWriter, bone_indices_start: int): if not self.bone_indices: # Weird case for byte-perfect writing. writer.fill("__bone_offset", bone_indices_start, obj=self) else: writer.fill("__bone_offset", writer.position, obj=self) writer.pack(f"{len(self.bone_indices)}i", *self.bone_indices)
def pack(self) -> bytes: """Pack TPF file to bytes.""" writer = BinaryWriter( big_endian=self.platform in {TPFPlatform.Xbox360, TPFPlatform.PS3}) writer.append(b"TPF\0") writer.reserve("data_size", "i") writer.pack("i", len(self.textures)) writer.pack("b", self.platform) writer.pack("b", self.tpf_flags) writer.pack("b", self.encoding) writer.pad(1) for i, texture in enumerate(self.textures): texture.pack_header(writer, i, self.platform, self.tpf_flags) for i, texture in enumerate(self.textures): texture.pack_name(writer, i, self.encoding) data_start = writer.position for i, texture in enumerate(self.textures): # TKGP notes: padding varies wildly across games, so don't worry about it too much. if len(texture.data) > 0: writer.pad_align(4) texture.pack_data(writer, i) writer.fill("data_size", writer.position - data_start) return writer.finish()
def pack(self) -> tp.Tuple[bytes, bytes]: header_writer = BinaryWriter( big_endian=self.big_endian or self.flags.is_big_endian) data_writer = BinaryWriter( big_endian=self.big_endian or self.flags.is_big_endian) self.pack_header(header_writer) entries = list(sorted(self._entries, key=lambda e: e.id)) entry_headers = [entry.get_header(self.flags) for entry in entries] for entry_header in entry_headers: entry_header.pack_bnd3(header_writer, self.flags, self.bit_big_endian) if self.flags.has_names: for entry, entry_header in zip(entries, entry_headers): header_writer.fill("path_offset", header_writer.position, obj=entry_header) # NOTE: BND paths are *not* encoded in `shift_jis_2004`, unlike most other strings, but are `shift-jis`. # The relevant difference is that escaped backslashes are encoded as the yen symbol in `shift_jis_2004`. header_writer.append( entry.get_packed_path(encoding="shift-jis")) # Useless BDT3 header. data_writer.append(b"BDF3") data_writer.pack("8s", self.signature.encode("ascii")) data_writer.pad(4) for entry, entry_header in zip(entries, entry_headers): header_writer.fill("data_offset", data_writer.position, obj=entry_header) data_writer.append(entry.data) return header_writer.finish(), data_writer.finish()
def pack_vertex_buffer_indices(self, writer: BinaryWriter, first_vertex_buffer_index: int): writer.fill("__vertex_buffer_offset", writer.position, obj=self) mesh_vertex_buffer_indices = range( first_vertex_buffer_index, first_vertex_buffer_index + len(self.vertex_buffers)) writer.pack(f"{len(self.vertex_buffers)}i", *mesh_vertex_buffer_indices)
def pack_data(self, writer: BinaryWriter, index: int): writer.fill(f"file_data_{index}", writer.position) if self.texture_flags in {2, 3}: data = zlib.compress(self.data, level=7) else: data = self.data writer.fill(f"file_size_{index}", len(data)) writer.append(data)
def pack_name(self, writer: BinaryWriter, index: int, encoding: int): writer.fill(f"file_name_{index}", writer.position) if encoding == 1: name = self.name.encode(encoding="utf-16-be" if writer. big_endian else "utf-16-le") + b"\0\0" elif encoding in {0, 2}: name = self.name.encode(encoding="shift-jis") + b"\0" else: raise ValueError( f"Invalid TPF texture encoding: {encoding}. Must be 0, 1, or 2." ) writer.append(name)
def pack(self) -> bytes: writer = BinaryWriter(big_endian=self.big_endian) self.pack_header(writer) path_encoding = ("utf-16-be" if self.big_endian else "utf-16-le") if self.unicode else "shift-jis" rebuild_hash_table = not self._most_recent_hash_table if not self._most_recent_hash_table or len( self._entries) != self._most_recent_entry_count: rebuild_hash_table = True else: # Check if any entry paths have changed. for i, entry in enumerate(self._entries): if entry.path != self._most_recent_paths[i]: rebuild_hash_table = True break self._most_recent_entry_count = len(self._entries) self._most_recent_paths = [entry.path for entry in self._entries] entries = list(sorted(self._entries, key=lambda e: e.id)) entry_headers = [entry.get_header(self.flags) for entry in entries] for entry_header in entry_headers: entry_header.pack_bnd4(writer, self.flags, self.bit_big_endian) if self.flags.has_names: for entry, entry_header in zip(entries, entry_headers): writer.fill("path_offset", writer.position, obj=entry_header) # NOTE: BND paths are *not* encoded in `shift_jis_2004`, unlike most other strings, but are `shift-jis`. # The relevant difference is that escaped backslashes are encoded as the yen symbol in `shift_jis_2004`. writer.append(entry.get_packed_path(encoding=path_encoding)) if self.hash_table_type == 4: writer.fill("hash_table_offset", writer.position) if rebuild_hash_table: writer.append(BinderHashTable.build_hash_table(self._entries)) else: writer.append(self._most_recent_hash_table) writer.fill("data_offset", writer.position) for entry, entry_header in zip(entries, entry_headers): writer.fill("data_offset", writer.position, obj=entry_header) writer.append( entry.data + b"\0" * 10 ) # ten pad bytes between each entry (for byte-perfect writes) return writer.finish()
def pack(self) -> bytes: writer = BinaryWriter( big_endian=self.big_endian or self.flags.is_big_endian) self.pack_header(writer) entries = list(sorted(self._entries, key=lambda e: e.id)) entry_headers = [entry.get_header(self.flags) for entry in entries] for entry_header in entry_headers: entry_header.pack_bnd3(writer, self.flags, self.bit_big_endian) if self.flags.has_names: for entry, entry_header in zip(entries, entry_headers): writer.fill("path_offset", writer.position, obj=entry_header) # NOTE: BND paths are *not* encoded in `shift_jis_2004`, unlike most other strings, but are `shift-jis`. # The relevant difference is that escaped backslashes are encoded as the yen symbol in `shift_jis_2004`. writer.append(entry.get_packed_path(encoding="shift-jis")) for entry, entry_header in zip(entries, entry_headers): writer.fill("data_offset", writer.position, obj=entry_header) writer.append(entry.data) writer.fill("file_size", writer.position) return writer.finish()
def pack_face_set_indices(self, writer: BinaryWriter, first_face_set_index: int): writer.fill("__face_set_offset", writer.position, obj=self) mesh_face_set_indices = range( first_face_set_index, first_face_set_index + len(self.face_sets)) writer.pack(f"{len(self.face_sets)}i", *mesh_face_set_indices)
def pack_bounding_box(self, writer: BinaryWriter): if self.bounding_box is None: writer.fill("__bounding_box_offset", 0, obj=self) else: writer.fill("__bounding_box_offset", writer.position, obj=self) self.bounding_box.pack(writer)
def pack(self) -> tp.Tuple[bytes, bytes]: header_writer = BinaryWriter( big_endian=self.big_endian or self.flags.is_big_endian) data_writer = BinaryWriter( big_endian=self.big_endian or self.flags.is_big_endian) self.pack_header(header_writer) path_encoding = ("utf-16-be" if self.big_endian else "utf-16-le") if self.unicode else "shift-jis" entries = list(sorted(self._entries, key=lambda e: e.id)) rebuild_hash_table = not self._most_recent_hash_table if not self._most_recent_hash_table or len( entries) != self._most_recent_entry_count: rebuild_hash_table = True else: # Check if any entry paths have changed. for i, entry in enumerate(entries): if entry.path != self._most_recent_paths[i]: rebuild_hash_table = True break self._most_recent_entry_count = len(entries) self._most_recent_paths = [entry.path for entry in entries] entry_headers = [entry.get_header(self.flags) for entry in entries] for entry_header in entry_headers: entry_header.pack_bnd4(header_writer, self.flags, self.bit_big_endian) if self.flags.has_names: for entry, entry_header in zip(entries, entry_headers): header_writer.fill("path_offset", header_writer.position, obj=entry_header) # NOTE: BND paths are *not* encoded in `shift_jis_2004`, unlike most other strings, but are `shift-jis`. # The relevant difference is that escaped backslashes are encoded as the yen symbol in `shift_jis_2004`. header_writer.append( entry.get_packed_path(encoding=path_encoding)) if self.hash_table_type == 4: header_writer.fill("hash_table_offset", header_writer.position) if rebuild_hash_table: header_writer.append( BinderHashTable.build_hash_table(self._entries)) else: header_writer.append(self._most_recent_hash_table) header_writer.fill("data_offset", header_writer.position) # Useless BDT4 header. data_writer.append(b"BDF4") data_writer.pack("?", self.unknown1) data_writer.pack("?", self.unknown2) data_writer.pad(3) data_writer.pack("?", self.big_endian) data_writer.pack("?", not self.bit_big_endian) data_writer.pad(5) data_writer.pack("q", 0x30) # header size data_writer.pack("8s", self.signature.encode("ascii")) data_writer.pad(16) for entry, entry_header in zip(entries, entry_headers): header_writer.fill("data_offset", data_writer.position, obj=entry_header) data_writer.append( entry.data + b"\0" * 10 ) # ten pad bytes between each entry (for byte-perfect writes) return header_writer.finish(), data_writer.finish()
def pack_members(self, writer: BinaryWriter): writer.fill("__member_offset", writer.position, obj=self) struct_offset = 0 for member in self.members: member.pack(writer, struct_offset) struct_offset += member.size
def fill_gx_offset(self, writer: BinaryWriter, gx_offsets: tp.List[int]): writer.fill("__gx_offset", 0 if self.gx_index == -1 else gx_offsets[self.gx_index], obj=self)
def pack_textures(self, writer: BinaryWriter, first_texture_index: int): writer.fill("_first_texture_index", first_texture_index, obj=self) for texture in self.textures: texture.pack(writer)