def unpack(self, reader: BinaryReader, bounding_box_has_unknown: bool = None): mesh = reader.unpack_struct(self.STRUCT) bounding_box_offset = mesh.pop("__bounding_box_offset") if bounding_box_offset == 0: self.bounding_box = None else: with reader.temp_offset(bounding_box_offset): self.bounding_box = BoundingBoxWithUnknown( reader) if bounding_box_has_unknown else BoundingBox( reader) bone_count = mesh.pop("__bone_count") with reader.temp_offset(mesh.pop("__bone_offset")): self.bone_indices = list(reader.unpack(f"<{bone_count}i")) face_set_count = mesh.pop("__face_set_count") with reader.temp_offset(mesh.pop("__face_set_offset")): self._face_set_indices = list(reader.unpack(f"<{face_set_count}i")) vertex_count = mesh.pop("__vertex_buffer_count") with reader.temp_offset(mesh.pop("__vertex_buffer_offset")): self._vertex_buffer_indices = list( reader.unpack(f"<{vertex_count}i")) self.set(**mesh)
def unpack_from(cls, reader: BinaryReader, data_offset: int): header = FSBSampleHeader(reader) metadata_size = header.total_size - FSBSampleHeader.STRUCT.size metadata = reader.read(metadata_size) with reader.temp_offset(data_offset): data = reader.read(header.compressed_length) return FSBSample(header, metadata, data)
def _check_big_endian(reader: BinaryReader): with reader.temp_offset(4): endian = reader.unpack_value("i") if endian == 0x1000000: return True elif endian == 0x1: return False raise ValueError( f"Invalid marker for LuaInfo byte order: {hex(endian)}")
def unpack(self, reader: BinaryReader): buffer_layout = reader.unpack_struct(self.STRUCT) with reader.temp_offset(buffer_layout.pop("__member_offset")): struct_offset = 0 self.members = [] for _ in range(buffer_layout.pop("__member_count")): member = LayoutMember(reader, struct_offset=struct_offset) self.members.append(member) struct_offset += member.layout_type.size()
def unpack( self, reader: BinaryReader, encoding: str, version: Version, gx_lists: tp.List[GXList], gx_list_indices: tp.Dict[int, int], ): material = reader.unpack_struct(self.STRUCT) self.name = reader.unpack_string(offset=material.pop("__name__z"), encoding=encoding) self.mtd_path = reader.unpack_string(offset=material.pop("__mtd_path__z"), encoding=encoding) gx_offset = material.pop("__gx_offset") if gx_offset == 0: self.gx_index = -1 elif gx_offset in gx_list_indices: self.gx_index = gx_list_indices[gx_offset] else: self.gx_index = gx_list_indices[gx_offset] = len(gx_lists) with reader.temp_offset(gx_offset): gx_lists.append(GXList(reader, version)) self.set(**material)
def unpack(self, reader: BinaryReader, header_vertex_index_size: int, vertex_data_offset: int): face_set = reader.unpack_struct(self.STRUCT) vertex_index_size = face_set.pop("__vertex_index_size") if vertex_index_size == 0: vertex_index_size = header_vertex_index_size if vertex_index_size == 8: raise NotImplementedError( "Soulstruct cannot support edge-compressed FLVER face sets.") elif vertex_index_size in {16, 32}: vertex_indices_count = face_set.pop("__vertex_indices_count") vertex_indices_offset = face_set.pop("__vertex_indices_offset") with reader.temp_offset(vertex_data_offset + vertex_indices_offset): fmt = f"<{vertex_indices_count}{'H' if vertex_index_size == 16 else 'I'}" self.vertex_indices = list(reader.unpack(fmt)) else: raise ValueError( f"Unsupported face set index size: {vertex_index_size}") self.set(**face_set)
def read_buffer( self, reader: BinaryReader, layouts: list[BufferLayout], vertices: list[Vertex], vertex_data_offset: int, uv_factor: int, ): layout = layouts[self.layout_index] layout_size = layout.get_total_size() if self._vertex_size != self._buffer_length / self.vertex_count: raise ValueError( f"Vertex buffer size ({self._vertex_size}) != buffer length / vertex count " f"({self._buffer_length / self.vertex_count}).") if self._vertex_size != layout_size: # This happens for a few vanilla meshes; we ignore such meshes. # TODO: I've looked at the buffer data for mesh 0 of m8000B2A10, and it appears very abnormal. In fact, # some of the 28-byte data clusters appear to just be counting upward as integers; there definitely does # not seem to be any position float data in there. Later on, they appear to change into random shorts. raise VertexBufferSizeError(self._vertex_size, layout_size) with reader.temp_offset(vertex_data_offset + self._buffer_offset): for vertex in vertices: vertex.read(reader, layout, uv_factor)
def unpack_from( cls, reader: BinaryReader, platform: TPFPlatform, tpf_flags: int, encoding: str, tpf_path: tp.Union[None, str, Path] = None, ): self = cls() self.tpf_path = tpf_path file_offset = reader.unpack_value("I") file_size = reader.unpack_value("i") self.format = reader.unpack_value("B") self.texture_type = TextureType(reader.unpack_value("B")) self.mipmaps = reader.unpack_value("B") self.texture_flags = reader.unpack_value("B") if self.texture_flags not in {0, 1, 2, 3}: raise ValueError( f"`TPFTexture.flags1` was {self.texture_flags}, but expected 0, 1, 2, or 3." ) if platform != TPFPlatform.PC: self.header = TextureHeader self.header.width = reader.unpack_value("h") self.header.height = reader.unpack_value("h") if platform == TPFPlatform.Xbox360: reader.assert_pad(4) elif platform == TPFPlatform.PS3: self.header.unk1 = reader.unpack_value("i") if tpf_flags != 0: self.header.unk2 = reader.unpack_value("i") if self.header.unk2 not in {0, 0x68E0, 0xAAE4}: raise ValueError( f"`TextureHeader.unk2` was {self.header.unk2}, but expected 0, 0x68E0, or 0xAAE4." ) elif platform in {TPFPlatform.PS4, TPFPlatform.XboxOne}: self.header.texture_count = reader.unpack_value("i") if self.header.texture_count not in {1, 6}: f"`TextureHeader.texture_count` was {self.header.texture_count}, but expected 1 or 6." self.header.unk2 = reader.unpack_value("i") if self.header.unk2 != 0xD: f"`TextureHeader.unk2` was {self.header.unk2}, but expected 0xD." name_offset = reader.unpack_value("I") has_float_struct = reader.unpack_value("i") == 1 if platform in {TPFPlatform.PS4, TPFPlatform.XboxOne}: self.header.dxgi_format = reader.unpack_value("i") if has_float_struct: self.float_struct = FloatStruct.unpack_from(reader) with reader.temp_offset(file_offset): self.data = reader.read(file_size) if self.texture_flags in {2, 3}: # Data is DCX-compressed. # TODO: should enforce DCX type as 'DCP_EDGE'? self.data = decompress(self.data) self.name = reader.unpack_string(offset=name_offset, encoding=encoding) return self
def read(self, reader: BinaryReader, layout: BufferLayout, uv_factor: float): self.uvs = [] self.tangents = [] self.colors = [] with reader.temp_offset(reader.position): self.raw = reader.read(layout.get_total_size()) for member in layout: not_implemented = False if member.semantic == LayoutSemantic.Position: if member.layout_type == LayoutType.Float3: self.position = Vector3(reader.unpack("<3f")) elif member.layout_type == LayoutType.Float4: self.position = Vector3(reader.unpack("<3f"))[:3] elif member.layout_type == LayoutType.EdgeCompressed: raise NotImplementedError( "Soulstruct cannot load FLVERs with edge-compressed vertex positions." ) else: not_implemented = True elif member.semantic == LayoutSemantic.BoneWeights: if member.layout_type == LayoutType.Byte4A: self.bone_weights = VertexBoneWeights( *[w / 127.0 for w in reader.unpack("<4b")]) elif member.layout_type == LayoutType.Byte4C: self.bone_weights = VertexBoneWeights( *[w / 255.0 for w in reader.unpack("<4B")]) elif member.layout_type in { LayoutType.UVPair, LayoutType.Short4ToFloat4A }: self.bone_weights = VertexBoneWeights( *[w / 32767.0 for w in reader.unpack("<4h")]) else: not_implemented = True elif member.semantic == LayoutSemantic.BoneIndices: if member.layout_type in { LayoutType.Byte4B, LayoutType.Byte4E }: self.bone_indices = VertexBoneIndices( *reader.unpack("<4B")) elif member.layout_type == LayoutType.ShortBoneIndices: self.bone_indices = VertexBoneIndices( *reader.unpack("<4h")) else: not_implemented = True elif member.semantic == LayoutSemantic.Normal: if member.layout_type == LayoutType.Float3: self.normal = Vector3(reader.unpack("<3f")) elif member.layout_type == LayoutType.Float4: self.normal = Vector3(reader.unpack("<3f")) float_normal_w = reader.unpack_value("<f") self.normal_w = int(float_normal_w) if self.normal_w != float_normal_w: raise ValueError( f"`normal_w` float was not a whole number.") elif member.layout_type in { LayoutType.Byte4A, LayoutType.Byte4B, LayoutType.Byte4C, LayoutType.Byte4E }: self.normal = Vector3([(x - 127) / 127.0 for x in reader.unpack("<3B")]) self.normal_w = reader.unpack_value("<B") elif member.layout_type == LayoutType.Short2toFloat2: self.normal_w = reader.unpack_value("<B") self.normal = Vector3( [x / 127.0 for x in reader.unpack("<3b")]) elif member.layout_type == LayoutType.Short4ToFloat4A: self.normal = Vector3( [x / 32767.0 for x in reader.unpack("<3h")]) self.normal_w = reader.unpack_value("<h") elif member.layout_type == LayoutType.Short4ToFloat4B: self.normal = Vector3([(x - 32767) / 32767.0 for x in reader.unpack("<3H")]) self.normal_w = reader.unpack_value("<h") else: not_implemented = True elif member.semantic == LayoutSemantic.UV: if member.layout_type == LayoutType.Float2: self.uvs.append(Vector3(*reader.unpack("<2f"), 0.0)) elif member.layout_type == LayoutType.Float3: self.uvs.append(Vector3(*reader.unpack("<3f"))) elif member.layout_type == LayoutType.Float4: self.uvs.append(Vector3(*reader.unpack("<2f"), 0.0)) self.uvs.append(Vector3(*reader.unpack("<2f"), 0.0)) elif member.layout_type in { LayoutType.Byte4A, LayoutType.Byte4B, LayoutType.Short2toFloat2, LayoutType.Byte4C, LayoutType.UV }: self.uvs.append( Vector3(*reader.unpack("<2h"), 0) / uv_factor) elif member.layout_type == LayoutType.UVPair: self.uvs.append( Vector3(*reader.unpack("<2h"), 0) / uv_factor) self.uvs.append( Vector3(*reader.unpack("<2h"), 0) / uv_factor) elif member.layout_type == LayoutType.Short4ToFloat4B: self.uvs.append(Vector3(*reader.unpack("<3h")) / uv_factor) if reader.unpack_value("<h") != 0: raise ValueError( "Expected zero short after reading UV | Short4ToFloat4B vertex member." ) else: not_implemented = True elif member.semantic == LayoutSemantic.Tangent: if member.layout_type == LayoutType.Float4: self.tangents.append(Vector4(*reader.unpack("<4f"))) elif member.layout_type in { LayoutType.Byte4A, LayoutType.Byte4B, LayoutType.Byte4C, LayoutType.Short4ToFloat4A, LayoutType.Byte4E, }: tangent = Vector4([(x - 127) / 127.0 for x in reader.unpack("<4B")]) self.tangents.append(tangent) else: not_implemented = True elif member.semantic == LayoutSemantic.Bitangent: if member.layout_type in { LayoutType.Byte4A, LayoutType.Byte4B, LayoutType.Byte4C, LayoutType.Byte4E }: self.bitangent = Vector4([(x - 127) / 127.0 for x in reader.unpack("<4B")]) else: not_implemented = True elif member.semantic == LayoutSemantic.VertexColor: if member.layout_type == LayoutType.Float4: self.colors.append(ColorRGBA(*reader.unpack("<4f"))) elif member.layout_type in { LayoutType.Byte4A, LayoutType.Byte4C }: # Convert byte channnels [0-255] to float channels [0-1]. self.colors.append( ColorRGBA(*[b / 255.0 for b in reader.unpack("<4B")])) else: not_implemented = True else: not_implemented = True if not_implemented: raise NotImplementedError( f"Unsupported vertex member semantic/type combination: " f"{member.semantic.name} | {member.layout_type.name}")
def from_header(cls, binder_reader: BinaryReader, entry_header: BinderEntryHeader) -> BinderEntry: with binder_reader.temp_offset(entry_header.data_offset): data = binder_reader.read(entry_header.compressed_size) return cls(entry_id=entry_header.id, path=entry_header.path, data=data, flags=entry_header.flags)
def _is_dcx(reader: BinaryReader) -> bool: """Checks if file data starts with "DCX" magic.""" with reader.temp_offset(offset=0): return reader.read(4) == b"DCX\0"