class RLT(FresObject): """FRES relocation table.""" _magic = b'_RLT' _reader = StructReader( ('4s', 'magic'), ('I', 'unk04'), # offset of the RLT? ('I', 'unk08'), # 5 ('I', 'unk0C'), # 0 ('I', 'unk10'), # 0 ('I', 'unk14'), # 0 ('I', 'unk18'), # 0 ('I', 'unk1C'), # D49E ('I', 'unk20'), # 0 ('I', 'unk24'), # 3D ('I', 'unk28'), # 0 ('I', 'unk2C'), # 0 Offset('data_start'), size = 0x34, ) def validate(self): super().validate() return True
class StringTable(BinaryObject): """String table.""" _magic = b'_STR' _reader = StructReader( ('4s', 'magic'), Padding(4), ('I', 'size'), Padding(4), ('I', 'num_strs'), Padding(4), size = 0x18, ) def _unpackFromData(self, data): super()._unpackFromData(data) self.strings = [] self._file.seek(self._file_offset + self._reader.size) for i in range(self.num_strs): offs = self._file.tell() offs += (offs & 1) # pad to u16 self.strings.append(readStringWithLength(self._file, '<H', offs)) #log.debug('Str 0x%04X: "%s"', i, self.strings[-1]) return self def validate(self): super().validate() return True
class Header(BinaryObject): """SARC file header.""" _reader = StructReader( ('4s', 'magic'), # 'SARC' ('H', 'header_len'), # always 0x14 ('H', 'byte_order'), # 0xFEFF or 0xFFFE ('I', 'file_size'), ('I', 'data_offset'), ('H', 'version'), # always 0x0100 ('H', 'reserved12'), ) def validate(self): assert self.magic == b'SARC', "Not a SARC file" assert self.version == 0x0100, \ "Unsupported version: " + str(self.version) assert self.byte_order in (0xFEFF, 0xFFFE), \ "Invalid byte order mark: 0x%04X" % self.byte_order if self.header_len != 0x14: log.warn("SARC header length is %d, should be 20", self.header_len) if self.reserved12 != 0: log.warn("SARC reserved field 0x12 is %d, should be 0", self.reserved12) return True
class Header(BinaryObject): """AAMP file header.""" _reader = StructReader( ('4s', 'magic'), # 'AAMP' ('I', 'version'), ('I', 'unk08'), ('I', 'filesize'), ('I', 'unk10'), ('I', 'xml_str_len'), # length of `str_xml` field ('I', 'num_root_nodes'), ('I', 'num_children'), # num direct children of root node ('I', 'total_nodes'), ('I', 'data_buf_size'), # no idea what these are used for ('I', 'str_buf_size'), ('I', 'unk2C'), ('4s', 'str_xml'), ) def validate(self): assert self.magic == b'AAMP', "Not an AAMP file" assert self.version == 2, \ "Unsupported version: " + str(self.version) if self.str_xml != b'xml\0': log.warn("AAMP header XML string is %s, should be b'xml\0'", self.str_xml) log.debug("AAMP filesize: %d", self.filesize) log.debug("AAMP #roots: %d", self.num_root_nodes) log.debug("AAMP #root children: %d", self.num_children) log.debug("AAMP total nodes: %d", self.total_nodes) log.debug("AAMP data buf size: %d", self.data_buf_size) log.debug("AAMP string buf size: %d", self.str_buf_size) log.debug("AAMP unknown fields: 0x%X, 0x%X, 0x%X", self.unk08, self.unk10, self.unk2C)
class FSHP(FresObject): """A FSHP in an FMDL.""" _magic = b'FSHP' _reader = StructReader( ('4s', 'magic'), ('I', 'unk04'), ('I', 'unk08'), ('I', 'unk0C'), StrOffs('name'), Padding(4), Offset64('fvtx_offset'), # => FVTX Offset64('lod_offset'), # => LOD models Offset64( 'fskl_idx_array_offs' ), # => 00030002 00050004 00070006 00090008 000B000A 000D000C 000F000E 00110010 Offset64('unk30'), # 0 Offset64('unk38'), # 0 # bounding box and bounding radius Offset64('bbox_offset'), # => about 24 floats, or 8 Vec3s, or 6 Vec4s Offset64( 'bradius_offset' ), # => => 3F03ADA8 3EFC1658 00000000 00000D14 00000000 00000000 00000000 00000000 Offset64('unk50'), ('I', 'flags'), ('H', 'index'), ('H', 'fmat_idx'), ('H', 'single_bind'), ('H', 'fvtx_idx'), ('H', 'skin_bone_idx_cnt'), ('B', 'vtx_skin_cnt'), ('B', 'lod_cnt'), ('I', 'vis_group_cnt'), ('H', 'fskl_array_cnt'), Padding(2), size=0x70, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the FSHP from given FRES.""" super().readFromFRES(fres, offset, reader) #log.debug("FSHP name='%s'", self.name) self.dumpToDebugLog() #self.dumpOffsets() self.fvtx = FVTX().readFromFRES(fres, self.fvtx_offset) self.lods = [] for i in range(self.lod_cnt): model = LODModel().readFromFRES( fres, self.lod_offset + (i * LODModel._reader.size)) self.lods.append(model) #self.lods = [self.lods[1]] # XXX DEBUG only keep one model return self def validate(self): super().validate() return True
class NX(BinaryObject): """A 'NX' texture in a BNTX.""" _magic = b'NX ' _reader = StructReader( ('4s', 'magic'), ('I', 'num_textures'), Offset64('info_ptrs_offset'), Offset64('data_blk_offset'), Offset64('dict_offset'), ('I', 'str_dict_len'), )
class Dict(BinaryObject): """Dictionary of names.""" _reader = StructReader( ('i', 'unk00'), # always 0? ('i', 'numItems'), size=8, ) _itemReader = StructReader( ('i', 'search'), # mostly increases, not always by 1 ('h', 'left'), # usually -1 for first item ('h', 'right'), # usually 1 for first item StrOffs('nameoffs'), Padding(4), size=0x10, ) def _unpackFromData(self, data): super()._unpackFromData(data) self.items = [] for i in range(self.numItems + 1): self._file.seek(self._file_offset + self._reader.size + (i * self._itemReader.size)) item = self._itemReader.unpackFromFile(self._file) if item['nameoffs'] == 0: break item['name'] = readStringWithLength(self._file, '<H', item['nameoffs']) self.items.append(item) return self def dumpToDebugLog(self): """Dump to debug log.""" log.debug("Dict with %d items; unk00 = %d", self.numItems, self.unk00) for i, item in enumerate(self.items): log.debug('%4d: %4d, %4d, %4d, "%s"', i, item['search'], item['left'], item['right'], item['name']) def validate(self): super().validate() return True
class NameTableHeader(BinaryObject): """SARC file SFAT filename table header.""" _reader = StructReader( ('4s', 'magic'), # 'SFNT' ('H', 'header_len'), # always 8 ('H', 'reserved06'), ) def validate(self): assert self.magic == b'SFNT', "Not an SFNT object" if self.reserved06 != 8: log.warn("SFNT reserved06 is %d, should be 8", self.reserved06) return True
class ShaderAssign(FresObject): _reader = StructReader( StrOffs('name'), Padding(4), StrOffs('name2'), Padding(4), Offset64('vtx_attr_names'), # -> offsets of attr names Offset64('vtx_attr_dict'), Offset64('tex_attr_names'), Offset64('tex_attr_dict'), Offset64('mat_param_vals'), # names from dict Offset64('mat_param_dict'), Padding(4), ('B', 'num_vtx_attrs'), ('B', 'num_tex_attrs'), ('H', 'num_mat_params'), )
class SFATHeader(BinaryObject): """SARC file SFAT structure header.""" _reader = StructReader( ('4s', 'magic'), # 'SFAT' ('H', 'header_len'), # always 0xC ('H', 'node_count'), ('I', 'hash_key'), # always 0x65 ) def validate(self): assert self.magic == b'SFAT', "Not a SFAT header" if self.header_len != 0xC: log.warn("SFAT header length is %d, should be 12", self.header_len) if self.hash_key != 0x65: log.warn("SFAT hash_key is 0x%X, should be 0x65", self.hash_key) return True
class BNTX(BinaryObject): """BNTX texture pack.""" _magic = b'BNTX' _reader = StructReader( ('4s', 'magic'), Padding(4), ('I', 'data_len'), ('H', 'byte_order'), # FFFE or FEFF ('H', 'version'), StrOffs('name'), Padding(2), ('H', 'strings_offs'), # relative to start of BNTX Offset('reloc_offs'), ('I', 'file_size'), size=0x20, ) def _unpackFromData(self, data): super()._unpackFromData(data) self.name = readString(self._file, self.name) self.strings = StringTable().readFromFile( self._file, self._file_offset + self.strings_offs) self.nx = NX().readFromFile(self._file, self._file_offset + self._reader.size) #self.nx.dumpToDebugLog() self.textures = [] for i in range(self.nx.num_textures): offs = self._file.read('Q', self.nx.info_ptrs_offset + (i * 8)) brti = BRTI().readFromFile(self._file, offs) log.debug("Tex %d = %s", i, brti) self.textures.append(brti) return self def validate(self): super().validate() return True def __str__(self): return "<BNTX '%s' at 0x%X>" % (self.name, id(self))
class EmbeddedFile(FresObject): """Generic file embedded in FRES archive.""" _reader = StructReader( Offset64('data_offset'), ('I', 'size'), Padding(4), size=0x10, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the archive from given FRES.""" super().readFromFRES(fres, offset, reader) #self.dumpToDebugLog() #self.dumpOffsets() return self def toData(self): return self.fres.file.read(self.size, self.data_offset)
class SFATNode(BinaryObject): """SARC file SFAT file node.""" _reader = StructReader( ('I', 'name_hash'), # filename hash ('I', 'file_attrs'), ('I', 'data_start'), # file data offs relative to SARC header data_offset ('I', 'data_end'), ) def readFromFile(self, file): """Read the node from the given file.""" super().readFromFile(file) if self.file_attrs & 0x01000000: self.name_offset = (self.file_attrs & 0xFFFF) * 4 else: self.name_offset = None return self def validate(self): assert self.data_start <= self.data_end, \ "File size is negative" return True
class Attribute(FresObject): """An attribute in a FRES.""" _reader = StructReader( StrOffs('name'), ('I', 'unk04'), ('H', 'format'), Padding(2), ('H', 'buf_offs'), ('H', 'buf_idx'), size=0x10, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the attribute from given FRES.""" super().readFromFRES(fres, offset, reader) log.debug("Attr name='%s' fmt=%04X offs=%d idx=%d unk=%d", self.name, self.format, self.buf_offs, self.buf_idx, self.unk04) #self.dumpToDebugLog() self.fvtx = None # to be filled in by the FVTX that reads it return self def validate(self): super().validate() return True
class FMAT(FresObject): """A FMAT in an FMDL.""" _magic = b'FMAT' _reader = StructReader( ('4s', 'magic'), ('I', 'size'), ('I', 'size2'), Padding(4), StrOffs('name'), Padding(4), Offset64('render_param_offs'), Offset64('render_param_dict_offs'), Offset64('shader_assign_offs'), # -> name offsets Offset64('unk30_offs'), Offset64('tex_ref_array_offs'), Offset64('unk40_offs'), Offset64('sampler_list_offs'), Offset64('sampler_dict_offs'), Offset64('shader_param_array_offs'), Offset64('shader_param_dict_offs'), Offset64('shader_param_data_offs'), Offset64('user_data_offs'), Offset64('user_data_dict_offs'), Offset64('volatile_flag_offs'), Offset64('user_offs'), Offset64('sampler_slot_offs'), Offset64('tex_slot_offs'), ('I', 'mat_flags'), ('H', 'section_idx'), ('H', 'render_param_cnt'), ('B', 'tex_ref_cnt'), ('B', 'sampler_cnt'), ('H', 'shader_param_cnt'), ('H', 'shader_param_data_size'), ('H', 'raw_param_data_size'), ('H', 'user_data_cnt'), Padding(2), ('I', 'unkB4'), size=0xB8, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the FMAT from given FRES.""" super().readFromFRES(fres, offset, reader) log.debug("FMAT name='%s'", self.name) self.dumpToDebugLog() self.dumpOffsets() self._readDicts() self._readRenderParams() self._readShaderParams() self._readTextureList() self._readSamplerList() self._readShaderAssign() return self def _readDicts(self): dicts = ('render_param', 'sampler', 'shader_param', 'user_data') for name in dicts: offs = getattr(self, name + '_dict_offs') if offs: #d = IndexGroup().readFromFile(self.fres.file, offs) #log.debug("FMAT %s dict:\n%s", name, d.dump()) data = self._readDict(offs, name) else: data = None setattr(self, name + '_dict', data) def _readDict(self, offs, name): d = Dict().readFromFile(self.fres.file, offs) log.debug("FMAT dict %s:", name) d.dumpToDebugLog() return d def _readTextureList(self): self.textures = [] log.debug("Texture list:") for i in range(self.tex_ref_cnt): name = self.fres.readStrPtr(self.tex_ref_array_offs + (i * 8)) slot = self.fres.read('q', self.tex_slot_offs + (i * 8)) log.debug("%3d (%2d): %s", i, slot, name) self.textures.append({'name': name, 'slot': slot}) def _readSamplerList(self): self.samplers = [] log.debug("Sampler list:") for i in range(self.sampler_cnt): data = self.fres.readHexWords(8, self.sampler_list_offs + (i * 32)) slot = self.fres.read('q', self.sampler_slot_offs + (i * 8)) log.debug("%3d (%2d): %s", i, slot, data) self.samplers.append({'slot': slot, 'data': data}) # XXX no idea what to do with this data def _readRenderParams(self): self.renderParams = {} types = ('?', 'float', 'str') for i in range(self.render_param_cnt): name, offs, cnt, typ, pad = self.fres.read( 'QQHHI', self.render_param_offs + (i * 24)) name = self.fres.readStr(name) if pad != 0: log.warning("Render info '%s' padding=0x%X", name, pad) try: typeName = types[typ] except IndexError: typeName = '0x%X' % typ param = { 'name': name, 'count': cnt, 'type': types[typ], 'vals': [], } for j in range(cnt): if typ == 0: val = self.fres.readHex(8, offs) elif typ == 1: val = self.fres.read('f', offs) elif typ == 2: val = self.fres.readStrPtr(offs) else: log.warning("Render param '%s' unknown type 0x%X", name, typ) val = '<unknown>' param['vals'].append(val) #log.debug("Render param: %-5s[%d] %-32s: %s", # typeName, cnt, name, ', '.join(map(str, param['vals']))) if name in self.renderParams: log.warning("Duplicate render param '%s'", name) self.renderParams[name] = param def _readShaderParams(self): self.shaderParams = {} #log.debug("Shader params:") array_offs = self.shader_param_array_offs data_offs = self.shader_param_data_offs for i in range(self.shader_param_cnt): # unk0: always 0; unk14: always -1 # idx0, idx1: both always == i unk0, name, type, size, offset, unk14, idx0, idx1 = \ self.fres.read('QQBBHiHH', array_offs + (i*32)) name = self.fres.readStr(name) type = shaderParamTypes[type] if unk0: log.debug("Shader param '%s' unk0=0x%X", name, unk0) if unk14 != -1: log.debug("Shader param '%s' unk14=%d", name, unk14) if idx0 != i or idx1 != i: log.debug("Shader param '%s' idxs=%d, %d (expected %d)", name, idx0, idx1, i) data = self.fres.read(size, data_offs + offset) data = struct.unpack(type['fmt'], data) #log.debug("%-38s %-5s %s", name, type['name'], # type['outfmt'] % data) if name in self.shaderParams: log.warning("Duplicate shader param '%s'", name) self.shaderParams[name] = { 'name': name, 'type': type, 'size': size, 'offset': offset, 'idxs': (idx0, idx1), 'unk00': unk0, 'unk14': unk14, 'data': data, } def _readShaderAssign(self): assign = ShaderAssign() assign.readFromFRES(self.fres, self.shader_assign_offs) self.shader_assign = assign log.debug("shader assign: %d vtx attrs, %d tex attrs, %d mat params", assign.num_vtx_attrs, assign.num_tex_attrs, assign.num_mat_params) self.vtxAttrs = [] for i in range(assign.num_vtx_attrs): name = self.fres.readStrPtr(assign.vtx_attr_names + (i * 8)) log.debug("vtx attr %d: '%s'", i, name) self.vtxAttrs.append(name) self.texAttrs = [] for i in range(assign.num_tex_attrs): name = self.fres.readStrPtr(assign.tex_attr_names + (i * 8)) log.debug("tex attr %d: '%s'", i, name) self.texAttrs.append(name) self.mat_param_dict = self._readDict(assign.mat_param_dict, "mat_params") self.mat_params = {} #log.debug("material params:") for i in range(assign.num_mat_params): name = self.mat_param_dict.items[i + 1]['name'] val = self.fres.readStrPtr(assign.mat_param_vals + (i * 8)) #log.debug("%-40s: %s", name, val) if name in self.mat_params: log.warning("duplicate mat_param '%s'", name) if name != '': self.mat_params[name] = val def validate(self): super().validate() return True
class FMDL(FresObject): """FMDL object header.""" #defaultFileExt = 'x3d' defaultFileExt = 'dae' # offsets in this struct are relative to the beginning of # the FRES file. # I'm assuming they're 64-bit since most are a 32-bit offset # followed by 4 zero bytes. _magic = b'FMDL' _reader = StructReader( ('4s', 'magic'), ('I', 'size'), ('I', 'size2'), Padding(4), StrOffs('name'), Padding(4), Offset64('str_tab_end'), Offset64('fskl_offset'), Offset64('fvtx_offset'), Offset64('fshp_offset'), Offset64('fshp_dict_offset'), Offset64('fmat_offset'), Offset64('fmat_dict_offset'), Offset64('udata_offset'), Offset64('unk60'), Offset64('unk68'), # probably dict for unk60 ('H', 'fvtx_count'), ('H', 'fshp_count'), ('H', 'fmat_count'), ('H', 'udata_count'), ('H', 'total_vtxs'), Padding(6), size = 0x78, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the archive from given FRES.""" super().readFromFRES(fres, offset, reader) log.debug("FMDL name: '%s'", self.name) self.dumpToDebugLog() #self.dumpOffsets() log.debug("FMDL '%s' contains %d skeletons, %d FVTXs, %d FSHPs, %d FMATs, %d udatas, total %d vertices", self.name, 1 if self.fskl_offset > 0 else 0, # can this ever be 0? self.fvtx_count, self.fshp_count, self.fmat_count, self.udata_count, self.total_vtxs) # read skeleton self.skeleton = FSKL().readFromFRES(fres, self.fskl_offset) # read vertex objects self.fvtxs = [] for i in range(self.fvtx_count): vtx = FVTX().readFromFRES(fres, self.fvtx_offset + (i*FVTX._reader.size)) self.fvtxs.append(vtx) # read shapes self.fshps = [] for i in range(self.fshp_count): self.fshps.append(FSHP().readFromFRES(fres, self.fshp_offset + (i*FSHP._reader.size))) # read materials self.fmats = [] for i in range(self.fmat_count): self.fmats.append(FMAT().readFromFRES(fres, self.fmat_offset + (i*FMAT._reader.size))) #self.fshps = [self.fshps[1]] # XXX DEBUG only keep one model return self def validate(self): super().validate() return True def toData(self): """Export model to X3D file.""" #writer = X3DWriter(self) writer = ColladaWriter() for i, fmat in enumerate(self.fmats): writer.addFMAT(fmat) for i, fvtx in enumerate(self.fvtxs): writer.addFVTX(fvtx, name=self.fshps[i].name) writer.addFSHP(self.fshps[i]) # XXX this is weird writer.addFSKL(self.skeleton) writer.addScene() return writer.toXML().tostring(pretty_print=True)
class LODModel(FresObject): """A Level-of-Detail Model.""" _reader = StructReader( Offset64('submesh_array_offs'), Offset64('unk08'), Offset64('unk10'), Offset64('idx_buf_offs'), # -> buffer size in bytes ('I', 'face_offs'), # offset into index buffer ('I', 'prim_fmt'), # how to draw the faces ('I', 'idx_type'), # data type of index buffer entries ('I', 'idx_cnt'), # total number of indices ('H', 'visibility_group'), ('H', 'submesh_cnt'), ('I', 'unk34'), size=0x38, ) primTypes = { # id: (min, incr, name) 0x01: (1, 1, 'points'), 0x02: (2, 2, 'lines'), 0x03: (2, 1, 'line_strip'), 0x04: (3, 3, 'triangles'), 0x05: (3, 1, 'triangle_fan'), 0x06: (3, 1, 'triangle_strip'), 0x0A: (4, 4, 'lines_adjacency'), 0x0B: (4, 1, 'line_strip_adjacency'), 0x0C: (6, 1, 'triangles_adjacency'), 0x0D: (6, 6, 'triangle_strip_adjacency'), 0x11: (3, 3, 'rects'), 0x12: (2, 1, 'line_loop'), 0x13: (4, 4, 'quads'), 0x14: (4, 2, 'quad_strip'), 0x82: (2, 2, 'tesselate_lines'), 0x83: (2, 1, 'tesselate_line_strip'), 0x84: (3, 3, 'tesselate_triangles'), 0x86: (3, 1, 'tesselate_triangle_strip'), 0x93: (4, 4, 'tesselate_quads'), 0x94: (4, 2, 'tesselate_quad_strip'), } idxFormats = { 0x00: '<I', # I/H are backward from gx2Enum.h??? 0x01: '<H', 0x02: '<I', 0x04: '>I', 0x09: '>H', } def readFromFRES(self, fres, offset=None, reader=None): """Read the model from given FRES.""" super().readFromFRES(fres, offset, reader) self.dumpToDebugLog() self.dumpOffsets() #self.groups = [] ##fres.file.seek(self.face_offs) #for i in range(self.visibility_group): # offs, count = fres.read('2I') # log.debug("group %2d: 0x%X, %d", i, offs, count) # self.groups.append((offs, count)) self.prim_fmt_id = self.prim_fmt self.prim_min, self.prim_size, self.prim_fmt = \ self.primTypes[self.prim_fmt] self.idx_fmt = self.idxFormats[self.idx_type] log.debug( "prim fmt: 0x%02X (%s), idx type: 0x%02X (%s) (min=%d inc=%d)", self.prim_fmt_id, self.prim_fmt, self.idx_type, self.idx_fmt, self.prim_min, self.prim_size) # read index buffer log.debug("Read %d idxs in fmt %s from 0x%X; idx_buf_offs=0x%X", self.idx_cnt, self.idx_fmt, self.face_offs, self.idx_buf_offs) self.idx_buf = fres.read(self.idx_fmt, pos=self.face_offs, count=self.idx_cnt, use_rlt=True) for i in range(self.idx_cnt): self.idx_buf[i] += self.visibility_group log.debug("idxs(%d): %s...", len(self.idx_buf), ' '.join(map(str, self.idx_buf[0:16]))) # read submeshes self.submeshes = [] for i in range(self.submesh_cnt + 1): # XXX is this right? offs, cnt = self.fres.read('2I', self.submesh_array_offs + (i * 8)) idxs = self.idx_buf[offs:offs + cnt] # XXX offs / size? log.debug("LOD submesh %d offs=%d cnt=%d: %s...", i, offs, cnt, ' '.join(map(str, idxs[0:16]))) self.submeshes.append({'offset': offs, 'count': cnt, 'idxs': idxs}) return self def validate(self): super().validate() return True
class Bone(FresObject): """A bone in an FSKL.""" # offsets in this struct are relative to the beginning of # the FRES file. # I'm assuming they're 64-bit. _reader = StructReader( StrOffs('name'), ('5I', 'unk04'), ('H', 'bone_idx'), ('h', 'parent_idx'), ('h', 'smooth_mtx_idx'), ('h', 'rigid_mtx_idx'), ('h', 'billboard_idx'), ('H', 'udata_count'), Flags('flags', { 'VISIBLE': 0x00000001, 'EULER': 0x00001000, # use euler rotn, not quaternion 'BB_CHILD':0x00010000, # child billboarding 'BB_WORLD_VEC':0x00020000, # World View Vector. # The Z axis is parallel to the camera. 'BB_WORLD_POINT':0x00030000, # World View Point. # The Z axis is equal to the direction the camera # is pointing to. 'BB_SCREEN_VEC':0x00040000, # Screen View Vector. # The Z axis is parallel to the camera, the Y axis is # equal to the up vector of the camera. 'BB_SCREEN_POINT':0x00050000, # Screen View Point. # The Z axis is equal to the direction the camera is # pointing to, the Y axis is equal to the up vector of # the camera. 'BB_Y_VEC':0x00060000, # Y-Axis View Vector. # The Z axis has been made parallel to the camera view # by rotating the Y axis. 'BB_Y_POINT':0x00070000, # Y-Axis View Point. # The Z axis has been made equal to the direction # the camera is pointing to by rotating the Y axis. 'SEG_SCALE_COMPENSATE':0x00800000, # Segment scale # compensation. Set for bones scaled in Maya whose # scale is not applied to child bones. 'UNIFORM_SCALE': 0x01000000, # Scale uniformly. 'SCALE_VOL_1': 0x02000000, # Scale volume by 1. 'NO_ROTATION': 0x04000000, 'NO_TRANSLATION':0x08000000, # same as previous but for hierarchy of bones 'GRP_UNIFORM_SCALE': 0x10000000, 'GRP_SCALE_VOL_1': 0x20000000, 'GRP_NO_ROTATION': 0x40000000, 'GRP_NO_TRANSLATION':0x80000000, }), Vec3f('scale'), Vec4f('rot'), Vec3f('pos'), size = 80, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the bone from given FRES.""" super().readFromFRES(fres, offset, reader) #self.s60 = readStringWithLength(file, '<H', self.unk60) #self.s70 = readStringWithLength(file, '<H', self.unk70) #self.s88 = readStringWithLength(file, '<H', self.unk88) self.parent = None # to be set by the FSKL self.fskl = None # to be set by the FSKL #self.rot *= -1 #self.rot.x %= (2*math.pi) #self.rot.y %= (2*math.pi) #self.rot.z %= (2*math.pi) self.pos.x *= -1 self.pos.z *= -1 self.rot.x *= -1 flagStr=[] names=( 'VISIBLE', 'EULER', 'BB_CHILD', 'BB_WORLD_VEC', 'BB_WORLD_POINT', 'BB_SCREEN_VEC', 'BB_SCREEN_POINT', 'BB_Y_VEC', 'BB_Y_POINT', 'SEG_SCALE_COMPENSATE', 'UNIFORM_SCALE', 'SCALE_VOL_1', 'NO_ROTATION', 'NO_TRANSLATION', 'GRP_UNIFORM_SCALE', 'GRP_SCALE_VOL_1', 'GRP_NO_ROTATION', 'GRP_NO_TRANSLATION', ) for name in names: if self.flags[name]: flagStr.append(name) self._flagStr = ' '.join(flagStr) #log.debug("Bone %d: '%s', parent=%d smooth=%d rigid=%d billboard=%d udata=%d scale=%s rot=%s pos=%s flags=0x%08X %s", # self.bone_idx, self.name, self.parent_idx, # self.smooth_mtx_idx, self.rigid_mtx_idx, # self.billboard_idx, self.udata_count, # self.scale, self.rot, self.pos, # self.flags['_raw'], ', '.join(flagStr)) #log.debug("Bone name = '%s'", self.name) #log.debug("Bone s60 = '%s'", self.s60) #log.debug("Bone s70 = '%s'", self.s70) #log.debug("Bone s88 = '%s'", self.s88) #self.dumpToDebugLog() return self def validate(self): super().validate() return True def computeTransform(self): """Compute final transformation matrix.""" T = self.pos S = self.scale R = self.rot # why have these flags instead of just setting the # values to 0/1? WTF Nintendo. # they seem to only be set when the values already are # 0 (or 1, for scale) anyway. #if self.flags['NO_ROTATION']: R = Vec4(0, 0, 0, 1) #if self.flags['NO_TRANSLATION']: T = Vec3(0, 0, 0) #if self.flags['SCALE_VOL_1']: S = Vec3(1, 1, 1) if self.flags['SEG_SCALE_COMPENSATE']: # apply inverse of parent's scale if self.parent: S *= 1 / self.parent.scale else: log.error("Bone '%s' has flag SEG_SCALE_COMPENSATE but no parent", self.name) # no idea what "scale uniformly" actually means. # XXX billboarding, rigid mtxs, if ever used. # Build matrices from these transformations. print("BONE", self.name) T = Matrix.Translate(4, T) _printMtx(T, 'T') S = Matrix.Scale (4, S) _printMtx(S, 'S') print("R input", R.x, R.y, R.z) R = Quaternion.fromEulerAngles(R).toMatrix() _printMtx(R, 'R') if self.parent: P = self.parent.computeTransform() print("P:",self.parent.pos,self.parent.rot,self.parent.scale) _printMtx(P, 'P:'+self.parent.name) else: P = Matrix.I(4) _printMtx(P, 'P:none') M = Matrix.I(4) #log.debug("Bone '%8s' @ %s R %s: T=\n%srot=\n%s =>\n%s", # self.name, self.pos, self.rot, T, R, R @ T) # multiply by the smooth matrix if any #if self.smooth_mtx_idx >= 0: # mtx = self.fskl.smooth_mtxs[self.smooth_mtx_idx] # # convert 4x3 to 4x4 # mtx = Matrix(mtx[0], mtx[1], mtx[2], (0, 0, 0, 1)) # M = M @ mtx # apply the transformations # SRTP is the order used by BFRES-Viewer... M = M @ S M = M @ R M = M @ T M = M @ P _printMtx(M, 'Final') return M def printHeirarchy(self, fskl, _depth=0): """Dump bone heirarchy to console.""" ind = (' ' * _depth) name = '%s%02X %s' % (ind, self.bone_idx, self.name) M = self.computeTransform() T, S, R = M.decomposeTransform() def D(r): return r / (math.pi / 180) log.debug("%s%s|%5.2f %5.2f %5.2f|%4d %4d %4d|%5.2f %5.2f %5.2f|%4d %4d %4d", name, ' ' * (26-len(name)), T.x, T.y, T.z, D(R.z), D(R.y), D(R.z), self.pos.x, self.pos.y, self.pos.z, D(self.rot.x), D(self.rot.y), D(self.rot.z), ) for bone in fskl.bones: if bone.parent_idx == self.bone_idx: bone.printHeirarchy(fskl, _depth+1)
class BRTI(BinaryObject): """A BRTI in a BNTX.""" class ChannelType(Enum): Zero = 0 One = 1 Red = 2 Green = 3 Blue = 4 Alpha = 5 class TextureType(Enum): Image1D = 0 Image2D = 1 Image3D = 2 Cube = 3 CubeFar = 8 class TextureDataType(Enum): UNorm = 1 SNorm = 2 UInt = 3 SInt = 4 Single = 5 SRGB = 6 UHalf = 10 defaultFileExt = 'png' _magic = b'BRTI' _reader = StructReader( ('4s', 'magic'), ('I', 'length'), ('Q', 'length2'), ('B', 'flags'), ('B', 'dimensions'), ('H', 'tile_mode'), ('H', 'swizzle_size'), ('H', 'mipmap_cnt'), ('H', 'multisample_cnt'), ('H', 'reserved1A'), ('B', 'fmt_dtype', lambda v: BRTI.TextureDataType(v)), ('B', 'fmt_type', lambda v: TextureFormat.get(v)()), Padding(2), ('I', 'access_flags'), ('i', 'width'), ('i', 'height'), ('i', 'depth'), ('i', 'array_cnt'), ('i', 'block_height', lambda v: 2**v), ('H', 'unk38'), ('H', 'unk3A'), ('i', 'unk3C'), ('i', 'unk40'), ('i', 'unk44'), ('i', 'unk48'), ('i', 'unk4C'), ('i', 'data_len'), ('i', 'alignment'), ('4B', 'channel_types', lambda v: tuple(map(BRTI.ChannelType, v))), ('i', 'tex_type'), StrOffs('name'), Padding(4), Offset64('parent_offset'), Offset64('ptrs_offset'), ) def _unpackFromData(self, data): super()._unpackFromData(data) self.name = readStringWithLength(self._file, '<H', self.name) #self.dumpToDebugLog() self.swizzle = BlockLinearSwizzle(self.width, self.fmt_type.bytesPerPixel, self.block_height) #self.swizzle = Swizzle(self.width, # self.fmt_type.bytesPerPixel, # self.block_height) self._readMipmaps() self._readData() log.debug("Texture '%s' size %dx%dx%d, len=%d: %s", self.name, self.width, self.height, self.depth, self.data_len, ' '.join(map(lambda b: '%02X' % b, self.data[0:16]))) return self def _readMipmaps(self): self.mipOffsets = [] for i in range(self.mipmap_cnt): offs = self.ptrs_offset + (i * 8) entry = self._file.read('I', offs) #- base self.mipOffsets.append(entry) log.debug("mipmap offsets: %s", list(map(lambda o: '%08X' % o, self.mipOffsets))) def _readData(self): base = self._file.read('Q', self.ptrs_offset) log.debug("Data at 0x%X => 0x%X", self.ptrs_offset, base) self.data = self._file.read(self.data_len, base) def decode(self): self.pixels, self.depth = self.fmt_type.decode(self) return self.pixels def toData(self): self.decode() tempFile = tempfile.SpooledTemporaryFile() png = PNG(width=self.width, height=self.height, pixels=self.pixels, bpp=self.depth) png.writeToFile(tempFile) tempFile.seek(0) return tempFile.read() def validate(self): super().validate() return True
class Header(FresObject): """FRES file header.""" _magic = (b'FRES', b'FRES ') _reader_wiiu = StructReader( # WiiU header ('4s', 'magic'), # 'FRES' ('I', 'version'), ('H', 'byte_order'), # FFFE=little, FEFF=big ('H', 'header_len'), # always 0x10 ('I', 'file_size'), ('I', 'alignment'), # memory alignment to load this file to StrOffs('name', None), # offset of file name without extension ('I', 'str_tab_len'), # bytes, string table size Offset('str_tab_offset'), # string table offset ('12I','group_offset'), # offset of each group (0=not present) ('12H','group_nfiles'), # num files in each group (excl root) ('I', 'user_ptr'), # always 0, changed in memory at runtime size = 0x6C, ) _reader_switch = StructReader( # Switch header # `name` is the offset of a null-terminated string. # `name2` is the offset of a length-prefixed string. # Both seem to be the filename without extension. ('8s', 'magic'), # 'FRES ' (four spaces) ('I', 'version'), ('H', 'byte_order'), # FFFE=little, FEFF=big ('H', 'header_len'), # always 0x0C Offset( 'name_offset'), Offset( 'alignment'), Offset( 'rlt_offset'), Offset( 'file_size'), # size of this file StrOffs( 'name2'), Offset( 'unk24'), # probably padding #Padding(4), Offset64('fmdl_offset'), Offset64('fmdl_dict_offset'), Offset64('fska_offset'), Offset64('fska_dict_offset'), Offset64('fmaa_offset'), Offset64('fmaa_dict_offset'), Offset64('fvis_offset'), Offset64('fvis_dict_offset'), Offset64('fshu_offset'), Offset64('fshu_dict_offset'), Offset64('fscn_offset'), Offset64('fscn_dict_offset'), Offset64('buf_mem_pool'), Offset64('buf_mem_pool_info'), Offset64('embed_offset'), Offset64('embed_dict_offset'), Offset64('unkA8'), Offset64('str_tab_offset'), Offset ('unkB8'), # str tab size? ('H', 'fmdl_cnt'), ('H', 'fska_cnt'), ('H', 'fmaa_cnt'), ('H', 'fvis_cnt'), ('H', 'fshu_cnt'), ('H', 'fscn_cnt'), ('H', 'embed_cnt'), ('H', 'unkCA'), ('H', 'unkCC'), ('H', 'unkCE'), size = 0xD0, ) def readFromFile(self, file): """Read this object from given file.""" # "FRES " (with 4 spaces) is Switch format. # Without the spaces is WiiU format. pos = file.tell() magic = file.read(8) file.seek(pos, 0) if magic == b'FRES ': self.type = 'switch' reader = self._reader_switch else: self.type = 'wiiu' reader = self._reader_wiiu self._reader = reader super().readFromFile(file, reader=reader) return self def validate(self): super().validate() assert self.byte_order in (0xFEFF, 0xFFFE), \ "Invalid byte order mark: 0x%04X" % self.byte_order if self.type == 'wiiu': if self.header_len != 0x10: log.warn("FRES header length is %d, should be 16", self.header_len) if self.user_ptr != 0: log.warn("FRES user_ptr is 0x%X, should be 0", self.user_ptr) if not isPowerOf2(self.alignment): log.warn("FRES alignment is 0x%X, should be a power of 2", self.alignment) elif self.type == 'switch': if self.header_len != 0xC: log.warn("FRES header length is %d, should be 12", self.header_len) else: log.error("FRES unknown type '%s'", self.type) return True
class FVTX(FresObject): """A FVTX in an FMDL.""" # vertex buffer object attributes _magic = b'FVTX' _reader = StructReader( ('4s', 'magic'), ('3I', 'unk04'), Offset64('vtx_attrib_array_offs'), Offset64('vtx_attrib_dict_offs'), Offset64('unk10'), Offset64('unk18'), Offset64('unk20'), Offset64('vtx_bufsize_offs'), Offset64('vtx_stridesize_offs'), Offset64('vtx_buf_array_offs'), Offset('vtx_buf_offs'), ('B', 'num_attrs'), ('B', 'num_bufs'), ('H', 'index'), # Section index: index into FVTX array of this entry. ('I', 'num_vtxs'), ('I', 'skin_weight_influence'), size = 0x60, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the FVTX from given FRES.""" super().readFromFRES(fres, offset, reader) self.dumpToDebugLog() #self.dumpOffsets() self._readDicts() self._readBuffers() self._readAttrs() self._readVtxs() return self def _readDicts(self): self.vtx_attrib_dict = Dict().readFromFile(self.fres.file, self.vtx_attrib_dict_offs) log.debug("FVTX attrib dict:") self.vtx_attrib_dict.dumpToDebugLog() def _readBuffers(self): dataOffs = self.fres.rlt.data_start + self.vtx_buf_offs self.buffers = [] file = self.fres.file for i in range(self.num_bufs): n = i*0x10 size = file.read('I', self.vtx_bufsize_offs+n) stride = file.read('I', self.vtx_stridesize_offs+n) buf = Buffer(file, size, stride, dataOffs) self.buffers.append(buf) dataOffs += buf.size def _readAttrs(self): self.attrs = [] self.attrsByName = {} for i in range(self.num_attrs): attr = Attribute().readFromFRES(self.fres, self.vtx_attrib_array_offs + (i * Attribute._reader.size)) #log.debug("Attr: %s", attr) attr.fvtx = self self.attrs.append(attr) self.attrsByName[attr.name] = attr def _readVtxs(self): self.vtxs = [] for i in range(self.num_vtxs): vtx = Vertex() for attr in self.attrs: buf = self.buffers[attr.buf_idx] offs = attr.buf_offs + (i * buf.stride) fmt = attrFmts.get(attr.format, None) if fmt is None: log.error("Unsupported attribute data type: 0x%04X", attr.format) break func = None if type(fmt) is dict: func = fmt.get('func', None) fmt = fmt['fmt'] data = struct.unpack_from(fmt, buf.data, offs) if func: data = func(data) #log.debug("vtx %d attr %s = %s", i, attr.name, data) vtx.setAttr(attr, data) self.vtxs.append(vtx) def validate(self): super().validate() return True
class FSKL(FresObject): """FSKL object header.""" # offsets in this struct are relative to the beginning of # the FRES file. # I'm assuming they're 64-bit. _magic = b'FSKL' _reader = StructReader( ('4s', 'magic'), ('I', 'size'), ('I', 'size2'), Padding(4), Offset64('bone_idx_group_offs'), Offset64('bone_array_offs'), Offset64('smooth_idx_offs'), Offset64('smooth_mtx_offs'), Offset64('unk30'), Flags( 'flags', { #'SCALE_NONE': 0x00000000, # no scaling 'SCALE_STD': 0x00000100, # standard scaling 'SCALE_MAYA': 0x00000200, # Respects Maya's segment scale # compensation which offsets child bones rather than # scaling them with the parent. 'SCALE_SOFTIMAGE': 0x00000300, # Respects the scaling method # of Softimage. 'EULER': 0x00001000, # euler rotn, not quaternion }), ('H', 'num_bones'), ('H', 'num_smooth_idxs'), ('H', 'num_rigid_idxs'), ('H', 'num_extra'), ('I', 'unk44'), size=0x48, ) def readFromFRES(self, fres, offset=None, reader=None): """Read the skeleton from given FRES.""" super().readFromFRES(fres, offset, reader) self.dumpToDebugLog() self.dumpOffsets() scaleModes = ('none', 'standard', 'maya', 'softimage') log.debug( "Skeleton contains %d bones, %d smooth idxs, %d rigid idxs, %d extras; scale mode=%s, rotation=%s; smooth_mtx_offs=0x%X", self.num_bones, self.num_smooth_idxs, self.num_rigid_idxs, self.num_extra, scaleModes[(self.flags['_raw'] >> 8) & 3], 'euler' if self.flags['EULER'] else 'quaternion', self.smooth_mtx_offs) self._readSmoothIdxs(fres) self._readSmoothMtxs(fres) self._readBones(fres) return self def _readBones(self, fres): self.bones = [] self.bonesByName = {} self.boneIdxGroups = [] offs = self.bone_array_offs for i in range(self.num_bones): b = Bone().readFromFRES(fres, offs) self.bones.append(b) if b.name in self.bonesByName: log.warn("Duplicate bone name '%s'", b.name) self.bonesByName[b.name] = b offs += Bone._reader.size # set parents for bone in self.bones: bone.fskl = self if bone.parent_idx >= len(self.bones): log.error("Bone %s has invalid parent_idx %d (max is %d)", bone.name, bone.parent_idx, len(self.bones)) bone.parent = None elif bone.parent_idx >= 0: bone.parent = self.bones[bone.parent_idx] else: bone.parent = None log.debug( "Skeleton: |Final Position |Final Rotation|Raw Position |Raw Rotation" ) self.bones[0].printHeirarchy(self) #log.debug("Final bone transforms:") #for bone in self.bones: # log.debug("%s\n%s", bone.name, bone.computeTransform()) self.boneIdxGroups = Dict().readFromFile(self.fres.file, self.bone_idx_group_offs) def _readSmoothIdxs(self, fres): self.smooth_idxs = fres.read('h', pos=self.smooth_idx_offs, count=self.num_smooth_idxs) log.debug("Smooth idxs: %s", self.smooth_idxs) def _readSmoothMtxs(self, fres): """Read the smooth matrices.""" self.smooth_mtxs = [] for i in range(max(self.smooth_idxs)): mtx = fres.read('3f', count=4, pos=self.smooth_mtx_offs + (i * 16 * 3)) # warn about invalid values for y in range(4): for x in range(3): n = mtx[y][x] if math.isnan(n) or math.isinf(n): log.warning( "Skeleton smooth mtx %d element [%d,%d] is %s", i, x, y, n) # replace all invalid values with zeros flt = lambda e: \ 0 if (math.isnan(e) or math.isinf(e)) else e mtx = list(map(lambda row: list(map(flt, row)), mtx)) #mtx[3][3] = 1 # debug mtx = Matrix(*mtx) # transpose #m = [[0,0,0,0], [0,0,0,0], [0,0,0,0], [0,0,0,0]] #for y in range(4): # for x in range(4): # m[x][y] = mtx[y][x] #mtx = m # log values to debug #log.debug("Inverse mtx %d:", i) #for y in range(4): # log.debug(" %s", ' '.join(map( # lambda v: '%+3.2f' % v, mtx[y]))) self.smooth_mtxs.append(mtx) def validate(self): #for field in self._reader.fields.values(): # val = getattr(self, field['name']) # if type(val) is int: # log.debug("FMDL[%04X] %16s = 0x%08X", field['offset'], # field['name'], val) # else: # log.debug("FMDL[%04X] %16s = %s", field['offset'], # field['name'], val) super().validate() return True
class Node(BinaryObject): """AAMP node.""" xmlns = xmlns xmlnsmap = xmlnsmap isListable = False _reader = StructReader( ('I', 'name_hash'), ('H', 'data_offset'), ('B', 'num_children'), ('B', 'data_type'), ) def __init__(self, file=None): """Create new Node. file: File to read it from. (optional) """ self.children = [] self.data = None if file is not None: self.readFromFile(file) def readFromFile(self, file): """Read the node from the given file.""" super().readFromFile(file) curPos = file.tell() offset = (self.data_offset * 4) - self._reader.size file.seek(offset, 1) if self.num_children > 0: for i in range(self.num_children): self.children.append(Node(file)) elif hasattr(self, 'data_type'): self.data = read_aamp_type(file, self.data_type) else: # root node pass file.seek(curPos) # restore position self.name = getName(self.name_hash) def toXML(self, _depth=0): """Convert node to XML node object.""" elem = ET.Element(self.name, nsmap=self.xmlnsmap) #elem.set('{'+self.xmlns+'}namehash', # '%08X' % self.name_hash) if self.num_children > 0: for child in self.children: elem.append(child.toXML()) elif hasattr(self, 'data_type'): elem.set('type', get_type_name(self.data_type)) try: elem.text = str(self.data) except ValueError: log.error("Error writing string to XML: %s", self.data) else: # root node pass return elem def __str__(self): return "<AAMP node '%s' at 0x%x>" % (self.name, id(self))