def deserialize(self, fs: FileStream, meta_type: MetadataType): result = {} if not meta_type: return result type_map = meta_type.type_tree.type_dict for n in range(len(meta_type.fields)): node = meta_type.fields[n] if node.is_array: element_type = meta_type.type_tree.nodes[node.index + 2] element_count = fs.read_sint32() array = result[node.name] = {'size': element_count} if element_count == 0: continue if element_type.byte_size == 1: array['data'] = fs.read( element_count) if element_count > 0 else b'' fs.align() else: items = [] if element_type.type in self.__premitive_decoders: decode = self.__premitive_decoders.get( element_type.type) for _ in range(element_count): items.append(decode(fs)) elif element_type.type == 'string': for _ in range(element_count): size = fs.read_sint32() items.append(fs.read(size) if size > 0 else b'') fs.align() else: for m in range(element_count): it = self.deserialize(fs, meta_type=type_map.get( element_type.index)) items.append(it) fs.align() array['data'] = items elif node.type == 'string': size = fs.read_sint32() result[node.name] = fs.read(size) if size > 0 else b'' fs.align() elif node.type in self.__premitive_decoders: result[node.name] = self.__premitive_decoders.get( node.type)(fs) if node.meta_flags & 0x4000 != 0: fs.align() elif node.byte_size == 0: continue else: result[node.name] = self.deserialize(fs, meta_type=type_map.get( node.index)) return result
def decode(self, fs: FileStream): fs.seek(self.node.offset) header = self.header header.metadata_size = fs.read_sint32() header.file_size = fs.read_sint32() assert self.node.size == header.file_size, '{} != {}'.format( self.node.size, header.file_size) header.version = fs.read_sint32() header.data_offset = fs.read_sint32() header.endianess = fs.read_boolean() fs.read(3) # reserved bytes fs.endian = '>' if header.endianess else '<' self.print(vars(header)) self.version = fs.read_string() self.platform = fs.read_uint32() self.type_tree_enabled = fs.read_boolean() self.print('version={} platform={} type_tree_enabled={}'.format( self.version, self.platform, self.type_tree_enabled)) self.type_trees = [] type_count = fs.read_uint32() self.print('type', type_count) for _ in range(type_count): offset = fs.position type_tree = MetadataTypeTree( type_tree_enabled=self.type_tree_enabled) type_tree.decode(fs) if self.type_tree_enabled: position = fs.position fs.seek(offset) type_data = fs.read(position - offset) with open(type_tree.get_cache_path(auto_create=True), 'wb') as fp: fp.write(type_data) self.type_trees.append(type_tree) self.register_type_tree(type_tree=type_tree) self.print(type_tree) object_count = fs.read_sint32() self.print('object', object_count) for _ in range(object_count): fs.align(4) obj = ObjectInfo() obj.decode(fs) type_tree = self.type_trees[obj.type_id] obj.name = type_tree.name self.objects.append(obj) self.print(vars(obj)) script_type_count = fs.read_sint32() self.print('typeinfo', script_type_count) for _ in range(script_type_count): st = ScriptTypeInfo() st.decode(fs) self.typeinfos.append(st) self.print(vars(st)) external_count = fs.read_sint32() self.print('external', external_count) for _ in range(external_count): ext = ExternalInfo() ext.decode(fs) self.externals.append(ext) self.print(ext) fs.read_string()
def decode(self, fs: FileStream): self.local_serialized_file_index = fs.read_sint32() fs.align(4) self.local_identifier_in_file = fs.read_sint64()