def main(): import argparse, sys arguments = argparse.ArgumentParser() arguments.add_argument('--file', '-f', required=True) arguments.add_argument('--output', '-o', default='__types') options = arguments.parse_args(sys.argv[1:]) output = p.abspath(options.output) if not p.exists(output): os.makedirs(output) MONO_BEHAVIOUR_PERSISTENT_ID = 114 stream = FileStream(file_path=options.file) stream.endian = '<' while stream.bytes_available: persistent_id = stream.read_uint32() script_hash = b'0' * 16 if persistent_id == MONO_BEHAVIOUR_PERSISTENT_ID: script_hash = stream.read(16) type_hash = stream.read(16) size = stream.read_uint32() offset = stream.position print(persistent_id, uuid.UUID(bytes=script_hash), uuid.UUID(bytes=type_hash), size) type_tree = MetadataTypeTree(True) type_tree.persistent_type_id = persistent_id type_tree.type_hash = type_hash type_tree.mono_hash = script_hash type_tree.decode_type_tree(fs=stream) print(type_tree) assert stream.position == offset + size
def deserialize(self, fs: FileStream, meta_type: MetadataType): result = {} if not meta_type: return result type_map = meta_type.type_tree.type_dict for n in range(len(meta_type.fields)): node = meta_type.fields[n] if node.is_array: element_type = meta_type.type_tree.nodes[node.index + 2] element_count = fs.read_sint32() array = result[node.name] = {'size': element_count} if element_count == 0: continue if element_type.byte_size == 1: array['data'] = fs.read( element_count) if element_count > 0 else b'' fs.align() else: items = [] if element_type.type in self.__premitive_decoders: decode = self.__premitive_decoders.get( element_type.type) for _ in range(element_count): items.append(decode(fs)) elif element_type.type == 'string': for _ in range(element_count): size = fs.read_sint32() items.append(fs.read(size) if size > 0 else b'') fs.align() else: for m in range(element_count): it = self.deserialize(fs, meta_type=type_map.get( element_type.index)) items.append(it) fs.align() array['data'] = items elif node.type == 'string': size = fs.read_sint32() result[node.name] = fs.read(size) if size > 0 else b'' fs.align() elif node.type in self.__premitive_decoders: result[node.name] = self.__premitive_decoders.get( node.type)(fs) if node.meta_flags & 0x4000 != 0: fs.align() elif node.byte_size == 0: continue else: result[node.name] = self.deserialize(fs, meta_type=type_map.get( node.index)) return result
def decode(self, file_path: str): fs = FileStream() fs.open(file_path) self.header.decode(fs) blocks_info_offset = self.header.get_blocks_info_offset() self.print(vars(self.header), blocks_info_offset, fs.position, self.header.compression_type) fs.seek(blocks_info_offset) compression_type = self.header.compression_type if compression_type != CompressionType.NONE: compressed_data = fs.read(self.header.compressed_blocks_info_size) assert len( compressed_data) == self.header.compressed_blocks_info_size uncompressed_data = lz4.block.decompress( compressed_data, self.header.uncompressed_blocks_info_size) temp = FileStream(data=uncompressed_data) self.read_blocks_and_directory(temp) else: assert self.header.compressed_blocks_info_size == self.header.uncompressed_blocks_info_size self.read_blocks_and_directory(fs) import io buffer = io.BytesIO() for block in self.blocks_info.blocks: if block.compression_type != CompressionType.NONE: compressed_data = fs.read(block.compressed_size) uncompressed_data = lz4.block.decompress( compressed_data, block.uncompressed_size) assert len(uncompressed_data ) == block.uncompressed_size, uncompressed_data buffer.write(uncompressed_data) else: uncompressed_data = fs.read(block.uncompressed_size) buffer.write(uncompressed_data) assert fs.position == fs.length buffer.seek(0) with open('data.bin', 'wb') as fp: fp.write(buffer.read()) buffer.seek(0) return FileStream(data=buffer.read())
def decode(self, fs: FileStream): offset = fs.position self.persistent_type_id = fs.read_sint32() self.is_stripped = fs.read_boolean() self.script_index = fs.read_sint16() if self.persistent_type_id == MONO_BEHAVIOUR_PERSISTENT_ID: self.mono_hash = fs.read(16) self.type_hash = fs.read(16) self.nodes = [] self.strings = {} if self.type_tree_enabled: self.decode_type_tree(fs) else: cache_path = self.get_cache_path() if p.exists(cache_path): tmp = FileStream(file_path=cache_path) tmp.endian = '<' persistent_type_id = tmp.read_sint32() assert persistent_type_id == self.persistent_type_id, '{} != {}'.format( persistent_type_id, self.persistent_type_id) tmp.seek(fs.position - offset) self.decode_type_tree(fs=tmp)
def decode(self, fs: FileStream): fs.seek(self.node.offset) header = self.header header.metadata_size = fs.read_sint32() header.file_size = fs.read_sint32() assert self.node.size == header.file_size, '{} != {}'.format( self.node.size, header.file_size) header.version = fs.read_sint32() header.data_offset = fs.read_sint32() header.endianess = fs.read_boolean() fs.read(3) # reserved bytes fs.endian = '>' if header.endianess else '<' self.print(vars(header)) self.version = fs.read_string() self.platform = fs.read_uint32() self.type_tree_enabled = fs.read_boolean() self.print('version={} platform={} type_tree_enabled={}'.format( self.version, self.platform, self.type_tree_enabled)) self.type_trees = [] type_count = fs.read_uint32() self.print('type', type_count) for _ in range(type_count): offset = fs.position type_tree = MetadataTypeTree( type_tree_enabled=self.type_tree_enabled) type_tree.decode(fs) if self.type_tree_enabled: position = fs.position fs.seek(offset) type_data = fs.read(position - offset) with open(type_tree.get_cache_path(auto_create=True), 'wb') as fp: fp.write(type_data) self.type_trees.append(type_tree) self.register_type_tree(type_tree=type_tree) self.print(type_tree) object_count = fs.read_sint32() self.print('object', object_count) for _ in range(object_count): fs.align(4) obj = ObjectInfo() obj.decode(fs) type_tree = self.type_trees[obj.type_id] obj.name = type_tree.name self.objects.append(obj) self.print(vars(obj)) script_type_count = fs.read_sint32() self.print('typeinfo', script_type_count) for _ in range(script_type_count): st = ScriptTypeInfo() st.decode(fs) self.typeinfos.append(st) self.print(vars(st)) external_count = fs.read_sint32() self.print('external', external_count) for _ in range(external_count): ext = ExternalInfo() ext.decode(fs) self.externals.append(ext) self.print(ext) fs.read_string()
def decode(self, fs: FileStream): fs.read_string() self.guid = fs.read(16) self.type = fs.read_sint32() self.path = fs.read_string()
def decode(self, fs: FileStream): self.uncompressed_data_hash = fs.read(16) for _ in range(fs.read_uint32()): block = StorageBlock() block.decode(fs) self.blocks.append(block)