def _out_one(self, build_look_map, is_force): edit_dict = {} # load old sheets if not is_force and os.path.isfile(self._ulo_path): ba = Bit.ByteArray() with open(self._ulo_path, 'rb') as fp: ba.init_buffer(fp.read()) nn = ba.read_u16() for i in range(nn): excel_name, sheet_name = Text.unpack(ba.read_utf8(), '.') sheet_buf = ba.read_bytes(ba.read_u32()) book_look_map = build_look_map.get(excel_name) if not book_look_map: continue # 这边被剔除了 new_sheet_dic = Collect.gen_dict(edit_dict, excel_name) if book_look_map == _Fmt.NO_DIFF_FLAG: new_sheet_dic[sheet_name] = sheet_buf # 这个excel没有变更,全部保留。 continue if sheet_name in book_look_map: new_sheet_dic[sheet_name] = sheet_buf # 只保留记录过的表 # merge sheets for excel_name, build_dict in self._new_dict.items(): Collect.gen_dict(edit_dict, excel_name).update(build_dict) # output sheets sheet_nums = 0 if edit_dict: ba = Bit.ByteArray().init_capacity(1024 * 1024 * 8) ba.set_position(2) for excel_name, sheet_dict in edit_dict.items(): sheet_nums += self._build_buf(excel_name, sheet_dict, ba) # log.i('write:', sheet_nums, excel_name, sheet_name) ba.set_position(0).write_u16(sheet_nums) # 写入表的数量 with open(self._ulo_path, 'wb') as fp: fp.write(ba.slim_buffer()) return sheet_nums
def split_res(self): # with open(os.path.join(self.idir, 'package.was'), 'rb') as fp: with open(os.path.join(self.idir, 'patch.was'), 'rb') as fp: buffer = fp.read() buffer = buffer[32:] e = len(buffer) p = 0 while p < e: p += 4 b = buffer[p:p+4] fs = Bit.u32_from(b) p += 24 b = buffer[p:p+4] ns = Bit.u32_from(b) p += 4 fn = Text.unicodes2str(buffer[p:p+ns]) p += ns + 1 if fn.endswith('.luac'): fn = 'src/' + fn else: fn = 'res/' + fn print(p, fs, fn) fb = buffer[p:p+fs] p += fs fp = os.path.join(self.odir, fn) FS.make_parent(fp) with open(fp, 'wb') as fp: fp.write(fb) print(p, e)
def encode_shader(shader): shader_size = len(shader) shader_hash = Math.hash_bkdr(shader) print(shader_size, shader_hash) print('-------------------------------------------') print(shader) print('-------------------------------------------') encrypt_keys = b'shader' encrypt_klen = len(encrypt_keys) hexes, array = '', [] for i in range(shader_size): s = '0x%x' % (ord(shader[i]) ^ encrypt_keys[i % encrypt_klen]) array.append(s) hexes += s + ',' if (i + 1) % 32 == 0: hexes += '\n' encrypt_size = len(array) hexes = Bit.bytes2hex(encrypt_keys) + Bit.bytes2hex( Bit.u32_bytes(shader_hash)) + Bit.bytes2hex( Bit.u32_bytes(encrypt_size)) + '\n' + hexes[0:-1] encrypt_size += 14 # magic + hash + length print(('[%d]={\n' % encrypt_size) + hexes + '\n};') print('-------------------------------------------') shader = '' for c in array: shader += chr(int(c, 16)) print(shader) print('-------------------------------------------')
def laya_sk(skpath, imagepath=None, outputdir=None): from jonlin.utils import Bit with open(skpath, 'rb') as fp: ba = Bit.ByteArray().init_buffer(fp.read()) ani_version = ba.read_utf8() ani_classname = ba.read_utf8() ani_names = ba.read_utf8() ani_count = ba.read_u8() pub_data_pos = ba.read_u32() ext_data_pos = ba.read_u32() # print(pub_data_pos, ext_data_pos) ba.set_position(ext_data_pos) ext_buffer = ba.read_bytes(ba.get_available()) ba = Bit.ByteArray().init_buffer(ext_buffer) tex_count = ba.read_int() tex_array = ba.read_utf8().split('\n') tex_books = {} tex_frames = [] for i in range(tex_count): tex_name = tex_array[i * 2 + 1] tex_books[tex_name] = tex_array[i * 2] x = ba.read_float() y = ba.read_float() w = ba.read_float() h = ba.read_float() fx = ba.read_float() fy = ba.read_float() fw = ba.read_float() fh = ba.read_float() # print(tex_name, x, y, w, h, fx, fy, fw, fh) tex_frames.append((tex_name, x, y, w, h, fx, fy, fw, fh)) # crop images atlas_root = os.path.dirname(skpath) if outputdir is None: imagesdir = os.path.join(atlas_root, FS.filename(skpath) + '_images') else: imagesdir = outputdir # if not os.path.isdir(imagesdir): # os.makedirs(imagesdir) image_map = {} for src in set(tex_books.values()): image_map[src] = Image.open(os.path.join(atlas_root, src)) for frame in tex_frames: tex_name = frame[0] x = frame[1] y = frame[2] w = frame[3] h = frame[4] rect = (x, y, x + w, y + h) image = image_map[tex_books[tex_name]] item_img = image.crop(rect) item_src = os.path.join(imagesdir, tex_name + '.png') FS.make_parent(item_src) item_img.save(item_src)
def _gen_header(self, filenum): header = bytearray() if self._blurer is None: header.append(0) else: self._blurer.submit(header) header.extend(Bit.u16_bytes(self._hseed)) header.extend(Bit.u16_bytes(filenum)) for hashcode in self._hashmap: info = self._hashmap[hashcode] header.extend(Bit.u32_bytes(hashcode)) header.extend(Bit.u32_bytes(info[0])) header.extend(Bit.u32_bytes(info[1])) return header
def _gen_buffer(self, sheet_obj, index_info, sheet_area, data_cols, errors): data_ncol = len(data_cols) data_keys = sheet_area.header.keys[1:] data_lets = sheet_area.header.lets[1:] is_list_t = _Lua2.is_list_table(index_info) # write to buffer ba = Bit.ByteArray().init_capacity(index_info.amount * data_ncol * 256) if is_list_t: ba.write_byte(1) else: ba.write_byte(0) ba.write_byte( self._ulo_type[index_info.let]) # write index let type ba.write_u8(data_ncol) for n in range(data_ncol): self._write_string(ba, data_keys[n]) ba.write_byte(self._ulo_type[data_lets[n]]) # write key let type ba.write_u16(index_info.amount) for m in range(index_info.amount): r = index_info.row_ns[m] if not is_list_t: try: self._write_cell(ba, index_info.values[m], index_info.let) except _CellError as err: errors.append(err) for n in range(data_ncol): t = data_lets[n] try: v = _Fmt.guess_value(sheet_obj.cell_value(r, data_cols[n]), t) self._write_cell(ba, v, t) except _CellError as err: errors.append(err) return ba.slim_buffer()
def pack_all(self, source): ba = Bit.ByteArray().init_capacity(1024 * 1024 * 2) ba.write_u16(len(source)) # 写入表的数量 for si, keys in source.items(): buff = self.pack_one(si, keys) size = len(buff) ba.write_utf8(si.name) ba.write_u32(size) ba.write_bytes(buff, size) return ba.slim_buffer()
def _out_pkg(self, build_look_map): if not os.path.isdir(self._ulo_path): os.makedirs(self._ulo_path) for excel_name, sheet_dict in self._new_dict.items(): ba = Bit.ByteArray().init_capacity(1024 * 1024 * 8) ba.set_position(2) sheet_nums = self._build_buf(excel_name, sheet_dict, ba) ba.set_position(0).write_u16(sheet_nums) # 写入表的数量 with open(os.path.join(self._ulo_path, excel_name + '.ulo'), 'wb') as fp: fp.write(ba.slim_buffer()) total_nums = 0 for name in os.listdir(self._ulo_path): ulofile = os.path.join(self._ulo_path, name) if FS.filename(name) not in build_look_map: log.w('删除表文件:', ulofile) os.remove(ulofile) else: with open(ulofile, 'rb') as fp: total_nums += Bit.u16_from(fp.read(2)) return total_nums
def _read_header(self): offset = Bit.u32_from(self._fp.read(4)) self._fp.seek(offset) # 读取秘钥 keylen = self._fp.read(1)[0] if keylen > 0: self._parser = self.Parser(keylen) self._parser.load(self._fp.read(keylen + 4)) # 读取hash种子和文件数量 buffer = self._fp.read(4) self._hseed = Bit.u16_from(buffer) filenum = Bit.u16_from(buffer[2:]) # 读取文件表 buffer = self._fp.read(filenum * 12) # tag+pos+len=12 offset = 0 self._hashmap = {} for i in range(filenum): tag = Bit.u32_from(buffer[offset:offset + 4]) offset += 4 pos = Bit.u32_from(buffer[offset:offset + 4]) offset += 4 size = Bit.u32_from(buffer[offset:offset + 4]) offset += 4 self._hashmap[tag] = (pos, size) log.d('unpack bale files:', filenum)
def pack_one(self, si, keys): ncols, nrows = len(keys), len(si.datas) ba = Bit.ByteArray().init_capacity(ncols * nrows * 32) ba.write_u16(nrows) ba.write_u8(ncols) for k in keys: c = k.i t = si.types[c] if t.endswith('[]'): f = self._write_json else: f = self.writer_dict[t] ba.write_utf8(k.k) # 字段 ba.write_u8(self.itypes[t]) # 类型 for line in si.datas: f(ba, line[c]) return ba.slim_buffer()
def _build(self, source, fillmb): taglist = self._load_taglist(source) filenum = len(taglist) assert filenum <= 0xFFFF, 'file nums exceeds %u' % 0xFFFF self._hashmap = {} self._fp.write(self._magic) self._fp.write(b'0000') if fillmb > 0: self._filler = AssetBuilder.Filler(fillmb, filenum + 2) self._filler.consume(self._fp) for tag in taglist: self._add_file(tag, os.path.join(source, tag)) offset = self._fp.tell() print('header offset:', offset) self._fp.seek(len(self._magic)) self._fp.write(Bit.u32_bytes(offset)) self._fp.seek(offset) self._fp.write(self._gen_header(filenum)) if self._filler is not None: self._filler.consume(self._fp)
def load(self, buffer): self._key = buffer[:self._keylen] self._let = Bit.u32_from(buffer[self._keylen:])
def submit(self, buffer): # 将加密信息记录到资源包 buffer.append(self._keylen) buffer.extend(self._key) buffer.extend(Bit.u32_bytes(self._let))