def readRotCurve(f, startoff, type, constant): if constant: const = f.readUShort() unk = f.readUShort() common.logDebug(" constant curve", type, const, unk) else: readSampledCurve(f, startoff, type, readSampledRotCurve)
def extract(file, outfolder, guessextension=None): common.logDebug("Processing", file, "...") common.makeFolder(outfolder) cpk = readCPK(file) if cpk is None: common.logError("Error reading CPK") return if len(cpk.filetable) == 0: common.logError("No files in CPK filetable") return with common.Stream(file, "rb") as f: for entry in cpk.filetable: if entry.filetype != "FILE": continue folder, filename = entry.getFolderFile(outfolder) f.seek(entry.fileoffset) data = f.read(entry.filesize) f.seek(entry.fileoffset) checkcomp = f.readString(8) if checkcomp == "CRILAYLA": extractsize = entry.extractsize if entry.extractsize != 0 else entry.filesize if extractsize != 0: data = compression.decompressCRILAYLA(f, entry.fileoffset) if guessextension is not None: filename = guessextension(data, entry, filename) if not os.path.isdir(folder): common.makeFolders(folder) with common.Stream(folder + filename, "wb") as fout: fout.write(data)
def repackBinaryStrings(elf, section, infile, outfile, readfunc, writefunc, encoding="shift_jis", elfsections=[".rodata"]): with common.Stream(infile, "rb") as fi: with common.Stream(outfile, "r+b") as fo: for sectionname in elfsections: rodata = elf.sectionsdict[sectionname] fi.seek(rodata.offset) while fi.tell() < rodata.offset + rodata.size: pos = fi.tell() check = readfunc(fi, encoding) if check != "": if check in section and section[check][0] != "": common.logDebug("Replacing string", check, "at", common.toHex(pos), "with", section[check][0]) fo.seek(pos) endpos = fi.tell() - 1 newlen = writefunc(fo, section[check][0], endpos - pos + 1) if newlen < 0: fo.writeZero(1) common.logError("String", section[check][0], "is too long.") else: fo.writeZero(endpos - fo.tell()) else: pos = fi.tell() - 1 fi.seek(pos + 1)
def readScaleCurve(f, startoff, type, constant): if constant: const = readFP(f) unk = readFP(f) common.logDebug(" constant curve", type, const, unk) else: readSampledCurve(f, startoff, type, readSampledScaleCurve)
def run(): infile = "data/extract/arm9.bin" outfile = "data/bin_output.txt" # Set to False to analyze the whole file limit = True common.logMessage("Extracting BIN to", outfile, "...") with codecs.open(outfile, "w", "utf-8") as out: with common.Stream(infile, "rb") as f: # Skip the beginning and end of the file to avoid false-positives f.seek(992000 if limit else 900000) foundstrings = [] while f.tell() < 1180000: pos = f.tell() if not limit or pos < 1010000 or pos > 1107700: check = game.detectShiftJIS(f) # Save the string if we detected one if check != "": if check not in foundstrings: common.logDebug("Found string at", pos) foundstrings.append(check) out.write(check + "=\n") pos = f.tell() - 1 f.seek(pos + 1) common.logMessage("Done! Extracted", len(foundstrings), "lines")
def readMappedImage(f, outfile, mapstart=0, num=1, bpp=2, width=0, height=0): f.seek(mapstart) maps = [] for j in range(num): map = TileMap() if num > 1: map.name = outfile.replace(".png", "_" + str(j + 1).zfill(2) + ".png") else: map.name = outfile map.offset = f.tell() map.width = width if map.width == 0: map.width = f.readByte() map.height = height if map.height == 0: map.height = f.readByte() map.bpp = bpp common.logDebug(" ", mapstart, vars(map)) map.map = [] for i in range(map.width * map.height): tilemap = TileMap() tilemap.data = f.readUShort() tilemap.tile = tilemap.data & 0x1ff tilemap.pal = (tilemap.data >> 9) & 0xf tilemap.bank = (tilemap.data >> 13) & 1 if tilemap.bank != 0 and bpp == 2: common.logError("Bank is not 0") tilemap.hflip = ((tilemap.data >> 14) & 1) == 1 tilemap.vflip = ((tilemap.data >> 15) & 1) == 1 map.map.append(tilemap) maps.append(map) common.logDebug("Map data ended at", common.toHex(f.tell())) return maps
def repackFontData(infile, outfile, datafile): common.logMessage("Repacking font data from", datafile, "...") common.copyFile(infile, outfile) glyphs = getFontGlyphs(infile) with codecs.open(datafile, "r", "utf-8") as f: section = common.getSection(f, "") if len(section) == 0: return with common.Stream(outfile, "rb+", False) as f: # Header f.seek(36) hdwcoffset = f.readUInt() # HDWC f.seek(hdwcoffset - 4) hdwclen = f.readUInt() tilenum = (hdwclen - 16) // 3 f.seek(8, 1) for i in range(tilenum): found = False for glyph in glyphs.values(): if glyph.index == i: sectionglyph = glyph.char if glyph.char != "=" else "<3D>" if sectionglyph in section: common.logDebug("Writing", section[sectionglyph][0], "at", f.tell()) fontdata = section[sectionglyph][0].split(",") f.writeSByte(int(fontdata[0])) f.writeByte(int(fontdata[1])) f.writeByte(int(fontdata[2])) found = True break if not found: f.seek(3, 1) common.logMessage("Done!")
def run(data, allfile=False): infolder = data + "extract/" outfile = data + "bin_output.txt" with codecs.open(data + "table_input.txt", "r", "utf-8") as tablef: table = common.getSection(tablef, "") if allfile: game.fileranges = {"bank_1d.bin": [(0x0, 0xfff0)]} with codecs.open(outfile, "w", "utf-8") as out: common.logMessage("Extracting bin to", outfile, "...") for file in common.showProgress(game.fileranges): out.write("!FILE:" + file + "\n") with common.Stream(infolder + file, "rb") as f: for range in game.fileranges[file]: f.seek(range[0]) while f.tell() < range[1]: if (len(range) >= 3): f.seek(range[2], 1) pos = f.tell() binstr = game.readString(f, table, True) if allfile and len(binstr) > 50: f.seek(pos + 2) continue if binstr.startswith("|"): f.seek(pos + 2) continue if binstr != "": common.logDebug("Found string at", common.toHex(pos), binstr) out.write(binstr + "=\n") common.logMessage("Done!")
def readELF(infile): elf = ELF() with common.Stream(infile, "rb") as f: f.seek(0x20) sectionsoff = f.readUInt() f.seek(0x2E) sectionsize = f.readUShort() sectionnum = f.readUShort() shstrndx = f.readUShort() common.logDebug("sectionsoff:", sectionsoff, "sectionsize:", sectionsize, "sectionnum", sectionnum, "shstrndx", shstrndx) # Read section headers f.seek(sectionsoff) for i in range(sectionnum): section = ELFSection() section.nameoff = f.readUInt() section.type = f.readUInt() section.flags = f.readUInt() section.addr = f.readUInt() section.offset = f.readUInt() section.size = f.readUInt() section.link = f.readUInt() section.info = f.readUInt() section.addralign = f.readUInt() section.entsize = f.readUInt() elf.sections.append(section) # Read section names for section in elf.sections: f.seek(elf.sections[shstrndx].offset + section.nameoff) section.name = f.readNullString() elf.sectionsdict[section.name] = section for i in range(sectionnum): common.logDebug(i, vars(elf.sections[i])) return elf
def readGMOChunk(f, gmo, maxsize, nesting=""): offset = f.tell() id = f.readUShort() headerlen = f.readUShort() blocklen = f.readUInt() common.logDebug(nesting + "GMO ID", common.toHex(id), "at", common.toHex(offset), "len", common.toHex(headerlen), common.toHex(blocklen)) if id == 0xa: # Texture name f.seek(8, 1) texname = f.readEncodedString().replace(":", "") while texname in gmo.names: texname += "_" common.logDebug(nesting + "0x0A at", common.toHex(offset), common.toHex(offset + blocklen), texname) gmo.names.append(texname) elif id == 0x8013: # Texture data f.seek(4, 1) gmo.offsets.append(f.tell()) common.logDebug(nesting + "0x8013 at", common.toHex(f.tell()), common.toHex(offset), common.toHex(offset + blocklen)) if id != 0x7 and id != 0xc and headerlen > 0: f.seek(offset + headerlen) common.logDebug(nesting + "Raeding nested blocks:") while f.tell() < offset + blocklen - 1 and f.tell() < maxsize: readGMOChunk(f, gmo, maxsize, nesting + " ") common.logDebug(nesting + "Done") f.seek(offset + blocklen) else: f.seek(offset + blocklen)
def readTransCurve(f, startoff, type, constant, add=0): if constant: const = readFP(f) if add != 0: f.seek(-4, 1) writeFP(f, const + add) common.logDebug(" constant curve", type, const) else: readSampledCurve(f, startoff, type, readSampledTransCurve, add)
def readPaletteData(paldata): palettes = [] for j in range(len(paldata) // 32): palette = [] for i in range(0, 32, 2): p = struct.unpack("<H", paldata[j * 32 + i:j * 32 + i + 2])[0] palette.append(common.readPalette(p)) palettes.append(palette) common.logDebug("Loaded", len(palettes), "palettes") return palettes
def extractARC(infolder, outfolder): common.makeFolder(outfolder) common.logMessage("Extracting ARC to", outfolder, "...") files = common.getFiles(infolder, ".arc") for file in common.showProgress(files): common.logDebug("Processing", file, "...") common.execute( "wszst EXTRACT " + infolder + file + " -D " + outfolder + file, False) common.logMessage("Done! Extracted", len(files), "files")
def extractTPL(infolder, outfolder, splitName=True): common.makeFolder(outfolder) common.logMessage("Extracting TPL to", outfolder, "...") files = common.getFiles(infolder, ".tpl") for file in common.showProgress(files): common.logDebug("Processing", file, "...") filename = file.split("/")[0] if splitName else file common.execute( "wimgt DECODE " + infolder + file + " -D " + outfolder + filename + "/" + os.path.basename(file).replace(".tpl", ".png"), False) common.logMessage("Done! Extracted", len(files), "files")
def readUTF(f, baseoffset, storeraw=False): offset = f.tell() headercheck = f.readString(4) if headercheck != "@UTF": common.logError("Wrong UTF header", headercheck) return None utf = UTF() utf.tablesize = f.readInt() utf.rowsoffset = f.readInt() + offset + 8 utf.stringsoffset = f.readInt() + offset + 8 utf.dataoffset = f.readInt() + offset + 8 utf.tablename = f.readInt() utf.numcolumns = f.readShort() utf.rowlength = f.readShort() utf.numrows = f.readInt() utf.baseoffset = baseoffset if storeraw: utf.rawpacket = f # common.logDebug("UTF", vars(utf)) for i in range(utf.numcolumns): column = UTFColumn() column.flags = f.readByte() if column.flags == 0: common.logDebug("Column flag is 0, skipping 3 bytes") f.seek(3, 1) column.flags = f.readByte() column.storagetype = column.flags & UTFColumnFlags.STORAGE_MASK nameoffset = f.readInt() + utf.stringsoffset # Assume ASCII, might be better to assume UTF8? column.name = f.readNullStringAt(nameoffset) if column.flags & UTFColumnFlags.STORAGE_MASK == UTFColumnFlags.STORAGE_CONSTANT: column.position = f.tell() column.data, column.type = readUTFTypedData(f, utf, column.flags) utf.columns.append(column) utf.columnlookup[column.name] = i common.logDebug("UTFColumn", i, vars(column)) for j in range(utf.numrows): f.seek(utf.rowsoffset + (j * utf.rowlength)) rows = [] for i in range(utf.numcolumns): column = utf.columns[i] row = UTFRow() if column.storagetype == UTFColumnFlags.STORAGE_ZERO: row.data = 0 elif column.storagetype == UTFColumnFlags.STORAGE_CONSTANT: row.data = column.data elif column.storagetype == UTFColumnFlags.STORAGE_PERROW: row.position = f.tell() row.data, row.type = readUTFTypedData(f, utf, column.flags) rows.append(row) # common.logDebug("UTFRow", j, i, column.name, vars(row)) utf.rows.append(rows) return utf
def writeMappedImage(f, tilestart, maps, palettes, num=1, skipzero=False): maxtile = tilesize = 0 for i in range(num): mapdata = maps[i] if mapdata.width == 0: common.logError("Width is 0") continue if mapdata.height == 0: common.logError("Height is 0") continue imgwidth = mapdata.width * 8 imgheight = mapdata.height * 8 pali = 0 if mapdata.bpp == 4 and palettes != colpalette: imgwidth += 40 for palette in palettes: if palette.count((0x0, 0x0, 0x0, 0xff)) == 16: break pali += 1 imgheight = max(imgheight, pali * 10) img = Image.new("RGB", (imgwidth, imgheight), (0x0, 0x0, 0x0)) pixels = img.load() x = y = 0 for map in mapdata.map: tilesize = (16 if mapdata.bpp == 2 else 32) if map.tile > maxtile: maxtile = map.tile if (map.tile > 0 or not skipzero) and (mapdata.bpp != 2 or map.bank == 0): f.seek(tilestart + map.bank * 0x4000 + map.tile * tilesize) try: readTile( f, pixels, x * 8, y * 8, palettes[map.pal] if map.pal < len(palettes) else palettes[0], map.hflip, map.vflip, mapdata.bpp) except struct.error: pass except IndexError: pass x += 1 if x == mapdata.width: y += 1 x = 0 if pali > 0: palstart = 0 for i in range(pali): pixels = common.drawPalette(pixels, palettes[i], imgwidth - 40, palstart * 10) palstart += 1 img.save(mapdata.name, "PNG") common.logDebug("Tile data ended at", common.toHex(tilestart + maxtile * tilesize + tilesize))
def readGMO(file): gmo = GMO() with common.Stream(file, "rb") as f: f.seek(16 + 4) gmo.size = f.readUInt() f.seek(8, 1) while f.tell() < gmo.size + 16: readGMOChunk(f, gmo, gmo.size + 16) for gimoffset in gmo.offsets: common.logDebug("Reading GIM at", common.toHex(gimoffset)) gim = readGIM(file, gimoffset) gmo.gims.append(gim) return gmo
def repackEXE(binrange, freeranges=None, manualptrs=None, readfunc=common.detectEncodedString, writefunc=common.writeEncodedString, encoding="shift_jis", comments="#", exein="", exeout="", ptrfile="data/manualptrs.asm", exefile="data/exe_input.txt"): if not os.path.isfile(exefile): common.logError("Input file", exefile, "not found") return False common.copyFile(exein, exeout) common.logMessage("Repacking EXE from", exefile, "...") section = {} with codecs.open(exefile, "r", "utf-8") as bin: section = common.getSection(bin, "", comments) chartot, transtot = common.getSectionPercentage(section) if type(binrange) == tuple: binrange = [binrange] notfound = common.repackBinaryStrings(section, exein, exeout, binrange, freeranges, readfunc, writefunc, encoding, 0x8000F800) # Handle not found pointers by manually replacing the opcodes if len(notfound) > 0 and manualptrs is not None: with open(ptrfile, "w") as f: for ptr in notfound: if ptr.old not in manualptrs: common.logError("Manual pointer", common.toHex(ptr.old), "->", common.toHex(ptr.new), "not found for string", ptr.str) continue for manualptr in manualptrs[ptr.old]: ptrloc = manualptr[0] ptrreg = manualptr[1] common.logDebug("Reassembling manual pointer", common.toHex(ptr.old), "->", common.toHex(ptr.new), "at", common.toHex(ptrloc), ptrreg) f.write(".org 0x" + common.toHex(ptrloc) + "\n") f.write(".area 0x8,0x0\n") f.write(" li " + ptrreg + ",0x" + common.toHex(ptr.new) + "\n") f.write(".endarea\n\n") common.logMessage("Done! Translation is at {0:.2f}%".format( (100 * transtot) / chartot)) return True
def readTIM(f, forcesize=0): tim = TIM() # Read header header = f.readUInt() if header != 0x10: return None type = f.readUInt() if type == 0x08: tim.bpp = 4 elif type == 0x09: tim.bpp = 8 elif type == 0x02: tim.bpp = 16 elif type == 0x03: tim.bpp = 24 else: common.logError("Unknown TIM type", common.toHex(type)) return None # Read palettes if tim.bpp == 4 or tim.bpp == 8: tim.clutsize = f.readUInt() tim.clutposx = f.readUShort() tim.clutposy = f.readUShort() tim.clutwidth = f.readUShort() tim.clutheight = f.readUShort() tim.clutoff = f.tell() for i in range(tim.clutheight): clut = readCLUTData(f, tim.clutwidth) tim.cluts.append(clut) # Read size tim.size = f.readUInt() tim.posx = f.readUShort() tim.posy = f.readUShort() tim.width = f.readUShort() tim.height = f.readUShort() if tim.bpp == 4: tim.width *= 4 elif tim.bpp == 8: tim.width *= 2 elif tim.bpp == 24: tim.width //= 1.5 tim.dataoff = f.tell() common.logDebug("TIM bpp", tim.bpp, "width", tim.width, "height", tim.height, "size", tim.size) pixelnum = forcesize if forcesize != 0 else (((tim.size - 12) * 8) // tim.bpp) readTIMData(f, tim, pixelnum) return tim
def readShiftJIS(f, len2=False, untilZero=False, encoding="shift_jis"): if untilZero: strlen2 = 999 else: if len2: strlen = f.readUShort() strlen2 = f.readUShort() else: strlen = f.readByte() strlen2 = f.readByte() sjis = "" i = j = 0 padding = 0 while i < strlen2: b1 = f.readByte() if b1 == 0x00: i += 1 j += 1 padding += 1 if untilZero: return sjis, i else: b2 = f.readByte() if b1 == 0x0d and b2 == 0x0a: sjis += "|" i += 2 j += 2 elif b1 == 0x81 and b2 == 0xa5: sjis += ">>" i += 2 j += 1 elif not common.checkShiftJIS(b1, b2): f.seek(-1, 1) sjis += chr(b1) i += 1 j += 1 else: f.seek(-2, 1) try: sjis += f.read(2).decode(encoding).replace("〜", "~") except UnicodeDecodeError: common.logDebug("UnicodeDecodeError at", f.tell() - 2) sjis += "UNK(" + common.toHex(b1) + common.toHex(b2) + ")" i += 2 j += 1 if not untilZero and j != strlen: common.logWarning("Wrong strlen", strlen, j) return sjis, i
def readSampledCurve(f, startoff, type, samplefunc, add=0): startframe = f.readUShort() other = f.readUShort() endframe = other & 0b111111111111 width = (other >> 12) & 0b11 lograte = (other >> 14) & 0b11 numsamples = 31 # int((endframe - startframe) / math.pow(2, lograte)) samplesoff = f.readUInt() + startoff common.logDebug(" sampled curve", type, "startframe", startframe, "endframe", endframe, "width", width, "lograte", lograte, "numsamples", numsamples, "samplesoff", common.toHex(samplesoff)) savepos = f.tell() f.seek(samplesoff) for i in range(numsamples): samplefunc(f, width, add) f.seek(savepos)
def extractBREFT(infolder, tempfolder, outfolder): common.makeFolder(tempfolder) common.makeFolder(outfolder) common.logMessage("Extracting BREFT to", outfolder, "...") files = common.getFiles(infolder, ".breft") for file in common.showProgress(files): common.logDebug("Processing", file, "...") outfile = file.split("/") outfile = "/" + outfile[1] + "/" + outfile[3] common.execute( "wszst EXTRACT " + infolder + file + " -D " + tempfolder + outfile, False) for imgfile in os.listdir(tempfolder + outfile + "/files"): common.execute( "wimgt DECODE " + tempfolder + outfile + "/files/" + imgfile + " -D " + outfolder + outfile + "/" + imgfile + ".png", False) common.logMessage("Done! Extracted", len(files), "files")
def extractMappedImage(f, outfile, tilestart, mapstart, num=1, readpal=False, bpp=2, forcewidth=0, forceheight=0): common.logDebug("Extracting", outfile) maps = readMappedImage(f, outfile, mapstart, num, bpp, forcewidth, forceheight) if readpal: f.seek(mapstart - 32) palettes = readPalette(f, maps[0].bpp) else: palettes = bwpalette writeMappedImage(f, tilestart, maps, palettes, num)
def run(): infolder = "data/extract/data/graphics/" outfolder = "data/out_IMG/" common.logMessage("Extracting KBG to", outfolder, "...") files = common.getFiles(infolder, ".kbg") for file in common.showProgress(files): common.logDebug("Processing", file, "...") with common.Stream(infolder + file, "rb") as f: palettes, ncgr = game.readKBG(f) tiledata = f.read(ncgr.tilelen) nitro.readNCGRTiles(ncgr, tiledata) # Export img common.makeFolders(outfolder + os.path.dirname(file)) outfile = outfolder + file.replace(".kbg", ".png") nitro.drawNCGR(outfile, None, ncgr, palettes, ncgr.width, ncgr.height) common.logMessage("Done! Extracted", len(files), "files")
def run(): infolder = "data/extract/data/graphics/" outfolder = "data/repack/data/graphics/" workfolder = "data/work_IMG/" common.logMessage("Repacking KBG from", workfolder, "...") files = common.getFiles(infolder, ".kbg") for file in common.showProgress(files): pngfile = workfolder + file.replace(".kbg", ".png") if not os.path.isfile(pngfile): continue common.logDebug("Processing", file, "...") with common.Stream(infolder + file, "rb") as fin: with common.Stream(outfolder + file, "wb") as f: palettes, ncgr = game.readKBG(fin) fin.seek(0) f.write(fin.read(ncgr.tileoffset)) nitro.writeNCGR(outfolder + file, ncgr, pngfile, palettes) common.logMessage("Done!")
def readTOC(f, cpk, tocoffset, contentoffset): addoffset = 0 if tocoffset > 0x800: tocoffset = 0x800 if contentoffset < 0: addoffset = tocoffset else: if tocoffset < 0: addoffset = contentoffset else: if contentoffset < tocoffset: addoffset = contentoffset else: addoffset = tocoffset f.seek(tocoffset) headercheck = f.readString(4) if headercheck != "TOC ": common.logError("Wrong TOC header", headercheck) return utfoffset = f.tell() utfpacket, utfsize, encrypted = readUTFData(f) tocentry = cpk.getFileEntry("TOC_HDR") tocentry.encrypted = encrypted tocentry.filesize = utfsize files = readUTF(utfpacket, utfoffset, True) tocentry.utf = files for i in range(files.numrows): entry = CPKFileEntry() entry.tocname = "TOC" entry.dirname, _, _ = files.getColumnDataType(i, "DirName") entry.filename, _, _ = files.getColumnDataType(i, "FileName") entry.filesize, entry.filesizepos, entry.filesizetype = files.getColumnDataType(i, "FileSize") entry.extractsize, entry.extractsizepos, entry.extractsizetype = files.getColumnDataType(i, "ExtractSize") entry.fileoffset, entry.fileoffsetpos, entry.fileoffsettype = files.getColumnDataType(i, "FileOffset") entry.fileoffset += addoffset entry.filetype = "FILE" entry.offset = addoffset entry.id, _, _ = files.getColumnDataType(i, "ID") entry.userstring, _, _ = files.getColumnDataType(i, "UserString") cpk.filetable.append(entry) common.logDebug("TOC", i, vars(entry))
def extractBinaryStrings(elf, foundstrings, infile, func, encoding="shift_jis", elfsections=[".rodata"]): with common.Stream(infile, "rb") as f: for sectionname in elfsections: rodata = elf.sectionsdict[sectionname] f.seek(rodata.offset) while f.tell() < rodata.offset + rodata.size: pos = f.tell() check = func(f, encoding) if check != "": if check not in foundstrings: common.logDebug("Found string at", common.toHex(pos), check) foundstrings.append(check) pos = f.tell() - 1 f.seek(pos + 1) return foundstrings
def decompress(f, complength): header = f.readUInt() type = header & 0xFF decomplength = ((header & 0xFFFFFF00) >> 8) common.logDebug("Compression header:", common.toHex(header), "type:", common.toHex(type), "length:", decomplength) with common.Stream() as data: data.write(f.read(complength)) data.seek(0) if type == CompressionType.LZ10: return compression.decompressLZ10(data, complength, decomplength) elif type == CompressionType.LZ11: return compression.decompressLZ11(data, complength, decomplength) elif type == CompressionType.Huff4: return compression.decompressHuffman(data, complength, decomplength, 4) elif type == CompressionType.Huff8: return compression.decompressHuffman(data, complength, decomplength, 8) elif type == CompressionType.RLE: return compression.decompressRLE(data, complength, decomplength) else: common.logError("Unsupported compression type", common.toHex(type)) return data.read()
def extractTIM(infolder, outfolder, extensions=".tim", readfunc=None): common.makeFolder(outfolder) common.logMessage("Extracting TIM to", outfolder, "...") files = common.getFiles(infolder, extensions) for file in common.showProgress(files): common.logDebug("Processing", file, "...") extension = os.path.splitext(file)[1] if readfunc is not None: tim, transp, forcepal = readfunc(infolder + file) else: transp = False forcepal = -1 with common.Stream(infolder + file, "rb") as f: tim = readTIM(f) if tim is None: continue # Export img common.makeFolders(outfolder + os.path.dirname(file)) outfile = outfolder + file.replace(extension, ".png") drawTIM(outfile, tim, transp, forcepal) common.logMessage("Done! Extracted", len(files), "files")
def read(f): f.seek(4) # Magic: ARCH archive = ARCHArchive() archive.files = [] archive.filenum = f.readUInt() archive.tableoff = f.readUInt() archive.fatoff = f.readUInt() archive.nameindexoff = f.readUInt() archive.dataoff = f.readUInt() common.logDebug("Archive:", vars(archive)) for i in range(archive.filenum): f.seek(archive.fatoff + i * 16) subfile = ARCHFile() subfile.length = f.readUInt() subfile.declength = f.readUInt() subfile.offset = f.readUInt() subfile.nameoffset = f.readUShort() subfile.encoded = f.readUShort() == 1 f.seek(archive.tableoff + subfile.nameoffset) subfile.name = f.readNullString() common.logDebug("File", i, vars(subfile)) archive.files.append(subfile) return archive