def parseData(self): data_stream = StreamParser(self.data) count = data_stream.readInt(4) data_stream.burn(4) for i in range(count): new_child = self.Child(data_stream) self.children.update({new_child.string: new_child})
def compressor(data): stream = StreamParser(data) chunks = [] header = StreamWriter() # chunkify the data into chunks of 0x20000 bytes while (chunk := stream.read(0x20000)): chunks.append(chunk)
def parseData(self): data_stream = StreamParser(self.data) count = data_stream.readInt(4) for i in range(count): child = self.Child(data_stream) self.children.update({child.string: child}) for child in self.children.values(): child.parseOffset(data_stream)
def parseData(self): data_stream = StreamParser(self.data) count = data_stream.readInt(4) self.imeta.data = self.data[0:0x290008] self.imeta.parseData() for entry in self.imeta.children.values(): new_child = self.Child(data_stream, entry.offset, entry.size) self.children.update({entry.string: new_child})
def recursiveTemplateImport(target: SaberPak, imeta : Imeta, path): # load the SceneData scene_data = None if "SceneData" not in target.children: print("s3dpak not compatible. May be corrupted, SceneData not present.\n" "Execution can continue, but SceneData wont be updated") else: scene_data = SceneData() scene_data.load(target.children["SceneData"].data) files = os.listdir(path) for file in files: stream = StreamParser(open(path + "/" + file, "rb").read()) name = file.split(".")[0].split("/")[-1] if ".imeta_child" in file: new_child = imeta.Child() new_child.loadFromStream(stream) imeta.importChild(file.split(".")[0], new_child) if scene_data: if name not in scene_data.Textures: scene_data.Textures.append(name) else: target.addEntry(path + "/" + file, "Template") if scene_data: if name not in scene_data.Templates: scene_data.Templates.append(name) if scene_data: target.children["SceneData"].data = scene_data.compile_data() return (target, imeta)
def decompressor(data): stream = StreamParser(data) count = stream.readInt(8) print(count) offsets = [stream.readInt(8) for i in range(count)] offsets.append(len(data)) with open("tmp", "wb") as file: for i in range(len(offsets) - 1): file.write(zlib.decompress(data[offsets[i]:offsets[i + 1]])) decompressed_data = open("tmp", "rb").read() os.remove("tmp") return decompressed_data
def decompressor(data): stream = StreamParser(data) # read the chunk count count = stream.readInt(4) # read offsets offsets = [(stream.readInt(4) + 4) for i in range(count)] # temporary offset to calculate last - end offsets.append(len(data)) with open("tmp", "wb") as file: for i in range(len(offsets) - 1): chunk = zlib.decompress(data[offsets[i]:offsets[i + 1]]) file.write(chunk) # read the buffer back with open("tmp", "rb") as file: decompressed_data = file.read() # cleanup os.remove("tmp") return decompressed_data
def load(self, stream: StreamParser): self.size = stream.readInt(4) self.flags = stream.readInt(4) self.fourCC = stream.readString(length=4) self.RGBBitCount = stream.readInt(4) self.RBitMask = stream.readInt(4) self.GBitMask = stream.readInt(4) self.BBitMask = stream.readInt(4) self.ABitMask = stream.readInt(4)
def __init__(self, data): self.data = data stream = StreamParser(data) self.dependecies = [] # for now, this will do for gathering the dependencies stream.burn(8) header_size = stream.readInt(4) stream.burn(4) self.name = stream.readString() stream.seek(header_size) stream.burn(15) dependency_count = stream.readInt(4) stream.burn(6) for i in range(dependency_count): self.dependecies.append(stream.readString()) stream.burn(6)
def loadFromFile(self, file): stream = StreamParser(open(file, "rb").read()) self.loadFromStream(stream) pass
def load(self, path): stream = StreamParser(open(path, "rb").read()) self.header.load( stream) # if stream passed instead of path, stream will be used self.pixelData = stream.read() # read pixel data
def load(self, path): if type(path) != StreamParser: stream = StreamParser(open(path, "rb").read()) else: stream = path self.magic = stream.readString(length=4) self.size = stream.readInt(4) self.flags = stream.readInt(4) self.height = stream.readInt(4) self.width = stream.readInt(4) self.pitchOrLinearSize = stream.readInt(4) self.depth = stream.readInt(4) self.mipmap_count = stream.readInt(4) for item in self.reserved: stream.readInt(4) self.ddspf.load(stream) self.caps = stream.readInt(4) self.caps2 = stream.readInt(4) self.caps3 = stream.readInt(4) self.caps4 = stream.readInt(4) self.reserved2 = stream.readInt(4)
def loadFromRawData(self, data): stream = StreamParser(data) self.loadFromStream(stream)
def loadFromFileRaw(self, file): data = open(file, "rb").read() stream = StreamParser(data) self.loadFromStream(stream, size=len(data) - 58)