def read_xpress(self, baddr, BlockSize): data_uz = self.PageCache.Get(baddr) if data_uz is None: data_read = self.base.read(baddr, BlockSize) if BlockSize == 0x10000: data_uz = data_read else: data_uz = xpress.xpress_decode(data_read) self.PageCache.Put(baddr, data_uz) return data_uz
def convert_to_raw(self, ofile): page_count = 0 for _i, xb in enumerate(self.PageDict.keys()): size = self.PageDict[xb][0][1] data_z = self.base.read(xb + 0x20, size) if size == 0x10000: data_uz = data_z else: data_uz = xpress.xpress_decode(data_z) for page, size, offset in self.PageDict[xb]: ofile.seek(page * 0x1000) ofile.write(data_uz[offset * 0x1000:offset * 0x1000 + 0x1000]) page_count += 1 del data_z, data_uz yield page_count
def convert_to_raw(self, ofile): page_count = 0 for _i, xb in enumerate(self.PageDict.keys()): size = self.PageDict[xb][0][1] data_z = self.base.read(xb + 0x20, size) if size == 0x10000: data_uz = data_z else: data_uz = xpress.xpress_decode(data_z) for page, size, offset in self.PageDict[xb]: ofile.seek(page * 0x1000) ofile.write(data_uz[offset * 0x1000 : offset * 0x1000 + 0x1000]) page_count += 1 del data_z, data_uz yield page_count
def decompress(self, descriptor, data): if len(data) == PAGE_SIZE: # In case the compressed size is larger than a page, the data # Does not seem to be compressed, so we just return data. return data if descriptor.st_data_mgr.CompressionAlgorithm.v() != XPRESS_ALGO: return None try: length = descriptor.record.CompressedSize.v() & 0xFFF result = xpress.xpress_decode(data[:length]) except Exception as e: self.debug("Error decompressing: {0}".format(str(e))) return None len_decompressed = len(result) if len_decompressed != PAGE_SIZE: self.debug("Decompressed data is not the " "size of a page: {0:#x}".format(len_decompressed)) return None return result