def search4cave(stream: io.RawIOBase, section_name: str, section_size: int, section_info, cave_size: int, virtaddr: int, _bytes: bytes): caves = [] byte_count = 0 base = stream.tell() offset = 0 while section_size > 0: rb = stream.read(1) section_size -= 1 offset += 1 if _bytes not in rb: if byte_count >= cave_size: mr = MiningResult() mr.name = section_name mr.cave_begin = (base + offset) - byte_count - 1 mr.cave_end = (base + offset) - 1 mr.cave_size = byte_count mr.virtaddr = virtaddr + offset - byte_count - 1 mr.info = section_info caves.append(mr) byte_count = 0 continue byte_count += 1 stream.seek(base) return caves
def __init__(self, stream: RawIOBase, chunk_size=4096, *args, **kwargs): self.chunk_size = chunk_size # Get content-size stream.seek(0, os.SEEK_END) content_length = stream.tell() stream.seek(0, os.SEEK_SET) super().__init__(stream, content_len=content_length, *args, **kwargs)
def verifycave(stream: io.RawIOBase, cave_size, _byte: bytes): base = stream.tell() success = True while cave_size > 0: cave_size -= 1 rb = stream.read(1) if _byte not in rb: success = False break stream.seek(base) return success
def _read_block_into(in_stream: io.RawIOBase, out_stream: io.RawIOBase): MAGIC = 0x00000010 HEADER_LENGTH = 0x10 MAGIC_OFFSET = 0x00 SOURCE_SIZE_OFFSET = 0x08 RAW_SIZE_OFFSET = 0x0C BLOCK_PADDING = 0x80 COMPRESSION_THRESHOLD = 0x7D00 # Block: # 10h Header # * Data # # Header: # 4h Magic # 4h Unknown / Zero # 4h Size in source # 4h Raw size # -> If size in source >= 7D00h then data is uncompressed header = in_stream.read(HEADER_LENGTH) if len(header) != HEADER_LENGTH: raise EOFError magic_check, = struct.unpack_from('<l', header, MAGIC_OFFSET) source_size, = struct.unpack_from('<l', header, SOURCE_SIZE_OFFSET) raw_size, = struct.unpack_from('<l', header, RAW_SIZE_OFFSET) if magic_check != MAGIC: raise NotImplementedError("Magic number not present") is_compressed = source_size < COMPRESSION_THRESHOLD block_size = source_size if is_compressed else raw_size if is_compressed and ( (block_size + HEADER_LENGTH) % BLOCK_PADDING) != 0: block_size += BLOCK_PADDING - ( (block_size + HEADER_LENGTH) % BLOCK_PADDING) buffer = in_stream.read(block_size) if len(buffer) != block_size: raise EOFError if is_compressed: current_position = out_stream.tell() if raw_size != out_stream.write(zlib.decompress(buffer, -15)): raise RuntimeError( "Inflated block does not match indicated size") else: out_stream.write(buffer)
def __init__(self, stream: io.RawIOBase): LENGTH = 0x50 FORMAT_OFFSET = 0x04 WIDTH_OFFSET = 0x08 HEIGHT_OFFSET = 0x0A self._buffer = stream.read(LENGTH) if len(self._buffer) != LENGTH: raise EOFError self.__width, = struct.unpack_from('<h', self._buffer, WIDTH_OFFSET) self.__height, = struct.unpack_from('<h', self._buffer, HEIGHT_OFFSET) self.__imgformat = ImageFormat(struct.unpack_from('<h', self._buffer, FORMAT_OFFSET)[0]) self.__end_of_header = stream.tell()
def __read(self, stream: io.RawIOBase): FILE_TYPE_OFFSET = 0x04 FILE_LENGTH_OFFSET = 0x10 FILE_LENGTH_SHIFT = 7 self._buffer = stream.read(4) length, = struct.unpack_from('<l', self._buffer, 0) remaining = length - 4 self._buffer += stream.read(remaining) self._file_type, = struct.unpack_from('<l', self._buffer, FILE_TYPE_OFFSET) self._length = struct.unpack_from('<l', self._buffer, FILE_LENGTH_OFFSET)[0] << FILE_LENGTH_SHIFT self._end_of_header = stream.tell()