def read(self, offset: int, length: int, pad: bool = False) -> bytes: """Reads from the file at offset for length.""" if not self.is_valid(offset, length): invalid_address = offset if self.minimum_address < offset <= self.maximum_address: invalid_address = self.maximum_address + 1 raise exceptions.InvalidAddressException( self.name, invalid_address, "Offset outside of the buffer boundaries") self.reads_that_happened += 1 try: with self._lock: if self._analyze == self.A_SOURCE: data = self.memscrimper_interface.vol_read( offset, length, source=True, force_reload=True) else: data = self.memscrimper_interface.vol_read( offset, length, source=False, force_reload=True) except: import traceback vollog.critical(traceback.format_exc()) if len(data) < length: if pad: data += (b"\x00" * (length - len(data))) else: raise exceptions.InvalidAddressException( self.name, offset + len(data), "Could not read sufficient bytes from the " + self.name + " file") return data
def mapping( self, offset: int, length: int, ignore_errors: bool = False ) -> Iterable[Tuple[int, int, int, str]]: """Returns a sorted iterable of (offset, mapped_offset, length, layer) mappings. This allows translation layers to provide maps of contiguous regions in one layer """ if length == 0: try: mapped_offset, _, layer_name = self._translate(offset) if not self._context.layers[layer_name].is_valid( mapped_offset): raise exceptions.InvalidAddressException( layer_name=layer_name, invalid_address=mapped_offset) except exceptions.InvalidAddressException: if not ignore_errors: raise return yield (offset, mapped_offset, length, layer_name) return while length > 0: try: chunk_offset, page_size, layer_name = self._translate(offset) chunk_size = min(page_size - (chunk_offset % page_size), length) if not self._context.layers[layer_name].is_valid( chunk_offset, chunk_size): raise exceptions.InvalidAddressException( layer_name=layer_name, invalid_address=chunk_offset) except (exceptions.PagedInvalidAddressException, exceptions.InvalidAddressException) as excp: if not ignore_errors: raise # We can jump more if we know where the page fault failed if isinstance(excp, exceptions.PagedInvalidAddressException): mask = (1 << excp.invalid_bits) - 1 else: mask = (1 << self._page_size_in_bits) - 1 length_diff = (mask + 1 - (offset & mask)) length -= length_diff offset += length_diff else: yield (offset, chunk_offset, chunk_size, layer_name) length -= chunk_size offset += chunk_size
def write(self, offset: int, value: bytes) -> None: """Writes a value at offset, distributing the writing across any underlying mapping.""" current_offset = offset length = len(value) for (layer_offset, mapped_offset, mapped_length, layer) in self.mapping(offset, length): if layer_offset > current_offset: raise exceptions.InvalidAddressException( self.name, current_offset, "Layer {} cannot map offset: {}".format( self.name, current_offset)) original_data = self._context.layers.read(layer, mapped_offset, mapped_length) # Always chunk the value based on the mapping value_to_write = original_data[:current_offset - layer_offset] + value[:mapped_length - (current_offset - layer_offset )] value = value[mapped_length - (current_offset - layer_offset):] encoded_value = self._encode(value_to_write, mapped_offset, layer_offset) if len(encoded_value) != mapped_length: raise exceptions.LayerException( self.name, "Unable to write new value, does not map to the same dimensions" ) self._context.layers.write(layer, mapped_offset, encoded_value) current_offset += len(value_to_write)
def read(self, offset: int, length: int, pad: bool = False) -> bytes: """Reads an offset for length bytes and returns 'bytes' (not 'str') of length size.""" current_offset = offset output = [] # type: List[bytes] for (layer_offset, mapped_offset, mapped_length, layer) in self.mapping(offset, length, ignore_errors=pad): if not pad and layer_offset > current_offset: raise exceptions.InvalidAddressException( self.name, current_offset, "Layer {} cannot map offset: {}".format( self.name, current_offset)) elif layer_offset > current_offset: output += [b"\x00" * (layer_offset - current_offset)] current_offset = layer_offset # The layer_offset can be less than the current_offset in non-linearly mapped layers # it does not suggest an overlap, but that the data is in an encoded block if mapped_length > 0: processed_data = self._decode( self._context.layers.read(layer, mapped_offset, mapped_length, pad), mapped_offset, layer_offset) # Chop off anything unnecessary at the start processed_data = processed_data[current_offset - layer_offset:] # Chop off anything unnecessary at the end processed_data = processed_data[:length - (current_offset - offset)] output += [processed_data] current_offset += len(processed_data) recovered_data = b"".join(output) return recovered_data + b"\x00" * (length - len(recovered_data))
def read(self, offset: int, length: int, pad: bool = False) -> bytes: """Reads an offset for length bytes and returns 'bytes' (not 'str') of length size.""" current_offset = offset output = [] # type: List[bytes] for (offset, _, mapped_offset, mapped_length, layer) in self.mapping(offset, length, ignore_errors=pad): if not pad and offset > current_offset: raise exceptions.InvalidAddressException( self.name, current_offset, "Layer {} cannot map offset: {}".format( self.name, current_offset)) elif offset > current_offset: output += [b"\x00" * (offset - current_offset)] current_offset = offset elif offset < current_offset: raise exceptions.LayerException( self.name, "Mapping returned an overlapping element") if mapped_length > 0: output += [ self._context.layers.read(layer, mapped_offset, mapped_length, pad) ] current_offset += mapped_length recovered_data = b"".join(output) return recovered_data + b"\x00" * (length - len(recovered_data))
def read(self, offset: int, length: int, pad: bool = False) -> bytes: """Reads an offset for length bytes and returns 'bytes' (not 'str') of length size.""" current_offset = offset output = b'' # type: bytes for (layer_offset, sublength, mapped_offset, mapped_length, layer) in self.mapping(offset, length, ignore_errors=pad): if not pad and layer_offset > current_offset: raise exceptions.InvalidAddressException( self.name, current_offset, "Layer {} cannot map offset: {}".format( self.name, current_offset)) elif layer_offset > current_offset: output += b"\x00" * (layer_offset - current_offset) current_offset = layer_offset # The layer_offset can be less than the current_offset in non-linearly mapped layers # it does not suggest an overlap, but that the data is in an encoded block if mapped_length > 0: unprocessed_data = self._context.layers.read( layer, mapped_offset, mapped_length, pad) processed_data = self._decode_data(unprocessed_data, mapped_offset, layer_offset, sublength) if len(processed_data) != sublength: raise ValueError( "ProcessedData length does not match expected length of chunk" ) output += processed_data current_offset += sublength return output + (b"\x00" * (length - len(output)))
def read(self, address: int, length: int, pad: bool = False) -> bytes: """Reads the data from the buffer.""" if not self.is_valid(address, length): invalid_address = address if self.minimum_address < address <= self.maximum_address: invalid_address = self.maximum_address + 1 raise exceptions.InvalidAddressException(self.name, invalid_address, "Offset outside of the buffer boundaries") return self._buffer[address:address + length]
def read(self, offset: int, length: int, pad: bool = False) -> bytes: """Reads from the file at offset for length.""" if not self.is_valid(offset, length): invalid_address = offset if self.minimum_address < offset <= self.maximum_address: invalid_address = self.maximum_address + 1 raise exceptions.InvalidAddressException(self.name, invalid_address, "Offset outside of the buffer boundaries") # TODO: implement locking for multi-threading with self._lock: self._file.seek(offset) data = self._file.read(length) if len(data) < length: if pad: data += (b"\x00" * (length - len(data))) else: raise exceptions.InvalidAddressException( self.name, offset + len(data), "Could not read sufficient bytes from the " + self.name + " file") return data
def write(self, offset: int, data: bytes) -> None: """Writes to the file. This will technically allow writes beyond the extent of the file """ if not self.is_valid(offset, len(data)): invalid_address = offset if self.minimum_address < offset <= self.maximum_address: invalid_address = self.maximum_address + 1 raise exceptions.InvalidAddressException( self.name, invalid_address, "Data segment outside of the " + self.name + " file boundaries") with self._lock: self.memscrimper_interface.write(offset, data)
def write(self, offset: int, value: bytes) -> None: """Writes a value at offset, distributing the writing across any underlying mapping.""" current_offset = offset length = len(value) for (offset, mapped_offset, length, layer) in self.mapping(offset, length): if offset > current_offset: raise exceptions.InvalidAddressException( self.name, current_offset, "Layer {} cannot map offset: {}".format(self.name, current_offset)) elif offset < current_offset: raise exceptions.LayerException(self.name, "Mapping returned an overlapping element") self._context.layers.write(layer, mapped_offset, value[:length]) value = value[length:] current_offset += length
def translate(self, offset: int, ignore_errors: bool = False) -> Tuple[Optional[int], Optional[str]]: mapping = list(self.mapping(offset, 0, ignore_errors)) if len(mapping) == 1: original_offset, mapped_offset, _, layer = mapping[0] if original_offset != offset: raise exceptions.LayerException(self.name, "Layer {} claims to map linearly but does not".format(self.name)) else: if ignore_errors: # We should only hit this if we ignored errors, but check anyway return None, None raise exceptions.InvalidAddressException(self.name, offset, "Cannot translate {} in layer {}".format(offset, self.name)) return mapped_offset, layer
def write(self, offset: int, value: bytes) -> None: """Writes a value at offset, distributing the writing across any underlying mapping.""" current_offset = offset length = len(value) for (layer_offset, sublength, mapped_offset, mapped_length, layer) in self.mapping(offset, length): if layer_offset > current_offset: raise exceptions.InvalidAddressException( self.name, current_offset, "Layer {} cannot map offset: {}".format(self.name, current_offset)) value_chunk = value[layer_offset - offset:layer_offset - offset + sublength] new_data = self._encode_data(layer, mapped_offset, layer_offset, value_chunk) self._context.layers.write(layer, mapped_offset, new_data) current_offset += len(new_data)
def mapping(self, offset: int, length: int, ignore_errors: bool = False) -> Iterable[Tuple[int, int, int, int, str]]: returned = 0 page_size = self._pdb_layer.page_size while length > 0: page = math.floor((offset + returned) / page_size) page_position = ((offset + returned) % page_size) chunk_size = min(page_size - page_position, length) if page >= self._pages_len: if not ignore_errors: raise exceptions.InvalidAddressException(layer_name = self.name, invalid_address = offset + returned) else: yield offset + returned, chunk_size, (self._pages[page] * page_size) + page_position, chunk_size, self._base_layer returned += chunk_size length -= chunk_size
def get_peb(self) -> interfaces.objects.ObjectInterface: """Constructs a PEB object""" if constants.BANG not in self.vol.type_name: raise ValueError("Invalid symbol table name syntax (no {} found)".format(constants.BANG)) # add_process_layer can raise InvalidAddressException. # if that happens, we let the exception propagate upwards proc_layer_name = self.add_process_layer() proc_layer = self._context.layers[proc_layer_name] if not proc_layer.is_valid(self.Peb): raise exceptions.InvalidAddressException(proc_layer_name, self.Peb, "Invalid address at {:0x}".format(self.Peb)) sym_table = self.vol.type_name.split(constants.BANG)[0] peb = self._context.object("{}{}_PEB".format(sym_table, constants.BANG), layer_name = proc_layer_name, offset = self.Peb) return peb
def _find_segment(self, offset: int, next: bool = False) -> Tuple[int, int, int, int]: """Finds the segment containing a given offset. Returns the segment tuple (offset, mapped_offset, length, mapped_length) """ if not self._segments: self._load_segments() # Find rightmost value less than or equal to x i = bisect_right( self._segments, (offset, self.context.layers[self._base_layer].maximum_address)) if i and not next: segment = self._segments[i - 1] if segment[0] <= offset < segment[0] + segment[2]: return segment if next: if i < len(self._segments): return self._segments[i] raise exceptions.InvalidAddressException( self.name, offset, "Invalid address at {:0x}".format(offset))