def generate_suggestions(self): """Generates a list of possible KDBG structure locations""" if not has_distorm: raise StopIteration("The distorm3 Python library is required") overlap = 20 offset = 0 current_offset = offset addr_space = self.obj_vm addresses = sorted(addr_space.get_available_addresses()) for (range_start, range_size) in addresses: # Jump to the next available point to scan from current_offset = max(range_start, current_offset) range_end = range_start + range_size if current_offset < 0xf80000000000: continue while (current_offset < range_end): # Figure out how much data to read l = min(constants.SCAN_BLOCKSIZE + overlap, range_end - current_offset) data = addr_space.zread(current_offset, l) for addr in utils.iterfind(data, "\x80\x3D"): full_addr = addr + current_offset result = self.copy_data_block(full_addr) if result: yield result current_offset += min(constants.SCAN_BLOCKSIZE, l)
def scan(self, address_space, offset = None, maxlen = None): if offset is None: current_offset = 0 else: current_offset = offset for (range_start, range_size) in sorted(address_space.get_available_addresses()): # Jump to the next available point to scan from # self.base_offset jumps up to be at least range_start current_offset = max(range_start, current_offset) range_end = range_start + range_size # If we have a maximum length, we make sure it's less than the range_end if maxlen is not None: range_end = min(range_end, current_offset + maxlen) while (current_offset < range_end): # We've now got range_start <= self.base_offset < range_end # Figure out how much data to read l = min(constants.SCAN_BLOCKSIZE + self.overlap, range_end - current_offset) data = address_space.zread(current_offset, l) for needle in self.needles: for addr in utils.iterfind(data, needle): # this scanner yields the matched pool tag as well as # the offset, to save the caller from having to perform # another .read() just to see which tag was matched yield data[addr:addr+4], addr + current_offset current_offset += min(constants.SCAN_BLOCKSIZE, l)
def search_process_memory(self, s): """Search process memory. @param s: a list of strings like ["one", "two"] """ # Allow for some overlap in case objects are # right on page boundaries overlap = 1024 scan_blk_sz = 1024 * 1024 * 10 addr_space = self.get_process_address_space() for vma in self.get_proc_maps(): offset = vma.links.start out_of_range = vma.links.start + (vma.links.end - vma.links.start) while offset < out_of_range: # Read some data and match it. to_read = min(scan_blk_sz + overlap, out_of_range - offset) data = addr_space.zread(offset, to_read) if not data: break for x in s: for hit in utils.iterfind(data, x): yield offset + hit offset += min(to_read, scan_blk_sz)
def search_process_memory(self, s, heap_only=False): # Allow for some overlap in case objects are # right on page boundaries overlap = 1024 # Make sure s in a list. This allows you to search for # multiple strings at once, without changing the API. if type(s) != list: debug.warning("Single strings to search_process_memory is deprecated, use a list instead") s = [s] scan_blk_sz = 1024 * 1024 * 10 addr_space = self.get_process_address_space() for vma in self.get_proc_maps(): if heap_only: if not (vma.vm_start <= self.mm.start_brk and vma.vm_end >= self.mm.brk): continue offset = vma.vm_start out_of_range = vma.vm_start + (vma.vm_end - vma.vm_start) while offset < out_of_range: # Read some data and match it. to_read = min(scan_blk_sz + overlap, out_of_range - offset) data = addr_space.zread(offset, to_read) if not data: break for x in s: for hit in utils.iterfind(data, x): yield offset + hit offset += min(to_read, scan_blk_sz)
def calculate(self): common.set_plugin_members(self) procs = pstasks.mac_tasks.calculate(self) for proc in procs: space = proc.get_process_address_space() for map in proc.get_proc_maps(): # only read/write without filebacks if not (map.get_perms() == "rw-" and not map.get_path()): continue # check the header for sqlite3 signature header = space.zread(map.links.start, 32) if "SQLite format" not in header: continue # get the whole sqlite3 data now data = space.zread(map.links.start, map.links.end - map.links.start) for offset in utils.iterfind(data, ":ABPerson"): person = obj.Object("String", offset = map.links.start + offset, vm = space, encoding = "utf8", length = 256) yield proc, person
def search_vmas(s, vmas, task): """ Searches VMAs for lists of strings. volatility.plugins.overlays.linux.linux.task_struct.search_process_memory could be used, but we want to search more than the heap and less than all of process memory. This code is mostly copied from there. """ # Allow for some overlap in case objects are # right on page boundaries overlap = 1024 scan_blk_sz = 1024 * 1024 * 10 addr_space = task.get_process_address_space() for vma in vmas: offset = vma.vm_start out_of_range = vma.vm_start + (vma.vm_end - vma.vm_start) while offset < out_of_range: # Read some data and match it. to_read = min(scan_blk_sz + overlap, out_of_range - offset) data = addr_space.zread(offset, to_read) if not data: break for x in s: for hit in utils.iterfind(data, x): yield offset + hit offset += min(to_read, scan_blk_sz)
def scan(self, address_space, offset = None, maxlen = None): with UpdateCounterForScope('MultiPoolScanner'): if offset is None: current_offset = 0 else: current_offset = offset for (range_start, range_size) in sorted(address_space.get_available_addresses()): # Jump to the next available point to scan from # self.base_offset jumps up to be at least range_start current_offset = max(range_start, current_offset) range_end = range_start + range_size # If we have a maximum length, we make sure it's less than the range_end if maxlen is not None: range_end = min(range_end, offset + maxlen) while (current_offset < range_end): # We've now got range_start <= self.base_offset < range_end # Figure out how much data to read l = min(constants.SCAN_BLOCKSIZE + self.overlap, range_end - current_offset) data = address_space.zread(current_offset, l) for needle in self.needles: for addr in utils.iterfind(data, needle): # this scanner yields the matched pool tag as well as # the offset, to save the caller from having to perform # another .read() just to see which tag was matched yield data[addr:addr+4], addr + current_offset current_offset += min(constants.SCAN_BLOCKSIZE, l)
def calculate(self): common.set_plugin_members(self) procs = pstasks.mac_tasks.calculate(self) for proc in procs: space = proc.get_process_address_space() for map in proc.get_proc_maps(): # only read/write without filebacks if not (map.get_perms() == "rw-" and not map.get_path()): continue # check the header for sqlite3 signature header = space.zread(map.links.start, 32) if "SQLite format" not in header: continue # get the whole sqlite3 data now data = space.zread(map.links.start, map.links.end - map.links.start) for offset in utils.iterfind(data, ":ABPerson"): person = obj.Object("String", offset=map.links.start + offset, vm=space, encoding="utf8", length=256) yield proc, person
def search_process_memory(self, s): """ Search memory for a simple byte string. FIXME: as of 2.3 this parameter can also be a list to search for mutliple strings concurrently. The single string will be deprecated in 3.0. @param s: the string to search for. @returns every occurrance of the string in process memory (as absolute address). """ # Allow for some overlap in case objects are # right on page boundaries overlap = 1024 # Make sure s in a list. This allows you to search for # multiple strings at once, without changing the API. if type(s) != list: debug.warning( "Single strings to search_process_memory is deprecated, use a list instead" ) s = [s] # All MMVADs that belong to this process. for vad, address_space in self.get_vads(skip_max_commit=True): offset = vad.Start out_of_range = vad.Start + vad.Length while offset < out_of_range: # Read some data and match it. to_read = min(constants.SCAN_BLOCKSIZE + overlap, out_of_range - offset) data = address_space.zread(offset, to_read) if not data: break for x in s: for hit in utils.iterfind(data, x): yield offset + hit offset += min(to_read, constants.SCAN_BLOCKSIZE)
def search_process_memory(self, s, vad_filter=None): """ Search memory for a simple byte string. FIXME: as of 2.3 this parameter can also be a list to search for mutliple strings concurrently. The single string will be deprecated in 3.0. @param s: the string to search for. @returns every occurrance of the string in process memory (as absolute address). """ # Allow for some overlap in case objects are # right on page boundaries overlap = 1024 # Make sure s in a list. This allows you to search for # multiple strings at once, without changing the API. if type(s) != list: debug.warning("Single strings to search_process_memory is deprecated, use a list instead") s = [s] # All MMVADs that belong to this process. for vad, address_space in self.get_vads(vad_filter, skip_max_commit=True): offset = vad.Start out_of_range = vad.Start + vad.Length while offset < out_of_range: # Read some data and match it. to_read = min(constants.SCAN_BLOCKSIZE + overlap, out_of_range - offset) data = address_space.zread(offset, to_read) if not data: break for x in s: for hit in utils.iterfind(data, x): yield offset + hit offset += min(to_read, constants.SCAN_BLOCKSIZE)
def calculate(self): common.set_plugin_members(self) ##----------------------------------------------------------- # Local Calendar Events ##----------------------------------------------------------- guid_re = re.compile( "[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}") guid_length = 36 seen = [] for page, size in self.addr_space.get_available_pages(): data = self.addr_space.read(page, size) if not data: continue for offset in utils.iterfind(data, "local_"): event = obj.Object("String", offset=page + offset, vm=self.addr_space, encoding="utf8", length=512) if "ACCEPTED" not in str(event): continue # determine where the next field starts field_len = len("local_") + guid_length next_field = str(event)[field_len:] # the next field is either a description or GUID match = guid_re.search(next_field) if match.start() == 0: description = "" last_field = next_field[guid_length:] else: description = next_field[:match.start()] last_field = next_field[match.start() + guid_length:] location = last_field.split("ACCEPTED")[0] if (description, location) in seen: continue seen.append((description, location)) yield None, description, location ##----------------------------------------------------------- # Shared / Global Calendar Events ##----------------------------------------------------------- procs = pstasks.mac_tasks.calculate(self) guid_re2 = re.compile( "\x25\x00\x00\x00[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}\x00" ) for proc in procs: if proc.p_comm.find("Calendar") == -1: continue space = proc.get_process_address_space() for map in proc.get_proc_maps(): # only read/write without filebacks if not (map.get_perms() == "rw-" and not map.get_path()): continue pages = (map.links.end - map.links.start) / 4096 for i in range(pages): start = map.links.start + i * 4096 data = space.zread(start, 4096) for match in guid_re2.finditer(data): event = obj.Object( "String", vm=space, length=128, offset=start + match.start() + 40 + 40, ) yield proc, "", event
def calculate(self): common.set_plugin_members(self) ##----------------------------------------------------------- # Local Calendar Events ##----------------------------------------------------------- guid_re = re.compile("[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}") guid_length = 36 seen = [] for page, size in self.addr_space.get_available_pages(): data = self.addr_space.read(page, size) if not data: continue for offset in utils.iterfind(data, "local_"): event = obj.Object("String", offset = page + offset, vm = self.addr_space, encoding = "utf8", length = 512) if "ACCEPTED" not in str(event): continue # determine where the next field starts field_len = len("local_") + guid_length next_field = str(event)[field_len:] # the next field is either a description or GUID match = guid_re.search(next_field) if match.start() == 0: description = "" last_field = next_field[guid_length:] else: description = next_field[:match.start()] last_field = next_field[match.start() + guid_length:] location = last_field.split("ACCEPTED")[0] if (description, location) in seen: continue seen.append((description, location)) yield None, description, location ##----------------------------------------------------------- # Shared / Global Calendar Events ##----------------------------------------------------------- procs = pstasks.mac_tasks.calculate(self) guid_re2 = re.compile("\x25\x00\x00\x00[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}\x00") for proc in procs: if proc.p_comm.find("Calendar") == -1: continue space = proc.get_process_address_space() for map in proc.get_proc_maps(): # only read/write without filebacks if not (map.get_perms() == "rw-" and not map.get_path()): continue pages = (map.links.end - map.links.start) / 4096 for i in range(pages): start = map.links.start + i * 4096 data = space.zread(start, 4096) for match in guid_re2.finditer(data): event = obj.Object("String", vm = space, length = 128, offset = start + match.start() + 40 + 40, ) yield proc, "", event
def services_from_memory_list(addr_space): """Enumerate services from walking the SCM's linked list""" services = {} pre_vista = addr_space.profile.metadata.get('major', 0) < 6 mem_model = addr_space.profile.metadata.get('memory_model', '32bit') if mem_model != "32bit": return {} ## find the service control manager process for process in tasks.pslist(addr_space): if str(process.ImageFileName) != "services.exe": continue ## create a DOS header at the process' image base address process_space = process.get_process_address_space() image_base = process.Peb.ImageBaseAddress dos_header = obj.Object("_IMAGE_DOS_HEADER", offset=image_base, vm=process_space) if not dos_header: break ## the first section (.text) contains the values we need try: sections = list(dos_header.get_nt_header().get_sections()) text_seg = sections[0] except ValueError: ## couldn't parse the PE header break except IndexError: ## no sections were found in the array break ## acquire the text section's data virtual_address = text_seg.VirtualAddress + image_base data = process_space.zread(virtual_address, text_seg.Misc.VirtualSize) list_head = None ## look for the ScInitDatabase signature for offset in utils.iterfind(data, "\xA3"): if not (data[offset + 5] == "\xA3" and data[offset + 10] == "\xA3" and data[offset + 15] == "\xA3" and data[offset + 20] == "\xA3" and data[offset + 25] == "\xE8"): continue ## the beginning of the service database list list_head = obj.Object("unsigned long", offset=virtual_address + offset + 21, vm=process_space) ## unable to find the signature...means list walking won't work if not list_head: break record = obj.Object("_SERVICE_RECORD", offset=list_head, vm=process_space) while record: name = str(record.ServiceName.dereference() or '') name = name.lower() services[name] = record record = record.ServiceList.Flink.dereference() return services
def services_from_memory_list(addr_space): """Enumerate services from walking the SCM's linked list""" services = {} pre_vista = addr_space.profile.metadata.get('major', 0) < 6 mem_model = addr_space.profile.metadata.get('memory_model', '32bit') if mem_model != "32bit": return {} ## find the service control manager process for process in tasks.pslist(addr_space): if str(process.ImageFileName) != "services.exe": continue ## create a DOS header at the process' image base address process_space = process.get_process_address_space() image_base = process.Peb.ImageBaseAddress dos_header = obj.Object("_IMAGE_DOS_HEADER", offset = image_base, vm = process_space) if not dos_header: debug.warning("Unable to parse DOS header") break ## the first section (.text) contains the values we need try: sections = list(dos_header.get_nt_header().get_sections()) text_seg = sections[0] except ValueError: ## couldn't parse the PE header debug.warning("Could not parse the PE header") break except IndexError: ## no sections were found in the array debug.warning("No sections were found in the array") break ## acquire the text section's data virtual_address = text_seg.VirtualAddress + image_base data = process_space.zread(virtual_address, text_seg.Misc.VirtualSize) list_head = None ## look for the ScInitDatabase signature for offset in utils.iterfind(data, "\xA3"): if not (data[offset + 5] == "\xA3" and data[offset + 10] == "\xA3" and data[offset + 15] == "\xA3" and data[offset + 20] == "\xA3" and data[offset + 25] == "\xE8"): continue ## the beginning of the service database list list_head = obj.Object("unsigned long", offset = virtual_address + offset + 21, vm = process_space) ## unable to find the signature...means list walking won't work if not list_head: debug.warning("Unable to find the signature") break record = obj.Object("_SERVICE_RECORD", offset = list_head, vm = process_space) while record: name = str(record.ServiceName.dereference() or '') name = name.lower() services[name] = record record = record.ServiceList.Flink.dereference() return services