def generate_suggestions(self): """Generates a list of possible KDBG structure locations""" with UpdateCounterForScope('KDBGScanner'): scanner = kdbg.KDBGScanner(needles = [obj.VolMagic(self.obj_vm).KDBGHeader.v()]) for val in scanner.scan(self.obj_vm): val = obj.Object("_KDDEBUGGER_DATA64", offset = val, vm = self.obj_vm) yield val
def scan(self, address_space, offset = None, maxlen = None): with UpdateCounterForScope('MultiPoolScanner'): if offset is None: current_offset = 0 else: current_offset = offset for (range_start, range_size) in sorted(address_space.get_available_addresses()): # Jump to the next available point to scan from # self.base_offset jumps up to be at least range_start current_offset = max(range_start, current_offset) range_end = range_start + range_size # If we have a maximum length, we make sure it's less than the range_end if maxlen is not None: range_end = min(range_end, offset + maxlen) while (current_offset < range_end): # We've now got range_start <= self.base_offset < range_end # Figure out how much data to read l = min(constants.SCAN_BLOCKSIZE + self.overlap, range_end - current_offset) data = address_space.zread(current_offset, l) for needle in self.needles: for addr in utils.iterfind(data, needle): # this scanner yields the matched pool tag as well as # the offset, to save the caller from having to perform # another .read() just to see which tag was matched yield data[addr:addr+4], addr + current_offset current_offset += min(constants.SCAN_BLOCKSIZE, l)
def testAllProcessMemoryWithFilesOnDisk(self): validator = MemoryValidatorClass() validator.Initialize('c:\\mem\\user\\') CounterMonitor.Start() with UpdateCounterForScope('main'): addr_space = utils.load_as(config) processList = tasks.pslist(addr_space) for processIndex, eprocess in enumerate(processList): imagename = str(eprocess.ImageFileName).lower() logging.info("---------------------------------------------") validator.Message = "ImageFileName:{0} UniqueProcessId:{1} DirectoryTableBase:{2}".format( eprocess.ImageFileName, eprocess.UniqueProcessId, eprocess.Pcb.DirectoryTableBase) if not any(s in imagename for s in self.PROCESS_TO_SCAN): continue validator.ImageName = imagename print '------process {} {}-------'.format( processIndex, imagename) config.process_id = eprocess.UniqueProcessId config.dtb = eprocess.Pcb.DirectoryTableBase task_space = eprocess.get_process_address_space() if not task_space: logging.error("Cannot acquire process AS") continue all_mods = list(eprocess.get_load_modules()) # PEB is paged out or no DLLs loaded if not all_mods: logging.error("Cannot load DLLs in process AS") continue validator.BuildLoadedModuleAddressesFromVol(all_mods) totalMods = len(all_mods) for modIndex, mod in enumerate(all_mods): print 'module {}/{} {}'.format(modIndex, totalMods, str(mod.BaseDllName)) with validator.ExceptionHandler( 'Failed comparing {0}'.format(imagename)): validator.InitializeModuleInfoFromVol(mod) if not task_space.is_valid_address(validator.DllBase): logging.error("Address is not valid in process AS") continue memoryData = task_space.zread(validator.DllBase, validator.SizeOfImage) if not memoryData: validator.Warn('failed to read memory data') continue validator.CompareExe(memoryData, validator.FullDllPath) CounterMonitor.Stop() validator.DumpFinalStats()
def testRunningProcesses(self): validator = MemoryValidatorClass() validator.Initialize('c:\\mem\\user\\') CounterMonitor.Start() System.request_debug_privileges() with UpdateCounterForScope('main'): system = System() system.scan_processes() totalProcesses = system.get_process_count() for processIndex, process in enumerate(system.iter_processes()): fileName = getattr(process, 'fileName') pid = getattr(process, 'dwProcessId') if not fileName or not pid: continue validator.ImageName = fileName logging.info("---------------------------------------------") validator.Message = "[{}] fileName:{} pid:{}".format(processIndex, fileName, pid) logging.info(validator.Message) if not any(s in fileName for s in self.PROCESS_TO_SCAN): continue print '------process {}/{} {}-------'.format(processIndex, totalProcesses, fileName) with validator.ExceptionHandler('Failed comparing {0}'.format(fileName)): process.scan_modules() mods = {} for module in process.iter_modules(): baseDllName = ntpath.basename(module.get_filename().lower()) mod = { 'BaseDllName' : baseDllName, 'FullDllName' : module.get_filename().lower(), 'StartAddr' : module.get_base(), 'EndAddr' : module.get_base() + module.get_size(), 'SizeOfImage' : module.get_size() } if not mods.get(baseDllName): mods[baseDllName] = [] mods[baseDllName].append(mod) validator.BuildLoadedModuleAddressesFromWinAppDbg(mods) totalMods = len(mods) for modIndex, modList in enumerate(mods.itervalues()): print 'module {}/{} {}'.format(modIndex, totalMods, modList[0]['BaseDllName']) for modIndex, mod in enumerate(modList): validator.InitializeModuleInfoFromWinAppDbg(mod) with validator.ExceptionHandler('failed comparing {0}'.format(mod)): memoryData = process.read(validator.DllBase, validator.SizeOfImage) if not memoryData: validator.Warn('failed to read memory data') continue validator.CompareExe(memoryData, validator.FullDllPath) CounterMonitor.Stop() validator.DumpFinalStats()
def get_available_addresses(self): """A generator that returns (addr, size) for each valid address block""" with UpdateCounterForScope('get_available_addresses'): runLength = None currentOffset = None for (offset, size) in self.get_available_pages(): if (runLength == None): runLength = size currentOffset = offset else: if (offset <= (currentOffset + runLength)): runLength += (currentOffset + runLength - offset) + size else: yield (currentOffset, runLength) runLength = size currentOffset = offset if (runLength != None and currentOffset != None): yield (currentOffset, runLength) raise StopIteration
def testAllKernelModulesInMemoryToFilesOnDisk(self): validator = MemoryValidatorClass() validator.Initialize('c:\\mem\\kernel\\') CounterMonitor.Start() with UpdateCounterForScope('main'): addr_space = utils.load_as(config) all_mods = modules.Modules(config).calculate() validator.BuildLoadedModuleAddressesFromVol(all_mods) for mod in all_mods: with validator.ExceptionHandler( 'Failed comparing {0}'.format(mod)): validator.InitializeModuleInfoFromVol(mod) #bytearray is fast but screwing up pefile #memoryData = bytearray(addr_space.zread(self.DllBase, self.size_to_read)) memoryData = addr_space.zread(validator.DllBase, validator.SizeOfImage) if not memoryData: logging.error( 'failed to read memory data for {0}'.format( validator.FullDllPath)) continue validator.CompareExe(memoryData, validator.FullDllPath) CounterMonitor.Stop() validator.DumpFinalStats()
def load_as(config, astype='virtual', **kwargs): """Loads an address space by stacking valid ASes on top of each other (priority order first)""" with UpdateCounterForScope('load_as'): base_as = None error = exceptions.AddrSpaceError() # Start off requiring another round found = True ## A full iteration through all the classes without anyone ## selecting us means we are done: while found: debug.debug("Voting round") found = False #print registry.get_plugin_classes(addrspace.BaseAddressSpace) for cls in sorted(registry.get_plugin_classes( addrspace.BaseAddressSpace).values(), key=lambda x: x.order if hasattr(x, 'order') else 10): debug.debug("Trying {0} ".format(cls)) try: base_as = cls(base_as, config, astype=astype, **kwargs) debug.debug("Succeeded instantiating {0} order {1}".format( base_as, cls.order)) found = True break except addrspace.ASAssertionError, e: debug.debug( "Failed instantiating {0}: {1}".format( cls.__name__, e), 2) error.append_reason(cls.__name__, e) continue except Exception, e: debug.debug( "Failed instantiating (exception): {0}".format(e)) error.append_reason(cls.__name__ + " - EXCEPTION", e) continue
if module in cmds.keys(): command = cmds[module](config) ## Register the help cb from the command itself config.set_help_hook(obj.Curry(command_help, command)) config.parse_options() if not config.LOCATION: debug.error("Please specify a location (-l) or filename (-f)") command.execute() except exceptions.VolatilityException, e: print e if __name__ == "__main__": config.set_usage( usage="Volatility - A memory forensics analysis platform.") config.add_help_hook(list_plugins) config.process_id = None try: with UpdateCounterForScope('main'): main() CounterMonitor.Stop() except Exception, e: if config.DEBUG: debug.post_mortem() except SystemExit, e: pass CounterMonitor.Stop()
def scan(self, address_space, offset=0, maxlen=None): with UpdateCounterForScope('BaseScanner'): self.buffer.profile = address_space.profile current_offset = offset ## Build our constraints from the specified ScannerCheck ## classes: self.constraints = [] for class_name, args in self.checks: check = registry.get_plugin_classes(ScannerCheck)[class_name]( self.buffer, **args) self.constraints.append(check) ## Which checks also have skippers? skippers = [c for c in self.constraints if hasattr(c, "skip")] for (range_start, range_size) in sorted( address_space.get_available_addresses()): # Jump to the next available point to scan from # self.base_offset jumps up to be at least range_start current_offset = max(range_start, current_offset) range_end = range_start + range_size # If we have a maximum length, we make sure it's less than the range_end if maxlen: range_end = min(range_end, offset + maxlen) while (current_offset < range_end): # We've now got range_start <= self.base_offset < range_end # Figure out how much data to read l = min(constants.SCAN_BLOCKSIZE + self.overlap, range_end - current_offset) # Populate the buffer with data # We use zread to scan what we can because there are often invalid # pages in the DTB data = address_space.zread(current_offset, l) self.buffer.assign_buffer(data, current_offset) ## Run checks throughout this block of data i = 0 while i < l: if self.check_addr(i + current_offset): ## yield the offset to the start of the memory ## (after the pool tag) yield i + current_offset ## Where should we go next? By default we go 1 byte ## ahead, but if some of the checkers have skippers, ## we may actually go much farther. Checkers with ## skippers basically tell us that there is no way ## they can match anything before the skipped result, ## so there is no point in trying them on all the data ## in between. This optimization is useful to really ## speed things up. FIXME - currently skippers assume ## that the check must match, therefore we can skip ## the unmatchable region, but its possible that a ## scanner needs to match only some checkers. skip = 1 for s in skippers: skip = max(skip, s.skip(data, i)) i += skip current_offset += min(constants.SCAN_BLOCKSIZE, l)
def generate_suggestions(self): """Returns the results of KCPRScanner for an adderss space""" with UpdateCounterForScope('KPCRScanner'): scanner = kpcr.KPCRScanner() for val in scanner.scan(self.obj_vm): yield val