def task_files(self, offset): addr_space = utils.load_as(self._config) file_list = [] self._config.PHYSOFFSET = offset self._config.DUMP_DIR = "." dump_files = filedump.DumpFiles(self._config) dump_results = dump_files.calculate() for dump in dump_results: print dump.keys() print "[*] {0} : {1} : {2} : {3} : {4} : {5} : {6}".format( dump['name'], dump['ofpath'], dump['pid'], dump['fobj'], dump['pad'], dump['type'], dump['present'])
def get_tasks(self): addr_space = utils.load_as(self._config) f = filescan.FileScan(self._config) tasks = [] parsed_tasks = [] for file in f.calculate(): filename = str(file.file_name_with_device() or '') if "system32\\tasks\\" in filename.lower() and ( 'system32\\tasks\\microsoft' not in filename.lower() or self._config.VERBOSE): header = file.get_object_header() tasks.append((file.obj_offset, filename)) debug.debug("Found task: 0x{0:x} {1}".format( file.obj_offset, filename)) for offset, name in tasks: self._config.PHYSOFFSET = '0x{:x}'.format(offset) df = dumpfiles.DumpFiles(self._config) self._config.DUMP_DIR = '.' for data in df.calculate(): # Doing this with mmap would probably be cleaner # Create a sufficiently large (dynamically resizable?) # memory map so that we can seek and write the file accordingly # # SystemError: mmap: resizing not available--no mremap() chopped_file = {} for mdata in data['present']: rdata = addr_space.base.read(mdata[0], mdata[2]) chopped_file[mdata[1]] = rdata task_xml = "".join(part[1] for part in sorted(chopped_file.items(), key=lambda x: x[0])) parsed = self.parse_task_xml(task_xml) if parsed: args = parsed['Actions']['Exec'].get("Arguments", None) if args: parsed['Actions']['Exec']['Command'] += " {}".format( args) pids = self.find_pids_for_imagepath( parsed['Actions']['Exec']['Command']) parsed_tasks.append( (name.split('\\')[-1], parsed, task_xml, pids)) return parsed_tasks
def dump_files(self, offset): self._config.update('PHYSOFFSET', str(hex(offset))) self._config.update('NAME', True) #self._config.update('UNSAFE', True) df = dumpfiles.DumpFiles(self._config).calculate() for summaryinfo in df: obj_type = summaryinfo.get('type') if obj_type == 'DataSectionObject': for mdata in summaryinfo.get('present', []): if len(mdata) < 3 or not mdata[0]: continue rdata = None try: rdata = self.addr_space.base.read(mdata[0], mdata[2]) except: pass if rdata: filename = os.path.basename(summaryinfo['name']) debug_info('[+] Extracted from Cache Manager: %s' % filename) return filename, rdata debug_warning('[-] Failed to extract from Cache Manager') else: continue