def create_cache_folders(self): """Removes the cache folder""" dumpname = self.memory_handler.get_name() # create the cache folder config.create_cache_folder(dumpname) # and the record subfolder self.create_record_cache_folder()
def persist(self, _context): outdir = _context.get_folder_cache() config.create_cache_folder(outdir) # outname = _context.get_filename_cache_signatures() #outname = os.path.sep.join([outdir, self._name]) ar = utils.int_array_save(outname, self._similarities) return
def persist(self): outdir = config.get_cache_filename(config.CACHE_SIGNATURE_GROUPS_DIR, self._context.dumpname) config.create_cache_folder(outdir) # outname = os.path.sep.join([outdir, self._name]) ar = utils.int_array_save(outname, self._similarities) return
def persist(self): outdir = config.get_cache_filename( config.CACHE_SIGNATURE_GROUPS_DIR, self._context.dumpname) config.create_cache_folder(outdir) # outname = os.path.sep.join([outdir, self._name]) ar = utils.int_array_save(outname, self._similarities) return
def cacheLoad(cls, memory_handler, heap_addr): dumpname = os.path.abspath(memory_handler.get_name()) config.create_cache_folder(dumpname) context_cache = config.get_cache_filename(config.CACHE_CONTEXT, dumpname, heap_addr) try: with open(context_cache, 'rb') as fin: ctx = pickle.load(fin) except (ValueError, EOFError) as e: os.remove(context_cache) log.error('Error in the context file. File cleaned. Please restart.') raise IOError('Error in the context file. File cleaned. Please restart.') log.debug('\t[-] loaded my context from cache') ctx.config = config ctx.memory_handler = memory_handler ctx.heap = ctx.memory_handler.get_mapping_for_address(ctx._heap_start) # and initialize ctx._init2() return ctx
def cacheLoad(cls, memory_handler, heap_addr): dumpname = os.path.abspath(memory_handler.get_name()) config.create_cache_folder(dumpname) context_cache = config.get_cache_filename(config.CACHE_CONTEXT, dumpname, heap_addr) try: with file(context_cache, 'r') as fin: ctx = pickle.load(fin) except EOFError as e: os.remove(context_cache) log.error('Error in the context file. File cleaned. Please restart.') raise RuntimeError('Error in the context file. File cleaned. Please restart.') log.debug('\t[-] loaded my context from cache') ctx.config = config ctx.memory_handler = memory_handler ctx.heap = ctx.memory_handler.get_mapping_for_address(ctx._heap_start) # and initialize ctx._init2() return ctx
def cacheSizes(self): """Find the number of different sizes, and creates that much numpyarray""" # if not os.access outdir = config.get_cache_filename( config.CACHE_SIGNATURE_SIZES_DIR, self._context.dumpname) config.create_cache_folder(outdir) # sizes = map(int, set(self._context._malloc_sizes)) arrays = dict([(s, []) for s in sizes]) # sort all addr in all sizes.. [arrays[self._context._malloc_sizes[i]].append( long(addr)) for i, addr in enumerate(self._context._malloc_addresses)] # saving all sizes dictionary in files... for size, lst in arrays.items(): fout = os.path.sep.join([outdir, 'size.%0.4x' % (size)]) arrays[size] = utils.int_array_save(fout, lst) # saved all sizes dictionaries. # tag it as done file( os.path.sep.join([outdir, config.CACHE_SIGNATURE_SIZES_DIR_TAG]), 'w') self._sizes = arrays return
def _init2(self): log.debug('[+] HeapContext on heap 0x%x', self._heap_start) # Check that cache folder exists config.create_cache_folder(self.dumpname) # re-open the heap walker heap_mapping = self.memory_handler.get_mapping_for_address(self._heap_start) finder = self.memory_handler.get_heap_finder() self.walker = finder.get_heap_walker(heap_mapping) # we need a heap walker to parse all allocations log.debug('[+] Searching pointers in heap') # get all pointers found in from allocated space. all_offsets, all_values = self.get_heap_pointers_from_allocated(self.walker) self._pointers_values = all_values self._pointers_offsets = all_offsets log.debug('[+] Gathering allocated heap chunks') res = utils.cache_get_user_allocations(self, self.walker) self._structures_addresses, self._structures_sizes = res # clean a bit the open fd's self.walker = None self.memory_handler.reset_mappings() # CAUTION: all heap walker, mappings are resetted. # Segmentation Fault will ensue if we don't restore heap walkers. heap_mapping = self.memory_handler.get_mapping_for_address(self._heap_start) finder = self.memory_handler.get_heap_finder() self.walker = finder.get_heap_walker(heap_mapping) #if self.memory_handler.get_target_platform().get_os_name() not in ['winxp', 'win7']: # log.info('[+] Reversing function pointers names') # # TODO in reversers # # dict(libdl.reverseLocalFonctionPointerNames(self) ) # self._function_names = dict() return
def cacheSizes(self): """Find the number of different sizes, and creates that much numpyarray""" # if not os.access outdir = config.get_cache_filename(config.CACHE_SIGNATURE_SIZES_DIR, self._context.dumpname) config.create_cache_folder(outdir) # sizes = map(int, set(self._context._malloc_sizes)) arrays = dict([(s, []) for s in sizes]) # sort all addr in all sizes.. [ arrays[self._context._malloc_sizes[i]].append(long(addr)) for i, addr in enumerate(self._context._malloc_addresses) ] # saving all sizes dictionary in files... for size, lst in arrays.items(): fout = os.path.sep.join([outdir, 'size.%0.4x' % (size)]) arrays[size] = utils.int_array_save(fout, lst) # saved all sizes dictionaries. # tag it as done file(os.path.sep.join([outdir, config.CACHE_SIGNATURE_SIZES_DIR_TAG]), 'w') self._sizes = arrays return