def retrieve_selected_functions(self, funcs): if not self.check_before_use(): return funcset_ids = [self.funcset] if not self.cfg['usepublic'] else None i, succ, skip, fail = 0, 0, 0, 0 _funcs = [ea for ea in funcs] funcs_len = len(_funcs) idaapi.show_wait_box("Matching... (0/{})".format(funcs_len)) for ea in _funcs: i += 1 idaapi.replace_wait_box("Matching... ({}/{})".format(i, funcs_len)) if idaapi.user_cancelled(): idaapi.hide_wait_box() print( "[{}] {} functions successfully matched, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip)) return code = self.retrieve_function_with_check(ea, 1, funcset_ids) if code == 0: succ += 1 elif code == 1: skip += 1 else: fail += 1 idaapi.hide_wait_box() print( "[{}] {} functions successfully matched, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip))
def run_query(qf, ea_list, qs): subtitle = qs.help title = subtitle if len(subtitle) < 80 else "%s..." % subtitle[:77] ch = hxtb.ic_t(title="Shell [%s]" % title) mode = qs.ast_type == 1 idaapi.show_wait_box("Processing") try: nfuncs = len(ea_list) for j, ea in enumerate(ea_list): if idaapi.user_cancelled(): break idaapi.replace_wait_box("Processing function %d/%d" % (j + 1, nfuncs)) r = list() try: r = hxtb.exec_query(qf, [ea], mode, parents=True, flags=idaapi.DECOMP_NO_WAIT) for x in r: ch.append(x) except Exception as e: print("%s: %s" % (SCRIPT_NAME, e)) finally: idaapi.hide_wait_box() return ch
def match_funcs(self, funcs): if not self.check_before_use(): return i, fail, skip, succ = 0, 0, 0, 0 def stop(): idaapi.hide_wait_box() BinaryAILog.summary(succ, skip, fail, "matched") funcs_len = len(funcs) idaapi.show_wait_box("Matching... (0/{})".format(funcs_len)) for ea in funcs: # refresh process status i += 1 idaapi.replace_wait_box("Matching... ({}/{})".format(i, funcs_len)) # check cancelled or not if idaapi.user_cancelled(): stop() return status = None try: status = self._match_with_check(ea) finally: if status == 1: succ += 1 elif status == 0: skip += 1 else: fail += 1 stop()
def _refresh_database_coverage(self): """ Refresh all the database coverage mappings managed by the director. """ logger.debug("Refreshing database coverage mappings") for i, name in enumerate(self.all_names, 1): logger.debug(" - %s" % name) idaapi.replace_wait_box("Refreshing coverage mapping %u/%u" % (i, len(self.all_names))) coverage = self.get_coverage(name) coverage.update_metadata(self.metadata) coverage.refresh()
def revert_selected_functions(self, funcs): i, succ, skip, fail = 0, 0, 0, 0 _funcs = [ea for ea in funcs] funcs_len = len(_funcs) idaapi.show_wait_box("reverting... (0/{})".format(funcs_len)) for ea in _funcs: i += 1 idaapi.replace_wait_box("reverting... ({}/{})".format( i, funcs_len)) pfn = idaapi.get_func(ea) res = bai_mark.revert_bai_func(pfn.start_ea) if res: succ += 1 else: skip += 1 idaapi.hide_wait_box() print( "[{}] {} functions successfully reverted, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip))
def upload_funcs(self, funcs): if not self.check_before_use(check_funcset=True): return i, succ, skip, fail = 0, 0, 0, 0 def stop(): idaapi.hide_wait_box() BinaryAILog.summary(succ, skip, fail, "uploaded") funcs_len = len(funcs) idaapi.show_wait_box("Uploading... (0/{})".format(funcs_len)) for ea in funcs: i += 1 idaapi.replace_wait_box("Uploading... ({}/{})".format( i, funcs_len)) if idaapi.user_cancelled(): stop() return # < minsize pfn = idaapi.get_func(ea) if idaapi.FlowChart(pfn).size < bai_config['minsize']: skip += 1 continue # try upload func_id = None try: func_id = self.mgr.upload(ea, self.mgr.funcset) except DecompilationFailure as e: BinaryAILog.fail(idaapi.get_func_name(ea), str(e)) fail += 1 continue except BinaryAIException as e: stop() BinaryAILog.fatal(e) # fail if not func_id: fail += 1 continue succ += 1 stop()
def upload_selected_functions(self, funcs): if not self.check_before_use(check_funcset=True): return i, succ, skip, fail = 0, 0, 0, 0 _funcs = [ea for ea in funcs] funcs_len = len(_funcs) idaapi.show_wait_box("Uploading... (0/{})".format(funcs_len)) for ea in _funcs: i += 1 idaapi.replace_wait_box("Uploading... ({}/{})".format( i, funcs_len)) if idaapi.user_cancelled(): idaapi.hide_wait_box() print( "[{}] {} functions successfully uploaded, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip)) return pfn = idaapi.get_func(ea) if idaapi.FlowChart(pfn).size < self.cfg['minsize']: skip += 1 continue func_id = None try: func_id = self.upload_function(ea, self.funcset) except DecompilationFailure: pass except BinaryAIException as e: idaapi.hide_wait_box() assert False, "[BinaryAI] {}".format(e._msg) func_name = idaapi.get_func_name(ea) if not func_id: print("[{}] {} failed because upload error".format( self.name, func_name)) fail += 1 continue succ += 1 idaapi.hide_wait_box() print( "[{}] {} functions successfully uploaded, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip))
def revert_funcs(self, funcs): i, succ, skip = 0, 0, 0 def stop(): idaapi.hide_wait_box() BinaryAILog.summary(succ, skip, 0, "reverted") funcs_len = len(funcs) idaapi.show_wait_box("Reverting... (0/{})".format(funcs_len)) for ea in funcs: i += 1 idaapi.replace_wait_box("Reverting... ({}/{})".format(i, funcs_len)) if idaapi.user_cancelled(): stop() return if bai_mark.revert_bai_func(ea): succ += 1 else: skip += 1 stop()
def _aggregate_batch(self, loaded_files): """ Aggregate the given loaded_files data into a single coverage object. """ idaapi.replace_wait_box("Aggregating coverage batch...") # create a new coverage set to manually aggregate data into coverage = DatabaseCoverage({}, self.palette) # # loop through the coverage data we have loaded from disk, and begin # the normalization process to translate / filter / flatten it for # insertion into the director (as a list of instruction addresses) # for i, data in enumerate(loaded_files, 1): # keep the user informed about our progress while loading coverage idaapi.replace_wait_box( "Aggregating batch data %u/%u" % (i, len(loaded_files)) ) # normalize coverage data to the open database try: addresses = self._normalize_coverage(data, self.director.metadata) # normalization failed, print & log it except Exception as e: lmsg("Failed to map coverage %s" % data.filepath) lmsg("- %s" % e) logger.exception("Error details:") continue # aggregate the addresses into the output coverage object coverage.add_addresses(addresses, False) # return the created coverage name return coverage
def log(msg): Message("[%s] %s\n" % (time.asctime(), msg)) replace_wait_box(msg)
aborted = False i = 0 x = nfuncs / 10 if nfuncs >= 10 else nfuncs idaapi.show_wait_box("Working...") for ea in func_list: bars = (int(round(i / (x), 0))) funcname = idaapi.get_func_name(ea) funcname = funcname if len( funcname) < 20 else funcname[:20] + "..." progress = "[%s%s] : %3.2f%%" % (bars * '#', (10 - bars) * '=', (float(i) / float(nfuncs)) * 100.0) idaapi.replace_wait_box("Total progress: %s\n\nScanning: %s\n\n" % (progress, funcname)) try: cfunc = idaapi.decompile(ea, flags=idaapi.DECOMP_NO_WAIT) except idaapi.DecompilationFailure: print "Error decompiling function @ 0x%x" % ea cfunc = None if cfunc: fp = func_parser_t(cfunc) fp.apply_to(cfunc.body, None) choser.feed(fp.data) if idaapi.user_cancelled(): aborted = True break
def search_gadgets(self): count_total = len(self.retns) count_notify = 0 count_curr = 0 # BUG: A separate flag is used to track user canceling the search, # because multiple calls to idaapi.wasBreak() do not properly # detect cancellations. breakFlag = False # Show wait dialog if not self.debug: idaapi.show_wait_box("Searching gadgets: 00 %") try : for ea_end in self.retns: # Flush the gadgets cache for each new retn pointer self.gadgets_cache = dict() # Flush memory cache for each new retn pointer self.dbg_mem_cache = None # CACHE: It is faster to read as much memory in one blob than to make incremental reads backwards. # Try to read and cache self.maxRopOffset bytes back. In cases where it is not possible, # then simply try to read the largest chunk. # NOTE: Read a bit extra to cover correct decoding of RETN, RETN imm16, CALL /2, and JMP /4 instructions. # Bug on end of segments : self.dbg_read_extra must be 0 dbg_read_extra = self.dbg_read_extra seg_start, seg_end = idc.SegStart(ea_end), idc.SegEnd(ea_end) if ea_end + dbg_read_extra > seg_end: dbg_read_extra = 0 for i in range(self.maxRopOffset): self.dbg_mem_cache = idc.GetManyBytes(ea_end - self.maxRopOffset + i, self.maxRopOffset - i + self.dbg_read_extra) if self.dbg_mem_cache != None: break # Error while reading memory (Ida sometimes does not want to read uninit data) if self.dbg_mem_cache == None: for backward_size in range(self.maxRopOffset, 0, -1): self.dbg_mem_cache = idc.GetManyBytes(ea_end - backward_size, backward_size) if self.dbg_mem_cache != None: break # Big problem ahead if self.dbg_mem_cache == None: logging.error("[Ida Search Error] could not read bytes [0x%x, 0x%x]" % (ea_end - self.maxRopOffset + i, ea_end - self.maxRopOffset + i + self.maxRopOffset - i + self.dbg_read_extra)) # Search all possible gadgets up to maxoffset bytes back # NOTE: Try all byte combinations to capture longer/more instructions # even with bad bytes in the middle. for i in range(1, len(self.dbg_mem_cache) - self.dbg_read_extra): ea = ea_end - i # Try to build a gadget at the pointer gadget = self.build_gadget(ea, ea_end) # Successfully built the gadget if gadget: # Filter gadgets with too many instruction if gadget.size > self.maxRopSize: break # Append newly built gadget self.gadgets.append(gadget) self.gadgets_cache[ea] = gadget # Exceeded maximum number of gadgets if self.maxRops and len(self.gadgets) >= self.maxRops: breakFlag = True print("[Ida Rop] Maximum number of gadgets exceeded.") break else: self.gadgets_cache[ea] = None if breakFlag or idaapi.wasBreak(): breakFlag = True break # Canceled # NOTE: Only works when started from GUI not script. if breakFlag or idaapi.wasBreak(): breakFlag = True print ("[IdaRopSearch] Canceled.") break # Progress report if not self.debug and count_curr >= count_notify: # NOTE: Need to use %%%% to escape both Python and IDA's format strings percent_progression = count_curr*100/count_total progression_str = """Searching gadgets: {progression:02d} %""".format(progression = percent_progression) idaapi.replace_wait_box(progression_str) count_notify += 0.10 * count_total count_curr += 1 print ("[IdaRopSearch] Found %d gadgets." % len(self.gadgets)) except: logging.error ("[IdaRopSearch] Exception raised while search for gadgets : %s." % sys.exc_info()) pass finally: if not self.debug: idaapi.hide_wait_box()
def interactive_load_file(self): """ Interactive loading of individual coverage files. """ self.palette.refresh_colors() created_coverage = [] # # kick off an asynchronous metadata refresh. this collects underlying # database metadata while the user will be busy selecting coverage files. # future = self.director.metadata.refresh(progress_callback=metadata_progress) # # we will now prompt the user with an interactive file dialog so they # can select the coverage files they would like to load from disk. # loaded_files = self._select_and_load_coverage_files() # if no valid coveragee files were selected (and loaded), bail if not loaded_files: self.director.metadata.abort_refresh() return # # to continue any further, we need the database metadata. hopefully # it has finished with its asynchronous collection, otherwise we will # block until it completes. the user will be shown a progress dialog. # idaapi.show_wait_box("Building database metadata...") await_future(future) # # stop the director's aggregate from updating. this is in the interest # of better performance when loading more than one new coverage set # into the director. # self.director.suspend_aggregation() # # loop through the coverage data we have loaded from disk, and begin # the normalization process to translate / filter / flatten its blocks # into a generic format the director can understand (a list of addresses) # for i, data in enumerate(loaded_files, 1): # keep the user informed about our progress while loading coverage idaapi.replace_wait_box( "Normalizing and mapping coverage %u/%u" % (i, len(loaded_files)) ) # normalize coverage data to the open database try: addresses = self._normalize_coverage(data, self.director.metadata) except Exception as e: lmsg("Failed to map coverage %s" % data.filepath) lmsg("- %s" % e) logger.exception("Error details:") continue # # ask the director to create and track a new coverage set from # the normalized coverage data we provide # coverage_name = os.path.basename(data.filepath) self.director.create_coverage(coverage_name, addresses) # save the coverage name to the list of succesful loads created_coverage.append(coverage_name) # # resume the director's aggregation capabilities, triggering an update # to recompute the aggregate with the newly loaded coverage # idaapi.replace_wait_box("Recomputing coverage aggregate...") self.director.resume_aggregation() # if nothing was mapped, then there's nothing else to do if not created_coverage: lmsg("No coverage files could be mapped...") idaapi.hide_wait_box() return # # select one (the first) of the newly loaded coverage file(s) # idaapi.replace_wait_box("Selecting coverage...") self.director.select_coverage(created_coverage[0]) # all done, hide the IDA wait box idaapi.hide_wait_box() lmsg("Successfully loaded %u coverage file(s)..." % len(created_coverage)) # show the coverage overview self.open_coverage_overview()
def interactive_load_batch(self): """ Interactive loading & aggregation of coverage files. """ self.palette.refresh_colors() # # kick off an asynchronous metadata refresh. this collects underlying # database metadata while the user will be busy selecting coverage files. # future = self.director.metadata.refresh(progress_callback=metadata_progress) # # we will now prompt the user with an interactive file dialog so they # can select the coverage files they would like to load from disk. # loaded_files = self._select_and_load_coverage_files() # if no valid coveragee files were selected (and loaded), bail if not loaded_files: self.director.metadata.abort_refresh() return # prompt the user to name the new coverage aggregate default_name = "BATCH_%s" % self.director.peek_shorthand() ok, coverage_name = prompt_string( "Batch Name:", "Please enter a name for this coverage", default_name ) # if user didn't enter a name for the batch, or hit cancel, we abort if not (ok and coverage_name): lmsg("Aborting batch load...") return # # to continue any further, we need the database metadata. hopefully # it has finished with its asynchronous collection, otherwise we will # block until it completes. the user will be shown a progress dialog. # idaapi.show_wait_box("Building database metadata...") await_future(future) # aggregate all the selected files into one new coverage set new_coverage = self._aggregate_batch(loaded_files) # inject the the aggregated coverage set idaapi.replace_wait_box("Mapping coverage...") self.director.create_coverage(coverage_name, new_coverage.data) # select the newly created batch coverage idaapi.replace_wait_box("Selecting coverage...") self.director.select_coverage(coverage_name) # all done, hide the IDA wait box idaapi.hide_wait_box() lmsg("Successfully loaded batch %s..." % coverage_name) # show the coverage overview self.open_coverage_overview()
def metadata_progress(completed, total): """ Handler for metadata collection callback, updates progress dialog. """ idaapi.replace_wait_box("Collected metadata for %u/%u Functions" % (completed, total))
def analysis_finish_cb(self, outfname, logfname, cfaoutfname, ea=None): idaapi.show_wait_box("HIDECANCEL\nParsing BinCAT analysis results") bc_log.debug("Parsing analyzer result file") # Here we can't check for user_cancelled because the UI is # unresponsive when parsing. try: cfa = cfa_module.CFA.parse(outfname, logs=logfname) except (pybincat.PyBinCATException, NoSectionError): idaapi.hide_wait_box() bc_log.error("Could not parse result file") return None self.clear_background() self.cfa = cfa if cfa: # XXX add user preference for saving to idb? in that case, store # reference to marshalled cfa elsewhere bc_log.info("Storing analysis results to idb...") with open(outfname, 'rb') as f: self.netnode["out.ini"] = f.read() with open(logfname, 'rb') as f: self.netnode["analyzer.log"] = f.read() if self.remapped_bin_path: self.netnode["remapped_bin_path"] = self.remapped_bin_path self.netnode["remap_binary"] = self.remap_binary if cfaoutfname is not None and os.path.isfile(cfaoutfname): with open(cfaoutfname, 'rb') as f: self.last_cfaout_marshal = f.read() bc_log.info("Analysis results have been stored idb.") else: bc_log.info("Empty or unparseable result file.") bc_log.debug("----------------------------") idaapi.replace_wait_box("Updating IDB with BinCAT results") # Update current RVA to start address (nodeid = 0) # by default, use current ea - e.g, in case there is no results (cfa is # None) or no node 0 (happens in backward mode) current_ea = self.current_ea if ea is not None: current_ea = ea else: try: node0 = cfa['0'] if node0: current_ea = node0.address.value except (KeyError, TypeError): # no cfa is None, or no node0 pass try: self.set_current_ea(current_ea, force=True) except TypeError as e: bc_log.warn("Could not load results from IDB") bc_log.warn("------ BEGIN EXCEPTION -----") bc_log.exception(e) bc_log.warn("------ END EXCEPTION -----") idaapi.hide_wait_box() return None self.netnode["current_ea"] = current_ea if not cfa: return for addr, nodeids in cfa.addr_nodes.items(): if hasattr(idaapi, "user_cancelled") and idaapi.user_cancelled() > 0: bc_log.info("User cancelled!") idaapi.hide_wait_box() return None ea = addr.value tainted = False taint_id = 1 for n_id in nodeids: # is it tainted? # find child nodes node = cfa[n_id] if node.tainted: tainted = True if node.taintsrc: # Take the first one taint_id = int(node.taintsrc[0].split("-")[1]) break if tainted: idaapi.set_item_color(ea, taint_color(taint_id)) else: idaapi.set_item_color(ea, 0xF0F0F0) idaapi.hide_wait_box() self.gui.focus_registers()
def _process_ctx_menu_action(self, action): """ Process the given (user selected) context menu action. """ # a right click menu action was not clicked. nothing else to do if not action: return # get the list rows currently selected in the coverage table selected_rows = self._table.selectionModel().selectedRows() if len(selected_rows) == 0: return # # extract the function addresses for the list of selected rows # as they will probably come in handy later. # function_addresses = [] for index in selected_rows: address = self._model.row2func[index.row()] function_addresses.append(address) # # check the universal actions first # # handle the 'Prefix functions' action if action == self._action_prefix: gui_prefix_functions(function_addresses) # handle the 'Clear prefix' action elif action == self._action_clear_prefix: clear_prefixes(function_addresses) # handle the 'Refresh metadata' action elif action == self._action_refresh_metadata: idaapi.show_wait_box("Building database metadata...") self._director.refresh() # ensure the table's model gets refreshed idaapi.replace_wait_box("Refreshing Coverage Overview...") self.refresh() # all done idaapi.hide_wait_box() # # the following actions are only applicable if there is only one # row/function selected in the coverage overview table. don't # bother to check multi-function selections against these # if len(selected_rows) != 1: return # unpack the single QModelIndex index = selected_rows[0] function_address = function_addresses[0] # handle the 'Rename' action if action == self._action_rename: gui_rename_function(function_address) # handle the 'Copy name' action elif action == self._action_copy_name: name_index = self._model.index(index.row(), FUNC_NAME) function_name = self._model.data(name_index, QtCore.Qt.DisplayRole) copy_to_clipboard(function_name) # handle the 'Copy address' action elif action == self._action_copy_address: address_string = "0x%X" % function_address copy_to_clipboard(address_string)
def search_gadgets(self): count_total = len(self.retns) count_notify = 0 count_curr = 0 # BUG: A separate flag is used to track user canceling the search, # because multiple calls to idaapi.wasBreak() do not properly # detect cancellations. breakFlag = False # Show wait dialog if not self.debug: idaapi.show_wait_box("Searching gadgets: 00%%%%") for (ea_end, module) in self.retns: # Flush the gadgets cache for each new retn pointer self.gadgets_cache = dict() # Flush memory cache for each new retn pointer self.dbg_mem_cache = None # CACHE: It is faster to read as much memory in one blob than to make incremental reads backwards. # Try to read and cache self.maxRopOffset bytes back. In cases where it is not possible, # then simply try to read the largest chunk. # NOTE: Read a bit extra to cover correct decoding of RETN, RETN imm16, CALL /2, and JMP /4 instructions. for i in range(self.maxRopOffset): self.dbg_mem_cache = read_module_memory(ea_end - self.maxRopOffset + i, self.maxRopOffset - i + self.dbg_read_extra) if self.dbg_mem_cache != None: break # Check to make sure we have actual data to work with. if self.dbg_mem_cache == None: continue # Search all possible gadgets up to maxoffset bytes back # NOTE: Try all byte combinations to capture longer/more instructions # even with bad bytes in the middle. for i in range(1, len(self.dbg_mem_cache) - self.dbg_read_extra): ea = ea_end - i # Get pointer charset ptr_charset = self.sploiter.get_ptr_charset(ea) # Filter the pointer if ptr_charset == None: continue if self.ptrNonull and not "nonull" in ptr_charset: continue if self.ptrUnicode and not "unicode" in ptr_charset: continue if self.ptrAscii and not "ascii" in ptr_charset: continue if self.ptrAsciiPrint and not "asciiprint" in ptr_charset: continue if self.ptrAlphaNum and not "alphanum" in ptr_charset: continue if self.ptrNum and not "numeric" in ptr_charset: continue if self.ptrAlpha and not "alpha" in ptr_charset: continue # Try to build a gadget at the pointer gadget = self.build_gadget(ea, ea_end) # Successfully built the gadget if gadget: # Populate gadget object with more data gadget.address = ea gadget.module = module gadget.ptr_charset = ptr_charset # Filter gadgets with too many instruction if gadget.size > self.maxRopSize: break # Append newly built gadget self.gadgets.append(gadget) self.gadgets_cache[ea] = gadget # Exceeded maximum number of gadgets if self.maxRops and len(self.gadgets) >= self.maxRops: breakFlag = True print "[idasploiter] Maximum number of gadgets exceeded." break else: self.gadgets_cache[ea] = None if breakFlag or idaapi.wasBreak(): breakFlag = True break # Canceled # NOTE: Only works when started from GUI not script. if breakFlag or idaapi.wasBreak(): breakFlag = True print "[idasploiter] Canceled." break # Progress report if not self.debug and count_curr >= count_notify: # NOTE: Need to use %%%% to escape both Python and IDA's format strings idaapi.replace_wait_box("Searching gadgets: %02d%%%%" % (count_curr * 100 / count_total)) count_notify += 0.10 * count_total count_curr += 1 print "[idasploiter] Found %d gadgets." % len(self.gadgets) if not self.debug: idaapi.hide_wait_box()