def term(self): """ On IDA's close event, export the Rop gadget list in a default csv file""" idaapi.show_wait_box("Saving gadgets ...") try: idarop_manager.save_internal_db() except Exception as e: pass idaapi.hide_wait_box()
def term(self): """ Called by IDA upon termination. """ idaapi.hide_wait_box() # Restore user descriptions so that they will be saved. self._restore_users_description() self._remove_tags()
def procanalyzer_on_finish(self): bc_log.info("Analyzer process finished") exitcode = self.exitCode() if exitcode != 0: bc_log.error("analyzer returned exit code=%i", exitcode) try: self.process_output() except Exception as e: idaapi.hide_wait_box() bc_log.error("Caught exception, hiding wait box", exc_info=True)
def refreshitems(self): # Pb : rop engine has not been init if self.idarop.rop == None: return # No new data present if self.rop_list_cache == self.idarop.rop.gadgets: return self.items = [] # No data present if len(self.idarop.rop.gadgets) == 0: return if len(self.idarop.rop.gadgets) > 10000: idaapi.show_wait_box("Ida rop : loading rop list ...") for i, g in enumerate(self.idarop.rop.gadgets): # reconstruct disas if g.opcodes == "": bad_gadget = False opcodes = idc.GetManyBytes(g.address, g.ret_address - g.address + 1) instructions = list() ea = g.address while ea <= g.ret_address: instructions.append( idc.GetDisasmEx(ea, idaapi.GENDSM_FORCE_CODE)) ea += idaapi.decode_insn(ea) # Badly decoded gadget if idaapi.decode_insn(ea) == 0: bad_gadget = True break if not bad_gadget: h = Gadget(address=g.address, ret_address=g.ret_address, instructions=instructions, opcodes=opcodes, size=len(opcodes)) self.idarop.rop.gadgets[i] = h self.items.append( h.get_display_list(self.idarop.addr_format)) else: self.items.append(g.get_display_list(self.idarop.addr_format)) self.rop_list_cache = self.idarop.rop.gadgets if len(self.idarop.rop.gadgets) > 10000: idaapi.hide_wait_box()
def __init__(self): idaapi.show_wait_box("REDB Plugin is loading, please wait...") self.functions = {} utils._backup_idb_file() utils.Configuration.assert_config_file_validity() self._collect_string_addresses() self.cur_history_item_index = 0 self.first_undo = 1 print "*** REDB Plugin loaded. ***" idaapi.hide_wait_box()
def ghidraaas_checkin(bin_file_path, filename, ghidra_server_url): """ Upload the .bytes files in ghidraaas. One time only (until IDA is restarted...) """ idaapi.show_wait_box("Connecting to Ghidraaas. Sending bytes file...") try: md5_hash = idautils.GetInputFileMD5() queue = Queue.Queue() my_args = (bin_file_path, filename, ghidra_server_url, md5_hash, queue) t1 = threading.Thread(target=ghidraaas_checkin_thread, args=my_args) t1.start() counter = 0 stop = False while not stop: time.sleep(SLEEP_LENGTH) counter += 1 # User terminated action if idaapi.user_cancelled(): stop = True print("GhIDA:: [!] Check-in interrupted.") continue # Reached TIIMEOUT if counter > COUNTER_MAX: stop = True print("GhIDA:: [!] Timeout reached.") continue # Thread terminated if not t1.isAlive(): stop = True print("GhIDA:: [DEBUG] Thread terminated.") continue print("GhIDA:: [DEBUG] Joining check-in thread.") t1.join(0) q_result = queue.get_nowait() print("GhIDA:: [DEBUG] Thread joined. Got queue result.") idaapi.hide_wait_box() return q_result except Exception: idaapi.hide_wait_box() print("GhIDA:: [!] Check-in error.") idaapi.warning("GhIDA check-in error") return False
def _submit_one(self): """ Submits the user's description. """ if self._assert_currently_pointing_at_a_function(): if self._assert_current_function_is_handled(): idaapi.show_wait_box("Submitting...") try: self._cur_func.submit_description() except Exception as e: print "REDB: Unexpected exception thrown while submitting:" print e idaapi.hide_wait_box()
def _request_one(self): """ Request descriptions for a function. """ if self._assert_currently_pointing_at_a_function(): if self._assert_current_function_is_handled(): idaapi.show_wait_box("Requesting...") try: self._cur_func.request_descriptions() except Exception as e: print "REDB: Unexpected exception thrown while requesting:" print e idaapi.hide_wait_box()
def show_rop_view(self): """ Show the list of rop gadgets found """ # If the default csv exist but has not been loaded, load here if self.defered_loading == True: idaapi.show_wait_box("loading gadgets db ...") self.load_default_csv(force=True) idaapi.hide_wait_box() self.defered_loading = False # Show the ROP gadgets view self.ropView.refreshitems() self.ropView.show()
def _submit_all_handled(self): """ Submit user description for all handled functions. """ num_of_funcs = str(len(self._handled_functions)) idaapi.show_wait_box("Submitting " + num_of_funcs + " function...") try: for function in self._handled_functions: self._handled_functions[function].submit_description() except Exception as e: print "REDB: Unexpected exception thrown while submitting:" print e idaapi.hide_wait_box()
def ghidraaas_checkout(ghidra_server_url): """ That's all. Remove .bytes file from Ghidraaas server. """ if not GLOBAL_CHECKIN: return idaapi.show_wait_box( "Connecting to Ghidraaas. Removing temporary files...") try: md5_hash = idautils.GetInputFileMD5() aargs = (md5_hash, ghidra_server_url) t1 = threading.Thread(target=ghidraaas_checkout_thread, args=aargs) t1.start() counter = 0 stop = False while not stop: # print("waiting check-out 1 zzz") # idaapi.request_refresh(idaapi.IWID_DISASMS) time.sleep(0.1) if wasbreak(): print("GhIDA:: [!] Check-out interrupted.") stop = True continue if counter > COUNTER_MAX * 10: print("GhIDA:: [!] Timeout reached.") stop = True continue if not t1.isAlive(): stop = True print("GhIDA:: [DEBUG] Thread terminated.") continue print("GhIDA:: [DEBUG] Joining check-out thread.") t1.join(0) print("GhIDA:: [DEBUG] Thread joined") idaapi.hide_wait_box() return except Exception: idaapi.hide_wait_box() print("GhIDA:: [!] Check-out error") idaapi.warning("GhIDA check-out error") return
def _make_run_prepereations(self): """ Preparations which take place in the loading process. """ redb_client_utils._parse_config_file() self._collect_string_addresses() self._collect_imported_modules() # Main dictionary holding all handled functions information. # The keys are the functions' first addresses. # The values are REDB_Functions - one for each handled function. self._handled_functions = {} idaapi.hide_wait_box()
def run(self, arg): """ This function is called by IDA when the user uses one of the plugins' hotkeys. """ # Establish if cursor is pointing at a function, # and if so, is the function in the handled functions list. # updates self._cur_function. self._collect_pre_run_info() # Call requested function getattr(self, CALLBACK_FUNCTIONS[arg][2])() idaapi.hide_wait_box()
def _request_all_handled(self): """ Request descriptions for all handled functions. """ num_of_funcs = str(len(self._handled_functions)) idaapi.show_wait_box("Requesting Descriptions for " + num_of_funcs + \ " function...") try: for function in self._handled_functions: self._handled_functions[function].\ request_descriptions() except Exception as e: print "REDB: Unexpected exception thrown while requesting:" print e idaapi.hide_wait_box()
def submit_description(self): # Preparing Post post = utils.Post() post.add_data('type', 'submit') post.add_data('attributes', self._attributes) cur_desc = descriptions.DescriptionUtils.get_all(self._first_addr) post.add_data('description', cur_desc) # Submitting idaapi.show_wait_box("Submitting...") res_data = post.send() idaapi.hide_wait_box() # Handling response if isinstance(res_data, str): # a message from the server return res_data else: return "Error: Illegal response format."
def revert_selected_functions(self, funcs): i, succ, skip, fail = 0, 0, 0, 0 _funcs = [ea for ea in funcs] funcs_len = len(_funcs) idaapi.show_wait_box("reverting... (0/{})".format(funcs_len)) for ea in _funcs: i += 1 idaapi.replace_wait_box("reverting... ({}/{})".format( i, funcs_len)) pfn = idaapi.get_func(ea) res = bai_mark.revert_bai_func(pfn.start_ea) if res: succ += 1 else: skip += 1 idaapi.hide_wait_box() print( "[{}] {} functions successfully reverted, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip))
def init(self): idaapi.show_wait_box("Looking for classes...") all_virtual_functions.clear() all_virtual_tables.clear() classes = [] for ordinal in range(1, idaapi.get_ordinal_qty(idaapi.cvar.idati)): result = Class.create_class(ordinal) if result: classes.append(result) for class_row, class_ in enumerate(classes): class_item = TreeItem(class_, class_row, None) for vtable_row, vtable in class_.vtables.items(): vtable_item = TreeItem(vtable, vtable_row, class_item) vtable_item.children = [TreeItem(function, 0, vtable_item) for function in vtable.virtual_functions] class_item.children.append(vtable_item) self.tree_data.append(class_item) idaapi.hide_wait_box()
def activate(self, ctx): print("Suggesting variable names...") idaapi.show_wait_box("Suggesting variable names... please wait") ea = idaapi.get_screen_ea() vuu = ida_hexrays.get_widget_vdui(ctx.widget) if ea is None: idaapi.warning("Current function not found.") else: f = StringIO() with jsonlines.Writer(f) as writer: try: info, cfunc = func(ea, vuu) # We must set the working directory to the dire dir to open the model correctly os.chdir(dire_dir) p = subprocess.Popen(['python', '-m', 'DIRE.prediction_plugin.run_one', '--model', MODEL], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, encoding=sys.getdefaultencoding()) #print(info) writer.write(info) comm = p.communicate(input=f.getvalue()) json_results = comm[0] stderr = comm[1] if p.returncode != 0: print(stderr) raise ValueError("Variable prediction failed") results = json.loads(json_results) best_results = results[0][0] #print("best: ", best_results) tuples = map(lambda x: (varnames[x[0]] if x[0] in varnames else x[0], x[1]['new_name']), best_results.items()) FinalRename(dict(tuples), cfunc, vuu).apply_to(cfunc.body, None) # Force the UI to update #vuu.refresh_ctext() except ida_hexrays.DecompilationFailure: idaapi.warning("Decompilation failed") except ValueError as e: idaapi.warning(str(e) + ". See output window for more details.") idaapi.hide_wait_box() return 1
def StartDump(self): # print self.start # print self.endorlen self.filepath = idaapi.ask_file(1, "*.dump", "save dump file") if self.dumptype == 0: ea = self.getHexNum(self.start) len = self.getHexNum(self.endorlen) if not idaapi.is_loaded(ea) or not idaapi.is_loaded(ea + len): idaapi.warning("arrary is out of bound") return -1 if len <= 0: idaapi.warning("len is <= 0") return -1 print("start read bytes") self.Close(0) idaapi.show_wait_box("read bytes") self.memdata = idaapi.get_bytes(ea, len) print("read bytes end") #idaapi.hide_wait_box("read end") idaapi.hide_wait_box() elif self.dumptype == 1: ea = self.getHexNum(self.start) len = self.getHexNum(self.endorlen) - self.getHexNum(self.start) if not idaapi.is_loaded(ea) or not idaapi.is_loaded(ea + len): idaapi.warning("arrary is out of bound") return -1 if len <= 0: idaapi.warning("len is <= 0") return -1 print("start read bytes") self.Close(0) idaapi.show_wait_box("read bytes") self.memdata = idaapi.get_bytes(ea, len) print("read bytes end") #idaapi.hide_wait_box("read end") idaapi.hide_wait_box() fp = open(self.filepath, 'wb') fp.write(self.memdata) fp.close() idaapi.msg("save:" + self.filepath) return 1
def setupModelData(self, root): idaapi.show_wait_box("Looking for classes...") all_virtual_functions.clear() all_virtual_tables.clear() classes = [] for ordinal in range(1, idaapi.get_ordinal_qty(idaapi.cvar.idati)): result = Class.create_class(ordinal) if result: classes.append(result) for class_ in classes: class_item = TreeItem(class_, root) for vtable in class_.vtables.values(): vtable_item = TreeItem(vtable, class_item) vtable_item.children = [TreeItem(function, vtable_item) for function in vtable.virtual_functions] class_item.appendChild(vtable_item) root.appendChild(class_item) idaapi.hide_wait_box()
def button_chart_on_click(self): try: idaapi.show_wait_box("Making chart...") tab_title = self.get_tab_title() if self.config.chart_type == ChartTypes.ENTROPY: if self.config.use_disk_binary and not self.config.entropy['segm_exists']: msg1 = "Do you want to create new segment with the binary content?\n" msg2 = "This will allow you to navigate over the file by double-clicking on the chart" if idaapi.ask_yn(1, "HIDECANCEL\n" + msg1 + msg2) == 1: self.create_segment_with_binary() self.parent.tabs.addTab(Entropy(self), tab_title) elif self.config.chart_type == ChartTypes.HISTOGRAM: self.parent.tabs.addTab(Histogram(self), tab_title) except Exception as e: idaapi.warning("%s" % traceback.format_exc()) idaapi.hide_wait_box()
def request_descriptions(self): # Making room for new descriptions self._public_descriptions = [] # Preparing Post post = utils.Post() post.add_data('type', 'request') post.add_data('attributes', self._attributes) # Requesting idaapi.show_wait_box("Requesting...") res_data = post.send() idaapi.hide_wait_box() # Handling response if isinstance(res_data, str): # a message from the server return res_data elif isinstance(res_data, list): for description in res_data: self._add_description(description) return "Received " + str(len(res_data)) + " descriptions." else: return "Error: Illegal response format."
def _match_with_check(self, ea, topk, funcset_ids): fail, skip, succ = -1, 0, 1 # < minsize pfn = idaapi.get_func(ea) if idaapi.FlowChart(pfn).size < bai_config['minsize']: return skip # do match try: targets = self.mgr.retrieve(ea, topk=bai_config['topk'], funcset_ids=funcset_ids) except DecompilationFailure as e: BinaryAILog.fail(idaapi.get_func_name(ea), str(e)) return fail except BinaryAIException as e: idaapi.hide_wait_box() BinaryAILog.fatal(e) if targets is None: return fail if not bai_mark.apply_bai_high_score( ea, targets[0]['function']['name'], targets[0]['score']): return skip return succ
def process_output(self): """ Try to process analyzer output. """ self.timer.destroy() bc_log.info("---- stdout ----------------") bc_log.info(str(self.readAllStandardOutput())) bc_log.info("---- stderr ----------------") bc_log.info(str(self.readAllStandardError())) bc_log.info("---- logfile ---------------") if os.path.exists(self.logfname): with open(self.logfname, 'rb') as f: log_lines = f.readlines() log_lines = dedup_loglines(log_lines, max=100) if len(log_lines) > 100: bc_log.info("---- Only the last 100 log lines (deduped) are displayed here ---") bc_log.info("---- See full log in %s ---" % self.logfname) for line in log_lines: bc_log.info(line.rstrip()) bc_log.info("====== end of logfile ======") idaapi.hide_wait_box() self.finish_cb(self.outfname, self.logfname, self.cfaoutfname)
def PopulateItems(self): min_entropy = self.config['min_entropy'] cur_ea = self.config['start_addr'] idaapi.show_wait_box("Searching xrefs...") while cur_ea < self.config['end_addr']: xrefs = list(idautils.XrefsTo(cur_ea)) if len(xrefs) > 0 and xrefs[0].type != idaapi.fl_F: # discard ordinary flow data = idaapi.get_bytes(cur_ea, self.config['block_size']) assert len(data) == self.config['block_size'] ent = entropy(data) if ent >= min_entropy: self.items.append([ "%08X" % cur_ea, "%.04f" % ent, "%d" % len(xrefs), "%d" % xrefs[0].iscode, "%s" % idautils.XrefTypeName(xrefs[0].type) ]) cur_ea += 1 idaapi.hide_wait_box()
def upload_selected_functions(self, funcs): if not self.check_before_use(check_funcset=True): return i, succ, skip, fail = 0, 0, 0, 0 _funcs = [ea for ea in funcs] funcs_len = len(_funcs) idaapi.show_wait_box("Uploading... (0/{})".format(funcs_len)) for ea in _funcs: i += 1 idaapi.replace_wait_box("Uploading... ({}/{})".format( i, funcs_len)) if idaapi.user_cancelled(): idaapi.hide_wait_box() print( "[{}] {} functions successfully uploaded, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip)) return pfn = idaapi.get_func(ea) if idaapi.FlowChart(pfn).size < self.cfg['minsize']: skip += 1 continue func_id = None try: func_id = self.upload_function(ea, self.funcset) except DecompilationFailure: pass except BinaryAIException as e: idaapi.hide_wait_box() assert False, "[BinaryAI] {}".format(e._msg) func_name = idaapi.get_func_name(ea) if not func_id: print("[{}] {} failed because upload error".format( self.name, func_name)) fail += 1 continue succ += 1 idaapi.hide_wait_box() print( "[{}] {} functions successfully uploaded, {} functions failed, {} functions skipped" .format(self.name, succ, fail, skip))
lexer = shlex.shlex(x.iIndentCommand.value) lexer.wordchars += "\:-." indent_cmd = list(lexer) importer = CIDABinaryToSourceImporter() importer.min_level = min_level importer.min_display_level = min_display_level importer.use_decompiler = x.rUseDecompiler.checked importer.import_src(database) finally: hide_wait_box() if __name__ == "__main__": try: try: if os.getenv("DIAPHORA_PROFILE") is not None: import cProfile profiler = cProfile.Profile() profiler.runcall(main) exported = True profiler.print_stats(sort="time") else: main() except: log("ERROR: %s" % str(sys.exc_info()[1])) traceback.print_exc() raise finally: hide_wait_box()
def search_pointers(self): # HACK: A separate flag is used to track user canceling the search, # because multiple calls to idaapi.wasBreak() do not properly # detect cancellations. breakFlag = False # Show wait dialog idaapi.show_wait_box("Searching writable function pointers...") for m in self.modules: ################################################################### # Locate all of the CALL and JMP instructions in the current module # which use an immediate operand. # List of call/jmp pointer calls in a given module ptr_calls = list() # Iterate over segments in the module # BUG: Iterating over all loaded segments is more stable than looking up by address for n in xrange(idaapi.get_segm_qty()): seg = idaapi.getnseg(n) # Segment in a selected modules if seg and seg.startEA >= m.addr and seg.endEA <= (m.addr + m.size): # Locate executable segments # NOTE: Each module may have multiple executable segments # TODO: Search for "MOV REG, PTR # CALL REG" if seg.perm & idaapi.SEGPERM_EXEC: # Search all instances of CALL /2 imm32/64 - FF 15 # TODO: Alternative pointer calls using SIB: FF 14 E5 11 22 33 44 - call dword/qword ptr [0x44332211] # FF 14 65 11 22 33 44 # FF 14 25 11 22 33 44 call_ea = seg.startEA while True: call_ea = idaapi.find_binary(call_ea + 1, seg.endEA, "FF 15", 16, idaapi.SEARCH_DOWN) if call_ea == idaapi.BADADDR: break ptr_calls.append(call_ea) # Search all instances of JMP /2 imm32/64 - FF 25 # TODO: Alternative pointer calls using SIB: FF 24 E5 11 22 33 44 - jmp dword/qword ptr [0x44332211] # FF 24 65 11 22 33 44 # FF 24 25 11 22 33 44 call_ea = seg.startEA while True: call_ea = idaapi.find_binary(call_ea + 1, seg.endEA, "FF 25", 16, idaapi.SEARCH_DOWN) if call_ea == idaapi.BADADDR: break ptr_calls.append(call_ea) ################################################################### # Extract all of the function pointers and make sure they are # are writable. # List of writable function pointer objects in a given module ptrs = list() for call_ea in ptr_calls: # Decode CALL/JMP instruction # NOTE: May result in invalid disassembly of split instructions insn_size = idaapi.decode_insn(call_ea) if insn_size: insn = idaapi.cmd insn_op1 = insn.Operands[0].type # Verify first operand is a direct memory reference if insn.Operands[0].type == idaapi.o_mem: # Get operand address ptr_ea = insn.Operands[0].addr # Apply pointer offset ptr_ea -= self.ptrOffset # Locate segment where the pointer is located ptr_seg = idaapi.getseg(ptr_ea) # Make sure a valid segment writeable segment was found if ptr_seg and ptr_seg.perm & idaapi.SEGPERM_WRITE: # Get pointer charset ptr_charset = self.sploiter.get_ptr_charset(ptr_ea) # Filter the pointer if not self.filterP2P: if ptr_charset == None: continue if self.ptrNonull and not "nonull" in ptr_charset: continue if self.ptrUnicode and not "unicode" in ptr_charset: continue if self.ptrAscii and not "ascii" in ptr_charset: continue if self.ptrAsciiPrint and not "asciiprint" in ptr_charset: continue if self.ptrAlphaNum and not "alphanum" in ptr_charset: continue if self.ptrNum and not "numeric" in ptr_charset: continue if self.ptrAlpha and not "alpha" in ptr_charset: continue # Increment the fptr counter # Get pointer disassembly insn_disas = idc.GetDisasmEx(call_ea, idaapi.GENDSM_FORCE_CODE) # Add pointer to the list ptr = Ptr(m.file, ptr_ea, self.ptrOffset, ptr_charset, call_ea, insn_disas) ptrs.append(ptr) ################################################################### # Cache Pointers to Pointers ptr_ea_prefix_cache = dict() if self.searchP2P: # CACHE: Running repeated searches over the entire memory space is # very expensive. Let's cache all of the addresses containing # bytes corresponding to discovered function pointers in a # single search and simply reference this cache for each # function pointer. Specifically running idaapi.find_binary() # is much more expensive than idaapi.dbg_read_memory(). # # NOTE: For performance considerations, the cache works on a per # module basis, but could be expanded for the entire memory # space. # # prefix_offset - how many bytes of discovered function # pointers to cache. # # Example: For function pointers 0x00401234, 0x00404321, 0x000405678 # we are going to use prefix_offset 2, so we will cache all of the # values located at addresses 0x0040XXXX if self.sploiter.addr64: pack_format = "<Q" addr_bytes = 8 prefix_offset = 6 else: pack_format = "<I" addr_bytes = 4 prefix_offset = 2 # Set of unique N-byte address prefixes to search in memory ea_prefix_set = set() for ptr in ptrs: ptr_ea = ptr.ptr_ea ptr_bytes = struct.pack(pack_format, ptr_ea) ptr_bytes = ptr_bytes[-prefix_offset:] ea_prefix_set.add(ptr_bytes) # Search the module for all bytes corresponding to the prefix # and use them as candidates for pointers-to-pointers for ea_prefix in ea_prefix_set: # NOTE: Make sure you search using 44 33 22 11 format and not 11223344 ea_prefix_str = " ".join(["%02x" % ord(b) for b in ea_prefix]) # Initialize search parameters for a given module ea = m.addr maxea = m.addr + m.size while True: ea = idaapi.find_binary(ea + 1, maxea, ea_prefix_str, 16, idaapi.SEARCH_DOWN) if ea == idaapi.BADADDR: break p2p_ea = ea - (addr_bytes - prefix_offset) dbg_mem = read_module_memory(p2p_ea, addr_bytes) ptr_ea_prefix = unpack(pack_format, dbg_mem)[0] if ptr_ea_prefix in ptr_ea_prefix_cache: ptr_ea_prefix_cache[ptr_ea_prefix].add(p2p_ea) else: ptr_ea_prefix_cache[ptr_ea_prefix] = set([p2p_ea, ]) # Detect search cancellation, but allow the loop below # to run to create already cached/found function pointers # Canceled if breakFlag or idaapi.wasBreak(): breakFlag = True break # Canceled if breakFlag or idaapi.wasBreak(): breakFlag = True break ################################################################### # Locate Pointer to Pointers for ptr in ptrs: ptr_ea = ptr.ptr_ea # Locate pointers-to-pointers for a given function pointer in the cache if self.searchP2P and ptr_ea in ptr_ea_prefix_cache: for p2p_ea in ptr_ea_prefix_cache[ptr_ea]: # Apply pointer-to-pointer offset p2p_ea -= self.p2pOffset p2p_charset = self.sploiter.get_ptr_charset(p2p_ea) # Filter the pointer if self.filterP2P: if p2p_charset == None: continue if self.ptrNonull and not "nonull" in p2p_charset: continue if self.ptrUnicode and not "unicode" in p2p_charset: continue if self.ptrAscii and not "ascii" in p2p_charset: continue if self.ptrAsciiPrint and not "asciiprint" in p2p_charset: continue if self.ptrAlphaNum and not "alphanum" in p2p_charset: continue if self.ptrNum and not "numeric" in p2p_charset: continue if self.ptrAlpha and not "alpha" in p2p_charset: continue # Copy existing pointer object to modify it for the particular p p2p = copy.copy(ptr) p2p.p2p_ea = p2p_ea p2p.p2p_offset = self.p2pOffset p2p.p2p_charset = p2p_charset # Apppend p2p specific pointer object to the global list self.ptrs.append(p2p) # Exceeded maximum number of pointers if self.maxPtrs and len(self.ptrs) >= self.maxPtrs: breakFlag = True print "[idasploiter] Maximum number of pointers exceeded." break # Simply append pointer object to the global list else: self.ptrs.append(ptr) # Exceeded maximum number of pointers if self.maxPtrs and len(self.ptrs) >= self.maxPtrs: breakFlag = True print "[idasploiter] Maximum number of pointers exceeded." break if breakFlag or idaapi.wasBreak(): breakFlag = True break # Canceled # NOTE: Only works when started from GUI not script. if breakFlag or idaapi.wasBreak(): breakFlag = True print "[idasploiter] Canceled." break print "[idasploiter] Found %d total pointers." % len(self.ptrs) idaapi.hide_wait_box()
def search_gadgets(self): count_total = len(self.retns) count_notify = 0 count_curr = 0 # BUG: A separate flag is used to track user canceling the search, # because multiple calls to idaapi.wasBreak() do not properly # detect cancellations. breakFlag = False # Show wait dialog if not self.debug: idaapi.show_wait_box("Searching gadgets: 00%%%%") for (ea_end, module) in self.retns: # Flush the gadgets cache for each new retn pointer self.gadgets_cache = dict() # Flush memory cache for each new retn pointer self.dbg_mem_cache = None # CACHE: It is faster to read as much memory in one blob than to make incremental reads backwards. # Try to read and cache self.maxRopOffset bytes back. In cases where it is not possible, # then simply try to read the largest chunk. # NOTE: Read a bit extra to cover correct decoding of RETN, RETN imm16, CALL /2, and JMP /4 instructions. for i in range(self.maxRopOffset): self.dbg_mem_cache = read_module_memory(ea_end - self.maxRopOffset + i, self.maxRopOffset - i + self.dbg_read_extra) if self.dbg_mem_cache != None: break # Check to make sure we have actual data to work with. if self.dbg_mem_cache == None: continue # Search all possible gadgets up to maxoffset bytes back # NOTE: Try all byte combinations to capture longer/more instructions # even with bad bytes in the middle. for i in range(1, len(self.dbg_mem_cache) - self.dbg_read_extra): ea = ea_end - i # Get pointer charset ptr_charset = self.sploiter.get_ptr_charset(ea) # Filter the pointer if ptr_charset == None: continue if self.ptrNonull and not "nonull" in ptr_charset: continue if self.ptrUnicode and not "unicode" in ptr_charset: continue if self.ptrAscii and not "ascii" in ptr_charset: continue if self.ptrAsciiPrint and not "asciiprint" in ptr_charset: continue if self.ptrAlphaNum and not "alphanum" in ptr_charset: continue if self.ptrNum and not "numeric" in ptr_charset: continue if self.ptrAlpha and not "alpha" in ptr_charset: continue # Try to build a gadget at the pointer gadget = self.build_gadget(ea, ea_end) # Successfully built the gadget if gadget: # Populate gadget object with more data gadget.address = ea gadget.module = module gadget.ptr_charset = ptr_charset # Filter gadgets with too many instruction if gadget.size > self.maxRopSize: break # Append newly built gadget self.gadgets.append(gadget) self.gadgets_cache[ea] = gadget # Exceeded maximum number of gadgets if self.maxRops and len(self.gadgets) >= self.maxRops: breakFlag = True print "[idasploiter] Maximum number of gadgets exceeded." break else: self.gadgets_cache[ea] = None if breakFlag or idaapi.wasBreak(): breakFlag = True break # Canceled # NOTE: Only works when started from GUI not script. if breakFlag or idaapi.wasBreak(): breakFlag = True print "[idasploiter] Canceled." break # Progress report if not self.debug and count_curr >= count_notify: # NOTE: Need to use %%%% to escape both Python and IDA's format strings idaapi.replace_wait_box("Searching gadgets: %02d%%%%" % (count_curr * 100 / count_total)) count_notify += 0.10 * count_total count_curr += 1 print "[idasploiter] Found %d gadgets." % len(self.gadgets) if not self.debug: idaapi.hide_wait_box()
def idenLib(): global g_func_sigs global g_main_sigs new_sigs = False cached = False if os.path.isfile(idenLibCache): try: with open(idenLibCache, "rb") as f: g_func_sigs = pickle.load(f) if os.path.isfile(idenLibCacheMain): with open(idenLibCacheMain, "rb") as f: g_main_sigs = pickle.load(f) cached = True except Exception as e: # continue with new sigs after excption print("[idenLib] load cache files error: %s" % str(e)) if cached: ret = idaapi.ask_yn(idaapi.ASKBTN_YES, "Do you want to select another signatures than current cached ?") if ret == idaapi.ASKBTN_CANCEL: print("[idenLib] user cancelled") return new_sigs = ret == idaapi.ASKBTN_YES else: new_sigs = True idaapi.msg_clear() if new_sigs: if not idenLibProcessSignatures(): return idaapi.show_wait_box("Please wait scan and apply signatures...") # function sigs from the current binary func_bytes_addr = {} for addr, size in getFuncRanges(): f_bytes = getOpcodes(addr, size) func_bytes_addr[f_bytes] = addr # apply sigs counter = 0 mainDetected = False for sig_opcodes, addr in func_bytes_addr.items(): if g_func_sigs.has_key(sig_opcodes): set_func_library(addr) func_name = g_func_sigs[sig_opcodes][0] current_name = idc.get_func_name(addr) if current_name != func_name: idc.set_name(addr, func_name, idaapi.SN_FORCE) print("{}: {}".format(hex(addr).rstrip("L"), func_name)) counter = counter + 1 if g_main_sigs.has_key(sig_opcodes): # "main" sig callInstr = g_main_sigs[sig_opcodes][1] + addr if idaapi.print_insn_mnem(callInstr) == "call": call_target = idc.get_operand_value(callInstr, 0) set_func_library(call_target) func_name = g_main_sigs[sig_opcodes][0] current_name = idc.get_func_name(call_target) if current_name != func_name: idaapi.set_name(call_target, func_name, idaapi.SN_FORCE) print("{}: {}".format(hex(call_target).rstrip("L"), func_name)) counter = counter + 1 mainDetected = True if not mainDetected: for entry in idautils.Entries(): for sig_opcodes, name_funcRva_EntryRva in g_main_sigs.items(): callInstr = name_funcRva_EntryRva[2] + entry[2] # from EP if idaapi.print_insn_mnem(callInstr) == "call": fromFunc = name_funcRva_EntryRva[1] func_start = callInstr - fromFunc func_opcodes = getOpcodes(func_start, MAX_FUNC_SIZE) if func_opcodes.startswith(sig_opcodes): call_target = idc.get_operand_value(callInstr, 0) set_func_library(call_target) current_name = idc.get_func_name(call_target) func_name = g_main_sigs[sig_opcodes][0] if current_name != func_name: idaapi.set_name(call_target, func_name, idaapi.SN_FORCE) print("{}: {}".format(hex(call_target).rstrip("L"), func_name)) counter = counter + 1 mainDetected = True break idaapi.hide_wait_box() print("[idenLib] Applied to {} function(s)".format(counter))
def interactive_load_batch(self): """ Interactive loading & aggregation of coverage files. """ self.palette.refresh_colors() # # kick off an asynchronous metadata refresh. this collects underlying # database metadata while the user will be busy selecting coverage files. # future = self.director.metadata.refresh(progress_callback=metadata_progress) # # we will now prompt the user with an interactive file dialog so they # can select the coverage files they would like to load from disk. # loaded_files = self._select_and_load_coverage_files() # if no valid coveragee files were selected (and loaded), bail if not loaded_files: self.director.metadata.abort_refresh() return # prompt the user to name the new coverage aggregate default_name = "BATCH_%s" % self.director.peek_shorthand() ok, coverage_name = prompt_string( "Batch Name:", "Please enter a name for this coverage", default_name ) # if user didn't enter a name for the batch, or hit cancel, we abort if not (ok and coverage_name): lmsg("Aborting batch load...") return # # to continue any further, we need the database metadata. hopefully # it has finished with its asynchronous collection, otherwise we will # block until it completes. the user will be shown a progress dialog. # idaapi.show_wait_box("Building database metadata...") await_future(future) # aggregate all the selected files into one new coverage set new_coverage = self._aggregate_batch(loaded_files) # inject the the aggregated coverage set idaapi.replace_wait_box("Mapping coverage...") self.director.create_coverage(coverage_name, new_coverage.data) # select the newly created batch coverage idaapi.replace_wait_box("Selecting coverage...") self.director.select_coverage(coverage_name) # all done, hide the IDA wait box idaapi.hide_wait_box() lmsg("Successfully loaded batch %s..." % coverage_name) # show the coverage overview self.open_coverage_overview()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) d = zlib.compress(pickle.dumps(var)) s.send(d) s.close() return None except Exception, e: return str(e) idaapi.info("Please run the Hiew-Names-Server script and press OK") idaapi.show_wait_box("Gathering and sending names to %s:%d" % (HOST, PORT)) info = [] for ea, name in idautils.Names(): offs = idaapi.get_fileregion_offset(ea) if offs == idaapi.BADADDR: continue is_func = False if idaapi.get_func(ea) is None else True info.append((offs, name, is_func)) ok = pickle_sendz(HOST, PORT, info) idaapi.hide_wait_box() if ok is not None: idaapi.warning("Failed to send names:\n" + ok) else: idaapi.info("Names successfully transfered!")
def interactive_load_file(self): """ Interactive loading of individual coverage files. """ self.palette.refresh_colors() created_coverage = [] # # kick off an asynchronous metadata refresh. this collects underlying # database metadata while the user will be busy selecting coverage files. # future = self.director.metadata.refresh(progress_callback=metadata_progress) # # we will now prompt the user with an interactive file dialog so they # can select the coverage files they would like to load from disk. # loaded_files = self._select_and_load_coverage_files() # if no valid coveragee files were selected (and loaded), bail if not loaded_files: self.director.metadata.abort_refresh() return # # to continue any further, we need the database metadata. hopefully # it has finished with its asynchronous collection, otherwise we will # block until it completes. the user will be shown a progress dialog. # idaapi.show_wait_box("Building database metadata...") await_future(future) # # stop the director's aggregate from updating. this is in the interest # of better performance when loading more than one new coverage set # into the director. # self.director.suspend_aggregation() # # loop through the coverage data we have loaded from disk, and begin # the normalization process to translate / filter / flatten its blocks # into a generic format the director can understand (a list of addresses) # for i, data in enumerate(loaded_files, 1): # keep the user informed about our progress while loading coverage idaapi.replace_wait_box( "Normalizing and mapping coverage %u/%u" % (i, len(loaded_files)) ) # normalize coverage data to the open database try: addresses = self._normalize_coverage(data, self.director.metadata) except Exception as e: lmsg("Failed to map coverage %s" % data.filepath) lmsg("- %s" % e) logger.exception("Error details:") continue # # ask the director to create and track a new coverage set from # the normalized coverage data we provide # coverage_name = os.path.basename(data.filepath) self.director.create_coverage(coverage_name, addresses) # save the coverage name to the list of succesful loads created_coverage.append(coverage_name) # # resume the director's aggregation capabilities, triggering an update # to recompute the aggregate with the newly loaded coverage # idaapi.replace_wait_box("Recomputing coverage aggregate...") self.director.resume_aggregation() # if nothing was mapped, then there's nothing else to do if not created_coverage: lmsg("No coverage files could be mapped...") idaapi.hide_wait_box() return # # select one (the first) of the newly loaded coverage file(s) # idaapi.replace_wait_box("Selecting coverage...") self.director.select_coverage(created_coverage[0]) # all done, hide the IDA wait box idaapi.hide_wait_box() lmsg("Successfully loaded %u coverage file(s)..." % len(created_coverage)) # show the coverage overview self.open_coverage_overview()
def ghidraaas_decompile(address, xml_file_path, bin_file_path, ghidra_server_url): """ Send the xml file to ghidraaas and ask to decompile a function """ global GLOBAL_CHECKIN # Filename without the .xml extension filename = GLOBAL_FILENAME if not GLOBAL_CHECKIN: if ghidraaas_checkin(bin_file_path, filename, ghidra_server_url): GLOBAL_CHECKIN = True else: raise Exception("[!] Ghidraaas Check-in error") idaapi.show_wait_box("Connecting to Ghidraaas. Decompiling function %s" % address) try: md5_hash = idautils.GetInputFileMD5() queue = Queue.Queue() aargs = (address, xml_file_path, bin_file_path, ghidra_server_url, filename, md5_hash, queue) t1 = threading.Thread(target=ghidraaas_decompile_thread, args=aargs) t1.start() counter = 0 stop = False while not stop: # idaapi.request_refresh(idaapi.IWID_DISASMS) # print("waiting decompile 1 zzz") time.sleep(0.1) if idaapi.wasBreak(): print("GhIDA:: [!] decompilation interrupted.") stop = True continue if counter > COUNTER_MAX * 10: print("GhIDA:: [!] Timeout reached.") stop = True continue if not t1.isAlive(): stop = True print("GhIDA:: [DEBUG] Thread terminated.") continue print("GhIDA:: [DEBUG] Joining decompilation thread.") t1.join(0) q_result = queue.get_nowait() print("GhIDA:: [DEBUG] Thread joined. Got queue result.") idaapi.hide_wait_box() return q_result except Exception: idaapi.hide_wait_box() print("GhIDA:: [!] Unexpected decompilation error") idaapi.warning("GhIDA decompilation error") return None
def ghidra_headless(address, xml_file_path, bin_file_path, ghidra_headless_path, ghidra_plugins_path): """ Call Ghidra in headless mode and run the plugin FunctionDecompile.py to decompile the code of the function. """ try: if not os.path.isfile(ghidra_headless_path): print("GhIDA:: [!] ghidra analyzeHeadless not found.") raise Exception("analyzeHeadless not found") decompiled_code = None idaapi.show_wait_box("Ghida decompilation started") prefix = "%s_" % address output_temp = tempfile.NamedTemporaryFile(prefix=prefix, delete=False) output_path = output_temp.name # print("GhIDA:: [DEBUG] output_path: %s" % output_path) output_temp.close() cmd = [ ghidra_headless_path, ".", "Temp", "-import", xml_file_path, '-scriptPath', ghidra_plugins_path, '-postScript', 'FunctionDecompile.py', address, output_path, "-noanalysis", "-deleteProject" ] # Options to 'safely' terminate the process if os.name == 'posix': kwargs = {'preexec_fn': os.setsid} else: kwargs = { 'creationflags': subprocess.CREATE_NEW_PROCESS_GROUP, 'shell': True } p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) stop = False counter = 0 print("GhIDA:: [INFO] Ghidra headless (timeout: %ds)" % TIMEOUT) print("GhIDA:: [INFO] Waiting Ghidra headless analysis to finish...") while not stop: time.sleep(0.1) counter += 1 subprocess.Popen.poll(p) # Process terminated if p.returncode is not None: stop = True print("GhIDA:: [INFO] Ghidra analysis completed!") continue # User terminated action if idaapi.wasBreak(): # Termiante the process! if os.name == 'posix': os.killpg(os.getpgid(p.pid), signal.SIGTERM) else: os.kill(p.pid, -9) stop = True print("GhIDA:: [!] Ghidra analysis interrupted.") continue # Process timeout if counter > COUNTER_MAX * 10: os.killpg(os.getpgid(p.pid), signal.SIGTERM) stop = True print("GhIDA:: [!] Decompilation error - timeout reached") continue # Check if JSON response is available if os.path.isfile(output_path): with open(output_path) as f_in: j = json.load(f_in) if j['status'] == "completed": decompiled_code = j['decompiled'] else: print("GhIDA:: [!] Decompilation error -", " JSON response is malformed") # Remove the temporary JSON response file os.remove(output_path) else: print("GhIDA:: [!] Decompilation error - JSON response not found") idaapi.warning("Ghidra headless decompilation error") except Exception as e: print("GhIDA:: [!] %s" % e) print("GhIDA:: [!] Ghidra headless analysis failed") idaapi.warning("Ghidra headless analysis failed") decompiled_code = None finally: idaapi.hide_wait_box() return decompiled_code
cfunc = idaapi.decompile(ea, flags=idaapi.DECOMP_NO_WAIT) except idaapi.DecompilationFailure: print "Error decompiling function @ 0x%x" % ea cfunc = None if cfunc: fp = func_parser_t(cfunc) fp.apply_to(cfunc.body, None) choser.feed(fp.data) if idaapi.user_cancelled(): aborted = True break i += 1 idaapi.hide_wait_box() if aborted: idaapi.warning("Aborted.") # IDA <= 7.2 else: for ea in func_list: try: cfunc = idaapi.decompile(ea) except idaapi.DecompilationFailure: print "Error decompiling function @ 0x%x" % ea cfunc = None if cfunc: fp = func_parser_t(cfunc) fp.apply_to(cfunc.body, None)