def write_file(path, data): """ """ if os.path.exists(path) and 1 != idaapi.ask_yn( 1, "The file already exists. Overwrite?"): return with open(path, "wb") as save_file: save_file.write(data)
def promptForSearchTypes(self): # Only run if QT not available, so not bothering with ida7 check logger.debug("Promping for search types") if using_ida7api: if idaapi.ASKBTN_YES == idaapi.ask_yn(idaapi.ASKBTN_YES, str('Search for push argument hash values?')): self.params.searchPushArgs = True if idaapi.ASKBTN_YES == idaapi.ask_yn(idaapi.ASKBTN_YES, str('Search for DWORD array of hashes?')): self.params.searchDwordArray = True else: if idc.AskYN(1, str('Search for push argument hash value?')) == 1: self.params.searchPushArgs = True if idc.AskYN(1, str('Search for DWORD array of hashes?')) == 1: self.params.searchDwordArray = True if (not self.params.searchDwordArray) and (not self.params.searchPushArgs): raise RuntimeError('No search types selected')
def slot_export_to_x64dbg_script(self): if not self.doc: idaapi.info("No capa results to export.") return filename = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".x64dbg.txt", "Choose file") if not filename: return if os.path.exists(filename) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return Tag_file = self.Parse_json(self.doc) xdbg_RVA = [] xdbg_comment = [] Basename = os.path.splitext(ida_nalt.get_root_filename())[0] Base_define = "$base=" + "\"" + Basename + ":base" + "\"" Xdbg_script = Base_define + "\n" for k in range(0, len(Tag_file.split("\n")) - 1): xdbg_RVA.append(Tag_file.split("\n")[k].split(';')[0]) xdbg_comment.append(Tag_file.split("\n")[k].split(';')[1]) Xdbg_script += "cmt $base+" + xdbg_RVA[ k] + "," + "\"" + xdbg_comment[k][:226] + "\"" + "\n" Xdbg_script += "bookmark $base+" + xdbg_RVA[k] + "\n" f = open(filename, "w").write(Xdbg_script)
def autoenum(self): common_value = get_common_value() enum_name = idaapi.ask_str(self._last_enum, 0, "Enum Name") if enum_name is None: return if not enum_name: enum_name = None self._last_enum = enum_name # Can't ask with negative numbers. if common_value >> ((8 * sark.core.get_native_size()) - 1): common_value = 0 const_value = idaapi.ask_long(common_value, "Const Value") if const_value is None: return modify = True try: enum = sark.add_enum(enum_name) except sark.exceptions.EnumAlreadyExists: enum = sark.Enum(enum_name) yes_no_cancel = idaapi.ask_yn(idaapi.ASKBTN_NO, "Enum already exists. Modify?\n") if yes_no_cancel == idaapi.ASKBTN_CANCEL: return elif yes_no_cancel == idaapi.ASKBTN_YES: modify = True else: # yes_no_cancel == idaapi.ASKBTN_NO: modify = False member_name = const_name(enum, const_value) if modify: try: enum.members.add(member_name, const_value) except sark.exceptions.SarkErrorAddEnumMemeberFailed as ex: idaapi.msg("[AutoEnum] Adding enum member failed: {}.".format( ex.message)) else: for member in enum.members: if member.value == const_value: member_name = member.name break else: return # Apply the enum apply_enum_by_name(enum, member_name)
def change_rules_dir(self): """ allow user to change rules directory """ rule_path = self.ask_user_directory() if not rule_path: logger.warning("no rules directory selected. nothing to do.") return self.rule_path = rule_path if 1 == idaapi.ask_yn(1, "Run analysis now?"): self.reload()
def dump_binary(path): max_addr = 0 # Check if we have a buggy IDA or not try: idaapi.get_many_bytes_ex(0, 1) except TypeError: buggy = True else: buggy = False if buggy: f = idaapi.qfile_t() try: f.open(path, 'wb+') except TypeError: # Another ugly hack for IDA 6/7 compat (unicode strings) f.open(str(path), 'wb+') segments = [idaapi.getnseg(x) for x in range(idaapi.get_segm_qty())] # no need for IDA 7 compat, it's not buggy max_addr = segments[-1].endEA if max_addr > 200 * 1024 * 1024: if idaapi.ask_yn( idaapi.ASKBTN_NO, "Dump file is over 200MB," " do you want to dump it anyway ?") != idaapi.ASKBTN_YES: return None idaapi.base2file(f.get_fp(), 0, 0, max_addr) f.close() return [("dump", 0, max_addr, 0, max_addr)] else: sections = [] current_offset = 0 with open(path, 'wb+') as f: # over all segments for n in range(idaapi.get_segm_qty()): seg = idaapi.getnseg(n) if hasattr(seg, "start_ea"): start_ea = seg.start_ea else: start_ea = seg.startEA if hasattr(seg, "end_ea"): end_ea = seg.end_ea else: end_ea = seg.endEA size = end_ea - start_ea # Only works with fixed IDAPython. f.write(idaapi.get_many_bytes_ex(start_ea, size)[0]) sections.append((idaapi.get_segm_name(seg), start_ea, size, current_offset, size)) current_offset += size dump_log.debug(repr(sections)) return sections
def run(self, arg): start, end = sark.get_selection() if not sark.structure.selection_has_offsets(start, end): message('No structure offsets in selection. Operation cancelled.') idaapi.warning( 'No structure offsets in selection. Operation cancelled.') return struct_name = idaapi.ask_str(self._prev_struct_name, 0, "Struct Name") if not struct_name: message("No structure name provided. Operation cancelled.") return self._prev_struct_name = struct_name common_reg = sark.structure.get_common_register(start, end) reg_name = idaapi.ask_str(common_reg, 0, "Register") if not reg_name: message("No offsets found. Operation cancelled.") return try: offsets, operands = sark.structure.infer_struct_offsets( start, end, reg_name) except sark.exceptions.InvalidStructOffset: message( "Invalid offset found. Cannot create structure.", "Make sure there are no negative offsets in the selection.") return except sark.exceptions.SarkInvalidRegisterName: message( "Invalid register name {!r}. Cannot create structs.".format( reg_name)) return try: sark.structure.create_struct_from_offsets(struct_name, offsets) except sark.exceptions.SarkStructAlreadyExists: yes_no_cancel = idaapi.ask_yn( idaapi.ASKBTN_NO, "Struct already exists. Modify?\n" "Cancel to avoid applying the struct.") if yes_no_cancel == idaapi.ASKBTN_CANCEL: return elif yes_no_cancel == idaapi.ASKBTN_YES: sid = sark.structure.get_struct(struct_name) sark.structure.set_struct_offsets(offsets, sid) else: # yes_no_cancel == idaapi.ASKBTN_NO: pass sark.structure.apply_struct(start, end, reg_name, struct_name)
def export_json(self): """ export capa results as JSON file """ if not self.doc: idaapi.info("No capa results to export.") return path = idaapi.ask_file(True, "*.json", "Choose file") if os.path.exists(path) and 1 != idaapi.ask_yn(1, "File already exists. Overwrite?"): return with open(path, "wb") as export_file: export_file.write( json.dumps(self.doc, sort_keys=True, cls=capa.render.CapaJsonObjectEncoder).encode("utf-8") )
def _handle_btn_save_tbq_file(self, filepath): if os.path.exists(filepath): if idaapi.ASKBTN_YES != idaapi.ask_yn( idaapi.ASKBTN_NO, "File exists!\n\nOverwerite %s?" % filepath): return self._commit_settings() success, e = self._get_settings().save(filepath) if success: print("[%s] saved to \"%s\"" % (SCRIPT_NAME, filepath)) else: idaapi.warning("Could not save file.\n\n%s" % e) return
def table_click(row, column): variable_name = tableWidget.item(row, 0).text() if idaapi.ask_yn(idaapi.ASKBTN_NO, "Delete Yara Rule : " + variable_name) == idaapi.ASKBTN_YES: del ruleset_list[variable_name] tableWidget.setRowCount(len(ruleset_list.keys())) tableWidget.setColumnCount(4) tableWidget.setHorizontalHeaderLabels(["Variable_name", "Rule", "Start", "End"]) for idx, name in enumerate(ruleset_list.keys()): tableWidget.setItem(idx, 0, QTableWidgetItem(name)) tableWidget.setItem(idx, 1, QTableWidgetItem(ruleset_list[name][0])) tableWidget.setItem(idx, 2, QTableWidgetItem(ruleset_list[name][1])) tableWidget.setItem(idx, 3, QTableWidgetItem(ruleset_list[name][2])) layout.addWidget(tableWidget)
def rebase_remote(self, offset): if self.base_remote is None: answer = idaapi.ask_yn( idaapi.ASKBTN_YES, "HIDECANCEL\nRemote module base is not yet resolved. Do you want to request it?\n" ) if answer == idaapi.ASKBTN_YES: self.remote_base_noticie() return None if not (self.base == self.base_remote): offset = (offset - self.base) + self.base_remote return offset
def DeleteRule(self): global ruleset_list, tableWidget, layout if idaapi.ask_yn(idaapi.ASKBTN_NO, "Delete Yara Rule") == idaapi.ASKBTN_YES: ruleset_list = {} tableWidget.setRowCount(len(ruleset_list.keys())) tableWidget.setColumnCount(4) tableWidget.setHorizontalHeaderLabels(["Variable_name", "Rule", "Start", "End"]) for idx, name in enumerate(ruleset_list.keys()): tableWidget.setItem(idx, 0, QTableWidgetItem(name)) tableWidget.setItem(idx, 1, QTableWidgetItem(ruleset_list[name][0])) tableWidget.setItem(idx, 2, QTableWidgetItem(ruleset_list[name][1])) tableWidget.setItem(idx, 3, QTableWidgetItem(ruleset_list[name][2])) layout.addWidget(tableWidget)
def slot_export_to_tag_file(self): if not self.doc: idaapi.info("No capa results to export.") return Tag_file = self.Parse_json(self.doc) filename = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".tag", "Choose file") if not filename: return if os.path.exists(filename) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return f = open(filename, "w").write(Tag_file)
def slot_change_rules_dir(self): """allow user to change rules directory user selection stored in settings for future runs """ rule_path = self.ask_user_directory() if not rule_path: logger.warning("No rule directory selected, nothing to do.") return self.rule_path = rule_path settings.user["rule_path"] = rule_path if 1 == idaapi.ask_yn(1, "Run analysis now?"): self.slot_analyze()
def slot_export_pickle(self): """ export capa results as JSON file """ if not self.doc: idaapi.info("No capa results to export.") return path = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".capa", "Choose file") if not path: return if os.path.exists(path) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return with open(path, "wb") as export_file: pickle.dump(self.doc, export_file, -1)
def slot_import_pickle(self): path = idaapi.ask_file(True, "*.capa", "Choose file") if not path: return if not os.path.exists(path): msg('capa: File does not exists !') return data = None with open(path, "rb") as import_file: self.range_model_proxy.invalidate() self.search_model_proxy.invalidate() self.model_data.reset() self.model_data.clear() self.disable_controls() self.set_view_status_label("Loading...") success = False act = True try: self.doc = pickle.load(import_file) if self.doc["meta"]["analysis"][ "base_address"] != ida_nalt.get_imagebase(): if 0 == idaapi.ask_yn( 1, "Imagebase is not match! Continue to load ?"): act = False except Exception as e: act = False logger.error("Failed to load capa results (error: %s)", e) if act: try: self.model_data.render_capa_doc(self.doc) self.render_capa_doc_mitre_summary() self.enable_controls() success = True except Exception as e: logger.error("Failed to render results (error: %s)", e) self.reset_view_tree() if not success: self.set_view_status_label("Click Analyze to get started...") logger.info("Loading Analysis failed.") else: logger.info("Loading Analysis completed.")
def ask(string, **default): """Ask the user a question providing the option to choose "yes", "no", or "cancel". If any of the options are specified as a boolean, then it is assumed that this option will be the default. If the user chooses "cancel", then this value will be returned instead of the value ``None``. """ state = {'no': 0, 'yes': 1, 'cancel': -1} results = {0: False, 1: True} if default: keys = {n for n in default.viewkeys()} keys = {n.lower() for n in keys if default.get(n, False)} dflt = next((k for k in keys), 'cancel') else: dflt = 'cancel' res = idaapi.ask_yn(state[dflt], internal.utils.string.to(string)) return results.get(res, None)
def ask(string, **default): """Ask the user a question providing the option to choose "yes", "no", or "cancel". If any of the options are specified as a boolean, then it is assumed that this option will be the default. If the user chooses "cancel", then this value will be returned instead of the value ``None``. """ state = {'no': 0, 'yes': 1, 'cancel': -1} results = {0: False, 1: True} if default: keys = {n for n in default.viewkeys()} keys = {n.lower() for n in keys if default.get(n, False)} dflt = next((k for k in keys), 'cancel') else: dflt = 'cancel' res = idaapi.ask_yn(state[dflt], string) return results.get(res, None)
def reload_gui_info(self, from_arena_cb=False): if self.heap is None: return try: if not misc.is_process_suspended(): answer = idaapi.ask_yn( idaapi.ASKBTN_YES, "HIDECANCEL\nThe process must be suspended to reload the info.\n\ Do you want to suspend it?") if answer == idaapi.ASKBTN_NO: return if not idaapi.suspend_process(): warning("Unable to suspend the process") return idaapi.refresh_debugger_memory() if not self.heap.get_heap_base(): self.show_warning("Heap not initialized") return if not config.libc_base: self.show_warning("Unable to resolve glibc base address.") return self.hide_warning() self.arenas_widget.setVisible(True) if not from_arena_cb: self.populate_arenas() self.arena_widget.populate_table() self.tcache_widget.populate_table() self.bins_widget.populate_tables() except Exception as e: self.show_warning(str(e)) idaapi.warning(traceback.format_exc())
def button_chart_on_click(self): try: idaapi.show_wait_box("Making chart...") tab_title = self.get_tab_title() if self.config.chart_type == ChartTypes.ENTROPY: if self.config.use_disk_binary and not self.config.entropy['segm_exists']: msg1 = "Do you want to create new segment with the binary content?\n" msg2 = "This will allow you to navigate over the file by double-clicking on the chart" if idaapi.ask_yn(1, "HIDECANCEL\n" + msg1 + msg2) == 1: self.create_segment_with_binary() self.parent.tabs.addTab(Entropy(self), tab_title) elif self.config.chart_type == ChartTypes.HISTOGRAM: self.parent.tabs.addTab(Histogram(self), tab_title) except Exception as e: idaapi.warning("%s" % traceback.format_exc()) idaapi.hide_wait_box()
def slot_export_json(self): """export capa results as JSON file""" if not self.doc: idaapi.info("No capa results to export.") return path = idaapi.ask_file(True, "*.json", "Choose file") # user cancelled, entered blank input, etc. if not path: return # check file exists, ask to override if os.path.exists(path) and 1 != idaapi.ask_yn( 1, "The selected file already exists. Overwrite?"): return with open(path, "wb") as export_file: export_file.write( json.dumps( self.doc, sort_keys=True, cls=capa.render.CapaJsonObjectEncoder).encode("utf-8"))
def slot_export_to_r2_script(self): if not self.doc: idaapi.info("No capa results to export.") return filename = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".cutter.r2", "Choose file") if not filename: return if os.path.exists(filename) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return Tag_file = self.Parse_json(self.doc) Cutter_RVA = [] Cutter_comment = [] Cutter_script = "" for k in range(0, len(Tag_file.split("\n")) - 1): Cutter_RVA.append(Tag_file.split("\n")[k].split(';')[0]) Cutter_comment.append(Tag_file.split("\n")[k].split(';')[1]) Cutter_script += "CCu base64:" + base64.b64encode( Cutter_comment[k].encode( "utf-8")).decode() + " @ " + "$B+0x" + Cutter_RVA[k] + "\n" f = open(filename, "w").write(Cutter_script)
def ask_file_option(option_prompt, file_mask, file_promt): if not idaapi.ask_yn(0, option_prompt): return None return idaapi.ask_file(0, file_mask, file_promt)
def idenLib(): global g_func_sigs global g_main_sigs new_sigs = False cached = False if os.path.isfile(idenLibCache): try: with open(idenLibCache, "rb") as f: g_func_sigs = pickle.load(f) if os.path.isfile(idenLibCacheMain): with open(idenLibCacheMain, "rb") as f: g_main_sigs = pickle.load(f) cached = True except Exception as e: # continue with new sigs after excption print("[idenLib] load cache files error: %s" % str(e)) if cached: ret = idaapi.ask_yn(idaapi.ASKBTN_YES, "Do you want to select another signatures than current cached ?") if ret == idaapi.ASKBTN_CANCEL: print("[idenLib] user cancelled") return new_sigs = ret == idaapi.ASKBTN_YES else: new_sigs = True idaapi.msg_clear() if new_sigs: if not idenLibProcessSignatures(): return idaapi.show_wait_box("Please wait scan and apply signatures...") # function sigs from the current binary func_bytes_addr = {} for addr, size in getFuncRanges(): f_bytes = getOpcodes(addr, size) func_bytes_addr[f_bytes] = addr # apply sigs counter = 0 mainDetected = False for sig_opcodes, addr in func_bytes_addr.items(): if g_func_sigs.has_key(sig_opcodes): set_func_library(addr) func_name = g_func_sigs[sig_opcodes][0] current_name = idc.get_func_name(addr) if current_name != func_name: idc.set_name(addr, func_name, idaapi.SN_FORCE) print("{}: {}".format(hex(addr).rstrip("L"), func_name)) counter = counter + 1 if g_main_sigs.has_key(sig_opcodes): # "main" sig callInstr = g_main_sigs[sig_opcodes][1] + addr if idaapi.print_insn_mnem(callInstr) == "call": call_target = idc.get_operand_value(callInstr, 0) set_func_library(call_target) func_name = g_main_sigs[sig_opcodes][0] current_name = idc.get_func_name(call_target) if current_name != func_name: idaapi.set_name(call_target, func_name, idaapi.SN_FORCE) print("{}: {}".format(hex(call_target).rstrip("L"), func_name)) counter = counter + 1 mainDetected = True if not mainDetected: for entry in idautils.Entries(): for sig_opcodes, name_funcRva_EntryRva in g_main_sigs.items(): callInstr = name_funcRva_EntryRva[2] + entry[2] # from EP if idaapi.print_insn_mnem(callInstr) == "call": fromFunc = name_funcRva_EntryRva[1] func_start = callInstr - fromFunc func_opcodes = getOpcodes(func_start, MAX_FUNC_SIZE) if func_opcodes.startswith(sig_opcodes): call_target = idc.get_operand_value(callInstr, 0) set_func_library(call_target) current_name = idc.get_func_name(call_target) func_name = g_main_sigs[sig_opcodes][0] if current_name != func_name: idaapi.set_name(call_target, func_name, idaapi.SN_FORCE) print("{}: {}".format(hex(call_target).rstrip("L"), func_name)) counter = counter + 1 mainDetected = True break idaapi.hide_wait_box() print("[idenLib] Applied to {} function(s)".format(counter))
import shutil import distutils.dir_util as dir_util import StringIO import os import os.path import sys import idaapi try: import requests print "'requests' is installed, good." except ImportError: if idaapi.ask_yn( idaapi.ASKBTN_NO, "'requests' is not installed, do you want to install it ?\n" "Choose 'no' if you do not intend to use a distant BinCAT server" ) == idaapi.ASKBTN_YES: print "requests is not installed, trying to install" import pip # Fugly hack (cause IDA console is not a real one) saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = StringIO.StringIO() sys.stderr = StringIO.StringIO() pip.main(['install', "requests"]) sys.stdout.seek(0) sys.stderr.seek(0) saved_stdout.write(sys.stdout.read()) saved_stderr.write(sys.stderr.read()) sys.stdout = saved_stdout sys.stderr = saved_stderr
def process_data_result(start, data): # 16 bytes on a line # one byte take 4 char: 2 hex char, a space and a char if isalnum # one line take 3 char addtion: two space and \n, and ea hex address BYTES_PER_LINE = 16 MAX_BYTES_HEX_DUMP = BYTES_PER_LINE * 64 # 64 lines printLen = len(data) if printLen > MAX_BYTES_HEX_DUMP: printLen = MAX_BYTES_HEX_DUMP plg_print("Only hexdump first %d bytes" % MAX_BYTES_HEX_DUMP) nLines = printLen // BYTES_PER_LINE # Number of lines nOdd = printLen % BYTES_PER_LINE # Number of bytes at last line isStr = True sHex = str() for i in range(printLen): if isStr and chr(data[i]) not in string.printable: isStr = False if i % BYTES_PER_LINE == 0: sHex += "%s: " % idaapi.ea2str(start + i) sHex += "%02X " % data[i] if (i % BYTES_PER_LINE == BYTES_PER_LINE - 1) or (i == printLen - 1): # add the end of data or end of a line if nLines: lineIdx = i // BYTES_PER_LINE # current line number low = lineIdx * BYTES_PER_LINE high = i + 1 else: low = 0 high = printLen sHex += " " # Padding last line if i == printLen - 1 and nLines and nOdd: sHex += " " * (BYTES_PER_LINE - nOdd) * 3 for j in range(low, high): ch = chr(data[j]) sHex += ch if ch.isalnum() else "." sHex += "\n" # Print out the hexdump string print(sHex) if isStr: txt = str(data) print("String result: '%s'" % txt) idaapi.set_cmt(start, txt, 1) if idaapi.ask_yn(idaapi.ASKBTN_NO, "Do you want to patch selected range with result data ?") == idaapi.ASKBTN_YES: idaapi.patch_bytes(start, bytes(data)) if idaapi.ask_yn(idaapi.ASKBTN_YES, "Do you want to dump result data to file ?") == idaapi.ASKBTN_YES: dump_data_to_file("Dump_At_0x%X_Size_%d.dump" % (start, len(data)), data)
def upload_all_callback(self, __): do_that = idaapi.ask_yn(0, "Are you sure to upload all functions?") if do_that == 1: self.upload_selected_functions(idautils.Functions())
def retrieve_all_callback(self, __): do_that = idaapi.ask_yn(0, "Are you sure to match all functions?") if do_that == 1: self.retrieve_selected_functions(idautils.Functions())