def promptForSearchTypes(self): self.logger.debug("Promping for search types") if idc.AskYN(1, str('Search for DWORD array of hashes?')) == 1: self.params.searchDwordArray = True if idc.AskYN(1, str('Search for push argument hash value?')) == 1: self.params.searchPushArgs = True if (not self.params.searchDwordArray) and (not self.params.searchPushArgs): raise RuntimeError('No search types selected')
def promptForSearchTypes(self): # Only run if QT not available, so not bothering with ida7 check logger.debug("Promping for search types") if idc.AskYN(1, str('Search for DWORD array of hashes?')) == 1: self.params.searchDwordArray = True if idc.AskYN(1, str('Search for push argument hash value?')) == 1: self.params.searchPushArgs = True if (not self.params.searchDwordArray) and (not self.params.searchPushArgs): raise RuntimeError('No search types selected')
def run(self, arg): start, end = sark.get_selection() struct_name = idc.AskStr(self._prev_struct_name, "Struct Name") if not struct_name: return self._prev_struct_name = struct_name common_reg = sark.structure.get_common_register(start, end) reg_name = idc.AskStr(common_reg, "Register") if not reg_name: return offsets, operands = sark.structure.infer_struct_offsets( start, end, reg_name) try: sark.structure.create_struct_from_offsets(struct_name, offsets) except sark.exceptions.SarkStructAlreadyExists: yes_no_cancel = idc.AskYN( idaapi.ASKBTN_NO, "Struct already exists. Modify?\n" "Cancel to avoid applying the struct.") if yes_no_cancel == idaapi.ASKBTN_CANCEL: return elif yes_no_cancel == idaapi.ASKBTN_YES: sid = sark.structure.get_struct(struct_name) sark.structure.set_struct_offsets(offsets, sid) else: # yes_no_cancel == idaapi.ASKBTN_NO: pass sark.structure.apply_struct(start, end, reg_name, struct_name)
def OnCommand(self, n, cmd): if cmd == self.show_all_toggle_cmd: if self.min_xrefs == self.MIN_XREFS: self.min_xrefs = 0 if self.min_xrefs != self.MIN_XREFS: self.min_xrefs = self.MIN_XREFS if self.must_have_loop == self.MUST_HAVE_LOOP: self.must_have_loop = False else: self.must_have_loop = self.MUST_HAVE_LOOP elif cmd == self.rename_cmd: if idc.AskYN( 0, "Are you sure you want to rename all 'sub_XXXXXX' functions to 'leaf_XXXXXX'?" ) == 1: for item in self.items: # Is this a leaf function? if item[-1] == True: current_name = item[0] if current_name.startswith('sub_'): new_name = current_name.replace('sub_', 'leaf_') idc.MakeName(idc.LocByName(current_name), new_name) self.populate_items() return 0
def main(doAllFuncs=True): #doAllFuncs=False #jayutils.configLogger('', logging.DEBUG) jayutils.configLogger('', logging.INFO) logger = jayutils.getLogger('stackstrings') logger.debug('Starting up now') filePath = jayutils.getInputFilepath() if filePath is None: self.logger.info('No input file provided. Stopping') return vw = jayutils.loadWorkspace(filePath) ea = idc.ScreenEA() res = idc.AskYN(0, 'Use basic-block local aggregator') if res == -1: print 'User canceled' return uselocalagg = (res == 1) ranges = getFuncRanges(ea, doAllFuncs) for funcStart, funcEnd in ranges: try: logger.debug('Starting on function: 0x%x', funcStart) stringList = runStrings(vw, funcStart, uselocalagg) for node, string in stringList: if isLikelyFalsePositiveString(string): #if it's very likely a FP, skip annotating continue print '0x%08x: %s' % (node[0], string) #print '0x%08x: 0x%08x: %s %s' % (node[0], node[1], binascii.hexlify(string), string) idc.MakeComm(node[0], string.strip()) except Exception, err: logger.exception('Error during parse: %s', str(err))
def Initialize(): global Rename Rename = idc.AskYN(0, "Automaticly Update Names? (sub_549570 => PrintChat)") if Rename == -1: print("Exiting...") return print("") print("++ Uhrwerk: Offsets (%s)" % datetime.datetime.now()) print("Why do they keep breaking...") print("") print("++ Functions") for Alias, Reference, Type in Functions: if Type == 1: PrintWrapper(Alias, FindFuncPattern(Reference), 1) if Type == 2: PrintWrapper(Alias, FindFuncCall(Reference), 1) if Type == 3: PrintWrapper(Alias, FindFuncFirstReference(Reference), 1) print("") print("++ Offsets") for Alias, Reference, Type, Operand in Offsets: if Type == 1: PrintWrapper(Alias, FindOffsetPattern(Reference, Operand), 2) print("")
def __init__(self): if self.get_start_ea(self.DATA) == idc.BADADDR: if idc.AskYN( 0, "There are no data segments defined! This probably won't end well. Continue?" ) != 1: raise Exception("Action cancelled by user.")
def OnClose(self): # TODO: Add a 'do not ask again' feature? if idc.AskYN( 1, "Path nodes have been highlighted in the disassembly window. Undo highlighting?" ) == 1: self.unhighlight_all()
def term(self): with ignored(AttributeError): if not self.die_manager.die_db.is_saved: response = idc.AskYN(1, "DIE DB was not saved, Would you like to save it now?") if response == 1: self.die_manager.save_db() self.die_manager.del_menu_items()
def autoenum(self): common_value = get_common_value() enum_name = idc.AskStr(self._last_enum, "Enum Name") if enum_name is None: return if not enum_name: enum_name = None self._last_enum = enum_name # Can't ask with negative numbers. if common_value >> ((8 * sark.core.get_native_size()) - 1): common_value = 0 const_value = idc.AskLong(common_value, "Const Value") if const_value is None: return modify = True try: enum = sark.add_enum(enum_name) except sark.exceptions.EnumAlreadyExists: enum = sark.Enum(enum_name) yes_no_cancel = idc.AskYN(idaapi.ASKBTN_NO, "Enum already exists. Modify?\n") if yes_no_cancel == idaapi.ASKBTN_CANCEL: return elif yes_no_cancel == idaapi.ASKBTN_YES: modify = True else: # yes_no_cancel == idaapi.ASKBTN_NO: modify = False member_name = const_name(enum, const_value) if modify: try: enum.members.add(member_name, const_value) except sark.exceptions.SarkErrorAddEnumMemeberFailed as ex: idaapi.msg("[AutoEnum] Adding enum member failed: {}.".format( ex.message)) else: for member in enum.members: if member.value == const_value: member_name = member.name break else: return # Apply the enum apply_enum_by_name(enum, member_name)
def reset(self): ''' Resets all settings to the defaults. ''' if idc.AskYN( 0, "Are you sure you want to undo all changes and reset?") == 1: self.sim.Reset() self.Refresh()
def sync_ask_rename(): rename_flag = 0 if force or idc.AskYN( rename_flag, "Replace %s by %s" % (get_name(), name["data"])) == 1: g_logger.debug("[x] renaming %s @ 0x%x as %s", get_name(), name["address"], name["data"]) idc.MakeName(name["address"], name["data"].encode('ascii', 'ignore'))
def run(self): """ Launch the hooks! """ idaapi.disable_script_timeout() if self.skel_settings.initial_sync: init_sync = 0 if idc.AskYN(init_sync, "Do you want to synchronize defined names?") == 1: self.send_names() if idc.AskYN(init_sync, "Do you want to synchronize defined comments?") == 1: self.send_comments() self.skel_ui.Show() self.skel_sync_agent.start() self.skel_hooks.hook()
def rename_fuzzy(self, n): if idc.AskYN(0, "Really rename functions based on fuzzy string " "matching? (Save your database first!)") == 1: self.rename_regex(n, regex_str="\[(.*?)\]") self.rename_regex(n, regex_str="(\w*)\(\)") self.rename_regex(n, regex_str="^In.(\w*)(?i)") self.rename_regex(n, regex_str="(^\w*):") self.rename_regex(n, regex_str="(\w*)\.c(?i)") self.rename_regex(n, regex_str="(\w*):") self.rename_regex(n, regex_str="(^\w*)")
def run(self, arg): start, end = sark.get_selection() if not sark.structure.selection_has_offsets(start, end): message('No structure offsets in selection. Operation cancelled.') idaapi.warning( 'No structure offsets in selection. Operation cancelled.') return struct_name = idc.AskStr(self._prev_struct_name, "Struct Name") if not struct_name: message("No structure name provided. Operation cancelled.") return self._prev_struct_name = struct_name common_reg = sark.structure.get_common_register(start, end) reg_name = idc.AskStr(common_reg, "Register") if not reg_name: message("No offsets found. Operation cancelled.") return try: offsets, operands = sark.structure.infer_struct_offsets( start, end, reg_name) except sark.exceptions.InvalidStructOffset: message( "Invalid offset found. Cannot create structure.", "Make sure there are no negative offsets in the selection.") return except sark.exceptions.SarkInvalidRegisterName: message( "Invalid register name {!r}. Cannot create structs.".format( reg_name)) return try: sark.structure.create_struct_from_offsets(struct_name, offsets) except sark.exceptions.SarkStructAlreadyExists: yes_no_cancel = idc.AskYN( idaapi.ASKBTN_NO, "Struct already exists. Modify?\n" "Cancel to avoid applying the struct.") if yes_no_cancel == idaapi.ASKBTN_CANCEL: return elif yes_no_cancel == idaapi.ASKBTN_YES: sid = sark.structure.get_struct(struct_name) sark.structure.set_struct_offsets(offsets, sid) else: # yes_no_cancel == idaapi.ASKBTN_NO: pass sark.structure.apply_struct(start, end, reg_name, struct_name)
def promptForHashTypes(self): ''' Iterate over the known hash types in the DB. Prompt the user for each one. Kind of painful here since only can do y/n prompt. TODO: Find a better/less painful prompt method! ''' hashTypes = self.dbstore.getAllHashTypes() for h in hashTypes: if 1 == idc.AskYN(1, str('Include hash: %s' % h.hashName)): self.params.hashTypes.append(h) if len(self.params.hashTypes) == 0: raise RuntimeError('No hashes selected')
def TwoProfColor(self): self.is_singleprofile = False self.is_twoprofile = True if not self.is_reanalyse: self.ProgectWarning() self.fname = idc.AskFile(0, '*.*', 'Select first profile file, plz') self.sname = idc.AskFile(0, '*.*', 'Select second profile file, plz') if (self.fname == None) or (self.sname == None): return False first = open(self.fname, 'rb') second = open(self.sname, 'rb') firstprofile = first.read().split('\n\n') secondprofile = second.read().split('\n\n') idaapi.msg("Analysing profiles...\n") firstprof = self.kern.analyze_callgr_profile(firstprofile) secondprof = self.kern.analyze_callgr_profile(secondprofile) if not self.is_reanalyse: YN = idc.AskYN( 0, 'Do you want to make additional funcs, based on callgrind logs?\n(If func not already exist)' ) if YN == 1: idaapi.msg("Tryind add funcs...\n") num = self.kern.make_funcs_from_profiles(firstprof, secondprof) idc.Warning("%d funcs was added" % num) actfuncs_dict = self.kern.color_profs(firstprof, secondprof, BLUE, ORG, GREEN) self.ColorFuncsView = FuncsColorChooser("Colored Funcs", actfuncs_dict, self.fname, self.sname) # self.ColorFuncsView.show() idaapi.msg("Done, enjoy work!\n") idaapi.msg( "\nHelp:\n - Click Functions window and Type Alt+1 to see actually executed funcs from profiles\n" ) self.is_reanalyse = False return True
def update(): if not config['settings']['update']['autocheck']: return url = ("repos/{owner}/{repo}/releases/latest" "").format(owner=config['git']['owner'], repo=config['git']['repository']) try: r = network.query("GET", url, server=config['git']['server'], token="", json=True) local_version = StrictVersion(__version__) remote_version = StrictVersion(r['tag_name']) logger('updater').info("local version: {}, latest version: {}".format( local_version, remote_version)) if remote_version < local_version: logger('updater').debug("You're using a version newer than latest") return if remote_version == local_version: logger('updater').debug("Version is up to date") return logger('updater').info("update is available") if remote_version in config['settings']['update']['skipped']: logger('updater').info("version update marked skip") return if config['settings']['update']['autoupdate']: pass else: update = idc.AskYN( 1, "An update is available for the rematch IDA " "plugin.\nVersion {} is available, while you're " "using {}.\nWould you like to update your version?".format( remote_version, local_version)) if update == 0: config['settings']['update']['skipped'].append( str(remote_version)) logger('updater').info("Version update supressed") return if update == -1: return except exceptions.NotFoundException: logger('updater').info("Couldn't find latest release for plugin")
def ask(askType, defaultVal, prompt): if askType is int or askType is long: return idc.AskLong(defaultVal, prompt) elif askType is str: return idc.AskStr(defaultVal, prompt) elif askType is bool: result = idc.AskYN(defaultVal, prompt) return bool(result) if result != -1 else None elif askType is file: typeAssert(defaultVal, bool) fname = idc.AskFile(defaultVal, "", prompt) if not isString(fname): return None return open(fname, "w" if defaultVal else "r")
def handle_update(self, response): self.q = None local_version = StrictVersion(__version__) raw_remote_version = response['info']['version'] remote_version = StrictVersion(raw_remote_version) log('update').info("local version: %s, latest version: %s", local_version, remote_version) if remote_version < local_version: log('update').debug("You're using a version newer than latest") self.status = "ready" return if remote_version == local_version: log('update').debug("Version is up to date") self.status = "ready" return log('update').info("update is available") if str(remote_version) in config['settings']['update']['skipped']: log('update').info("version update marked skip") self.status = "skip" return if not config['settings']['update']['autoupdate']: update = idc.AskYN( 1, "An update is available for the rematch IDA " "plugin.\nVersion {} is available, while you're " "using {}.\nWould you like to update your version?".format( remote_version, local_version)) if update == 0: config['settings']['update']['skipped'].append( str(remote_version)) log('update').info("Version update suppressed") self.status = "skip" return if update == -1: self.status = "skip" return # get latest version's package url new_release = response['releases'][raw_remote_version] new_url = new_release[0]['url'] self.update_version(new_url)
def GenerateAddresses(): SignatureSearch() global Rename Rename = idc.AskYN(0, "Automaticly Update Names? (sub_549570 => PrintChat)") if Rename == -1: print("Exiting...") return print("") print("++ Offsets (%s)" % datetime.datetime.now()) print("Why do they keep breaking...") print("") print("++ Functions") for x in Functions: if x.HasPrefix == True: if int(x.Type) == 1: PrintWrapper(x.Alias, FindFuncPattern(x.Reference), 1) if int(x.Type) == 2: PrintWrapper(x.Alias, FindFuncCall(x.Reference), 1) if int(x.Type) == 3: PrintWrapper(x.Alias, FindFuncFirstReference(x.Reference), 1) else: if int(x.Type) == 1: PrintWrapper(x.Alias, FindFuncPattern(x.Reference), 0) if int(x.Type) == 2: PrintWrapper(x.Alias, FindFuncCall(x.Reference), 0) if int(x.Type) == 3: PrintWrapper(x.Alias, FindFuncFirstReference(x.Reference), 0) print("") print("++ Objects") for x in Objects: if x.HasPrefix == True: if int(x.Type) == 1: PrintWrapper(x.Alias, FindOffsetPattern(x.Reference, int(x.Operand)), 2) else: if int(x.Type) == 1: PrintWrapper(x.Alias, FindOffsetPattern(x.Reference, int(x.Operand)), 0) print("")
def OneProfColor(self): self.is_singleprofile = True self.is_twoprofile = False if not self.is_reanalyse: self.singlname = idc.AskFile(0, '*.*', 'Select profile file, plz') if self.singlname == None: return False prof = open(self.singlname, 'rb') binprofile = prof.read().split('\n\n') idaapi.msg("Analysing profile...\n") binprof = self.kern.analyze_callgr_profile(binprofile) actfuncs = self.kern.color_single_profile(binprof, GREEN) if not self.is_reanalyse: YN = idc.AskYN( 0, 'Do you want to make additional funcs, based on callgrind logs?\n(If func not already exist)' ) if YN == 1: idaapi.msg("Tryind add funcs...\n") num = self.kern.make_funcs_from_prof(binprof) idc.Warning("%d funcs was added" % num) self.ColorFuncsView = FuncsUniqueProfile("Actually funcs", actfuncs, GREEN) # self.ColorFuncsView.show() idaapi.msg("Done, enjoy work!") idaapi.msg( "\nHelp:\n - Click Functions window and Type Alt+1 to see actually executed funcs from profile\n" ) self.is_reanalyse = False return True
def handle_update(response): local_version = StrictVersion(__version__) remote_version = StrictVersion(response['info']['version']) logger('update').info("local version: %s, latest version: %s", local_version, remote_version) if remote_version < local_version: logger('update').debug("You're using a version newer than latest") return if remote_version == local_version: logger('update').debug("Version is up to date") return logger('update').info("update is available") if str(remote_version) in config['settings']['update']['skipped']: logger('update').info("version update marked skip") return if config['settings']['update']['autoupdate']: pass else: update = idc.AskYN( 1, "An update is available for the rematch IDA plugin." "\nVersion {} is available, while you're using {}. " "\nWould you like to update your version?".format( remote_version, local_version)) if update == 0: config['settings']['update']['skipped'].append(str(remote_version)) logger('update').info("Version update suppressed") return if update == -1: return # get latest version's package url new_release = response['releases'][str(remote_version)] new_url = new_release[0]['url'] update_version(new_url)
def main(): global tm_start for mod in ('metapc', 'ppc', 'arm'): arch_mod = __import__('arch.%s' % mod, globals(), locals(), ['*']) arch = arch_mod.Arch() if arch: if arch.check_arch(): # This is a valid module for the current architecure # so the search has finished log_message('Using architecture module [%s]' % mod) break else: log_message( 'No module found to process the current architecure [%s]. Exiting.' % (arch.processor_name)) return global instrumentation log_message('Initialization sucessful.') db_engine, db_host, db_name, db_user, db_password = (None, ) * 5 batch_mode = False module_comment = '' process_sections = False # If the configuration filename has been fetched from the # environment variables, then use that. # if CONFIG_FILE_NAME: config_file_path = CONFIG_FILE_NAME # Otherwise fallback into the one expected in the IDA directory # else: config_file_path = os.path.join(idaapi.idadir(''), 'ida2sql.cfg') if os.path.exists(config_file_path): cfg = ConfigParser.ConfigParser() cfg.read(config_file_path) if cfg.has_section('database'): if cfg.has_option('database', 'engine'): db_engine = getattr(DB_ENGINE, cfg.get('database', 'engine')) if cfg.has_option('database', 'host'): db_host = cfg.get('database', 'host') if cfg.has_option('database', 'schema'): db_name = cfg.get('database', 'schema') if cfg.has_option('database', 'user'): db_user = cfg.get('database', 'user') if cfg.has_option('database', 'password'): db_password = cfg.get('database', 'password') if cfg.has_option('importing', 'mode'): batch_mode = cfg.get('importing', 'mode') if batch_mode.lower() in ('batch', 'auto'): batch_mode = True if cfg.has_option('importing', 'comment'): module_comment = cfg.get('importing', 'comment') if cfg.has_option('importing', 'process_sections'): process_sections = cfg.get('importing', 'process_sections') if process_sections.lower() in ('no', 'false'): process_sections = False else: process_sections = True if None in (db_engine, db_host, db_name, db_user, db_password): (db_engine, db_host, db_name, db_user, db_password) = query_configuration() if None in (db_engine, db_host, db_name, db_user, db_password): log_message('User cancelled the exporting.') return failed = False try: sqlexporter = SQLExporter(arch, db_engine, db=db_name, user=db_user, passwd=db_password, host=db_host, use_new_schema=USE_NEW_SCHEMA) except ImportError: print "Error connecting to the database, error importing required module: %s" % sys.exc_info( )[0] failed = True except Exception: print "Error connecting to the database, Reason: %s" % sys.exc_info( )[0] failed = True if failed: # Can't connect to the database, indicate that to BinNavi if batch_mode is True: idc.Exit(FATAL_CANNOT_CONNECT_TO_DATABASE) else: return if not sqlexporter.is_database_ready(): if batch_mode is False: result = idc.AskYN( 1, 'Database has not been initialized yet. Do you want to create now the basic tables? (This step is performed only once)' ) else: result = 1 if result == 1: sqlexporter.init_database() else: log_message('User requested abort.') return iteration = os.environ.get('EXPORT_ITERATION', None) module_id = os.environ.get('MODULE_ID', None) if iteration is None and module_id == None: # Export manually print "Exporting manually ..." iteration = -1 sqlexporter.set_callgraph_only(False) sqlexporter.set_exporting_manually(True) status = sqlexporter.new_module(idc.GetInputFilePath(), arch.get_architecture_name(), idaapi.get_imagebase(), module_comment, batch_mode) elif iteration is not None and module_id is not None: # Export the next k functions or the call graph sqlexporter.set_exporting_manually(False) sqlexporter.set_callgraph_only(int(iteration) == -1) sqlexporter.set_module_id(int(module_id)) status = True else: sqlexporter.set_exporting_manually(False) status = sqlexporter.new_module(idc.GetInputFilePath(), arch.get_architecture_name(), idaapi.get_imagebase(), module_comment, batch_mode) sqlexporter.set_callgraph_only(False) if status is False: log_message('Export aborted') return elif status is None: log_message( 'The database appears to contain data exported with different schemas, exporting not allowed.' ) if batch_mode: idc.Exit(FATAL_INVALID_SCHEMA_VERSION) instrumentation = Instrumentation() instrumentation.new_function_callable(sqlexporter.process_function) instrumentation.new_packet_callable(sqlexporter.process_packet) instrumentation.new_section_callable(sqlexporter.process_section) tm_start = time.time() already_imported = sqlexporter.db.get_already_imported() incomplete = process_binary(arch, process_sections, int(iteration), already_imported) sqlexporter.finish() log_message( 'Results: %d functions, %d instructions, %d basic blocks, %d address references' % (len(sqlexporter.exported_functions), len(sqlexporter.exported_instructions), sqlexporter.basic_blocks_next_id - 1, sqlexporter.address_references_values_count)) log_message( 'Results: %d expression substitutions, %d operand expressions, %d operand tuples' % (sqlexporter.expression_substitutions_values_count, sqlexporter.operand_expressions_values_count, sqlexporter.operand_tuples___operands_values_count)) log_message('Exporting completed in %s' % get_time_delta_string()) # If running in batch mode, exit when done if batch_mode: if incomplete: shiftedModule = (sqlexporter.db.module_id << 0x10) | 0xFF idc.Exit(shiftedModule) elif not sqlexporter.callgraph_only: shiftedModule = (sqlexporter.db.module_id << 0x10) | 0xFE idc.Exit(shiftedModule) else: idc.Exit(0)
def refreshitems(self): #print "refreshitems" self.items_data = [] self.items = [] try: #-1:cancel,0-no,1-ok UseScreenEAInt = idc.AskYN(1,"是否自动获取数据位置?(否则使用ScreenEA)".decode('utf-8').encode(sys.getfilesystemencoding())) if UseScreenEAInt == -1: UseScreenEAInt = 1 if UseScreenEAInt == 1: UseScreenEA = False else: UseScreenEA = True print "UseScreenEA:{}".format(UseScreenEA) self.tool = MKBundleTool() asms = self.tool.GetBundledAssemblyList(UseScreenEA) if not asms: return False for BundledAssemblyItem in asms: #print BundledAssemblyItem #print("FileItemStructOffset:{:016X} FileNameOffset:{:016X} #FileDataOffset:{:016X} FileSize:{:016X} FileCompressedSize:{:016X} #IsGZip:{} FileName:{}"\ #.format( \ #BundledAssemblyItem.FileItemStructOffset , \ #BundledAssemblyItem.FileNameOffset,\ #BundledAssemblyItem.FileDataOffset,\ #BundledAssemblyItem.FileSize,\ #BundledAssemblyItem.FileCompressedSize,\ #BundledAssemblyItem.IsGZip,\ #BundledAssemblyItem.FileName)) if self.tool.Is64Bit: fstr = "0x%016X" else: fstr = "0x%08X" self.items_data.append(BundledAssemblyItem) self.items.append(["%d" % BundledAssemblyItem.Index, fstr % BundledAssemblyItem.FileItemStructOffset, fstr % BundledAssemblyItem.FileNameOffset, fstr % BundledAssemblyItem.FileDataOffset, fstr % BundledAssemblyItem.FileSize, fstr % BundledAssemblyItem.FileSizeOffset, fstr % BundledAssemblyItem.FileCompressedSizeOffset, fstr % BundledAssemblyItem.FileCompressedSize, BundledAssemblyItem.IsCompressed, BundledAssemblyItem.IsGZip, BundledAssemblyItem.IsME, BundledAssemblyItem.FileName]) return True except: traceback.print_exc() return False
class_name = derived_ctor_name.replace("::ctor", "") print("rename: %s %s 0x%x" % (category, class_name, derived_ctor_ea)) do_rename(category, derived_ctor_ea, class_name) derived_classes.append(derived_ctor_name) for derived_class in derived_classes: rename_derived_classes(category, derived_class) def rename_derived_classes_for_category(category): # type: (str) -> None base_ctor_name = "AI_%sBase::ctor" % category rename_derived_classes(category, base_ctor_name) STRUCT_SIZE = 0x10 first_time = idc.AskYN(0, "Is it the first time this script is being run?") for category, address, size in TABLES: for i in range(size): print("%s [%u/%u]" % (category, i + 1, size)) entry = idaapi.get_many_bytes(address + STRUCT_SIZE * i, STRUCT_SIZE) crc, padding, fn = struct.unpack('<IIQ', entry) name = aidef_crc.get(crc, "Unknown_%08x" % crc) if first_time: function_name = "AI_F_%s_%s" % (category, name) idc.MakeNameEx(fn, function_name, idc.SN_NOWARN) idc.SetType(fn, "void* makeHandler(void* param, sead::Heap* heap);") if "BL operator new" not in idc.GetDisasm( fn + 6 * 4) or idc.GetMnem(fn + 9 * 4) != "BL": continue
def OnPopupMenu (self, menu_id): global ph global isArm if menu_id == self.menu_loadfromfile: fileName = idc.AskFile (0, "", "Import ROP binary") if fileName and ph.load_from_file (fileName): self.refresh () elif menu_id == self.menu_savetofile: fileName = idc.AskFile (1, "", "Export ROP binary") if fileName and ph.save_to_file (fileName): print "payload saved to %s" % fileName elif menu_id == self.menu_jumpto: n = self.GetLineNo () Jump (ph.get_value (n)) elif menu_id == self.menu_autorec: ph.analyze () self.refresh () elif menu_id == self.menu_autorec2: # TODO: add stack-pointer dependent analysis algorithm for x86 :D Warning ("Not implemented yet") elif isArm and menu_id == self.menu_armPCtrace: ph.traceArmPC () self.refresh () elif menu_id == self.menu_reset: if idc.AskYN (1, "Are you sure?") == 1: ph.reset_type () self.refresh () elif menu_id == self.menu_disasm: try: self.disasm self.disasm.refresh () self.disasm.Show () except: self.disasm = disasmviewer_t () if self.disasm.Create (): self.disasm.Show () else: del self.disasm elif menu_id == self.menu_toggle: self.toggle_item () elif menu_id == self.menu_deleteitem: self.delete_item () elif menu_id == self.menu_insertitem: self.insert_item () elif menu_id == self.menu_edititem: self.edit_item () elif menu_id == self.menu_copyitem: self.copy_item () elif menu_id == self.menu_cutitem: self.cut_item () elif menu_id == self.menu_pasteitem: self.paste_item () elif menu_id == self.menu_findinsn: s = AskStr ("", "Find instruction(s)") if s: find (s, False) else: return False return True
cpp_all = ''' #include "overrides.h" #include "ep.h" #include "GL/gl.h" #line 2 "%s" ''' % decomp.cpp_decomp_tag return decomp.run(externs, text_fns, cpp_filter, cpp_all, decompile) intro = ''' #include <stdlib.h> #include <stdio.h> #include <string.h> #include <math.h> #include <assert.h> #include <time.h> #include <GL/gl.h> #include <GL/glu.h> ''' if __name__ == '__main__': p = idc.AskYN(1, 'Run the decompiler?') if p == -1: print 'Aborting.' elif p == 1: print intro print run(decompile=True) else: run(decompile=False) # just load it (for interactive use/testing)
def unpickle(source_file=None, overwrite=False, ignore_segment_change=False, ignore_md5_mismatch=False, visual_merge=True): """ Loads information from the given pickled file. """ if source_file is None: source_file = QtWidgets.QFileDialog.getOpenFileName()[0] hooks_enabled = idb_push.g_hooks_enabled try: print "Started unpickling at %s" % (time.ctime()) idb_push.g_hooks_enabled = False with open(source_file, 'rb') as f: idb_data = cPickle.load(f) # verify MD5 if idb_data[INPUT_FILE_MD5_FIELD] != idc.GetInputMD5( ) and not ignore_md5_mismatch: print "MD5 mismatch - unpickled file for %s but working on %s" % ( idb_data[INPUT_FILE_MD5_FIELD], idc.GetInputMD5()) return # check for segment change if not ignore_segment_change: # using iteritems since iterating on a dictionary returns only the dictionary keys if psida_common.get_segments() != idb_data[SEGMENTS]: if 1 != idc.AskYN( 0, SEGMENT_WARNING % (format_segments(idb_data[SEGMENTS]), format_segments(psida_common.get_segments()))): # user replied No or Cancel return # apply idb_data (unless overwrite==True, in which case # you just overwrite everything) if visual_merge and not overwrite: # apply non-conflicting updates name_conflicts = {} comment_conflicts = {} structure_conflicts = {} set_names(idb_data[NAMES_FIELD], overwrite, name_conflicts) set_all_comments(idb_data[COMMENTS_FIELD], overwrite, comment_conflicts) set_all_structures(idb_data[STRUCTURES_FIELD], overwrite, structure_conflicts) conflicts = { NAMES_FIELD: name_conflicts, COMMENTS_FIELD: comment_conflicts, STRUCTURES_FIELD: structure_conflicts } if len(name_conflicts) + len(comment_conflicts) + len( structure_conflicts) == 0: # no conflicts need solving print "Unpickling complete at %s" % (time.ctime()) return global g_form if g_form is not None: g_form.Close(idaapi.PluginForm.FORM_SAVE) g_form = IDBMergeForm() g_form.Show("IDB MERGE") populate_form_with_items(make_items(conflicts)) else: set_names(idb_data[NAMES_FIELD], overwrite) set_all_comments(idb_data[COMMENTS_FIELD], overwrite) idc.Refresh() idc.RefreshLists() print "Unpickling complete at %s" % (time.ctime()) except: traceback.print_exc() raise finally: idb_push.g_hooks_enabled = hooks_enabled
# -------------------------------------------------------------------------- def ProcessUiActions(actions, flags=0): """ @param actions: A string containing a list of actions separated by semicolon, a list or a tuple @param flags: flags to be passed to process_ui_action() @return: Boolean. Returns False if the action list was empty or execute_ui_requests() failed. """ # Instantiate a helper helper = __process_ui_actions_helper(actions, flags) return False if len(helper) < 1 else idaapi.execute_ui_requests((helper, )) # -------------------------------------------------------------------------- class print_req_t(object): def __init__(self, s): self.s = s def __call__(self): idaapi.msg("%s" % self.s) return False # Don't reschedule if idc.AskYN(1, ("HIDECANCEL\nDo you want to run execute_ui_requests() example?\n" "Press NO to execute ProcessUiActions() example\n")): idaapi.execute_ui_requests((print_req_t("Hello"), print_req_t(" world\n"))) else: ProcessUiActions("JumpQ;JumpName")