def run(self, arg): print("Running") PE = peutils_t() print("Image base is %016X" % PE.imagebase) print("Exporting functions...") filename = os.path.splitext(idc.GetIdbPath())[0] + ".sym" rawOffsetsFilename = os.path.splitext(idc.GetIdbPath())[0] + ".raw.sym" f = open(filename, 'w') rawOffsetsFile = open(rawOffsetsFilename, 'w') count = 0 for address, name in Names(): offset = address - PE.imagebase rawOffset = idaapi.get_fileregion_offset(address) if idc.GetFunctionFlags(address) != -1: size = idc.FindFuncEnd(address) - address else: size = 4 #namesList.append((offset, name)) count += 1 f.write("%08X %08X;%s\n" % (offset, size, name)) rawOffsetsFile.write("%08X %08X;%s\n" % (rawOffset, size, name)) f.close() rawOffsetsFile.close() print("%d functions exported" % count)
def create_idb_for_worker(self, worker_id, recycle_idb_from_id=None): if recycle_idb_from_id is not None: idb_filename_for_export_old = get_local_idb_name( idc.GetIdbPath(), "_cache_export_%08i" % (recycle_idb_from_id), self.export_dir) if os.path.exists(idb_filename_for_export_old): idb_filename_for_export_new = get_local_idb_name( idc.GetIdbPath(), "_cache_export_%08i" % (worker_id), self.export_dir) if os.path.exists(idb_filename_for_export_new): os.remove(idb_filename_for_export_new) os.rename(idb_filename_for_export_old, idb_filename_for_export_new) remove_ida_temporary_files(idb_filename_for_export_new) else: copy_idb_to_local_file("_cache_export_%08i" % (worker_id), self.export_dir) else: # copy current idb to temp idb for export # check if file already exists idb_filename_for_export = get_local_idb_name( idc.GetIdbPath(), "_cache_export_%08i" % (worker_id), self.export_dir) if os.path.exists(idb_filename_for_export): os.remove(idb_filename_for_export) logger.debug("Copying IDB file %s" % idb_filename_for_export) idb_filename_for_export = copy_idb_to_local_file( "_cache_export_%08i" % (worker_id), self.export_dir, use_hardlink=(COPY_WITH_HARDLINKS and sys.platform == "linux2"))
def retrieve_reset(self, *args): if not idaapi.askyn_c( False, "All your local changes will be lost !\nDo you really want to proceed ?" ): return # disable all yaco hooks self.YaCoUI.unhook() # create a backup of current idb yatools.copy_idb_to_local_file( "_bkp_%s" % time.ctime().replace(" ", "_").replace(":", "_")) # delete all modified objects self.repo_manager.repo.checkout_head() # get reset self.repo_manager.fetch_origin() self.repo_manager.rebase_from_origin() original_idb_name = yatools.get_original_idb_name(idc.GetIdbPath()) # remove current idb os.remove(idc.GetIdbPath()) # recreate local idb shutil.copy(original_idb_name, yatools.get_local_idb_name(original_idb_name)) # local should not be overwritten, so we have to close IDA brutally ! idaapi.cvar.database_flags |= idaapi.DBFL_KILL idc.Warning("Force pull complete, you can restart IDA") idc.Exit(0)
def _on_new_branch_reply(self, repo, branch, _): self._plugin.core.branch = branch.uuid # Save the current database self._plugin.core.save_netnode() idc.save_database(idc.GetIdbPath(), 0) # Create the packet that will hold the database packet = UploadDatabase.Query(repo.hash, branch.uuid) inputPath = idc.GetIdbPath() with open(inputPath, 'rb') as inputFile: packet.content = inputFile.read() # Create the progress dialog text = "Uploading database to server, please wait..." progress = QProgressDialog(text, "Cancel", 0, len(packet.content)) progress.setCancelButton(None) # Remove cancel button progress.setModal(True) # Set as a modal dialog windowFlags = progress.windowFlags() # Disable close button progress.setWindowFlags(windowFlags & ~Qt.WindowCloseButtonHint) progress.setWindowTitle("Save to server") iconPath = self._plugin.resource('upload.png') progress.setWindowIcon(QIcon(iconPath)) progress.show() # Send the packet to upload the file packet.upback = partial(self._progress_callback, progress) d = self._plugin.network.send_packet(packet) d.add_callback(partial(self._database_uploaded, repo, branch)) d.add_errback(logger.exception)
def init(self, *args, **kwargs): if not is_enabled(): print("YaCo: disabled") return idaapi.PLUGIN_SKIP try: # check if we are in YaCo project input_filename = idc.GetIdbPath() if input_filename.count("_local.") > 0 and os.path.exists(".git"): print("YaCo: initializing") import_yaco_paths() import YaCo YaCo.start() return idaapi.PLUGIN_KEEP elif "_local." not in input_filename and os.path.exists(".git"): print(""" ******************************************************* WARNING : You have opened a database in a git project, WARNING : but your database doesn't match a YaCo project. WARNING : YaCo is disabled ! ******************************************************* """) return idaapi.PLUGIN_OK else: return idaapi.PLUGIN_KEEP except Exception, e: print("YaCo: error during initialization") print(traceback.format_exc()) logger = logging.getLogger("YaCo") if logger is not None: try: logger.error("YaCo: error during initialization") logger.error(traceback.format_exc()) except: pass raise e
def databaseFile(self): """Return the (full) path of the database file. Return Value: Path to the database file """ return idc.GetIdbPath()
def LoadSavedAssemblers(seg_ea=None): assemblers = {} opty_dir = idc.GetIdbPath() opty_dir = opty_dir[:opty_dir.rfind( os.sep)] + os.sep + "optimice_%s" % idc.GetInputFile() + os.sep if not os.path.isdir(opty_dir): return None if seg_ea == None: for fname in os.listdir(opty_dir): if re.match(r"Assembler_[0-9ABCDEF]+\.pickle.zlib", fname) != None: seg_ea = re.match(r"Assembler_([0-9ABCDEF]+)\.pickle.zlib", fname).group(1) fp = open(opty_dir + fname, "rb") asm = pickle.loads(zlib.decompress(fp.read())) fp.close() assemblers[seg_ea] = asm else: fname = "Assembler_%08x.pickle.zlib" % (seg_ea) try: fp = open(opty_dir + fname, "r") asm = pickle.loads(zlib.decompress(fp.read())) assemblers[seg_ea] = asm fp.close() except: return None return assemblers
def init(self): if not _G['inited']: if hasattr(Config, "debug") and Config.debug: Logger.setVerbose(True) HexraysPlugin() UpdateTypeAction("Ctrl-Shift-U").register().addMenu() RenameTypeAction().register().addMenu() SubclassTypeAction().register().addMenu() MapInterfacesAction("Ctrl-Shift-I").register().addMenu() ExploreAction("Ctrl-Shift-E").register().addMenu() ExportTypesAction().register().addMenu() TestAction("Ctrl-Shift-]").register().addMenu() IDAAction('Reload', "Ctrl-Shift-R").setProc( self.reload).register().addMenu() IDAAction('Verbose').setProc(self.verbose).register().addMenu() IDAAction('About').setProc(self.version).register().addMenu() _G['inited'] = True doc = os.path.split(idc.GetIdbPath())[1].split('.')[0] inits = ["ida_init", "ida_init_" + doc.lower()] for y in inits: if not hasattr(Config, y): continue for x in getattr(Config, y).split(';'): x = x.strip() Logger.debug("initcmd: %s", x) try: exec(x, globals()) except Exception as e: print "Error running init cmd", x, ":", str(e) return idaapi.PLUGIN_KEEP
def exit(tests): idbname = re.sub(r'\.i(db|64)$', '_' + tests + '.i\\1', idc.GetIdbPath()) if tests == 'yatest': YaCo.yaco.commit_cache() # save intermediate bases if debug: idc.SaveBase(idbname)
def OnEditLine(self, n): #this is "Delete Segment" ans = idaapi.askyn_c( 1, "HIDECANCEL\nAre you sure you want to delete segment and all optimized data from disk?" ) if ans == 1: opty_dir = idc.GetIdbPath() opty_dir = opty_dir[:opty_dir.rfind( os.sep)] + os.sep + "optimice_%s" % idc.GetInputFile() print opty_dir if not os.path.isdir(opty_dir): print ">GUI_FunctionManager:OnEditLine - Error [%s] not a directory!" % opty_dir return 0 shutil.rmtree(opty_dir) print ">GUI_FunctionManager: Optimice directory deleted: [%s]" % opty_dir idc.SegDelete(int(self.items[n][0], 16), 0) print ">GUI_FunctionManager: Optimice segment deleted: [%s]" % self.items[ n][0] self.populate_items() return 0
def check_valid_cache_startup(self): logger.debug("check_valid_cache_startup") if "origin" not in self.repo.get_remotes(): logger.debug( "WARNING origin not defined : ignoring origin and master sync check !" ) else: if self.repo.get_commit("origin/master") != self.repo.get_commit( "master"): message = "Master and origin/master doesn't point to the same commit, please update your master." logger.debug(message) if IDA_RUNNING is True: try: os.mkdir("cache/") except OSError: pass idbname = os.path.basename(idc.GetIdbPath()) idbname_prefix = os.path.splitext(idbname)[0] idbname_extension = os.path.splitext(idbname)[1] if not idbname_prefix.endswith('_local'): local_idb_name = "%s_local%s" % (idbname_prefix, idbname_extension) if not os.path.exists(local_idb_name): copy_idb_to_local_file() if IDA_IS_INTERACTIVE: message = "To use YaCo you must name your IDB with _local suffix. " message += "YaCo will create one for you.\nRestart IDA and open %s." % local_idb_name logger.debug(message) idaapi.cvar.database_flags |= idaapi.DBFL_KILL idc.Warning(message) idc.Exit(0)
def ask_to_checkout_modified_files(self): modified_objects = "" checkout_head = False for modified_object in self.repo.get_modified_objects(): # check if modified file is original idb original_idb = get_original_idb_name(idc.GetIdbPath()) if original_idb == modified_object: # original idb modification detected, create a backup try: shutil.copy( original_idb, "%s_bkp_%s" % (original_idb, time.ctime().replace( " ", "_").replace(":", "_"))) except Exception as e: traceback.print_exc() raise e checkout_head = True else: modified_objects += "%s\n" % modified_object if len(modified_objects) > 0: message = "%s\nhas been modified, this is not normal, do you want to checkout these files ? " message += "(Rebasing will be disabled if you answer no)" if idaapi.askyn_c(True, message % modified_objects): self.repo.checkout_head() else: self.repo_auto_sync = False return if checkout_head: # checkout silently self.repo.checkout_head()
def init(self, *args, **kwargs): if not is_enabled(): print("yaco: disabled") return idaapi.PLUGIN_SKIP root_dir = os.path.abspath(os.path.join(inspect.getsourcefile(lambda: 0), "..", "YaTools")) for path in ["YaCo", "bin"]: path = os.path.join(root_dir, path) print("yaco: using %s" % path) sys.path.append(path) input_filename = idc.GetIdbPath() if input_filename.count("_local.") > 0 and os.path.exists(".git"): print("yaco: initializing") start() if not yaco.is_started(): return idaapi.PLUGIN_SKIP return idaapi.PLUGIN_KEEP if "_local." not in input_filename and os.path.exists(".git"): print(""" ******************************************************* WARNING : You have opened a database in a git project, WARNING : but your database doesn't match a YaCo project. WARNING : YaCo is disabled ! ******************************************************* """) return idaapi.PLUGIN_OK return idaapi.PLUGIN_KEEP
def exportLib(self, **kwargs): file = os.path.splitext(idc.GetInputFile())[0] path = os.path.split(idc.GetIdbPath())[0] idapath = idc.GetIdaDirectory() tilname = os.path.join(path, file + ".til") outfile = os.path.join(idapath, 'til', 'pc', file + ".til") shutil.copy(tilname, outfile) os.system(os.path.join(idapath, "tilib64.exe") + " -#- " + outfile)
def __init__(self, settings_filename): """ Prepare for execution """ SkelUtils.header() g_logger.info("[+] Init Skelenox") # Load settings self.skel_settings = SkelConfig(settings_filename) self.skel_conn = SkelConnection(self.skel_settings) # If having 3 idbs in your current path bother you, change this self.crit_backup_file = idc.GetIdbPath()[:-4] + "_backup_preskel_.idb" self.backup_file = idc.GetIdbPath()[:-4] + "_backup_.idb" atexit.register(self.end_skelenox) g_logger.info( "Backuping IDB before any intervention (_backup_preskel_)") idc.SaveBase(self.crit_backup_file, idaapi.DBFL_TEMP) g_logger.info("Creating regular backup file IDB (_backup_)") idc.SaveBase(self.backup_file, idaapi.DBFL_TEMP) self.last_saved = time.time() if self.skel_hooks is not None: self.skel_hooks.cleanup_hooks() if not self.skel_conn.get_online(): g_logger.error("Cannot get online =(") # Synchronize the sample self.skel_sync_agent = SkelSyncAgent() self.skel_sync_agent.setup_config(settings_filename) # setup hooks self.skel_hooks = SkelHooks(self.skel_conn) # setup UI self.skel_ui = SkelUI(settings_filename) # setup skelenox terminator self.setup_terminator() g_logger.info("Skelenox init finished")
def get_idb_file_path(): """ TODO: there should be another better approach """ # idaapi.get_input_file_path(): this return original path of idb file, even if file is copied to another directory file_path = idc.GetIdbPath() if file_path is None or len(file_path) == 0: return None return os.path.dirname(file_path)
def check_yadb(self): path = idc.GetIdbPath() path = re.sub(r'\w+_local\.i(db|64)$', 'database/database.yadb', path) import yadb.Root data = None with open(path, 'rb') as fh: data = bytearray(fh.read()) root = yadb.Root.Root.GetRootAsRoot(data, 0) read = "export." + str(idaapi.IDA_SDK_VERSION) def tostr(index): return root.Strings(index).decode() def getname(version): name = version.Username() return tostr(name.Value()) if name else "" data = {} versions = [ ("bin", root.Binaries, root.BinariesLength(), lambda x: x.Address()), ("str", root.Structs, root.StructsLength(), lambda x: getname(x)), ("stm", root.StructMembers, root.StructMembersLength(), lambda x: (getname(data[x.ParentId()]), x.Address())), ("enu", root.Enums, root.EnumsLength(), lambda x: getname(x)), ("enm", root.EnumMembers, root.EnumMembersLength(), lambda x: (getname(data[x.ParentId()]), x.Address())), ("seg", root.Segments, root.SegmentsLength(), lambda x: x.Address()), ("chk", root.SegmentChunks, root.SegmentChunksLength(), lambda x: x.Address()), ("fun", root.Functions, root.FunctionsLength(), lambda x: x.Address()), ("stk", root.Stackframes, root.StackframesLength(), lambda x: x.Address()), ("stm", root.StackframeMembers, root.StackframeMembersLength(), lambda x: (data[x.ParentId()].Address(), x.Address())), ("ref", root.ReferenceInfos, root.ReferenceInfosLength(), lambda x: x.Address()), ("cod", root.Codes, root.CodesLength(), lambda x: x.Address()), ("dat", root.Datas, root.DatasLength(), lambda x: x.Address()), ("bbk", root.BasicBlocks, root.BasicBlocksLength(), lambda x: x.Address()), ] with open(read, "wb") as fh: fh.write("objects: %d\n" % root.ObjectsLength()) fh.write("systems: %d\n" % root.SystemsLength()) for (prefix, getter, size, getkey) in versions: fh.write("\n%s: %d\n" % (prefix, size)) values = [] for it in iterate(getter, size): data[it.ObjectId()] = it name = getname(it) prototype = tostr(it.Prototype()) # workaround broken usercall arguments on 32-bit binaries in ida64 if "__usercall" in prototype: prototype = re.sub("@<r(\w)x>", "@<e\\1x>", prototype) # remove ids from prototypes which are unstable prototype = re.sub("/\\*%(:?[^%]+)%\\*/", "", prototype) # clean-up artifacts after prototype id removal prototype = prototype.replace(" ,", ",") prototype = prototype.replace(" )", ")") prototype = prototype.replace(" ", " ") prototype = prototype.replace(" []", "[]") values.append((getkey(it), prefix, it.Address(), name, prototype)) values.sort(cmp=lambda x, y: cmp(x[0], y[0])) for (key, prefix, ea, name, prototype) in values: line = "%s_%-2x %s %s" % (prefix, ea, name, prototype) fh.write(line.rstrip() + "\n") self.check_golden(read)
def on_import_clicked(self): dir_path = os.path.dirname(idc.GetIdbPath()) file_path, _ = QFileDialog.getOpenFileName(self, 'Select File to Import', dir_path, 'Dump file (*.json)') if not file_path: return self._import_dialog = ImportDialog(file_path, self) self._import_dialog.show()
def __init__(self): if PROFILE_YACO_LOADING: self.pr = cProfile.Profile() self.pr.enable() """ Create and initialize native subsystem """ name, ext = os.path.splitext(idc.GetIdbPath()) ya.StartYatools(name) logging.basicConfig() global logger logger = logging.getLogger("YaCo") logger.setLevel(LOGGING_LEVEL) logger.propagate = True for h in logger.handlers: h.setLevel(logging.WARN) handler = YaLogHandler() handler.setLevel(LOGGING_LEVEL) logger.addHandler(handler) # logger.setLevel(logging.DEBUG) def set_console_level_(level): for h in logger.handlers: if h not in [handler]: h.setLevel(level) """ Create and initialize Python subsystem """ idaapi.msg("YaCo %s\n" % YACO_VERSION) self.hash_provider = ya.YaToolsHashProvider() self.repo_manager = repository.YaToolRepoManager(idc.GetIdbPath()) self.repo_manager.check_valid_cache_startup() self.ida_hooks = hooks.Hooks(self.hash_provider, self.repo_manager) self.YaCoUI = hooks.YaCoUI_Hooks(self)
def get_db_folder(self, _local, _create): if _local == 1: # local db _folder = os.path.dirname(idc.GetIdbPath()) + os.sep + 'SD_local_DB' else: # global db _folder = os.path.dirname(os.path.realpath(__file__)) + os.sep + "SnippetDetector_DB" if not os.path.isdir(_folder): if _create: # First saved snippet, SnippetDetector database folder doesn't exist. Create it! os.makedirs(_folder, 755) return _folder
def save_call_graph(): cur = idc.MinEA() end = idc.MaxEA() #D:\source\test1\test_dir\p_16 #path_dir=os.path.dirname(idc.GetIdbPath()) #p_dir=path_dir.split('\\')[-1] #elf_name=os.path.basename(idc.GetIdbPath()).split('.')[-2] i64_file = idc.GetIdbPath() first_dot = i64_file.find('.') no_dot_file = i64_file[:first_dot] gdl_path = no_dot_file + '.gdl' idc.GenCallGdl(gdl_path, 'Call Gdl', idc.CHART_GEN_GDL)
def __init__(self, sim): ''' Class constructor. @sim - idasimulator_t class instance. Returns None. ''' self.cfg = None self.sim = sim self.idb = idc.GetIdbPath() self.confile = os.path.join(idaapi.get_user_idadir(), self.CONFIG_FILE)
def SaveState(self): opty_dir = idc.GetIdbPath() opty_dir = opty_dir[:opty_dir.rfind(os.sep)] + os.sep + "optimice_%s" % idc.GetInputFile() if not os.path.isdir(opty_dir): os.mkdir(opty_dir) save_fname = "%s%sAssembler_%08x.pickle.zlib" % (opty_dir, os.sep, self.segment_start) fw = open(save_fname, "wb+") s1 = pickle.dumps(self) fw.write(zlib.compress(s1)) fw.close() print ">Assembler:SaveState - File [%s] saved" % (save_fname)
def load_metadata(): global metadata print('load_metadata') dir = os.path.dirname(idc.GetIdbPath()) metafile = dir + '/' + 'global-metadata.dat' if not os.path.exists(metafile): print(' File not found. %s' % metafile) return metadata = ida_il2cpp_metadata.IL2CppMetaData(metafile)
def launch_worker(self, create_idb=True, recycle_idb_from_id=None): worker_id = self.next_worker_id self.next_worker_id += 1 idb_filename_for_export = get_local_idb_name( idc.GetIdbPath(), "_cache_export_%08i" % worker_id, self.export_dir) if create_idb: self.create_idb_for_worker(worker_id, recycle_idb_from_id) logger.debug("Running job %i." % worker_id) idaq = sys.executable if sys.platform == "linux2": idaq = "idaq" if idc.GetIdbPath().endswith(idaapi.IDB_EXT64): idaq = "idaq64" idaq_path = os.path.abspath( os.path.join(os.path.dirname(inspect.getsourcefile(idaapi)), '..')) idaq = os.path.join(idaq_path, idaq) ida_args = [] if self.disable_plugin: ida_args += ["-Oyaco:disable_plugin"] if not debug: ida_args += ["-A"] cmd = exec_ida.Exec(idaq, os.path.abspath(idb_filename_for_export), *ida_args) cmd.set_idle(True) script = os.path.abspath(os.path.join(__file__, "..", "export_all.py")) cmd.with_script(script, "--slave", self.bin_dir) logger.info(str(cmd)) cmd.start() process_conn = self.listener.accept() self.connection_fds[worker_id] = process_conn.fileno() self.connections[process_conn.fileno()] = (worker_id, process_conn, cmd)
def copy_idb_to_local_file(suffix=None, subdir=None, use_hardlink=False): local_file_name = get_local_idb_name(idc.GetIdbPath(), suffix) if subdir is not None: (head, tail) = os.path.split(local_file_name) local_file_name = os.path.join(head, subdir, tail) (head, tail) = os.path.split(local_file_name) if os.path.exists(head) is False: os.mkdir(head) if use_hardlink: (idb_dir, idb_name) = os.path.split(idc.GetIdbPath()) original_idb_name = os.path.splitext(idb_name)[0] new_idb_name = os.path.splitext(local_file_name)[0] (head, tail) = os.path.split(local_file_name) logger.info("looking for copy-possible files in %s" % head) for f in os.listdir(head): (list_file_name, list_file_ext) = os.path.splitext(f) logger.info( "checking if %s:%s is to be copied to %s as source name" % (list_file_name, list_file_ext, original_idb_name)) if (list_file_name == original_idb_name and (list_file_ext in set([".nam", ".til"]) or (list_file_ext.startswith(".id") and list_file_ext[-1:].isdigit()))): new_name = os.path.join(idb_dir, new_idb_name + list_file_ext) f = os.path.join(idb_dir, f) logger.info("Linking %s to %s" % (f, new_name)) try: os.remove(new_name) except: pass os.system("/bin/cp --reflink=auto %s %s" % (f, new_name)) else: idc.SaveBase(local_file_name) remove_ida_temporary_files(local_file_name) return local_file_name
def store_idb(self): ssid = None try: ssid = self.cfg.ssid except AttributeError: ## this is first store pass if not ssid: ssid = '0' p = os.path.join(os.getcwd(),idc.GetInputFile()) tag = 'init' elif self.session_is_exec(ssid): ssid = '0' p = idc.GetIdbPath() else: p = idc.GetIdbPath() if p == idc.GetIdbPath(): tag= idc.AskStr('tag','Plase give it a name') if not tag: return with open(p,'rb') as f: data = f.read() h = hashlib.sha256(data).hexdigest() data = {'ssid':ssid,'uid':self.cfg.uid,'hash':h,'tag':tag} with open(p,'rb') as fd: r= self.cmd('create_idb',data=data,files={'data':fd}).json() if r['session'] != ssid: self.cfg.ssid = r['session'] print self.ch self.ch.start()
def init(self): msg("[IReal]: Init done\n") msg("[IReal]: Waiting for auto analysing\n") constants.create_general_config_file() constants.create_config_file() shared.BASE_URL = constants.get_data_from_config_file("server") shared.LOG = constants.get_data_from_config_file("log") if idc.GetIdbPath() and ida_auto.auto_is_ok(): log("Start") if not shared.MASTER_PAUSE_HOOK: shared.PAUSE_HOOK = False else: shared.PAUSE_HOOK = True if shared.USERID != -1 and shared.IS_COMMUNICATION_MANAGER_STARTED: #started. if not shared.PAUSE_HOOK: pass_to_manager(StartIDAEvent()) constants.send_data_to_window( shared.COMMUNICATION_MANAGER_WINDOW_ID, constants.CHANGE_PROJECT_ID, json.dumps({ "project-id": shared.PROJECT_ID, "need-to-pull": shared.MASTER_PAUSE_HOOK })) constants.send_data_to_window( shared.COMMUNICATION_MANAGER_WINDOW_ID, constants.CHANGE_USER, json.dumps({ "username": shared.USERNAME, "id": shared.USERID, "token": shared.USER_TOKEN })) constants.send_data_to_window( shared.COMMUNICATION_MANAGER_WINDOW_ID, constants.CHANGE_BASE_URL, json.dumps({"url": shared.BASE_URL})) self.idb_hook = LiveHook() self.ui_hook = ClosingHook() self.idp_hook = LiveHookIDP() self.view_hook = CursorChangeHook() self.idb_hook.hook() self.ui_hook.hook() self.idp_hook.hook() self.view_hook.hook() self.hook() return idaapi.PLUGIN_KEEP
def save_function_name_addr(): #D:\source\test1\test_dir\p_16 #.i64 i64_file = idc.GetIdbPath() first_dot = i64_file.find('.') no_dot_file = i64_file[:first_dot] funcs_path = no_dot_file + '.funcs' #funcs_path = idc.GetIdbPath()[:-4]+'.funcs' myfile = open(funcs_path, 'w') ea = BeginEA() for funcea in Functions(SegStart(ea), SegEnd(ea)): functionName = GetFunctionName(funcea) myfile.write(hex(funcea) + ' ' + functionName + '\n') #print hex(funcea),functionName myfile.close()
def guess_filepath(self): filepath = self.current_config.binary_filepath if os.path.isfile(filepath): return filepath # try to use idaapi.get_input_file_path filepath = idaapi.get_input_file_path() if os.path.isfile(filepath): return filepath # get_input_file_path returns file path from IDB, which may not # exist locally if IDB has been moved (eg. send idb+binary to # another analyst) filepath = idc.GetIdbPath().replace('idb', 'exe') if os.path.isfile(filepath): return filepath # give up return None