def run(self): try: logger.debug("Starting up") dbFile = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'shellcode_hashes', 'sc_hashes.db')) logger.debug('Trying default db path: %s', dbFile) if not os.path.exists(dbFile): if using_ida7api: dbFile = idc.AskFile(0, "*.db", "Select shellcode hash database") else: dbFile = idaapi.ask_file(False, "*.db", "Select shellcode hash database") if (dbFile is None) or (not os.path.isfile(dbFile)): logger.debug("No file select. Stopping now") return self.dbstore = DbStore(dbFile) logger.debug("Loaded db file: %s", dbFile) if QT_AVAILABLE: self.launchGuiInput() else: self.launchManualPrompts() searcher = ShellcodeHashSearcher(self.dbstore, self.params) logger.debug('Starting to run the searcher now') searcher.run() logger.debug("Done") except RejectionException: logger.info('User canceled action') except Exception, err: logger.exception("Exception caught: %s", str(err))
def load_nids(directory, library): try: path = '%s/%s/%s' % (idc.idadir(), directory, library) with open(path) as database: if library == AEROLIB: NIDS = dict(row for row in csv.reader(database, delimiter=' ')) else: NIDS = [row.strip() for row in database] except IOError: retry = idaapi.ask_file(0, '%s|*.csv|All files (*.*)|*.*', 'Please gimme your %s file' % (library)) if retry != None: try: with open(retry) as database: if library == AEROLIB: NIDS = dict( row for row in csv.reader(database, delimiter=' ')) else: NIDS = [row.strip() for row in database] shutil.copy2(retry, path) except: print('Ok, no NIDs for you!') else: print('Ok, no NIDs for you!') return NIDS
def OnButtonPress(self, code=0): if code == 0: path = idaapi.ask_file(False, "*.tbq", "Load hxtb query from file...") if path: self._handle_btn_load_tbq_file(path) elif code == 1: path = idaapi.ask_file(True, "*.tbq", "Save hxtb query to file...") if path: self._handle_btn_save_tbq_file(path) elif code == 2: self._handle_btn_run_query() elif code == 3: self._handle_btn_new() else: idaapi.warning("wtf?")
def slot_export_to_x64dbg_script(self): if not self.doc: idaapi.info("No capa results to export.") return filename = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".x64dbg.txt", "Choose file") if not filename: return if os.path.exists(filename) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return Tag_file = self.Parse_json(self.doc) xdbg_RVA = [] xdbg_comment = [] Basename = os.path.splitext(ida_nalt.get_root_filename())[0] Base_define = "$base=" + "\"" + Basename + ":base" + "\"" Xdbg_script = Base_define + "\n" for k in range(0, len(Tag_file.split("\n")) - 1): xdbg_RVA.append(Tag_file.split("\n")[k].split(';')[0]) xdbg_comment.append(Tag_file.split("\n")[k].split(';')[1]) Xdbg_script += "cmt $base+" + xdbg_RVA[ k] + "," + "\"" + xdbg_comment[k][:226] + "\"" + "\n" Xdbg_script += "bookmark $base+" + xdbg_RVA[k] + "\n" f = open(filename, "w").write(Xdbg_script)
def getInputFilepath_ida7(): '''Returns None if the uesr cancels. Updates the filepath in the idb on success''' filePath = idc.get_input_file_path() if not os.path.exists(filePath): print 'IDB input file not found. Prompting for new one: %s' % filePath filePath = idaapi.ask_file(False, '*.*', 'Enter path to idb input file') if filePath is not None: idc.set_root_filename(filePath) return filePath
def run(self, arg): if self.view: self.Close() fn = idaapi.ask_file(0, "*.asm", "Select ASM file to view") if not fn: return self.view = asmview_t() if not self.view.Create(fn): return self.view.Show()
def dump_data_to_file(fName, data): defPath = os.path.dirname(idaapi.get_input_file_path()) defPath = os.path.join(defPath, fName) dumpPath = idaapi.ask_file(1, defPath, "*.dump") if dumpPath: try: with open(dumpPath, "wb") as f: f.write(data) plg_print("Dump %d bytes to file %s successed" % (len(data), dumpPath)) except IOError as e: plg_print(str(e))
def get_pat_file(): logger = logging.getLogger("idb2pat:get_pat_file") name, _extension = os.path.splitext(idc.get_input_file_path()) name = name + ".pat" filename = idaapi.ask_file(1, name, "Enter the name of the pattern file") if filename is None: logger.debug("User did not choose a pattern file") return None return filename
def OnKeydown(self, vkey, shift): # ESCAPE? if vkey == 27: self.Close() #ENTER elif vkey == ord('\r'): ea = self.GetCurrentWord() try: ea = int(ea, base=0) idc.Jump(ea) except: print("Cannot jump to '%s'" % (ea)) elif vkey == ord('G'): #add line n = self.GetLineNo() if n is not None: v = idc.AskLong(n, "Where to go?") if v: self.Jump(v) elif vkey == ord('E'): idx = self.GetLineNo() if idx is not None: l = self.lines[idx] new_l = idc.AskStr(l, 'Insert line:') if new_l is not None: self.lines[idx] = new_l self.EditLine(idx, new_l) self.Refresh() elif vkey == ord('L'): #load path = idaapi.ask_file(0, '*.json', 'Insert lines source') self.load(path) elif vkey == ord('S'): #save #for_saving = 1 path = idaapi.ask_file(1, '*.json', 'Insert json path to save db to') self.save(path) else: return False return True
def export_json(self): """ export capa results as JSON file """ if not self.doc: idaapi.info("No capa results to export.") return path = idaapi.ask_file(True, "*.json", "Choose file") if os.path.exists(path) and 1 != idaapi.ask_yn(1, "File already exists. Overwrite?"): return with open(path, "wb") as export_file: export_file.write( json.dumps(self.doc, sort_keys=True, cls=capa.render.CapaJsonObjectEncoder).encode("utf-8") )
def ida_generate_db(db_path=None): import time if db_path is None: db_path = idaapi.ask_file(1, "*.geckoidb", "Enter the DB's path") if not db_path: raise Exception("Invalid DB path") output = { 'segments': [], 'functions': [], 'labels': {}, 'heads': [], 'code_references': [], 'data_references': [], } start_time = time.time() print('Exporting segments') output['segments'].extend(ida_iter_segments()) print('Exporting functions') output['functions'].extend(ida_iter_functions()) print('Exporting labels') for ea, name in ida_iter_names(): # TODO: assert name not in output['labels'] output['labels'][name] = ea print('Exporting heads + xrefs') for head in ida_iter_heads(): output['heads'].append(head) # TODO: Should i also do XrefsFrom? for ref in idautils.CodeRefsFrom(head, 1): output['code_references'].append(ReferenceTuple(frm=head, to=ref)) # TODO: Should i also do DataRefsFrom? for ref in idautils.DataRefsFrom(head): output['data_references'].append(ReferenceTuple(frm=head, to=ref)) print('Finished processing in ', int(time.time() - start_time), 'seconds') start_time = time.time() print('Storing to DB') with _gzip.open(db_path, 'wb') as f: _pickle.dump(output, f, protocol=1) print('Finished storing DB in ', int(time.time() - start_time), 'seconds')
def save_program_analysis(self): """ """ if not self.doc: idaapi.info("No program analysis to save.") return s = json.dumps(self.doc, sort_keys=True, cls=capa.render.json.CapaJsonObjectEncoder).encode("utf-8") path = idaapi.ask_file(True, "*.json", "Choose file to save capa program analysis JSON") if not path: return write_file(path, s)
def activate(self, ctx): args = [False, "*.pdb", "Enter path to PDB file"] try: pdbpath = idaapi.ask_file(*args) except AttributeError: pdbpath = idc.AskFile(*args) if pdbpath is None: return 0 if not ali_plugin.init_dia(inbin_path=pdbpath): return 0 if not ali_plugin.attach_actions(): return 0 return 1
def slot_export_to_tag_file(self): if not self.doc: idaapi.info("No capa results to export.") return Tag_file = self.Parse_json(self.doc) filename = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".tag", "Choose file") if not filename: return if os.path.exists(filename) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return f = open(filename, "w").write(Tag_file)
def slot_export_pickle(self): """ export capa results as JSON file """ if not self.doc: idaapi.info("No capa results to export.") return path = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".capa", "Choose file") if not path: return if os.path.exists(path) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return with open(path, "wb") as export_file: pickle.dump(self.doc, export_file, -1)
def slot_import_pickle(self): path = idaapi.ask_file(True, "*.capa", "Choose file") if not path: return if not os.path.exists(path): msg('capa: File does not exists !') return data = None with open(path, "rb") as import_file: self.range_model_proxy.invalidate() self.search_model_proxy.invalidate() self.model_data.reset() self.model_data.clear() self.disable_controls() self.set_view_status_label("Loading...") success = False act = True try: self.doc = pickle.load(import_file) if self.doc["meta"]["analysis"][ "base_address"] != ida_nalt.get_imagebase(): if 0 == idaapi.ask_yn( 1, "Imagebase is not match! Continue to load ?"): act = False except Exception as e: act = False logger.error("Failed to load capa results (error: %s)", e) if act: try: self.model_data.render_capa_doc(self.doc) self.render_capa_doc_mitre_summary() self.enable_controls() success = True except Exception as e: logger.error("Failed to render results (error: %s)", e) self.reset_view_tree() if not success: self.set_view_status_label("Click Analyze to get started...") logger.info("Loading Analysis failed.") else: logger.info("Loading Analysis completed.")
def StartDump(self): # print self.start # print self.endorlen self.filepath = idaapi.ask_file(1, "*.dump", "save dump file") if self.dumptype == 0: ea = self.getHexNum(self.start) len = self.getHexNum(self.endorlen) if not idaapi.is_loaded(ea) or not idaapi.is_loaded(ea + len): idaapi.warning("arrary is out of bound") return -1 if len <= 0: idaapi.warning("len is <= 0") return -1 print("start read bytes") self.Close(0) idaapi.show_wait_box("read bytes") self.memdata = idaapi.get_bytes(ea, len) print("read bytes end") #idaapi.hide_wait_box("read end") idaapi.hide_wait_box() elif self.dumptype == 1: ea = self.getHexNum(self.start) len = self.getHexNum(self.endorlen) - self.getHexNum(self.start) if not idaapi.is_loaded(ea) or not idaapi.is_loaded(ea + len): idaapi.warning("arrary is out of bound") return -1 if len <= 0: idaapi.warning("len is <= 0") return -1 print("start read bytes") self.Close(0) idaapi.show_wait_box("read bytes") self.memdata = idaapi.get_bytes(ea, len) print("read bytes end") #idaapi.hide_wait_box("read end") idaapi.hide_wait_box() fp = open(self.filepath, 'wb') fp.write(self.memdata) fp.close() idaapi.msg("save:" + self.filepath) return 1
def OnCommand(self, cmd_id): if self.cmd_dot == cmd_id: fname = ask_file(1, "*.dot", "Export DOT file") if fname: f = open(fname, "wb") buf = "digraph G {\n graph [overlap=scale]; node [fontname=Courier]; rankdir=\"LR\";\n\n" for c in self.classes.keys(): n = self.classes.keys().index(c) buf += ' a%s [shape=box, label = "%s", color="blue"]\n' % ( n, c) buf += "\n" for c in self.classes.keys(): class_index = self.classes.keys().index(c) for base in self.classes[c]: if base in self.classes.keys(): base_index = self.classes.keys().index(base) buf += ' a%s -> a%s [style = bold]\n' % ( class_index, base_index) buf += "}" f.write(buf) f.close()
def slot_export_json(self): """export capa results as JSON file""" if not self.doc: idaapi.info("No capa results to export.") return path = idaapi.ask_file(True, "*.json", "Choose file") # user cancelled, entered blank input, etc. if not path: return # check file exists, ask to override if os.path.exists(path) and 1 != idaapi.ask_yn( 1, "The selected file already exists. Overwrite?"): return with open(path, "wb") as export_file: export_file.write( json.dumps( self.doc, sort_keys=True, cls=capa.render.CapaJsonObjectEncoder).encode("utf-8"))
def slot_export_to_r2_script(self): if not self.doc: idaapi.info("No capa results to export.") return filename = idaapi.ask_file( True, os.path.splitext(ida_nalt.get_root_filename())[0] + ".cutter.r2", "Choose file") if not filename: return if os.path.exists(filename) and 1 != idaapi.ask_yn( 1, "File already exists. Overwrite?"): return Tag_file = self.Parse_json(self.doc) Cutter_RVA = [] Cutter_comment = [] Cutter_script = "" for k in range(0, len(Tag_file.split("\n")) - 1): Cutter_RVA.append(Tag_file.split("\n")[k].split(';')[0]) Cutter_comment.append(Tag_file.split("\n")[k].split(';')[1]) Cutter_script += "CCu base64:" + base64.b64encode( Cutter_comment[k].encode( "utf-8")).decode() + " @ " + "$B+0x" + Cutter_RVA[k] + "\n" f = open(filename, "w").write(Cutter_script)
def get_input_file(freeze=True): """ get input file path freeze (bool): if True, get freeze file if it exists """ # try original file in same directory as idb/i64 without idb/i64 file extension input_file = idc.get_idb_path()[:-4] if freeze: # use frozen file if it exists freeze_file_cand = "%s%s" % (input_file, CAPA_EXTENSION) if os.path.isfile(freeze_file_cand): return freeze_file_cand if not os.path.isfile(input_file): # TM naming input_file = "%s.mal_" % idc.get_idb_path()[:-4] if not os.path.isfile(input_file): input_file = idaapi.ask_file(0, "*.*", "Please specify input file.") if not input_file: raise ValueError("could not find input file") return input_file
def load_nids(location, nids={}): try: with open(location) as database: nids = dict(row for row in csv.reader(database, delimiter=' ')) except IOError: retry = idaapi.ask_file(0, 'aerolib.csv|*.csv|All files (*.*)|*.*', 'Please gimme your aerolib.csv file') if retry != None: try: with open(retry) as database: nids = dict(row for row in csv.reader(database, delimiter=' ')) shutil.copy2(retry, location) except: print('Ok, no NIDs for you!') else: print('Ok, no NIDs for you!') return nids
def getDat(start, length): global exe_file exe_file.seek(start, 0) result = exe_file.read(length) return result def getVal(start, length): result = int( struct.unpack('<I', getDat(start, length) + b"\00" * (4 - length))[0]) return result exe_name = idaapi.ask_file(0, "", "FILTER *.EXE\nSelect EXE file") if exe_name is not None and os.path.isfile(exe_name): if os.path.getsize(exe_name) >= 64: with open(exe_name, "r+b") as exe_file: MZ_Magic = getDat(0, 2) if MZ_Magic == "MZ": NEAddr = getVal(60, 4) NE_Magic = getDat(0 + NEAddr, 2) if NE_Magic == "NE": LinkVer = getVal(2 + NEAddr, 1) if LinkVer == 4 or LinkVer == 5: SegTableAddr = getVal(34 + NEAddr, 2) NumSeg = getVal(28 + NEAddr, 2) count = 0 position = SegTableAddr + NEAddr while count < NumSeg:
def ask_file_option(option_prompt, file_mask, file_promt): if not idaapi.ask_yn(0, option_prompt): return None return idaapi.ask_file(0, file_mask, file_promt)
class BasicBlocks(Base): __tablename__ = 'basic_blocks' id = Column(Integer, primary_key=True) offset = Column(Integer) reached = Column(Boolean) when = Column(Date) testcase = Column(String) def __init__(self, offset, reached, when, testcase): self.offset = offset self.reached = reached self.when = when self.testcase = testcase filename = idaapi.ask_file(False, '*.csv', 'Please select csv') blocks = [] with open(filename) as csvfile: readCSV = csv.reader(csvfile, delimiter=";", quotechar='"') next(readCSV) for row in readCSV: print row blocks.append(int(row[4])) for bb in blocks: absPos = bb + ida_nalt.get_imagebase() f = idaapi.get_func(absPos)
# PROGRAM START if __name__ == '__main__': filename = idaapi.get_root_filename().split('.')[0] print('# PS4 IOCTL Nabber') print('# Searching for ioctl requests...') for function in idat.Functions(): if 'ioctl' in ida.get_func_name(function): IOCTL = nab(function) if IOCTL: filename = ida.ask_file( True, 'Text files|*.txt|All files (*.*)|*.*', 'Where do you want to save your ioctl requests?') if filename != None: print('# Saving ioctl requests...') with open(filename, 'w') as OUTPUT: for key, value in sorted(IOCTL.items(), key=lambda kv: (kv[1], kv[0])): OUTPUT.write('%s %s\n' % (value, key)) else: print('# Printing them instead...') for key, value in sorted(IOCTL.items(), key=lambda kv: (kv[1], kv[0])): print(value + ' ' + key) else: print('# ioctl was not found or no named functions found!')
from csv_symbols_io import read_symbols_csv from tools import import_functions import idaapi import ida_auto if __name__ == "__main__": csv_file = idaapi.ask_file(0, "*.csv", "Select symbol csv") fncs, _ = read_symbols_csv(csv_file) import_functions(fncs, always_thumb=False)
if _special is True: Abs = [ "__ACRTUSED", "PLOCALHEAP", "PATOMTABLE", "PSTACKTOP", "PSTACKMIN", "PSTACKBOT", "PLOCALHEA2", "PATOMTABL2", "PSTACKMI2" ] if name.upper() in Abs: _space = " Abs " else: _space = " Imp " else: _space = " " return " " + "0" * (4 - len(_seg)) + _seg + ":" + "0" * ( 4 - len(addr)) + addr + _space + name filename = idaapi.ask_file(0, "", "FILTER *.SYM *.MAP\nSelect MAP file") if filename is not None and os.path.isfile(filename): if filename.endswith("SYM"): map_header = "\n Start Length Name Class\n 0000:0000 00000H _TEXT CODE ; placeholder\n 0000:0000 00000H _DATA DATA ; placeholder\n\n Origin Group\n" map_origin = "" map_body = "\n Address Publics by Value\n" map_content = "\n" with open(filename, "rb") as sym_file: # ver_major = getVal(-1, 1, 2) # ver_minor = getVal(-2, 1, 2) ver = getVal(-2, 2, 2) if ver < 520 or ver > 778: idaapi.warning( ".SYM version not supported! Output may be incorrect!")