def main(): idc.Wait() if autoIsOk(): classes = run_msvc() print classes else: print "Take it easy, man" print "Done" idc.Exit(0)
def main(): #test code if (len(idc.ARGV) < 2): print_help() ana_fun_name = '%s version %s protocol version %d%s' else: ana_fun_name = idc.ARGV[1] #要分析的函数名 para_num = 0 #参数数量 pos = ana_fun_name.find('%') while (not pos == -1): para_num += 1 pos += 1 pos = ana_fun_name.find('%', pos) ana = AnayBinFil() dic = ana.Anayl_Func_Call(ana_fun_name, para_num + 1) print '在函数中', '其调用参数为' for item in dic: print item, dic[item] sf = open("out.dat", 'w') if not sf: sf.write('parameter:' + str(idc.ARGV[0]) + str(idc.ARGV[1]) + '\n') idc.Exit(0) for item in dic: sf.write('In function : ' + item + '\n') x = (dic[item]) s = ' ' for i in range(len(x)): if x[i] is None: continue s += str(x[i]) + ' , ' sf.write(s + '\n') sf.close() ''' # get all names and it's addr for x in Names(): print x ''' idc.Exit(0)
def find_sscanf_vulns(): idc.Wait() sscanf = idc.LocByName('_sscanf') if sscanf == idc.BADADDR: print("sscanf not found") return for caller in idautils.CodeRefsTo(sscanf, False): process_sscanf_callers(caller, sscanf) if ida_kernwin.cvar.batch: idc.Exit(0)
def extract_bbs(): filename = idc.AskFile(1, "*.*", "Save list of basic blocks") exit = False if not filename: basename = idc.GetInputFile() filename = basename + ".bblist" idc.GenerateFile(idc.OFILE_ASM, basename + ".asm", 0, idc.BADADDR, 0) idc.GenerateFile(idc.OFILE_LST, basename + ".lst", 0, idc.BADADDR, 0) exit = True fp = open(filename, 'w') funcs = idautils.Functions() for f in funcs: cls_main(fp, f) if exit: idc.Exit(0)
def extract_functions(): filename = idc.AskFile(1, "*.*", "Save list of functions") exit = False if not filename: basename = idc.GetInputFile() filename = basename + ".fcn" idc.GenerateFile(idc.OFILE_ASM, basename + ".asm", 0, idc.BADADDR, 0) idc.GenerateFile(idc.OFILE_LST, basename + ".lst", 0, idc.BADADDR, 0) exit = True fp = open(filename, 'w') funcs = idautils.Functions() for f in funcs: print >> fp, "%#010x %s" % (f, GetFunctionName(f)) if exit: idc.Exit(0)
def main(): tty = idc.ARGV[1] ea = int(idc.ARGV[2], 16) # function address idaapi.load_plugin('hexrays') # need to load the decompiler first idaapi.load_plugin('hexx64') f = idaapi.get_func(ea) fdec = idaapi.decompile(f) # Print to stdout instead of the Output window. with open(tty, 'w') as fh: fh.write("{}\n".format(fdec)) idc.Exit(0)
def retrieve_reset(self, *args): if not idaapi.askyn_c( False, "All your local changes will be lost !\nDo you really want to proceed ?" ): return # disable all yaco hooks self.YaCoUI.unhook() self.repo_manager.discard_and_pull_idb() # current idb should not be overwritten, so we have to close IDA brutally ! idaapi.set_database_flag(idaapi.DBFL_KILL) idc.Warning("Force pull complete, you can restart IDA") idc.Exit(0)
def main(): # Wait for auto-analysis to finish before running script idaapi.autoWait() print "Initialize databse" init() print "Analyze data dependency" analyze_data_dep() print "Extracting code pointers" extract_code_ptr_from_code() extract_code_ptr_from_ro_sections() dump_result() idc.Exit(0)
def create_reset(self, *args): title = "YaCo Force Push" text = "You are going to force push your IDB. Other YaCo users will need to stop working & force pull.\n" "Do you really want to force push ?" val = idaapi.askbuttons_c( "Yes", "No", "", idaapi.ASKBTN_NO, "TITLE %s\nICON QUESTION\nAUTOHIDE SESSION\n" "HIDECANCEL\n%s" % (title, text)) if val != idaapi.ASKBTN_YES: return # disable all yaco hooks self.YaCoUI.unhook() # create a backup of current idb yatools.copy_idb_to_local_file( "_bkp_%s" % time.ctime().replace(" ", "_").replace(":", "_")) # restore original idb original_file = yatools.copy_idb_to_original_file() # get xml files xml_files = [] for root, dirs, files in os.walk('cache/'): for file in files: xml_files.append("%s/%s" % (root, file)) # add idb self.repo_manager.repo.add_file(original_file) # remove xml cache self.repo_manager.repo.remove_files(xml_files) for xml_file in xml_files: os.remove(xml_file) # create commit self.repo_manager.repo.commit("YaCo force push") # push commit self.repo_manager.push_origin_master() idc.Warning( "Force push complete, you can restart IDA and other YaCo users can \"Force pull\"" ) idc.Exit(0)
def extract_bbs(): filename = idc.AskFile(1, '*.*', 'Save list of basic blocks') do_exit = False if not filename: basename = idc.GetInputFile() filename = '%s.bblist' % basename idc.GenerateFile(idc.OFILE_ASM, '%s.asm' % basename, 0, idc.BADADDR, 0) idc.GenerateFile(idc.OFILE_LST, '%s.lst' % basename, 0, idc.BADADDR, 0) do_exit = True with open(filename, 'w') as fp: funcs = idautils.Functions() for func in funcs: cls_main(fp, func) if do_exit: idc.Exit(0)
def main(): idc.Wait() print ["Check begin-----------------------------------------------------------------"] file_type = idaapi.get_file_type_name() print "It's",[file_type],"file." danger_func_check() #iOS_file = ["Fat Mach-O file, 1. ARMv7","Fat Mach-O file, 2. ARM64","Mach-O file (EXECUTE). ARM64","Mach-O file (EXECUTE). ARMv7","Mach-O file (EXECUTE). ARM","Mach-O file (EXECUTE). ARMv7s","Mach-O file (EXECUTE). ARMv6"] #Android_so_file = [] if ("Mach-O file" in file_type) and ("ARM" in file_type): iOS_check() if "PE" in file_type: CreateProcessAsUserW_check() #if file_type in Android_so_file: #Android_so_check() strcpy_buffer_check() print "-----------------------------------------------------------------" print ["Check over-----------------------------------------------------------------"] idc.Exit(0)
def main(): funcs = Functions() totalInstrs = 0 for f in funcs: name = Name(f) end = GetFunctionAttr(f, FUNCATTR_END) locals = GetFunctionAttr(f, FUNCATTR_FRSIZE) funcInstrs = getInstrsByAddr(f) totalInstrs += funcInstrs # Message("Function: %s, starts at %x, ends at %x, with %d instructions\n" % (name, f, end, funcInstrs)) Message("Total: %d Instructions\n" % (totalInstrs)) log_file_uri = os.path.dirname(os.path.realpath(__file__)) + '/data.txt' log_file = open(log_file_uri, 'a') log_file.write('Total Instructions: ' + str(totalInstrs) + '\n') log_file.close() # return 1 idc.Exit(0)
def main(): funcs = Functions() totalBlocks = 0; for f in funcs: name = Name(f) end = GetFunctionAttr(f, FUNCATTR_END) locals = GetFunctionAttr(f, FUNCATTR_FRSIZE) functionBasicBlocks = getBasicblocksByAddr(f) totalBlocks += functionBasicBlocks # Message("Function: %s, starts at %x, ends at %x, with %d blocks\n" % (name, f, end, functionBasicBlocks)) Message("Total: %d blocks\n" % (totalBlocks)); log_file_uri = os.path.dirname(os.path.realpath(__file__)) + '/data.txt' log_file = open(log_file_uri, 'a') log_file.write('Total basicblocks: ' + str(totalBlocks) + '\n') log_file.close() # return 1 idc.Exit(0); # Exit IDA Pro
def create_reset(self, *args): title = "YaCo Force Push" text = "You are going to force push your IDB. Other YaCo users will need to stop working & force pull.\n" "Do you really want to force push ?" val = idaapi.askbuttons_c( "Yes", "No", "", idaapi.ASKBTN_NO, "TITLE %s\nICON QUESTION\nAUTOHIDE SESSION\n" "HIDECANCEL\n%s" % (title, text)) if val != idaapi.ASKBTN_YES: return # disable all yaco hooks self.YaCoUI.unhook() self.repo_manager.sync_and_push_original_idb() idc.Warning( "Force push complete, you can restart IDA and other YaCo users can \"Force pull\"" ) idc.Exit(0)
def parse_hdr(self): ''' Refer: function [go12Init()] in https://golang.org/src/debug/gosym/pclntab.go ''' magic = idc.Dword(self.start_addr) & 0xFFFFFFFF if magic != Pclntbl.MAGIC: print magic, Pclntbl.MAGIC common._error("Invalid pclntbl header magic number!") idc.Exit(1) #raise Exception("Invalid pclntbl header magic number!") if idc.Word(self.start_addr + 4) & 0xFFFF != 0: raise Exception("Invalid pclntbl header") self.min_lc = idc.Byte(self.start_addr + 6) & 0xFF if (self.min_lc != 1) and (self.min_lc != 2) and (self.min_lc != 4): raise Exception("Invalid pclntbl minimum LC!") self.ptr_sz = idc.Byte(self.start_addr + 7) & 0xFF if (self.ptr_sz != 4) and (self.ptr_sz != 8): raise Exception("Invalid pclntbl pointer size!")
def findFuncPreds(func_ea): pool = set([GetFunctionName(func_ea)]) flag = set([GetFunctionName(func_ea)]) result = [] while pool: fname = pool.pop() f_ea = getFunctionWithName(fname) #print f_ea for ref_ea in CodeRefsTo(f_ea, 0): #print ref_ea.type, XrefTypeName(ref_ea.type) #print "04%X" ,ref_ea func_name = GetFunctionName(ref_ea) if len(func_name) > 0 and func_name not in flag: #G.add_edge(func_name, fname) #func = get_func(ref_ea) result.append((fname, func_name, ref_ea)) pool.add(func_name) flag.add(func_name) elif len(func_name) == 0: print 'Thumb' idc.Exit(0) return result
def main(): port = int(idc.ARGV[1]) if idc.ARGV[1:] else 18861 thread_mode = idc.ARGV[2] == 'threaded' if idc.ARGV[2:] else False print('Received arguments: port=%s, thread_mode=%s' % (port, thread_mode)) # :note: For speed, we don't want to idc.Wait() here, # but you might want to call it in your code # to make sure that autoanalysis has finished. if thread_mode: thread = threading.Thread(target=main_thread, args=(port, thread_mode)) thread.daemon = True thread.start() else: srv = OneShotServer(SlaveService, port=port) # OneShotServer is a LIE so we have to do some shit # this is copied from https://github.com/tomerfiliba/rpyc/blob/master/rpyc/utils/server.py # specifically, the start method. if stuff breaks look here! srv._listen() srv._register() srv.accept() idc.Exit(0)
def main(): f = None if len(idc.ARGV) > 1: out_name = idc.ARGV[1] print "Writing to %s" % out_name f = open(out_name, "w+") rel_addr_llvm.ensure_all() strings = [ "+xsimstate=1", "Sending internal notification %s", "activation ticket accepted... drive thru" ] if f: f.write("#pragma once\n") f.write("REF_ENTRY ref_table[] = {\n") f.write("// TODO: version\n") f.write( "// generated automatically with commcenter.py, IDA and IdaPython\n" ) print "// TODO: version" print "// generated automatically with commcenter.py, IDA and IdaPython" for s in strings: if not process_func_for_string(s, f): if f: f.close() os.unlink(f.name) raise Exception("Failed for string %s", s) if f: f.write("};\n") f.close() idc.Exit(0) print
FEATURE_extract_time[str(DG.number_of_nodes())].append( str(time.time() - t_feature_start)) function_str = str(cur_function_name) + "," + str(DG.number_of_nodes()) + "," + \ str(DG.number_of_edges()) + ","+ \ str(program) + "," + str(version) + "," + str(bin_path) + ",\n" function_list_fp = open(function_list_file, 'a') # a 追加 function_list_fp.write(function_str) function_list_fp.close() binary_name = '_'.join(bin_path.split('\\')[-1].split('.')[:-1]) with open( os.path.join(fea_path_origion, binary_name + "_cfg_extractor_time.json"), 'w') as fp: json.dump(CFG_extract_time, fp, indent=4) if config.STEP2_GEN_DFG: with open( os.path.join(fea_path_origion, binary_name + "_dfg_extractor_time.json"), 'w') as fp: json.dump(DFG_extract_time, fp, indent=4) with open( os.path.join(fea_path_origion, binary_name + "_mfe_extractor_time.json"), 'w') as fp: json.dump(FEATURE_extract_time, fp, indent=4) return if __name__ == '__main__': main() idc.Exit(0)
def main(): global tm_start for mod in ('metapc', 'ppc', 'arm'): arch_mod = __import__('arch.%s' % mod, globals(), locals(), ['*']) arch = arch_mod.Arch() if arch: if arch.check_arch(): # This is a valid module for the current architecure # so the search has finished log_message('Using architecture module [%s]' % mod) break else: log_message( 'No module found to process the current architecure [%s]. Exiting.' % (arch.processor_name)) return global instrumentation log_message('Initialization sucessful.') db_engine, db_host, db_name, db_user, db_password = (None, ) * 5 batch_mode = False module_comment = '' process_sections = False # If the configuration filename has been fetched from the # environment variables, then use that. # if CONFIG_FILE_NAME: config_file_path = CONFIG_FILE_NAME # Otherwise fallback into the one expected in the IDA directory # else: config_file_path = os.path.join(idaapi.idadir(''), 'ida2sql.cfg') if os.path.exists(config_file_path): cfg = ConfigParser.ConfigParser() cfg.read(config_file_path) if cfg.has_section('database'): if cfg.has_option('database', 'engine'): db_engine = getattr(DB_ENGINE, cfg.get('database', 'engine')) if cfg.has_option('database', 'host'): db_host = cfg.get('database', 'host') if cfg.has_option('database', 'schema'): db_name = cfg.get('database', 'schema') if cfg.has_option('database', 'user'): db_user = cfg.get('database', 'user') if cfg.has_option('database', 'password'): db_password = cfg.get('database', 'password') if cfg.has_option('importing', 'mode'): batch_mode = cfg.get('importing', 'mode') if batch_mode.lower() in ('batch', 'auto'): batch_mode = True if cfg.has_option('importing', 'comment'): module_comment = cfg.get('importing', 'comment') if cfg.has_option('importing', 'process_sections'): process_sections = cfg.get('importing', 'process_sections') if process_sections.lower() in ('no', 'false'): process_sections = False else: process_sections = True if None in (db_engine, db_host, db_name, db_user, db_password): (db_engine, db_host, db_name, db_user, db_password) = query_configuration() if None in (db_engine, db_host, db_name, db_user, db_password): log_message('User cancelled the exporting.') return failed = False try: sqlexporter = SQLExporter(arch, db_engine, db=db_name, user=db_user, passwd=db_password, host=db_host, use_new_schema=USE_NEW_SCHEMA) except ImportError: print "Error connecting to the database, error importing required module: %s" % sys.exc_info( )[0] failed = True except Exception: print "Error connecting to the database, Reason: %s" % sys.exc_info( )[0] failed = True if failed: # Can't connect to the database, indicate that to BinNavi if batch_mode is True: idc.Exit(FATAL_CANNOT_CONNECT_TO_DATABASE) else: return if not sqlexporter.is_database_ready(): if batch_mode is False: result = idc.AskYN( 1, 'Database has not been initialized yet. Do you want to create now the basic tables? (This step is performed only once)' ) else: result = 1 if result == 1: sqlexporter.init_database() else: log_message('User requested abort.') return iteration = os.environ.get('EXPORT_ITERATION', None) module_id = os.environ.get('MODULE_ID', None) if iteration is None and module_id == None: # Export manually print "Exporting manually ..." iteration = -1 sqlexporter.set_callgraph_only(False) sqlexporter.set_exporting_manually(True) status = sqlexporter.new_module(idc.GetInputFilePath(), arch.get_architecture_name(), idaapi.get_imagebase(), module_comment, batch_mode) elif iteration is not None and module_id is not None: # Export the next k functions or the call graph sqlexporter.set_exporting_manually(False) sqlexporter.set_callgraph_only(int(iteration) == -1) sqlexporter.set_module_id(int(module_id)) status = True else: sqlexporter.set_exporting_manually(False) status = sqlexporter.new_module(idc.GetInputFilePath(), arch.get_architecture_name(), idaapi.get_imagebase(), module_comment, batch_mode) sqlexporter.set_callgraph_only(False) if status is False: log_message('Export aborted') return elif status is None: log_message( 'The database appears to contain data exported with different schemas, exporting not allowed.' ) if batch_mode: idc.Exit(FATAL_INVALID_SCHEMA_VERSION) instrumentation = Instrumentation() instrumentation.new_function_callable(sqlexporter.process_function) instrumentation.new_packet_callable(sqlexporter.process_packet) instrumentation.new_section_callable(sqlexporter.process_section) tm_start = time.time() already_imported = sqlexporter.db.get_already_imported() incomplete = process_binary(arch, process_sections, int(iteration), already_imported) sqlexporter.finish() log_message( 'Results: %d functions, %d instructions, %d basic blocks, %d address references' % (len(sqlexporter.exported_functions), len(sqlexporter.exported_instructions), sqlexporter.basic_blocks_next_id - 1, sqlexporter.address_references_values_count)) log_message( 'Results: %d expression substitutions, %d operand expressions, %d operand tuples' % (sqlexporter.expression_substitutions_values_count, sqlexporter.operand_expressions_values_count, sqlexporter.operand_tuples___operands_values_count)) log_message('Exporting completed in %s' % get_time_delta_string()) # If running in batch mode, exit when done if batch_mode: if incomplete: shiftedModule = (sqlexporter.db.module_id << 0x10) | 0xFF idc.Exit(shiftedModule) elif not sqlexporter.callgraph_only: shiftedModule = (sqlexporter.db.module_id << 0x10) | 0xFE idc.Exit(shiftedModule) else: idc.Exit(0)
idb_path = idc.GetIdbPath() output_file = None for (ext, ext_x) in exts_dict.items(): if os.path.exists(idb_path.replace(".idb", ext)): output_file = idb_path.replace(".idb", ext_x) break if output_file is None: msg("corresponding pe file not exists: %s" % idb_path) assert False exit(1) # we control whether replace or not from _gen_symbols.py, not from here # if os.path.exists(output_file): # msg("can't export, file already exists: %s" % output_file) if os.path.exists(output_file): msg("replacing existing file: %s" % output_file) if output_file: save_non_sub_function(output_file, is_offset=True, is_hex=True) msg("xrkexport for xrkpydbg, finish: %s" % output_file) else: msg("xrkexport for xrkpydbg, no idb loaded") # we then exit idc.Exit(1)
pass eps = [] try: if args.exports_to_lift: eps = args.exports_to_lift.readlines() elif args.entry_symbol is None: eps = getAllExports() eps = [ep.strip() for ep in eps] except IOError as e: DEBUG( "Could not open file of exports to lift. See source for details\n" ) idc.Exit(-1) if args.entry_symbol: eps.extend(args.entry_symbol) assert len(eps) > 0, "Need to have at least one entry point to lift" DEBUG("Will lift {0} exports\n".format(len(eps))) if args.make_export_stubs: DEBUG("Generating export stubs...\n") outdef = path.join(outpath, "{0}.def".format(myname)) DEBUG("Output .DEF file: {0}\n".format(outdef)) generateDefFile(outdef, eps) outstub = path.join(outpath, "{0}_exportstub.c".format(myname))
def exit(self): if self.headless: idc.Exit(0)
def exit(self): """Exit the disassembler (cleanly).""" idc.Exit(0)
import os import sys import shutil from glob import glob try: import epydoc.apidoc import epydoc.cli except ImportError as e: import idc import traceback idc.Message("Couldn't import module %s\n" % traceback.format_exc()) idc.Exit(-1) # -------------------------------------------------------------------------- DOC_DIR = 'hr-html' # -------------------------------------------------------------------------- def log(msg): #print msg pass # -------------------------------------------------------------------------- def add_footer(lines): S1 = 'Generated by Epydoc' S2 = '</table>' p = lines.find(S1) if p == -1:
def killIDA(self): """Terminate IDA Pro process.""" idc.Exit(0)
def exit(code=0): if __idascript_active__: idc.Exit(code)
def dump_exp_gadgets(): import gadget exp_gadgets = gadget.find_exp_gadgets() with open("%s.payload.gadgets" % get_input_file_path(), "wb") as f: pickle.dump(exp_gadgets, f) return def load_exp_gadgets(dump_file): with open(dump_file, "rb") as f: exp_gadgets = pickle.load(f) return exp_gadgets def disasm(ea): return idc.GetDisasm(ea) """ if __name__ == "__main__": import sys sys.setrecursionlimit(40000) idc.Wait() # Wait for IDA to complete its auto analysis print '-------- DUMP start -----------' dump_data() print '-------- DUMP end -----------' ''' (all_blocks, all_opcodes) = load_codes_blocks(target_file) show_blocks(all_blocks) print all_opcodes ''' idc.Exit(0) # Exit IDA when done;
args.arch)) init_for_arch(args.arch) log.info("Analysing {}.".format(idc.GetInputFile())) # Wait for auto-analysis to finish. log.info("Waiting for IDA to finish its auto-analysis") idaapi.autoWait() analyse_data(ADDRESS_SIZE[args.arch]) analyse_subroutines() find_imported_subroutines() find_exported_subroutines() analyse_indirect_jumps() analyse_function_returns() analyse_callbacks() log.info("Saving CFG to {}".format(args.output.name)) cfg.save_to_stream(args.output) log.info("Done") idc.Exit(0) except SystemExit as e: idc.Exit(e.code) except: log.error(traceback.format_exc()) idc.Exit(1)
def main(): with open(os.path.join(__LOG_ROOT_PATH, __LOG_FILE_NAME), 'w') as f: if __DEBUG: f.write("successfully opened {}\n".format( os.path.join(__LOG_ROOT_PATH, __LOG_FILE_NAME))) f.write("{} \t=========zyc_test begins!=========\n\n".format( str(datetime.datetime.now()))) names = idautils.Names() name_addresses = [] name_literals = [] functions_found = [] for item in names: name_addresses.append(item[0]) name_literals.append(item[1]) if __DEBUG: f.write("\nNAME Address Pairs:\n") for i in range(len(name_addresses)): f.write( "Name and Address Pair -- Name: {}, Address: {}\n".format( str(name_literals[i]), str(hex(name_addresses[i])))) f.write('\n') func_gen = idautils.Functions() if __DEBUG: f.write("succesfully called idautils.Functions()\n") if __DEBUG: f.write("\nFunction Names:\n") for item in func_gen: func_literal = get_func_literal(item, name_addresses, name_literals) if func_literal is None: f.write("function at {} cannot be found in idautils.Names()\n". format(str(hex(item)))) else: if __DEBUG: f.write("function at {} is named: {}\n".format( str(hex(item)), func_literal)) functions_found.append((item, func_literal)) atomic_functions = [] entry_functions = [] for function_address_name_pairs in functions_found: if is_the_function_calling_nothing(function_address_name_pairs, f): atomic_functions.append(function_address_name_pairs) if is_the_function_called_by_others(function_address_name_pairs, f): entry_functions.append(function_address_name_pairs) for pairs in atomic_functions: f.write("I call nothing: {}, at {}\n".format(pairs[1], pairs[0])) for pairs in entry_functions: f.write("nothings called me: {}, at {}\n".format( pairs[1], pairs[0])) f.write("\n{} \t========zyc_test terminates!========\n".format( str(datetime.datetime.now()))) #uncomment this line when starting batch analysis idc.Exit(0) return 0