def main(): binary = idc.GetInputFilePath() proj = angr.Project(binary, load_options={ 'auto_load_libs': False, 'extern_size': 0x800000 }) isPE = proj.loader.all_pe_objects if len(isPE) == 0: filetype = "ELF" else: filetype = "PE" if filetype == "ELF": rtti_gcc() elif filetype == "PE": rtti_msvc() #for item in class_list: # class_list[item]["addr"] = hex(class_list[item]["addr"]).strip("L") # print item,class_list[item] #print "count:%d" % len(class_list) src_list_file = open('NodeTreejson.txt', 'w') src_list_jsonstr = json.dumps(class_list) src_list_file.write(src_list_jsonstr) src_list_file.close() statistics() draw()
def OnCreate(self, form): """ Called when the plugin form is created """ # Initalize Data exename = idc.GetInputFile() path = os.path.dirname(idc.GetInputFilePath()) filename = os.path.join(path, exename) if False: self.infoparser = InfoParser(infoname) self.infoparser.load() self.flamegraph = FlameGraphReader(filename) self.flamegraph.parse() if False: self.infoparser.flow() self.canvas = None # Get parent widget self.parent = self.FormToPyQtWidget(form) self.PopulateForm() drawing = Drawing(self.flamegraph) for idx in xrange(0, len(self.flamegraph.roots)): root = self.flamegraph.roots[idx] self._combobox.addItem("%d: %x" % (idx, root['size']), idx) if drawing.activeIndex is None: drawing.activeIndex = idx self.canvas.setDrawing(drawing)
def save_file(output_file): """ Save the patched file """ DIFF_RE = re.compile(r'([A-F0-9]+): ([A-F0-9]+) ([A-F0-9]+)') idc.GenerateFile(idaapi.OFILE_DIF, output_file, 0, idc.MaxEA(), 0) diff_file = open(output_file, "rb").read() orig_file = open(idc.GetInputFilePath(), "rb").read() print "OK" diff_file = diff_file.split("\n") total = 0 success = 0 for line in diff_file: match = DIFF_RE.match(line) if match: groups = match.groups() total += 1 offset = int(groups[0], 16) orig_byte = groups[1].decode('hex') new_byte = groups[2].decode('hex') if orig_file[offset] == orig_byte: orig_file = orig_file[:offset] + new_byte + orig_file[offset + 1:] success += 1 else: print "Error matching %02x at offset %x..." % (groups[1], offset) new_file = open(output_file, 'wb') new_file.write(orig_file) new_file.close() print "%i/%i patches applied" % (success, total)
def _collect_data(self, collect_args): # @UnusedVariable try: exe_file_path = idc.GetInputFilePath() md5_obj = hashlib.md5(open(exe_file_path).read()) self._exe_md5 = md5_obj.hexdigest() except: # exe does not exist. self._exe_md5 = ""
def activate(self, ctx): if ctypes.windll.shell32.IsUserAnAdmin() == 0: print "Admin privileges required" return name = idc.GetInputFile().split('.')[0] driver = driverlib.Driver(idc.GetInputFilePath(), name) loaded = driver.load() started = driver.start()
def __init__(self): self.storage = {} self.bt_obj = Utils.get_bitness(idc.BeginEA()) self.structCreator = Utils.StructCreator(self.bt_obj) self.processor = None self.typer = None self.binaryPath = idc.GetInputFilePath() self.structsDef = {}
def activate(self, ctx): if ctypes.windll.shell32.IsUserAnAdmin() == 0: print("Admin privileges required") return #name = idc.GetInputFile().split('.')[0] name = idc.get_input_file_path().split('.')[0] driver = driverlib.Driver(idc.GetInputFilePath(), name) driver.stop() driver.unload()
def getInputFilepath(): '''Returns None if the uesr cancels. Updates the filepath in the idb on success''' filePath = idc.GetInputFilePath() if not os.path.exists(filePath): print 'IDB input file not found. Prompting for new one: %s' % filePath filePath = idc.AskFile(False, '*.*', 'Enter path to idb input file') if filePath is not None: idc.SetInputFilePath(filePath) return filePath
def decode(): with open(idc.GetInputFilePath(), 'rb') as fh: # Read the PE header offset from the MZ header. fh.seek(0x3C) pe_offset = __read_u32(fh) # print "PE header offset at %08X" % (pe_offset) fh.seek(pe_offset) pe_magic = __read_u32(fh) if pe_magic != 0x00004550: print "PE header magic is invalid: expected %08x, got %08x." % ( 0x00004550, pe_magic) return None # Seek to the end of MZ header, scan for "Rich" magic. fh.seek(0x40) while fh.tell() < pe_offset: if __read_u32(fh) == 0x68636952: records_end = fh.tell() - 4 mask = __read_u32(fh) # print "Rich signature found, mask: %08X" % (mask) break else: print "Rich signature not found." return None # Seek to the end of MZ header, scan for "DanS" magic. fh.seek(0x40) while fh.tell() < records_end: d = __read_u32(fh) ^ mask if d == 0x536E6144: # print "DanS signature found." fh.seek(12, os.SEEK_CUR) break else: print "DanS signature not found." return None product_list = [] while fh.tell() < records_end: d = __read_u32(fh) ^ mask t = __read_u32(fh) ^ mask # print "%08X %08X" % (d, t) prod = __lookup(d >> 16, d & 0xFFFF) if prod.id == 1: continue product_list.append(prod) # print prod return product_list
def load_keil_sfr_descr(): filename = os.path.dirname(idc.GetInputFilePath()) + "\\XC2xxx.INC" f = open(filename, "r+") pr_line = "" for line in f: words = line.split() if len(words) == 3 and (words[1] == "EQU" or words[1] == "DEFR"): addr = int(words[2], 0) name = GetTrueName(addr) print addr, name MakeName(addr, words[0]) MakeRptCmt(addr, pr_line.strip()) pr_line = line f.close()
def read_rsds_codeview(): guid = None penode = idaapi.netnode() penode.create(idautils.peutils_t.PE_NODE) fpos = penode.altval(idautils.peutils_t.PE_ALT_DBG_FPOS) if (fpos == 0): print("[*] No debug directory") return guid input_file = idc.GetInputFilePath() if not os.path.exists(input_file): print("[*] input file not available") else: with open(input_file, 'r') as fd: fd.seek(fpos) raw = fd.read(0x1C) """ typedef struct _IMAGE_DEBUG_DIRECTORY { DWORD Characteristics; DWORD TimeDateStamp; WORD MajorVersion; WORD MinorVersion; DWORD Type; DWORD SizeOfData; DWORD AddressOfRawData; DWORD PointerToRawData; } IMAGE_DEBUG_DIRECTORY, *PIMAGE_DEBUG_DIRECTORY; """ dbgdir = struct.unpack('LLHHLLLL', raw) # 2, IMAGE_DEBUG_TYPE_CODEVIEW if not (dbgdir[4] == 2): print("[*] not CODEVIEW data") else: fd.seek(dbgdir[7]) if not (fd.read(4) == "RSDS"): print( "[*] unsupported CODEVIEW information format (%s)" % sig) else: d1, d2, d3 = struct.unpack('LHH', fd.read(0x8)) d4 = struct.unpack('>H', fd.read(0x2))[0] d5 = binascii.hexlify(fd.read(0x6)).upper() guid = "%08X-%04X-%04X-%04X-%s" % (d1, d2, d3, d4, d5) return guid
def main(host, port): print('\n**** Resolve executed XREFS *****') rvn = reven2.RevenServer(host, port) print("\n* REVEN Server: {}:{}".format(host, port)) binary = find_binary(rvn.ossi, os.path.basename(idc.GetInputFilePath()).lower()) print('\n* Binary path: {}'.format(binary)) print('\n* Compute Trace XREFS') bfcs = search_breaking_control_flow(rvn.trace.search, binary) xrefs = compute_trace_xrefs(bfcs) print('\n* Convert Trace XREFS to IDA indirect XREFS') xrefs = convert_to_ida_indirect_xrefs(xrefs) print('\n* Apply XREFS in IDA') ida_apply_xrefs(xrefs) print('\n* Finish')
def main(host, port): print('\n**** Coverage Info *****') rvn = reven2.RevenServer(host, port) print("\n* REVEN Server: {}:{}".format(host, port)) binary = find_binary(rvn.ossi, os.path.basename(idc.GetInputFilePath()).lower()) print('\n* binary path: {}'.format(binary)) print('\n* Compute Trace coverage') coverage = compute_coverage(rvn.trace.search, binary) display_coverage(coverage) print('\n* Compute frequency colors') fcolors = colors.compute_frequency_colors(coverage.values()) display_colors(fcolors) print('\n* Apply colors in IDA') ida_apply_colors(coverage, fcolors) print('\n* Finished')
def get_input_file_path(): if idaapi.IDA_SDK_VERSION <= 699: retval = idc.GetInputFilePath() else: retval = ida_nalt.get_input_file_path() return retval
for ea, name in idautils.Names(): flag = idc.GetFlags(ea) if not idc.hasUserName(flag): continue seg_ea = idc.SegStart(ea) seg_name = idc.SegName(ea) if seg_name not in self.sections: continue sym_type = 'function' if idc.isCode(flag) else 'object' self.symbols[name] = (seg_name, ea - seg_ea, sym_type) def run(self): self.load_symbols_from_ida() err, err_msg = self.objcopy() return err, err_msg if is_ida: inp_file = idc.GetInputFilePath() if not os.path.isfile(inp_file): inp_file = idc.AskFile(0, '', 'Input ELF file') out_file = idc.AskFile(1, '', 'Output file') a = AddSym(inp_file, out_file) err, err_msg = a.run() if err != 0: idc.Warning(err_msg) if out_file != inp_file: os.remove(out_file) else: idc.Message('Saved to {}\n'.format(out_file))
UNUSED = None _YARA_MATCHES = [] # Codecs used to detect encoding of strings. CODE_PAGES = [ 'ascii', 'utf-32-be', 'utf-32-le', 'utf-16-be', 'utf-16-le', 'utf-8', # General (utf-7 omitted) 'gb18030', 'gbk', # Unified Chinese 'gb2312', 'hz', # Simplified Chinese 'big5hkscs', 'big5', # Traditional Chinese (cp950 omitted) 'koi8-r', 'iso8859-5', 'cp1251', 'mac-cyrillic', # Cyrillic (cp866, cp855 omitted) 'cp949', # Korean (johab, iso2022-kr omitted) 'iso8859-6', 'cp1256', # Arabic (cp864, cp720 omitted) 'latin1', # If all else fails, latin1 is always is successful. ] INPUT_FILE_PATH = idc.GetInputFilePath() # Put these here for increased robustness. Please don't depend on these very often. ENCODED_STRINGS = [] DECODED_STRINGS = [] # CODEC to use for displaying strings in IDA, etc. DISPLAY_CODE = 'cp437' if sys.platform == 'win32' else 'ascii' class SuperFunc_t(object): """ Description: Effectively extends func_t to also know its name and all its non-recursive xrefs and knows how to rename itself. Fields:
def launch_on_funcs(architecture, abi, funcs, test_set, map_addr=None, jitter=None, buf_size=2000): """Launch identification on functions. @architecture: str standing for current architecture @abi: str standing for expected ABI @funcs: list of function addresses (int) to check @test_set: list of test sets to run Optional arguments: @map_addr: (optional) the base address where the binary has to be loaded if format is not recognized @jitter: (optional) jitter engine to use (gcc, tcc, llvm, python, qemu) @buf_size: (optional) number of argument to pass to each instance of sibyl. High number means speed; low number means less ressources and higher frequency of report """ # Check Sibyl availability global identify_binary if not identify_binary: raise ValueError("A valid Sibyl path to find.py must be supplied") # Get binary information filename = str(idc.GetInputFilePath()) nb_func = len(funcs) # Prepare run starttime = time.time() nb_found = 0 add_map = [] if isinstance(map_addr, int): add_map = ["-m", hex(map_addr)] # Launch identification print "Launch identification on %d function(s)" % nb_func options = ["-a", architecture, "-b", abi, "-o", "JSON"] for test_name in test_set: options += ["-t", test_name] if jitter is not None: options += ["-j", jitter] options += add_map res = {} for i in xrange(0, len(funcs), buf_size): # Build command line addresses = funcs[i:i + buf_size] command_line = [identify_binary, "find"] command_line += options command_line += [filename] command_line += addresses # Call Sibyl and keep only stdout for addr, candidates in parse_output(command_line): handle_found(addr, candidates) res[addr] = candidates nb_found += 1 # Print current status and estimated time curtime = (time.time() - starttime) maxi = min(i + buf_size, len(funcs)) estimatedtime = (curtime * nb_func) / maxi remaintime = estimatedtime - curtime print "Current: %.02f%% (sub_%s)| Estimated time remaining: %.02fs" % ( ((100. / nb_func) * maxi), addresses[-1], remaintime) print "Finished ! Found %d candidates in %.02fs" % (nb_found, time.time() - starttime) return res
def activate(self, ctx): ind = ctx.chooser_selection.at(0) ioctl = self.items[ind - 1] name = idc.GetInputFile().split('.')[0] driver = driverlib.Driver(idc.GetInputFilePath(), name) DisplayIOCTLSForm(ioctl, driver)
def run_bap_with(argument_string): """ Run bap with the given argument_string. Uses the currently open file, dumps latest symbols from IDA and runs BAP with the argument_string Also updates the 'BAP View' """ from bap.plugins.bap_view import BAP_View from bap.utils import config import ida import idc import tempfile check_and_configure_bap() bap_executable_path = config.get('bap_executable_path') if bap_executable_path is None: return # The user REALLY doesn't want us to run it args = { 'bap_executable_path': bap_executable_path, 'bap_output_file': tempfile.mkstemp(suffix='.out', prefix='ida-bap-')[1], 'input_file_path': idc.GetInputFilePath(), 'symbol_file_location': tempfile.mkstemp(suffix='.sym', prefix='ida-bap-')[1], 'header_path': tempfile.mkstemp(suffix='.h', prefix='ida-bap-')[1], 'remaining_args': argument_string } bap_api_enabled = (config.get('enabled', default='0', section='bap_api').lower() in ('1', 'true', 'yes')) ida.dump_symbol_info(args['symbol_file_location']) if bap_api_enabled: ida.dump_c_header(args['header_path']) idc.Exec("\ \"{bap_executable_path}\" \ --api-add=c:\"{header_path}\" \ ".format(**args)) command = ("\ \"{bap_executable_path}\" \"{input_file_path}\" \ --read-symbols-from=\"{symbol_file_location}\" --symbolizer=file \ {remaining_args} \ -d > \"{bap_output_file}\" 2>&1 \ ".format(**args)) idc.Exec(command) with open(args['bap_output_file'], 'r') as f: BAP_View.update("BAP execution string\n" + "--------------------\n" + "\n" + '\n --'.join(('bap' + argument_string).split('--')) + "\n" + "\n" + "Output\n" + "------\n" + "\n" + f.read()) # Force close BAP View # This forces the user to re-open the new view if needed # This "hack" is needed since IDA decides to give a different BAP_View # class here, than the cls parameter it sends to BAP_View # TODO: Fix this import idaapi tf = idaapi.find_tform("BAP View") if tf: idaapi.close_tform(tf, 0) # Do a cleanup of all the temporary files generated/added if bap_api_enabled: idc.Exec("\ \"{bap_executable_path}\" \ --api-remove=c:`basename \"{header_path}\"` \ ".format(**args)) idc.Exec("\ rm -f \ \"{symbol_file_location}\" \ \"{header_path}\" \ \"{bap_output_file}\" \ ".format(**args))
def main(): imp_funcs = [] xrefs = [] cg = CallGraph() file_name = idc.get_root_filename() file_path = idc.GetInputFilePath() def get_file_ssdeep(): if 'ssdeep' in sys.modules: return ssdeep.hash_from_file(file_path) else: return 'No ssdeep Modules. Please Install ssdeep.' def imp_cb(ea, name, ord): imp_funcs.append(ea) return True if 'batch' in idc.ARGV: idaapi.autoWait() for fea in Functions(): func_flags = get_func_flags(fea) # NORMAL = 0 # LIBRARY = 1 # IMPORTED = 2 # THUNK = 3 if func_flags & FUNC_LIB: func_type = 1 elif func_flags & FUNC_THUNK: func_type = 3 else: func_type = 0 cg.add_vertex(fea, func_type) cg.add_root(fea) items = FuncItems(fea) for item in items: for xref in XrefsFrom(item, 0): # https://www.hex-rays.com/products/ida/support/idadoc/313.shtml if xref.type != fl_F: xrefs.append([fea, xref.to]) # List Import Functions and Add to cg num_imp_module = idaapi.get_import_module_qty() for i in range(0, num_imp_module): idaapi.enum_import_names(i, imp_cb) imp_funcs.sort() for imp_func_ea in imp_funcs: cg.add_vertex(imp_func_ea, 2) for xref in xrefs: if xref[1] in cg.vertices: cg.connect_vertex(xref[0], xref[1]) cg.set_roots() for root in cg.roots: cg.build_graph_pattern(root) if len(idc.ARGV) == 0: print('Graph MD5: %s' % cg.get_graph_md5()) print('Graph SHA1: %s' % cg.get_graph_sha1()) print('Graph SHA256: %s' % cg.get_graph_sha256()) print('Graph SSDEEP: %s' % cg.get_graph_ssdeep()) print('File SSDEEP: %s' % get_file_ssdeep()) if 'out_pattern' in idc.ARGV: if not os.path.isdir('./out'): os.mkdir('./out') f = open('./out/' + file_name + '.bin', 'wb') f.write(cg.graph_pattern) f.close() if 'batch' in idc.ARGV: if not os.path.isdir('./out'): os.mkdir('./out') f = open('./out/result', 'a+') f.write('%s,%s,%s,%s\n' % (file_name, cg.get_graph_md5(), cg.get_graph_ssdeep(), get_file_ssdeep())) f.close() idc.Exit(0)
return if __name__ == '__main__': #清空输出窗口 form = idaapi.find_tform("Output window") idaapi.switchto_tform(form, True) idaapi.process_ui_action("msglist:Clear") #save to file path = os.path.abspath(__file__) path = os.path.realpath(__file__) path = os.path.dirname(path) # target_path = idc.GetInputFilePath() target_file = idc.GetInputFile() if idaapi.init_hexrays_plugin(): #print("Hex-rays version %s has been detected" % idaapi.get_hexrays_version()) pass else: load_plugin_decompiler(is_bit64) # #get_strings() sub_value = idaapi.ask_long(0x20, 'please input stack additional value') if sub_value is not None: #0x20 为在原来基础上再增加的空间大小 patch_vul_func(sub_value) else:
def main(): global tm_start for mod in ('metapc', 'ppc', 'arm'): arch_mod = __import__('arch.%s' % mod, globals(), locals(), ['*']) arch = arch_mod.Arch() if arch: if arch.check_arch(): # This is a valid module for the current architecure # so the search has finished log_message('Using architecture module [%s]' % mod) break else: log_message( 'No module found to process the current architecure [%s]. Exiting.' % (arch.processor_name)) return global instrumentation log_message('Initialization sucessful.') db_engine, db_host, db_name, db_user, db_password = (None, ) * 5 batch_mode = False module_comment = '' process_sections = False # If the configuration filename has been fetched from the # environment variables, then use that. # if CONFIG_FILE_NAME: config_file_path = CONFIG_FILE_NAME # Otherwise fallback into the one expected in the IDA directory # else: config_file_path = os.path.join(idaapi.idadir(''), 'ida2sql.cfg') if os.path.exists(config_file_path): cfg = ConfigParser.ConfigParser() cfg.read(config_file_path) if cfg.has_section('database'): if cfg.has_option('database', 'engine'): db_engine = getattr(DB_ENGINE, cfg.get('database', 'engine')) if cfg.has_option('database', 'host'): db_host = cfg.get('database', 'host') if cfg.has_option('database', 'schema'): db_name = cfg.get('database', 'schema') if cfg.has_option('database', 'user'): db_user = cfg.get('database', 'user') if cfg.has_option('database', 'password'): db_password = cfg.get('database', 'password') if cfg.has_option('importing', 'mode'): batch_mode = cfg.get('importing', 'mode') if batch_mode.lower() in ('batch', 'auto'): batch_mode = True if cfg.has_option('importing', 'comment'): module_comment = cfg.get('importing', 'comment') if cfg.has_option('importing', 'process_sections'): process_sections = cfg.get('importing', 'process_sections') if process_sections.lower() in ('no', 'false'): process_sections = False else: process_sections = True if None in (db_engine, db_host, db_name, db_user, db_password): (db_engine, db_host, db_name, db_user, db_password) = query_configuration() if None in (db_engine, db_host, db_name, db_user, db_password): log_message('User cancelled the exporting.') return failed = False try: sqlexporter = SQLExporter(arch, db_engine, db=db_name, user=db_user, passwd=db_password, host=db_host, use_new_schema=USE_NEW_SCHEMA) except ImportError: print "Error connecting to the database, error importing required module: %s" % sys.exc_info( )[0] failed = True except Exception: print "Error connecting to the database, Reason: %s" % sys.exc_info( )[0] failed = True if failed: # Can't connect to the database, indicate that to BinNavi if batch_mode is True: idc.Exit(FATAL_CANNOT_CONNECT_TO_DATABASE) else: return if not sqlexporter.is_database_ready(): if batch_mode is False: result = idc.AskYN( 1, 'Database has not been initialized yet. Do you want to create now the basic tables? (This step is performed only once)' ) else: result = 1 if result == 1: sqlexporter.init_database() else: log_message('User requested abort.') return iteration = os.environ.get('EXPORT_ITERATION', None) module_id = os.environ.get('MODULE_ID', None) if iteration is None and module_id == None: # Export manually print "Exporting manually ..." iteration = -1 sqlexporter.set_callgraph_only(False) sqlexporter.set_exporting_manually(True) status = sqlexporter.new_module(idc.GetInputFilePath(), arch.get_architecture_name(), idaapi.get_imagebase(), module_comment, batch_mode) elif iteration is not None and module_id is not None: # Export the next k functions or the call graph sqlexporter.set_exporting_manually(False) sqlexporter.set_callgraph_only(int(iteration) == -1) sqlexporter.set_module_id(int(module_id)) status = True else: sqlexporter.set_exporting_manually(False) status = sqlexporter.new_module(idc.GetInputFilePath(), arch.get_architecture_name(), idaapi.get_imagebase(), module_comment, batch_mode) sqlexporter.set_callgraph_only(False) if status is False: log_message('Export aborted') return elif status is None: log_message( 'The database appears to contain data exported with different schemas, exporting not allowed.' ) if batch_mode: idc.Exit(FATAL_INVALID_SCHEMA_VERSION) instrumentation = Instrumentation() instrumentation.new_function_callable(sqlexporter.process_function) instrumentation.new_packet_callable(sqlexporter.process_packet) instrumentation.new_section_callable(sqlexporter.process_section) tm_start = time.time() already_imported = sqlexporter.db.get_already_imported() incomplete = process_binary(arch, process_sections, int(iteration), already_imported) sqlexporter.finish() log_message( 'Results: %d functions, %d instructions, %d basic blocks, %d address references' % (len(sqlexporter.exported_functions), len(sqlexporter.exported_instructions), sqlexporter.basic_blocks_next_id - 1, sqlexporter.address_references_values_count)) log_message( 'Results: %d expression substitutions, %d operand expressions, %d operand tuples' % (sqlexporter.expression_substitutions_values_count, sqlexporter.operand_expressions_values_count, sqlexporter.operand_tuples___operands_values_count)) log_message('Exporting completed in %s' % get_time_delta_string()) # If running in batch mode, exit when done if batch_mode: if incomplete: shiftedModule = (sqlexporter.db.module_id << 0x10) | 0xFF idc.Exit(shiftedModule) elif not sqlexporter.callgraph_only: shiftedModule = (sqlexporter.db.module_id << 0x10) | 0xFE idc.Exit(shiftedModule) else: idc.Exit(0)
def main(): # 命令行模式将下面注释去掉,并将IDA python相关函数加上注释,即InforExtraction单独为IDA脚本,后面的独立IDA外执行,调试模式时有用 ''' try: options,args = getopt.getopt(sys.argv[1:],"hf:", ["help","file="]) except getopt.GetoptError: sys.exit() binary = None for name,value in options: if name in ("-h","--help"): usage() sys.exit() if name in ("-f","--file"): binary = value if binary == None: usage() sys.exit() ''' print "[+]log: Start analysis" binary = idc.GetInputFilePath() isPIE = idc.GetDisasm(0) # 基址从0开始 if len(isPIE) == 0: proj = angr.Project(binary, load_options={'auto_load_libs': False,'extern_size': 0x800000}) # 基址从非0开始, ELF文件中有的需要手动设定基址为0,否则IDA分析的地址数据与angr分析的地址数据不一致 else: # 在最新版本中:custom_base_addr -> base_addr proj = angr.Project(binary, load_options={'main_opts':{'custom_base_addr':0},'auto_load_libs': False,'extern_size': 0x800000}) isPE = proj.loader.all_pe_objects if len(isPE) == 0: filetype = "ELF" else: filetype = "PE" InforExtraction.main(filetype) vftable_file = open("vftable","r") vftable_jsonstr = vftable_file.read() vftable_list = json.loads(vftable_jsonstr) vftable_file.close() if filetype == "PE": vbtable_file = open("vbtable","r") vbtable_jsonstr = vbtable_file.read() vbtable_list = json.loads(vbtable_jsonstr) vbtable_file.close() VTT_list = None elif filetype == "ELF": VTT_file = open("VTT","r") VTT_jsonstr = VTT_file.read() VTT_list = json.loads(VTT_jsonstr) VTT_file.close() vbtable_list = None ctor_file = open("ctor","r") ctor_jsonstr = ctor_file.read() ctor_list = json.loads(ctor_jsonstr) ctor_file.close() symbol_file = open("symbol","r") symbol_jsonstr = symbol_file.read() symbol_list = json.loads(symbol_jsonstr) symbol_file.close() #print vftable_list #print vbtable_list #print ctor_list # 生成ctor CFG start = time.time() start_points = [] for ctor_addr in ctor_list: start_points.append(int(ctor_addr,16)) for vftable in vftable_list: if vftable_list[vftable]["dtor"] != 0: start_points.append(int(vftable_list[vftable]["dtor"],16)) mycfg = cfg.CFG(proj=proj,start_points=start_points,symbol_list=symbol_list,thread_num=1) end = time.time() print "[+]log: Build ctor cfg completion. Time:%fs" % (end-start) #print_cfg(mycfg) print "[*]log: The number of analysis functions:%d" % len(mycfg.functions) # 进行覆写分析 start = time.time() myoverwrite = StaticTaintAnalysis.StaticTaintAnalysis(proj,mycfg,vftable_list,vbtable_list,VTT_list,ctor_list,symbol_list,filetype) end = time.time() print "[+]log: Overwrite analysis completion. Time:%fs" % (end-start) #print_overwrite(myoverwrite) #sys.exit() # 继承树生成 start = time.time() inheritance_tree = HeuristicReasoning.HeuristicReasoning(proj,mycfg,myoverwrite.ctor_list,vftable_list,symbol_list) end = time.time() print "[+]log: Build inherTree completion. Time:%fs" % (end-start) inheritance_tree.statistics() #inheritance_tree.draw_ctor() print_CHT(inheritance_tree) inheritance_tree.draw()