def do_reloc(ea, differential): fd = idaapi.fixup_data_t(idaapi.FIXUP_OFF32) dest = idaapi.get_dword(ea) fd.off = dest if differential != 0: fd.off += differential idaapi.set_dword(ea, dest + differential) dseg = idaapi.getseg(dest) if dseg is not None: fd.sel = dseg.sel fd.off -= idaapi.sel2para(dseg.sel) * 16 idaapi.set_fixup(ea, fd)
def load_cro(li, is_crs): if is_crs: base = 0 else: base = 0x00100000 # arbitrary li.seek(0x80) (Magic, NameOffset, NextCRO, PreviousCRO, FileSize, BssSize, FixedSize, UnknownZero, UnkSegmentTag, OnLoadSegmentTag, OnExitSegmentTag, OnUnresolvedSegmentTag, CodeOffset, CodeSize, DataOffset, DataSize, ModuleNameOffset, ModuleNameSize, SegmentTableOffset, SegmentNum, ExportNamedSymbolTableOffset, ExportNamedSymbolNum, ExportIndexedSymbolTableOffset, ExportIndexedSymbolNum, ExportStringsOffset, ExportStringsSize, ExportTreeTableOffset, ExportTreeNum, ImportModuleTableOffset, ImportModuleNum, ExternalPatchTableOffset, ExternalPatchNum, ImportNamedSymbolTableOffset, ImportNamedSymbolNum, ImportIndexedSymbolTableOffset, ImportIndexedSymbolNum, ImportAnonymousSymbolTableOffset, ImportAnonymousSymbolNum, ImportStringsOffset, ImportStringsSize, StaticAnonymousSymbolTableOffset, StaticAnonymousSymbolNum, InternalPatchTableOffset, InternalPatchNum, StaticPatchTableOffset, StaticPatchNum) = struct.unpack( "<IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII", li.read(0x138 - 0x80)) if not is_crs: li.file2base(0, base, base + FileSize, 0) idaapi.add_segm(0, base, base + 0x80, "header", "RODATA") idaapi.add_segm(0, base + SegmentTableOffset, base + DataOffset, "tables", "RODATA") # set segments li.seek(SegmentTableOffset) segmentDic = [("CODE", ".text"), ("DATA", ".rodata"), ("DATA", ".data"), ("BSS", ".bss")] segmentAddress = [] for i in range(SegmentNum): SegmentOffset, SegmentSize, SegmentType = struct.unpack( "<III", li.read(12)) if SegmentType == 3: SegmentOffset = 0x08000000 idaapi.enable_flags(base + SegmentOffset, base + SegmentOffset + SegmentSize, idaapi.STT_VA) segmentAddress.append(base + SegmentOffset) if SegmentSize: idaapi.add_segm(0, segmentAddress[i], segmentAddress[i] + SegmentSize, segmentDic[SegmentType][1], segmentDic[SegmentType][0]) # do internal relocations li.seek(InternalPatchTableOffset) for i in range(InternalPatchNum): target, patch_type, source, _, _, shift = struct.unpack( "<IBBBBI", li.read(12)) target_offset = DecodeTag(segmentAddress, target) source_offset = segmentAddress[source] + shift if patch_type == 2: value = source_offset elif patch_type == 3: rel = source_offset - target_offset if rel < 0: rel += 0x100000000 value = rel idaapi.patch_long(target_offset, value) f = idaapi.fixup_data_t() f.type = idaapi.FIXUP_OFF32 f.off = value idaapi.set_fixup(target_offset, f) # import li.seek(ImportNamedSymbolTableOffset) importNamedSymbolTable = [] for i in range(ImportNamedSymbolNum): importNamedSymbolTable.append(struct.unpack('<II', li.read(8))) for importNamedSymbol in importNamedSymbolTable: nameOffset, batchOffset = importNamedSymbol li.seek(nameOffset) name = "" while True: c = li.read(1) if c == '\0': break name += c do_import_batch(li, segmentAddress, batchOffset, name) li.seek(ImportModuleTableOffset) module = [] for i in range(ImportModuleNum): module.append(struct.unpack('<IIIII', li.read(20))) for m in module: moduleNameOffset, indexed, indexedNum, anonymous, anonymousNum = m li.seek(moduleNameOffset) mname = "" while True: c = li.read(1) if c == '\0': break mname += c indexeds = [] li.seek(indexed) for i in range(indexedNum): indexeds.append(struct.unpack('<II', li.read(8))) anonymouses = [] li.seek(anonymous) for i in range(anonymousNum): anonymouses.append(struct.unpack('<II', li.read(8))) for i in indexeds: index, batchOffset = i do_import_batch(li, segmentAddress, batchOffset, "%s_%d" % (mname, index)) for i in anonymouses: tag, batchOffset = i do_import_batch(li, segmentAddress, batchOffset, "%s_%08X" % (mname, tag)) # export li.seek(ExportNamedSymbolTableOffset) exportNamedSymbolTable = [] for i in range(ExportNamedSymbolNum): exportNamedSymbolTable.append(struct.unpack('<II', li.read(8))) for exportNamedSymbol in exportNamedSymbolTable: nameOffset, target = exportNamedSymbol target_offset = DecodeTag(segmentAddress, target) li.seek(nameOffset) name = "" while True: c = li.read(1) if c == '\0': break name += c if idaapi.segtype(target_offset) == idaapi.SEG_CODE: target_offset &= ~1 idaapi.add_entry(target_offset, target_offset, name, idaapi.segtype(target_offset) == idaapi.SEG_CODE) idaapi.make_name_public(target_offset) li.seek(ExportIndexedSymbolTableOffset) for i in range(ExportIndexedSymbolNum): target, = struct.unpack('<I', li.read(4)) target_offset = DecodeTag(segmentAddress, target) if idaapi.segtype(target_offset) == idaapi.SEG_CODE: target_offset &= ~1 idaapi.add_entry(i, target_offset, "indexedExport_%d" % i, idaapi.segtype(target_offset) == idaapi.SEG_CODE) idaapi.make_name_public(target_offset)
def load_cro(li, is_crs): if is_crs: base = 0 else: base = 0x00100000 # arbitrary li.seek(0x80) (Magic, NameOffset, NextCRO, PreviousCRO, FileSize, BssSize, FixedSize, UnknownZero, UnkSegmentTag, OnLoadSegmentTag, OnExitSegmentTag, OnUnresolvedSegmentTag, CodeOffset, CodeSize, DataOffset, DataSize, ModuleNameOffset, ModuleNameSize, SegmentTableOffset, SegmentNum, ExportNamedSymbolTableOffset, ExportNamedSymbolNum, ExportIndexedSymbolTableOffset, ExportIndexedSymbolNum, ExportStringsOffset, ExportStringsSize, ExportTreeTableOffset, ExportTreeNum, ImportModuleTableOffset, ImportModuleNum, ExternalPatchTableOffset, ExternalPatchNum, ImportNamedSymbolTableOffset, ImportNamedSymbolNum, ImportIndexedSymbolTableOffset, ImportIndexedSymbolNum, ImportAnonymousSymbolTableOffset, ImportAnonymousSymbolNum, ImportStringsOffset, ImportStringsSize, StaticAnonymousSymbolTableOffset, StaticAnonymousSymbolNum, InternalPatchTableOffset, InternalPatchNum, StaticPatchTableOffset, StaticPatchNum) = struct.unpack("<IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII", li.read(0x138 - 0x80)) if not is_crs: li.file2base(0, base, base + FileSize, 0) # set segments li.seek(SegmentTableOffset) segmentDic = [ ("CODE", ".text"), ("DATA", ".rodata"), ("DATA", ".data"), ("BSS", ".bss") ] segmentAddress = [] for i in range(SegmentNum): SegmentOffset, SegmentSize, SegmentType = struct.unpack("<III", li.read(12)) if SegmentType == 3: SegmentOffset = 0x08000000 idaapi.enable_flags(base + SegmentOffset, base + SegmentOffset + SegmentSize, idaapi.STT_VA) segmentAddress.append(base + SegmentOffset) if SegmentSize : idaapi.add_segm(0, segmentAddress[i], segmentAddress[i] + SegmentSize, segmentDic[SegmentType][1], segmentDic[SegmentType][0]) # do internal relocations li.seek(InternalPatchTableOffset) for i in range(InternalPatchNum): target, patch_type, source, _, _, shift = struct.unpack("<IBBBBI", li.read(12)) target_offset = DecodeTag(segmentAddress, target) source_offset = segmentAddress[source] + shift if patch_type == 2: value = source_offset elif patch_type == 3: rel = source_offset - target_offset if rel < 0: rel += 0x100000000 value = rel idaapi.patch_long(target_offset, value) f = idaapi.fixup_data_t() f.type = idaapi.FIXUP_OFF32 f.off = value idaapi.set_fixup(target_offset, f) # import li.seek(ImportNamedSymbolTableOffset) importNamedSymbolTable = [] for i in range(ImportNamedSymbolNum): importNamedSymbolTable.append(struct.unpack('<II', li.read(8))) for importNamedSymbol in importNamedSymbolTable: nameOffset, batchOffset = importNamedSymbol li.seek(nameOffset) name = "" while True: c = li.read(1) if c == '\0': break name += c do_import_batch(li, segmentAddress, batchOffset, name) li.seek(ImportModuleTableOffset) module = [] for i in range(ImportModuleNum): module.append(struct.unpack('<IIIII', li.read(20))) for m in module: moduleNameOffset, indexed, indexedNum, anonymous, anonymousNum = m li.seek(moduleNameOffset) mname = "" while True: c = li.read(1) if c == '\0': break mname += c indexeds = [] li.seek(indexed) for i in range(indexedNum): indexeds.append(struct.unpack('<II', li.read(8))) anonymouses = [] li.seek(anonymous) for i in range(anonymousNum): anonymouses.append(struct.unpack('<II', li.read(8))) for i in indexeds: index, batchOffset = i do_import_batch(li, segmentAddress, batchOffset, "%s_%d"%(mname, index)) for i in anonymouses: tag, batchOffset = i do_import_batch(li, segmentAddress, batchOffset, "%s_%08X"%(mname, tag)) # export li.seek(ExportNamedSymbolTableOffset) exportNamedSymbolTable = [] for i in range(ExportNamedSymbolNum): exportNamedSymbolTable.append(struct.unpack('<II', li.read(8))) for exportNamedSymbol in exportNamedSymbolTable: nameOffset, target = exportNamedSymbol target_offset = DecodeTag(segmentAddress, target) li.seek(nameOffset) name = "" while True: c = li.read(1) if c == '\0': break name += c idaapi.add_entry(target_offset, target_offset, name, idaapi.segtype(target_offset) == idaapi.SEG_CODE) idaapi.make_name_public(target_offset) li.seek(ExportIndexedSymbolTableOffset) for i in range(ExportIndexedSymbolNum): target, = struct.unpack('<I', li.read(4)) target_offset = DecodeTag(segmentAddress, target) idaapi.add_entry(i, target_offset, "indexedExport_%d" % i, idaapi.segtype(target_offset) == idaapi.SEG_CODE) idaapi.make_name_public(target_offset)
def load_file(li, neflags, format): # Read in the bFLT header fields li.seek(0) (magic, version, entry, data_start, data_end, bss_end, stack_size, reloc_start, reloc_count, flags) = struct.unpack(">IIIIIIIIII", li.read(4 * 10)) # Check for the GZIP flag. # The loader doesn't de-compress GZIP'd files, as these can be easily decompressed with external tools prior to loading the file into IDA if (flags & FLAGS_GZIP) == FLAGS_GZIP: Warning( "Code/data is GZIP compressed. You probably want to decompress the bFLT file with the flthdr or gunzip_bflt utilities before loading it into IDA." ) # Load the file data into IDA li.file2base(BFLT_HEADER_SIZE, BFLT_HEADER_SIZE, data_end, True) # Add the .text .data and .bss segments idaapi.add_segm(0, BFLT_HEADER_SIZE, data_start, ".text", "CODE") idaapi.add_segm(0, data_start, data_end, ".data", "DATA") idaapi.add_segm(0, data_end, bss_end, ".bss", "BSS") if DEBUG: print "Created File Segments: " print "\t.text 0x%.8X - 0x%.8X" % (BFLT_HEADER_SIZE, data_start) print "\t.data 0x%.8X - 0x%.8X" % (data_start, data_end) print "\t.bss 0x%.8X - 0x%.8X" % (data_end, bss_end) # Entry point is at the beginning of the .text section idaapi.add_entry(entry, entry, "_start", 1) # Set default processor idaapi.set_processor_type(DEFAULT_CPU, SETPROC_ALL) # Explicitly set 32 bit addressing on .text segment idaapi.set_segm_addressing(idaapi.getseg(entry), 1) # prepare structure for set_fixup() fd = idaapi.fixup_data_t() fd.type = idaapi.FIXUP_OFF32 # Is there a global offset table? if (flags & FLAGS_GOTPIC) == FLAGS_GOTPIC: # Add a reptable comment and name the offset so that all references to GOT are obvious MakeRptCmt(data_start, "GLOBAL_OFFSET_TABLE") MakeName(data_start, "GOT") if DEBUG: print "Global Offset Table detected, patching..." # GOT starts at the beginning of the data section; loop through the data section, patching up valid GOT entries. i = data_start while i < data_end: # Get the next GOT entry li.seek(i) got_entry = struct.unpack("<I", li.read(4))[0] # The last GOT entry is -1 if got_entry == 0xFFFFFFFF: if DEBUG: print "Finished processing Global Offset Table." break # All other non-zero entries are valid GOT entries elif got_entry > 0: # The actual data is located at <original GOT entry> + <BFLT_HEADER_SIZE> new_entry = got_entry + BFLT_HEADER_SIZE if DEBUG: print "Replacing GOT entry value 0x%.8X with 0x%.8X at offset 0x%.8X" % ( got_entry, new_entry, i) # Replace the GOT entry with the correct pointer idaapi.put_long(i, new_entry) # add info about relocation to help analyzer fd.off = new_entry idaapi.set_fixup(i, fd) # Make each GOT entry a DWORD MakeDword(i) # Point i at the next GOT entry address i = i + 4 # Patch relocation addresses for i in range(0, reloc_count): try: # Get the next relocation entry. # Relocation entry = <address of bytes to be patched> - <BFLT_HEADER_SIZE> li.seek(reloc_start + (i * 4)) reloc_offset = struct.unpack(">I", li.read(4))[0] + BFLT_HEADER_SIZE # Sanity check, make sure the relocation offset is in a defined segment if reloc_offset < bss_end: try: # reloc_offset + base_offset == <pointer to actual data> - <BFLT_HEADER_SIZE> li.seek(reloc_offset) reloc_val = struct.unpack(">I", li.read(4))[0] if reloc_val == 0: # skip zero relocs # see fs/binfmt_flat.c if DEBUG: print "Skipping zero reloc at (0x%.8X)" % reloc_offset continue reloc_data_offset = reloc_val + BFLT_HEADER_SIZE if DEBUG: print "Patching reloc: (0x%.8X) == 0x%.8X" % ( reloc_offset, reloc_data_offset) # Replace pointer at reloc_offset with the address of the actual data idaapi.put_long(reloc_offset, reloc_data_offset) # add info about relocation to help analyzer fd.off = reloc_data_offset idaapi.set_fixup(reloc_offset, fd) except Exception, e: print "Error patching relocation entry #%d: %s" % (i, str(e)) elif DEBUG: print "Relocation entry #%d outside of defined file sections, skipping..." % i