def vm_load_pe(vm, fdata, align_s=True, load_hdr=True, name="", **kargs): """Load a PE in memory (@vm) from a data buffer @fdata @vm: VmMngr instance @fdata: data buffer to parse @align_s: (optional) If False, keep gaps between section @load_hdr: (optional) If False, do not load the NThdr in memory Return the corresponding PE instance. Extra arguments are passed to PE instanciation. If all sections are aligned, they will be mapped on several different pages Otherwise, a big page is created, containing all sections """ # Parse and build a PE instance pe = pe_init.PE(fdata, **kargs) # Check if all section are aligned aligned = True for section in pe.SHList: if section.addr & 0xFFF: aligned = False break if aligned: # Loader NT header if load_hdr: # Header length hdr_len = max(0x200, pe.NThdr.sizeofheaders) # Page minimum size min_len = min(pe.SHList[0].addr, 0x1000) # Get and pad the pe_hdr pe_hdr = pe.content[:hdr_len] + max( 0, (min_len - hdr_len)) * "\x00" vm.add_memory_page(pe.NThdr.ImageBase, PAGE_READ | PAGE_WRITE, pe_hdr, "%r: PE Header" % name) # Align sections size if align_s: # Use the next section address to compute the new size for i, section in enumerate(pe.SHList[:-1]): new_size = pe.SHList[i + 1].addr - section.addr section.size = new_size section.rawsize = new_size section.data = strpatchwork.StrPatchwork( section.data[:new_size]) section.offset = section.addr # Last section alignement last_section = pe.SHList[-1] last_section.size = (last_section.size + 0xfff) & 0xfffff000 # Pad sections with null bytes and map them for section in pe.SHList: data = str(section.data) data += "\x00" * (section.size - len(data)) attrib = PAGE_READ if section.flags & 0x80000000: attrib |= PAGE_WRITE vm.add_memory_page(pe.rva2virt(section.addr), attrib, data, "%r: %r" % (name, section.name)) return pe # At least one section is not aligned log.warning('PE is not aligned, creating big section') min_addr = 0 if load_hdr else None max_addr = None data = "" for i, section in enumerate(pe.SHList): if i < len(pe.SHList) - 1: # If it is not the last section, use next section address section.size = pe.SHList[i + 1].addr - section.addr section.rawsize = section.size section.offset = section.addr # Update min and max addresses if min_addr is None or section.addr < min_addr: min_addr = section.addr max_section_len = max(section.size, len(section.data)) if max_addr is None or section.addr + max_section_len > max_addr: max_addr = section.addr + max_section_len min_addr = pe.rva2virt(min_addr) max_addr = pe.rva2virt(max_addr) log.debug('Min: 0x%x, Max: 0x%x, Size: 0x%x', min_addr, max_addr, (max_addr - min_addr)) # Create only one big section containing the whole PE vm.add_memory_page(min_addr, PAGE_READ | PAGE_WRITE, (max_addr - min_addr) * "\x00") # Copy each sections content in memory for section in pe.SHList: log.debug('Map 0x%x bytes to 0x%x', len(section.data), pe.rva2virt(section.addr)) vm.set_mem(pe.rva2virt(section.addr), str(section.data)) return pe
def vm_load_pe(vm, fdata, align_s=True, load_hdr=True, **kargs): """Load a PE in memory (@vm) from a data buffer @fdata @vm: VmMngr instance @fdata: data buffer to parse @align_s: (optional) If False, keep gaps between section @load_hdr: (optional) If False, do not load the NThdr in memory Return the corresponding PE instance. Extra arguments are passed to PE instanciation. If all sections are aligned, they will be mapped on several different pages Otherwise, a big page is created, containing all sections """ # Parse and build a PE instance pe = pe_init.PE(fdata, **kargs) # Check if all section are aligned aligned = True for section in pe.SHList: if section.addr & 0xFFF: aligned = False break if aligned: # Loader NT header if load_hdr: # Header length hdr_len = max(0x200, pe.NThdr.sizeofheaders) # Page minimum size min_len = min(pe.SHList[0].addr, 0x1000) # Get and pad the pe_hdr pe_hdr = pe.content[:hdr_len] + max(0, (min_len - hdr_len)) * "\x00" vm.add_memory_page(pe.NThdr.ImageBase, PAGE_READ | PAGE_WRITE, pe_hdr) # Align sections size if align_s: # Use the next section address to compute the new size for i, section in enumerate(pe.SHList[:-1]): new_size = pe.SHList[i + 1].addr - section.addr section.size = new_size section.rawsize = new_size section.data = strpatchwork.StrPatchwork( section.data[:new_size]) section.offset = section.addr # Last section alignement last_section = pe.SHList[-1] last_section.size = (last_section.size + 0xfff) & 0xfffff000 # Pad sections with null bytes and map them for section in pe.SHList: data = str(section.data) data += "\x00" * (section.size - len(data)) attrib = PAGE_READ if section.flags & 0x80000000: attrib |= PAGE_WRITE vm.add_memory_page(pe.rva2virt(section.addr), attrib, data) return pe # At least one section is not aligned log.warning('PE is not aligned, creating big section') min_addr = 0 if load_hdr else None max_addr = None data = "" for i, section in enumerate(pe.SHList): if i < len(pe.SHList) - 1: # If it is not the last section, use next section address section.size = pe.SHList[i + 1].addr - section.addr section.rawsize = section.size section.offset = section.addr # Update min and max addresses if min_addr is None or section.addr < min_addr: min_addr = section.addr max_section_len = max(section.size, len(section.data)) if max_addr is None or section.addr + max_section_len > max_addr: max_addr = section.addr + max_section_len min_addr = pe.rva2virt(min_addr) max_addr = pe.rva2virt(max_addr) log.debug('Min: 0x%x, Max: 0x%x, Size: 0x%x', min_addr, max_addr, (max_addr - min_addr)) # Create only one big section containing the whole PE vm.add_memory_page(min_addr, PAGE_READ | PAGE_WRITE, (max_addr - min_addr) * "\x00") # Copy each sections content in memory for section in pe.SHList: log.debug('Map 0x%x bytes to 0x%x', len(section.data), pe.rva2virt(section.addr)) vm.set_mem(pe.rva2virt(section.addr), str(section.data)) return pe