def formatBadBytes(badbytes): if len(badbytes) % 2 > 0: raise RopperError('The length of badbytes has to be a multiple of two') try: badbytes = unhexlify(badbytes) except: raise RopperError('Invalid characters in badbytes string') return badbytes
def addFile(self, name, bytes=None, arch=None, raw=False): if self._getFileFor(name): raise RopperError('file is already added: %s' % name) if arch: arch = getArchitecture(arch) loader = Loader.open(name, bytes=bytes, raw=raw, arch=arch) if len(self.__files ) > 0 and self.__files[0].loader.arch != loader.arch: raise RopperError( 'It is not supported to open file with different architectures! Loaded: %s; File to open: %s' % (str(self.__files[0].loader.arch), str(loader.arch))) file = FileContainer(loader) self.__files.append(file)
def getSection(self, name): for shdr in self._binary.sections: if shdr.name == name: return Section(shdr.name, shdr.raw, shdr.header.sh_addr, shdr.header.sh_addr - self._binary.imageBase) raise RopperError('No such section: %s' % name)
def searchString(self, string='', name=None): def search(f, string): data = [] if not string or string == '[ -~]{2}[ -~]*': string = '[ -~]{2}[ -~]*' else: string = f.arch.searcher.prepareFilter(string) sections = list(f.dataSections) string = string.encode('ascii') # python 3 compatibility for section in sections: b = bytes(bytearray(section.bytes)) for match in re.finditer(string, b): vaddr = f.imageBase + section.offset if f.imageBase != None else section.virtualAddress data.append( (match.start() + vaddr , match.group())) return data to_return = {} if not name: for file in self.__files: to_return[file.loader.fileName] = search(file.loader, string) else: fc = self._getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) to_return[name] = search(fc.loader, string) return to_return
def semanticSearch(self, search, stableRegs=[], name=None): count = 0 if name: fc = self._getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) s = fc.loader.arch.searcher for gadget in s.semanticSearch(fc.gadgets, search, self.options.inst_count, stableRegs): if self.options.count_of_findings == 0 or self.options.count_of_findings > count: yield(fc.name, gadget) else: break count += 1 self.__saveCache(fc) else: for fc in self.__files: s = fc.loader.arch.searcher for gadget in s.semanticSearch(fc.gadgets, search, self.options.inst_count, stableRegs): if self.options.count_of_findings == 0 or self.options.count_of_findings > count: yield(fc.name, gadget) else: break count += 1 self.__saveCache(fc)
def setArchitectureFor(self, name, arch): file = self.getFileFor(name) if not file: raise RopperError('No such file opened: %s' % name) file.loader.arch = getArchitecture(arch) if file.loaded: self.loadGadgetsFor(name)
def setImageBaseFor(self, name, imagebase): file = self._getFileFor(name) if not file: raise RopperError('No such file opened: %s' % name) file.loader.imageBase = imagebase Gadget.IMAGE_BASES[file.loader.checksum] = file.loader.imageBase if file.loaded and (self.options.badbytes or self.options.cfg_only and file.type == Type.PE): file.gadgets = self.__prepareGadgets(file, file.allGadgets, file.type)
def _loadDefaultArch(self): try: machine = elf.EM[self._binary.elfHeader.header.e_machine] cls = elf.ELFCLASS[self._binary.elfHeader.header.e_ident[elf.EI.CLASS]] end = self._binary._bytes[elf.EI.DATA] return getArch( (machine,cls, end ),self._binary.elfHeader.header.e_entry) except BaseException as e: raise RopperError(e)
def asm(self, code, arch='x86', format='hex'): if format not in ('hex', 'string', 'raw'): raise RopperError( 'Invalid format: %s\n Valid formats are: hex, string, raw' % format) format = Format.HEX if format == 'hex' else Format.STRING if format == 'string' else Format.RAW return self.ropper.assemble(code, arch=getArchitecture(arch), format=format)
def load(self, binary): if 'sqlite3' not in globals(): self._printer.printError('sqlite is not installed!') return execSect = binary.executableSections gcount = 0 lcount = 0 endcount = 0 conn = sqlite3.connect(self.__dbname) c = conn.cursor() c.execute('select * from sections') sectionrows = c.fetchall() c.execute('select * from gadgets') gadgetrows = c.fetchall() c.execute('select * from lines') linerows = c.fetchall() if self._printer: for i in sectionrows: endcount += i[3] gadgets = [] sections = {} for s in sectionrows: for section in execSect: if s[1] == section.name and int(s[2]) == section.offset: if s[4] != hashlib.md5(section.bytes).hexdigest(): raise RopperError( 'wrong checksum: ' + s[4] + ' and ' + hashlib.md5(section.bytes).hexdigest()) else: sections[s[0]] = section for g in range(s[3]): grow = gadgetrows[gcount] gcount += 1 gadget = Gadget(binary, sections[grow[1]]) gadgets.append(gadget) for l in range(grow[2]): lrow = linerows[lcount] lcount += 1 gadget.append(int(lrow[1]), lrow[3], lrow[4]) if self._printer: self._printer.printProgress('loading gadgets...', float(gcount) / endcount) if self._printer: self._printer.finishProgress('gadgets loaded from: ' + self.__dbname) conn.close() return gadgets
def disassAddress(self, name, address, length): fc = self.getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) eSections = fc.loader.executableSections for section in eSections: if section.virtualAddress <= address and section.virtualAddress + section.size > address: ropper = Ropper() g = ropper.disassembleAddress(section, fc.loader, address, address - (fc.loader.imageBase+section.offset), length) if not g: raise RopperError('Cannot disassemble address: %s' % toHex(address)) if length < 0: length = length * -1 return g.disassemblyString() return ''
def addFile(self, name, bytes=None, arch=None, raw=False): if self._getFileFor(name): raise RopperError('file is already added: %s' % name) if arch: arch=getArchitecture(arch) loader = Loader.open(name, bytes=bytes, raw=raw, arch=arch) file = FileContainer(loader) self.__files.append(file)
def search(self, search, quality=None, name=None): if name: fc = self._getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) s = fc.loader.arch.searcher for gadget in s.search(fc.gadgets, search, quality): yield(fc.name, gadget) else: for fc in self.__files: s = fc.loader.arch.searcher for gadget in s.search(fc.gadgets, search, quality): yield(fc.name, gadget)
def searchInstructions(self, code, name=None): to_return = {} if not name: for file in self.__files: to_return[file.loader.fileName] = self.__ropper.searchInstructions(file.loader, code) else: fc = self.getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) to_return[name] = self.__ropper.searchInstructions(fc.loader, code) return self.__filterBadBytes(to_return)
def searchJmpReg(self, regs=['esp'],name=None): to_return = {} if not name: for file in self.__files: to_return[file.loader.fileName] = self.__ropper.searchJmpReg(file.loader, regs) else: fc = self._getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) to_return[name] = self.__ropper.searchJmpReg(fc.loader, regs) return self.__filterBadBytes(to_return)
def createRopChain(self, chain, arch, options={}): callback = None if self.__callbacks and hasattr(self.__callbacks, '__ropchainMessages__'): callback = self.__callbacks.__ropchainMessages__ b = [] gadgets = {} for binary in self.__files: if str(binary.arch) == arch: gadgets[binary.loader] = binary.gadgets b.append(binary.loader) generator = RopChain.get(b, gadgets, chain, callback, unhexlify(self.options.badbytes)) if not generator: raise RopperError('%s does not have support for %s chain generation at the moment. Its a future feature.' % (self.files[0].loader.arch.__class__.__name__, chain)) return generator.create(options)
def extractValues(self, constraints, analysis, arch): if not constraints: return [] to_return = [] for constraintString in constraints: m = re.match(Searcher.CONSTRAINT_REGEX, constraintString) if not m: raise RopperError('Not a valid constraint') reg1 = m.group(1) reg2 = m.group(3) reg1 = reg1.replace('[', '') reg1 = reg1.replace(']', '') reg1 = arch.getRegisterName(reg1) reg2 = reg2.replace('[', '') reg2 = reg2.replace(']', '') if reg2.isdigit() or isHex(reg2): reg2 = None reg2 = arch.getRegisterName(reg2) to_return.append((reg1, reg2)) return to_return
def semanticSearch(self, gadgets, constraints, maxLen, stableRegs=[]): if 'z3' not in globals(): raise RopperError( 'z3 has to be installed in order to use semantic search') if 'archinfo' not in globals(): raise RopperError( 'archinfo has to be installed in order to use semantic search') if 'pyvex' not in globals(): raise RopperError( 'pyvex has to be installed in order to use semantic search') to_return = [] count = 0 max_count = len(gadgets) count = 0 found = False found_gadgets = [] slicer = Slicer() constraint_key = " ".join(list(set(constraints))) for glen in range(1, maxLen + 1): for gadget in gadgets: if len(gadget) != glen: continue semantic_info = gadget.info if not semantic_info: continue #constraint_values = self.extractValues(constraints, semantic_info, gadget.arch) cc = z3helper.ConstraintCompiler(gadget.arch, semantic_info) constraint_values = cc.getSymbols(constraints) if self.__isSimilarGadget(gadget, found_gadgets) \ or self.__areRegistersNotUsed(constraint_values, semantic_info) \ or self.__areStableRegistersClobbered(stableRegs, semantic_info.clobberedRegisters): continue constraint_string = cc.compile(';'.join(constraints)) if constraint_key not in semantic_info.checkedConstraints: set_reg = constraint_values[0][0] slice_instructions = [] slice = slicer.slice( semantic_info.expressions, [set_reg for set_reg, get_reg in constraint_values]) count += 1 solver = z3.Solver() expr_len = len(semantic_info.expressions) expr = None tmp = None for inst in slice.expressions: # tmp = inst # if tmp == False: # continue if expr is None: expr = inst else: expr = 'And(%s, %s)' % (expr, inst) expr = ExpressionBuilder().build(semantic_info.regs, semantic_info.mems, expr, constraint_string) solver.add(expr) if solver.check() == z3.unsat: found = True found_gadgets.append(gadget) semantic_info.checkedConstraints[constraint_key] = True yield gadget else: semantic_info.checkedConstraints[ constraint_key] = False elif semantic_info.checkedConstraints[constraint_key]: count += 1 found_gadgets.append(gadget) yield gadget else: count += 1
def _setGadgets(self, name, gadgets): fc = self.getFileFor(name) if not fc: raise RopperError('No such file opened: %s' % name) fc.allGadgets = gadgets fc.gadgets = self.__prepareGadgets(fc, fc.allGadgets, fc.type)
def __loadCache(self, file): mp = False nan = 0 processes = [] single = False cache_file = None try: temp = RopperService.CACHE_FOLDER cache_file = temp + os.path.sep + self.__getCacheFileName(file) if not os.path.exists(cache_file): if not os.path.exists(cache_file + '_%d' % 1): return else: if isWindows(): raise RopperError('Cache has to be cleared.') mp = True and multiprocessing.cpu_count() > 1 else: single = True if self.__callbacks and hasattr(self.__callbacks, '__message__'): self.__callbacks.__message__('Load gadgets from cache') if self.__callbacks and hasattr(self.__callbacks, '__gadgetSearchProgress__'): self.__callbacks.__gadgetSearchProgress__(None, [], 0) if not mp: all_gadgets = [] if single: with open(cache_file, 'rb') as f: data = f.read() all_gadgets.extend(eval(decode(data, 'zip'))) if self.__callbacks and hasattr( self.__callbacks, '__gadgetSearchProgress__'): self.__callbacks.__gadgetSearchProgress__( None, all_gadgets, 1.0) else: for i in range(1, RopperService.CACHE_FILE_COUNT + 1): if os.path.exists(cache_file + '_%d' % i): with open(cache_file + '_%d' % i, 'rb') as f: data = f.read() all_gadgets.extend(eval(decode(data, 'zip'))) if self.__callbacks and hasattr( self.__callbacks, '__gadgetSearchProgress__'): self.__callbacks.__gadgetSearchProgress__( None, all_gadgets, float(i) / RopperService.CACHE_FILE_COUNT) return all_gadgets else: count = min(multiprocessing.cpu_count(), RopperService.CACHE_FILE_COUNT) gqueue = multiprocessing.Queue() fqueue = multiprocessing.JoinableQueue() for i in range(1, RopperService.CACHE_FILE_COUNT + 1): fqueue.put(cache_file + '_%d' % i) all_gadgets = [] for i in range(count): p = multiprocessing.Process( target=self.__loadCachePerProcess, args=(fqueue, gqueue)) p.start() processes.append(p) for i in range(count): fqueue.put(None) for i in range(RopperService.CACHE_FILE_COUNT): gadgets = gqueue.get() all_gadgets.extend(gadgets) if self.__callbacks and hasattr( self.__callbacks, '__gadgetSearchProgress__'): self.__callbacks.__gadgetSearchProgress__( None, all_gadgets, float(i + 1) / RopperService.CACHE_FILE_COUNT) return sorted(all_gadgets, key=Gadget.simpleInstructionString) except KeyboardInterrupt: if mp: for p in processes: if p and p.is_alive(): p.terminate() except BaseException as e: if mp: for p in processes: if p and p.is_alive(): p.terminate() if cache_file: for i in range(1, RopperService.CACHE_FILE_COUNT + 1): if os.path.exists(cache_file + '_%d' % i): os.remove(cache_file + '_%d' % i)