def dbg_mem_read(state): addr = state.inspect.mem_read_address expr = state.inspect.mem_read_expr l.info("state{}: read {} from addr {}".format( state.spectre.uid, describeAst(expr), #list(describeAst(leaf) for leaf in expr.leaf_asts()), describeAst(addr)))
def detected_spectre_write(state): print( "\n!!!!!!!! UNSAFE WRITE !!!!!!!!\n Instruction Address {}\n Write Address {}\n Write Value {}\n A set of argument values meeting constraints is: {}\n constraints were {}\n" .format( hex(state.addr), describeAst(state.inspect.mem_write_address), describeAst(state.inspect.mem_write_expr), { name: state.solver.eval(bvs) for (name, (bvs, _)) in state.globals['args'].items() }, state.solver.constraints)) state.spectre.violation = ('write', state.addr, state.inspect.mem_write_address, state.inspect.mem_write_expr)
def display(self): """ Return a string describing the MemoryLayout in detail """ r = "\nSecret intervals:" for (mn, mx) in self.secretIntervals: r += "\n[{}, {})".format(describeAst(mn), describeAst(mx)) r += "\nAssignments:" for (a, (v, bits)) in self.concreteAssignments.items(): r += "\nAddress {} gets value {}, {} bits".format(describeAst(a), describeAst(v), bits) r += "\nNot-secret addresses:" for addr in self.notSecretAddresses: r += "\n{}".format(describeAst(addr)) return r
def run(self, type_int): l.info("stubbing out a call to EVP_PKEY_meth_find") # In my current understanding, this method searches through a static list of EVP_PKEY_METHOD objects # looking for one that has its first field equal to the provided `type_int` argument. # We simply perform this functionality here rather than symexing the binary search. names = [ "rsa_pkey_meth", "dh_pkey_meth", "dsa_pkey_meth", "ec_pkey_meth", "hmac_pkey_meth", "cmac_pkey_meth", "rsa_pss_pkey_meth", "dhx_pkey_meth", "scrypt_pkey_meth", "tls1_prf_pkey_meth", "ecx25519_pkey_meth", "ecx448_pkey_meth", "hkdf_pkey_meth", "poly1305_pkey_meth", "siphash_pkey_meth", "ed25519_pkey_meth", "ed448_pkey_meth", "sm2_pkey_meth" ] possible_meths = (self.proj.loader.find_symbol(name).rebased_addr for name in names) for meth in possible_meths: if meth is None: continue meth_pkey_id = self.state.mem[meth].int32_t if self.state.solver.solution(type_int, meth_pkey_id): # Since I'm not sure how to fork for each possible return value, # for now we just use the first match self.state.add_constraints(type_int == meth_pkey_id) return meth raise ValueError("couldn't find a valid method, type was {}".format(describeAst(type_int)))
def detected_spectre_branch(state): if isinstance(state.spectre, SpectreExplicitState): if state.addr in state.spectre.whitelist: l.info("Detected whitelisted unsafe branch:\n Instruction Address {}\n Branch Target {}\n Guard {}".format( hex(state.addr), state.inspect.exit_target, describeAst(state.inspect.exit_guard))) return path = ''.join(state.spec.path) if state.has_plugin('spec') else 'not available' l.error("\n!!!!!!!! UNSAFE BRANCH !!!!!!!!\n Instruction Address {}\n Branch Target {}\n Guard {}\n Path {}\n A set of argument values meeting constraints is: {}\n constraints were {}\n".format( hex(state.addr), state.inspect.exit_target, describeAst(state.inspect.exit_guard), path, {name: state.solver.eval(bvs) for (name, (bvs, _)) in state.globals['args'].items()}, state.solver.constraints)) state.spectre.violation = ('branch', state.addr, state.inspect.exit_target, state.inspect.exit_guard)
def detected_spectre_write(state): if isinstance(state.spectre, SpectreExplicitState): if state.addr in state.spectre.whitelist: l.info("Detected whitelisted unsafe write:\n Instruction Address {}\n Write Address {}\n Write Value {}".format( hex(state.addr), describeAst(state.inspect.mem_write_address), describeAst(state.inspect.mem_write_expr))) return path = ''.join(state.spec.path) if state.has_plugin('spec') else 'not available' l.error("\n!!!!!!!! UNSAFE WRITE !!!!!!!!\n Instruction Address {}\n Write Address {}\n Write Value {}\n Path {}\n A set of argument values meeting constraints is: {}\n constraints were {}\n".format( hex(state.addr), describeAst(state.inspect.mem_write_address), describeAst(state.inspect.mem_write_expr), path, {name: state.solver.eval(bvs) for (name, (bvs, _)) in state.globals['args'].items()}, state.solver.constraints)) state.spectre.violation = ('write', state.addr, state.inspect.mem_write_address, state.inspect.mem_write_expr)
def dbg_tmp_write(state): num = state.inspect.tmp_write_num expr = state.inspect.tmp_write_expr l.info("state{}: wrote {} to tmp {}".format( state.spectre.uid, describeAst(expr), #list(describeAst(leaf) for leaf in expr.leaf_asts()), num))
def dbg_reg_write(state): offset = state.inspect.reg_write_offset expr = state.inspect.reg_write_expr l.info("state{}: wrote {} to offset {}".format( state.spectre.uid, describeAst(expr), #list(describeAst(leaf) for leaf in expr.leaf_asts()), offset))
def dbg_tmp_read(state): num = state.inspect.tmp_read_num expr = state.inspect.tmp_read_expr l.info("state{}: read {} from tmp {}".format( state.spectre.uid, describeAst(expr), #list(describeAst(leaf) for leaf in expr.leaf_asts()), num))
def dbg_reg_read(state): offset = state.inspect.reg_read_offset expr = state.inspect.reg_read_expr l.info("state{}: read {} from offset {}".format( state.spectre.uid, describeAst(expr), #list(describeAst(leaf) for leaf in expr.leaf_asts()), offset))
def detected_spectre_branch(state): print( "\n!!!!!!!! UNSAFE BRANCH !!!!!!!!\n Branch Address {}\n Branch Target {}\n Guard {}\n A set of argument values meeting constraints is: {}\n constraints were {}\n" .format( hex(state.addr), state.inspect.exit_target, describeAst(state.inspect.exit_guard), { name: state.solver.eval(bvs) for (name, (bvs, _)) in state.globals['args'].items() }, state.solver.constraints)) state.spectre.violation = ('branch', state.addr, state.inspect.exit_target, state.inspect.exit_guard)
def do_store(self, state, addr, expr, condition, endness, action): l.debug("time {}: deferring a store of {} to addr {}".format( state.spec.ins_executed, describeAst(expr), describeAst(addr))) state.spec.stores.append( (addr, expr, condition, endness, action, False)) # this is also hacky, but works for our purposes: # SpectreExplicitState wants the inspect points on 'mem_write' to trigger _now_ # even though we may or may not (semantically) be doing / have done the store now. # (Future loads will consider both the possibility that we have or have not done # this store, until it drops out of the speculation window.) # The inspect point will trigger again when the store drops out of the speculation # window, and it's theoretically bad to trigger it twice, but doesn't do any harm # for our uses, so for now it's fine. state._inspect('mem_write', BP_BEFORE, mem_write_address=addr, mem_write_length=len(expr) // 8, mem_write_expr=expr, mem_write_condition=condition) state._inspect( 'mem_write', BP_AFTER ) # angr/storage/memory.py passes only these arguments to the BP_AFTER point, so we do the same here for consistency
def log_concretization(state): raw = describeAst(state.inspect.address_concretization_expr) concretized = "[{}]".format(', '.join( describeAst(x) for x in state.inspect.address_concretization_result)) l.debug("instr {}: concretized {} to {}".format(hex(state.addr), raw, concretized))
def performLoadWithPossibleForwarding(state, load_addr, load_size_bytes, load_endness): """ returns: list of pairs (state, load_value) """ l.debug("time {}: handling load of addr {}".format(state.spec.ins_executed, load_addr)) returnPairs = [] # one valid option is to read from memory, ignoring all inflight stores (not forwarding) memory_value = state.memory.load(load_addr, load_size_bytes, endness=load_endness) returnPairs.append((state, memory_value)) # 'correct_state' will be continuously updated, but it always stores our current idea of which state has the 'correct' (not mis-speculated) load value correct_state = state correct_value = memory_value # explained later notOverlapStates = [] stores = list(enumerate(state.spec.stores.getAllOldestFirst())) # collect them into a list once right away, so then we aren't worrying about iterating over state.spec.stores while modifying it for (storenum, (s_addr, s_value, s_cond, s_endness, _, _)) in stores: l.debug(" - checking whether it could alias with store of {} to {}".format(describeAst(s_value), describeAst(s_addr))) s_size_bytes = len(s_value) // 8 loadOverlapsStore = overlaps(load_addr, load_size_bytes, s_addr, s_size_bytes) if not correct_state.solver.satisfiable(extra_constraints=[loadOverlapsStore]): # it is impossible for the load to overlap this store continue if s_cond is not None and correct_state.solver.satisfiable(extra_constraints=[claripy.Not(s_cond)]): raise ValueError("not yet implemented: conditional store where condition could be False") if correct_state.solver.symbolic(load_size_bytes): raise ValueError("not yet implemented: load could overlap with an inflight store but has symbolic size") if correct_state.solver.symbolic(s_size_bytes): raise ValueError("not yet implemented: load could overlap with an inflight store, but store has symbolic size") if load_size_bytes > s_size_bytes: #l.warn("load could overlap with an inflight store, but load is larger. We are only considering the case where they do not overlap. This will miss some possible paths.") #correct_state.add_constraints(claripy.Not(loadOverlapsStore)) continue # if we got here, the load may overlap the store, but doesn't necessarily have to if correct_state.solver.satisfiable(extra_constraints=[claripy.Not(loadOverlapsStore)]): # in this case, it's possible both that the load either does or does not overlap the store # We create a notOverlapState, for which forwarding from the previous store was _actually correct_ # (it will not alias with this store or any newer inflight stores) # (We could also consider the possibility that the load not-aliases with this # store and does-alias with a newer inflight store, but that would lead to a # lot more blowup and it's unclear it would be useful. We're approximating # elsewhere anyway, e.g. concretization) notOverlapState = correct_state.copy() notOverlapStates.append(notOverlapState) returnPairs.append((notOverlapState, correct_value)) # it reads the previous correct value # on the other hand, the other states are going to assume the load and store alias, so we should constrain that # (we add this before the fork that will happen below, because both of the forked states assume that the aliasing happens) correct_state.add_constraints(loadOverlapsStore) for s in notOverlapStates: # all of these states got their _correct values_ already, so they cannot alias with this store s.add_constraints(claripy.Not(loadOverlapsStore)) # now we're left with the case where the load does overlap the store if isDefinitelyNotEqual_Solver(correct_state, load_addr, s_addr): #l.warn("load could overlap with an inflight store, but load has a different address (they are misaligned). We are only considering the case where they do not overlap. This will miss some possible paths.") continue elif not isDefinitelyEqual_Solver(correct_state, load_addr, s_addr): l.warn("load could overlap with store misaligned, but we are only considering the aligned case") # we choose to only consider cases where they're exactly equal, so we add that constraint correct_state.add_constraints(load_addr == s_addr) # fork a new state, that will forward from this inflight store forwarding_state = correct_state.copy() # note that nothing is poisoned in correct_state yet # the previous 'correct' state must discover that it's incorrect when this store retires, at the latest # (since it _definitely does_ alias with this store -- either that was already the case, or we constrained it to be so) correct_state.spec.stores.updateAt(storenum, poison) # we are now the 'correct' state, to our knowledge -- we have the most recently stored value to this address correct_state = forwarding_state # we are a valid state, and this is the value we think the load has returnPairs.append((forwarding_state, alignedLoadFromStoredValue(load_size_bytes, s_value, s_size_bytes, load_endness, s_endness))) if len(returnPairs) == 1: l.debug(" - final results: only one possible value, {}".format(returnPairs[0][1])) else: l.debug(" – final results: {} possible values: {}".format(len(returnPairs), list(v for (_, v) in returnPairs))) return returnPairs