def build_field_tuple_and_path(self, ctx, path): typ = self._type fields = [] newpath = [] while len(path) > 0: if typ.is_int(): assert False, "Can't have base type if there is more left in path" elif typ.is_array() or typ.is_pointer(): if typ.is_array(): if not util.path_condition_implies( ctx, z3.ULT(path[0], typ.length()), print_model=True): util.print_stacktrace(ctx) raise IndexError( "Can not prove index %s is within array bounds %s" % (path[0], typ.length())) if typ.is_pointer(): if not util.path_condition_implies(ctx, path[0] == 0): util.print_stacktrace(ctx) raise RuntimeError("Pointer arithmetic not supported") typ = typ.deref() newpath.append(path[0]) elif typ.is_struct(): field = util.simplify(path[0]).as_long() fields.append(field) typ = typ.field(field) else: assert False, "Unhandled case" path = path[1:] return tuple(fields), newpath
def branch(self, ctx, cond, cond_type, iftrue, iffalse): self.assertion(ctx, not cond.is_poison(), "path condition is poison") assert itypes.integer_size(cond_type) == 1 scond = util.simplify(cond) can_be_true = not z3.is_false(scond) can_be_false = not z3.is_true(scond) if ctx['depth'] >= ctx['prune_depth']: can_be_true = not z3.is_false( scond) and not util.path_condition_implies(ctx, z3.Not(scond)) can_be_false = not can_be_true or ( not z3.is_true(scond) and not util.path_condition_implies(ctx, scond)) true_branch_exc = None false_branch_exc = None trueval = None falseval = None if can_be_true: try: ctx.push(path_condition=scond) trueval = iftrue() except ex.UnreachableException, e: stacktrace = getattr(e, 'stacktrace', None) self.assertion(ctx, util.path_condition_implies(ctx, z3.BoolVal(False), print_model=True), "Panic " + repr(e), stacktrace=stacktrace) can_be_true = False except BaseException, e: true_branch_exc = e
def learn(self, transitions, rewards): # Prepare for learning self.updateDicts() attributes = ["X_pos", "Y_pos", "X_size", "Y_size", "Colour", "Shape", "Nothing", "Reward"] model_updated = False if transitions and rewards: att_list = range(REWARD + 1) elif transitions and not rewards: att_list = range(REWARD) elif not transitions and rewards: att_list = [REWARD] else: return # For each object attribute or reward for i in att_list: # print("**************************") # print("Learning schemas for " + attributes[i]) # print("**************************") remaining = dict(zip(self.data[i].keys(),[[] for key in self.data[i].keys()])) # For each attribute/reward value to be predicted for key in self.data[i].keys(): # If the maximum number of schemas has already been learn we skip this round of learning if len(self.schemas[i][key]) >= LIMIT: remaining[key] = self.data[i][key] continue # If we are predicting rewards the learning data is constructed from all objects that have changed if i == REWARD: # Form positive cases xYes = [] xNo = [] for datum in self.data[i][key]: predicted = False for o in datum.keys(): if self.checkDatum([datum[o], key], i, consistency_check=True): predicted = True # self.evidence[i][key].append(datum) break if not predicted: xYes += [datum[c] for c in self.obsChanges] xNo += [datum[o]for o in datum.keys() if o not in self.obsChanges] # if not self.checkDatum([datum[o], key], i)[0]: # xYes += [datum[c] for c in self.obsChanges] # xNo += [datum[o] for o in datum.keys() if o not in self.obsChanges] # Form negative cases for other in self.data[i].keys(): if other != key: xNo += util.flatten([[datum[o] for o in datum.keys()] for datum in self.data[i][other] + self.evidence[i][other]]) # Otherwise we construct learning data in the standard way else: # Form positive cases xYes = [] for datum in self.data[i][key]: if datum[0][i] != key: # if self.checkDatum([datum,key], i)[0]: # self.evidence[i][key].append(datum) # else: # xYes.append(datum) if not self.checkDatum([datum,key], i, consistency_check=True): xYes.append(datum) self.data[i][key] = [datum for datum in self.data[i][key] if datum not in self.evidence[i][key]] # Form negative cases xNo = [self.data[i][other] + self.evidence[i][other] for other in self.data[i].keys() if other != key] xNo = util.flatten(xNo) # If there are no changes in this attribute of the primary object then we skip this round of learning if len(xYes) == 0: remaining[key] = self.data[i][key] # print("no changes for " + str(key)) continue # Form binary vectors for learning xYes = [util.toBinary(self, item) for item in xYes] xNo = [util.toBinary(self, item) for item in xNo] schemas = [util.toBinarySchema(self, schema) for schema in self.schemas[i][key]] oldSchemas = deepcopy(schemas) # print("Learning for " + str(key)) # Learn and output schemas, new evidence, and remaining positive cases if i == REWARD: [binarySchemas, _, _] = lern.learnSchemas(xYes, xNo, schemas, self.deterministic) else: [binarySchemas, binaryEvidence, binaryRemaining] = lern.learnSchemas(xYes, xNo, schemas, self.deterministic) # print("111111111111111111") # print schemas # print("222222222222222222") # print binarySchemas # print("333333333333333333") # Name new schemas new_names = [] new_schemas = [util.fromBinarySchema(self, s, key) for s in binarySchemas if s not in oldSchemas] for s in new_schemas: s.name = self.num_schemas new_names.append(s.name) self.num_schemas += 1 # Convert learnt schemas and evidence from binary output and add to model self.schemas[i][key] += new_schemas self.schemas[i][key] = util.simplify(self, self.schemas[i][key], key, attributes[i]) # Get initial counts of and display new schemas new_printed = False for s in self.schemas[i][key]: if s.name in new_names: if not new_printed: print("New schemas: ") new_printed = True model_updated = True if not self.deterministic: s.get_initial_counts(self, i) print(attributes[i] + " = " + str(key) + " <- " + s.display(no_head=True)) # # If they are reward schemas then the binary evidence and remaining data are not in the correct form to be stored # if i == REWARD: # for datum in self.data[i][key]: # predicted = False # for o in datum.keys(): # if self.checkDatum([datum[o], key], i)[0]: # predicted = True # self.evidence[i][key].append(datum) # break # if not predicted: # remaining[key].append(datum) # # # Otherwise we can convert directly back from the binary data and store the resukt # else: # self.evidence[i][key] += [util.fromBinary(self, datum) for datum in binaryEvidence] # remaining[key] = [util.fromBinary(self, datum) for datum in binaryRemaining] # # self.data[i] = remaining return model_updated