def score(self, h): """ Currently using the simple accuracy measure from page 41 of Langley's ML book. """ correct = 0 for mapping, x, y in self.kset: if (((y == 1 and covers(h, x, mapping)) or (y == 0 and not covers(h, x, mapping)))): correct += 1 return correct / len(self.kset)
def ifit(self, t, x, y): """ Incrementally specializes the hypothesis set. """ mapping = {a: t[i] for i, a in enumerate(self.args)} if y == 1: self.pset.append((x, mapping)) bad_h = set([ h for h in self.hset if not covers(h.union(self.constraints), x, mapping) ]) # print("POS BAD", bad_h) self.hset -= bad_h elif y == 0: bad_h = set([ h for h in self.hset if covers(h.union(self.constraints), x, mapping) ]) # print("NEG BAD", bad_h) for h in bad_h: self.hset.remove(h) gset = specialize(h, self.constraints, self.args, self.pset, x, mapping, lambda: self.gensym()) for p, pm in self.pset: bad_g = set([ g for g in gset if not covers(g.union(self.constraints), p, pm) ]) gset -= bad_g # print("WORKABLE GSET", gset) self.hset.update(gset) self.remove_subsumed() # impose a limit on the number of hypotheses self.hset = set(list(self.hset)[:10]) else: raise Exception("y must be 0 or 1")
def ifit(self, t, x, y): mapping = {a: t[i] for i, a in enumerate(self.args)} if self.h is None and y == 1: rm = {t[i]: a for i, a in enumerate(self.args)} self.h = set([rename(rm, l) for l in x]) self.kset.append((mapping, x, y)) while len(self.kset) > self.k: self.kset.pop(0) hset = None if y == 1: if not covers(self.h.union(self.constraints), x, mapping): reverse_mapping = {t[i]: a for i, a in enumerate(self.args)} renamed_x = set([rename(reverse_mapping, ele) for ele in x]) hset = set([generalize(self.h, renamed_x)]) self.last_pos = (x, mapping) else: if covers(self.h.union(self.constraints), x, mapping): hset = specialize(self.h, self.constraints, self.args, [self.last_pos], x, mapping, lambda: self.gensym()) if hset is None or len(hset) == 0: return hset = [(self.score(new_h.union(self.constraints)), random(), new_h) for new_h in hset] curr_score = self.score(self.h.union(self.constraints)) hset.append((curr_score, random(), self.h)) hset.sort(reverse=True) print(hset) new_score, _, new_h = hset[0] self.h = new_h
def ifit(self, t, x, y): """ Incrementally specializes the hypothesis set. When a positive example is encountered that is not covered, then it utilizes antiunification to find the least general generalization (lgg). Note, this ignores negative examples. """ mapping = {a: t[i] for i, a in enumerate(self.args)} reverse_mapping = {t[i]: a for i, a in enumerate(self.args)} renamed_x = set([rename(reverse_mapping, ele) for ele in x]) if y == 1: if self.h is None: self.h = renamed_x elif not covers(self.h.union(self.constraints), x, mapping): self.h = generalize(self.h, renamed_x) elif y != 0: raise Exception("y must be 0 or 1")
def goal_test(self, node): h = node.state args, constraints, pset, neg, neg_mapping, gensym = node.extra return not covers(h.union(constraints), neg, neg_mapping)
def goal_test(self, node): h = node.state args, pos, pos_mapping, gensym = node.extra return covers(h, pos, pos_mapping)