def forward(facts, kb): '''An exhaustive forward chaining algorithm for first-order definite clauses''' # each production is [antecedent_list, consequent_literal, triggers] stack = list(facts) productions = [[parse.antecedent(k), parse.consequent(k), []] for k in kb] entailed = [] while len(stack) > 0: current = stack.pop(0) for prod in productions: for ant in prod[0]: theta = unify.unify(current, ant) if theta != None: new_consequent = unify.subst(theta, prod[1]) new_triggers = prod[2] + [current] if len(prod[0]) == 1: # last one entailed.append([new_consequent, new_triggers]) stack.append(new_consequent) else: new_antecedent = list(prod[0]) new_antecedent.remove(ant) new_antecedent = [unify.subst(theta, x) for x in new_antecedent] productions.append([new_antecedent, new_consequent, new_triggers]) return entailed
def and_or_leaflists(remaining, indexed_kb, depth, antecedents = [], assumptions = []): '''Returns list of all entailing sets of leafs in the and-or backchaining tree''' if depth == 0 and len(antecedents) > 0: # fail return [] # (empty) list of lists elif len(remaining) == 0: # done with this level if len(antecedents) == 0: # found one return [assumptions] # list of lists else: return and_or_leaflists(antecedents, indexed_kb, depth - 1, [], assumptions) else: # more to go on this level literal = remaining[0] # first of remaining predicate = literal[0] if predicate not in indexed_kb: return and_or_leaflists(remaining[1:], indexed_kb, depth, antecedents, [literal] + assumptions) # shift literal to assumptions else: revisions = [] for rule in indexed_kb[predicate]: # indexed by predicate of literal theta = unify.unify(literal, parse.consequent(rule)) if theta != None: if depth == 0: # no depth for revision return [] # (empty) list of lists revisions.append([unify.subst(theta, remaining[1:]), # new remaining with substitutions indexed_kb, depth, unify.standardize(unify.subst(theta, parse.antecedent(rule))) + unify.subst(theta, antecedents), # new antecedents with substitutions unify.subst(theta, assumptions)]) # new assumptions with substitutions return itertools.chain(*[and_or_leaflists(*rev) for rev in revisions]) # list of lists (if any)
def crunch(conjunction): # returns a list of all possible ways to unify conjunction literals conjunction = [k for k,v in itertools.groupby(sorted(conjunction))] # remove duplicates res = [conjunction] # start with one solution pairs = itertools.combinations(conjunction, 2) thetas = [theta for theta in [unify.unify(p[0], p[1]) for p in pairs] if theta is not None] ps = powerset(thetas) for thetaset in ps: if len(thetaset) > 0: consistent = mergethetas(thetaset) if consistent: rewrite = [k for k,v in itertools.groupby(sorted(unify.subst(consistent, conjunction)))] if rewrite not in res: # expensive! res.append(rewrite) return res
def cruncher(conjunction, idx = 0): if idx >= len(conjunction) - 1: # last one return [[k for k,v in itertools.groupby(sorted(conjunction))]] # dedupe literals in solution else: res = [] for subsequent in range(idx + 1,len(conjunction)): theta = unify.unify(conjunction[idx], conjunction[subsequent]) if theta: new_conjunction = unify.subst(theta, conjunction[0:subsequent] + conjunction[(subsequent + 1):len(conjunction)]) res.extend(cruncher(new_conjunction, idx)) res.extend(cruncher(conjunction, idx + 1)) return res
def crunch( conjunction ): # returns a list of all possible ways to unify conjunction literals conjunction = [k for k, v in itertools.groupby(sorted(conjunction)) ] # remove duplicates res = [conjunction] # start with one solution pairs = itertools.combinations(conjunction, 2) thetas = [ theta for theta in [unify.unify(p[0], p[1]) for p in pairs] if theta is not None ] ps = powerset(thetas) for thetaset in ps: if len(thetaset) > 0: consistent = mergethetas(thetaset) if consistent: rewrite = [ k for k, v in itertools.groupby( sorted(unify.subst(consistent, conjunction))) ] if rewrite not in res: # expensive! res.append(rewrite) return res
def contextual_and_or_leaflists(remaining, indexed_kb, depth, context, antecedents=[], assumptions=[]): '''Returns list of all entailing sets of leafs in the and-or backchaining tree, with belief context''' if depth == 0 and len(antecedents) > 0: # fail return [] # (empty) list of lists elif len(remaining) == 0: # done with this level if len(antecedents) == 0: # found one return [assumptions] # list of lists else: return contextual_and_or_leaflists(antecedents, indexed_kb, depth - 1, context, [], assumptions) else: # more to go on this level literal = remaining[0] # first of remaining predicate = literal[0] if predicate not in indexed_kb: return contextual_and_or_leaflists( remaining[1:], indexed_kb, depth, context, antecedents, [literal] + assumptions) # shift literal to assumptions else: revisions = [] for rule in indexed_kb[ predicate]: # indexed by predicate of literal theta = unify.unify(literal, parse.consequent(rule)) if theta != None: if depth == 0: # no depth for revision return [] # (empty) list of lists revisions.append([ unify.subst( theta, remaining[1:]), # new remaining with substitutions indexed_kb, depth, context, unify.standardize( unify.subst(theta, parse.antecedent(rule))) + unify.subst( theta, antecedents), # new antecedents with substitutions unify.subst(theta, assumptions) ]) # new assumptions with substitutions for contextliteral in context: theta = unify.unify(literal, contextliteral) if theta != None: # literal unifies with context #print(str(literal) + " unifies with " + str(contextliteral) + " with theta " + str(theta) + " creating " + str(unify.subst(theta, literal))) revisions.append([ unify.subst( theta, remaining[1:]), # new remaining with substitutions indexed_kb, depth, context, # not revised unify.subst(theta, antecedents), # antecedents unify.subst(theta, assumptions) # assumptions ]) # should we "break" here now that we've found one? return itertools.chain(*[ contextual_and_or_leaflists(*rev) for rev in revisions ]) # list of lists (if any)