示例#1
0
def and_or_leaflists(remaining, indexed_kb, depth, antecedents = [], assumptions = []):
    '''Returns list of all entailing sets of leafs in the and-or backchaining tree'''
    if depth == 0 and len(antecedents) > 0: # fail
        return [] # (empty) list of lists
    elif len(remaining) == 0: # done with this level
        if len(antecedents) == 0: # found one
            return [assumptions] # list of lists
        else:
            return and_or_leaflists(antecedents, indexed_kb, depth - 1, [], assumptions)
    else: # more to go on this level
        literal = remaining[0] # first of remaining
        predicate = literal[0]
        if predicate not in indexed_kb:
            return and_or_leaflists(remaining[1:], indexed_kb, depth, antecedents, [literal] + assumptions) # shift literal to assumptions
        else:
            revisions = [] 
            for rule in indexed_kb[predicate]: # indexed by predicate of literal
                theta = unify.unify(literal, parse.consequent(rule))
                if theta != None:
                    if depth == 0: # no depth for revision
                        return [] # (empty) list of lists
                    revisions.append([unify.subst(theta, remaining[1:]), # new remaining with substitutions
                                      indexed_kb,
                                      depth,
                                      unify.standardize(unify.subst(theta, parse.antecedent(rule))) +
                                      unify.subst(theta, antecedents),  # new antecedents with substitutions
                                      unify.subst(theta, assumptions)]) # new assumptions with substitutions
            return itertools.chain(*[and_or_leaflists(*rev) for rev in revisions]) # list of lists (if any)
示例#2
0
def forward(facts, kb):
    '''An exhaustive forward chaining algorithm for first-order definite clauses'''
    # each production is [antecedent_list, consequent_literal, triggers]
    stack = list(facts)
    productions = [[parse.antecedent(k), parse.consequent(k), []] for k in kb]
    entailed = []
    while len(stack) > 0:
        current = stack.pop(0)
        for prod in productions:
            for ant in prod[0]:
                theta = unify.unify(current, ant)
                if theta != None:
                    new_consequent = unify.subst(theta, prod[1])
                    new_triggers = prod[2] + [current]
                    if len(prod[0]) == 1: # last one
                        entailed.append([new_consequent,
                                         new_triggers])
                        stack.append(new_consequent)
                    else:
                        new_antecedent = list(prod[0])
                        new_antecedent.remove(ant)
                        new_antecedent = [unify.subst(theta, x) for x in new_antecedent]
                        productions.append([new_antecedent,
                                            new_consequent,
                                            new_triggers])
    return entailed
示例#3
0
def forward(facts, kb):
    '''An exhaustive forward chaining algorithm for first-order definite clauses'''
    # each production is [antecedent_list, consequent_literal, triggers]
    stack = list(facts)
    productions = [[parse.antecedent(k), parse.consequent(k), []] for k in kb]
    entailed = []
    while len(stack) > 0:
        current = stack.pop(0)
        for prod in productions:
            for ant in prod[0]:
                theta = unify.unify(current, ant)
                if theta != None:
                    new_consequent = unify.subst(theta, prod[1])
                    new_triggers = prod[2] + [current]
                    if len(prod[0]) == 1: # last one
                        entailed.append([new_consequent,
                                         new_triggers])
                        stack.append(new_consequent)
                    else:
                        new_antecedent = list(prod[0])
                        new_antecedent.remove(ant)
                        new_antecedent = [unify.subst(theta, x) for x in new_antecedent]
                        productions.append([new_antecedent,
                                            new_consequent,
                                            new_triggers])
    return entailed
示例#4
0
def and_or_leaflists(remaining, indexed_kb, depth, antecedents = [], assumptions = []):
    '''Returns list of all entailing sets of leafs in the and-or backchaining tree'''
    if depth == 0 and len(antecedents) > 0: # fail
        return [] # (empty) list of lists
    elif len(remaining) == 0: # done with this level
        if len(antecedents) == 0: # found one
            return [assumptions] # list of lists
        else:
            return and_or_leaflists(antecedents, indexed_kb, depth - 1, [], assumptions)
    else: # more to go on this level
        literal = remaining[0] # first of remaining
        predicate = literal[0]
        if predicate not in indexed_kb:
            return and_or_leaflists(remaining[1:], indexed_kb, depth, antecedents, [literal] + assumptions) # shift literal to assumptions
        else:
            revisions = [] 
            for rule in indexed_kb[predicate]: # indexed by predicate of literal
                theta = unify.unify(literal, parse.consequent(rule))
                if theta != None:
                    if depth == 0: # no depth for revision
                        return [] # (empty) list of lists
                    revisions.append([unify.subst(theta, remaining[1:]), # new remaining with substitutions
                                      indexed_kb,
                                      depth,
                                      unify.standardize(unify.subst(theta, parse.antecedent(rule))) +
                                      unify.subst(theta, antecedents),  # new antecedents with substitutions
                                      unify.subst(theta, assumptions)]) # new assumptions with substitutions
            return itertools.chain(*[and_or_leaflists(*rev) for rev in revisions]) # list of lists (if any)
示例#5
0
def index_by_consequent_predicate(kb):
    res = {}
    for dc in kb:
        predicate = parse.consequent(dc)[0]
        if predicate in res:
            res[predicate].append(dc)
        else:
            res[predicate] = [dc]
    return res
示例#6
0
def index_by_consequent_predicate(kb):
    res = {}
    for dc in kb:
        predicate = parse.consequent(dc)[0]
        if predicate in res:
            res[predicate].append(dc)
        else:
            res[predicate] = [dc]
    return res
示例#7
0
def existential_warnings(definite_clauses):
    '''Definite clauses where there are existential variables in the consequent (not found in antecedent)'''
    warnings = ""
    for dc in definite_clauses:
        va = parse.all_variables(parse.antecedent(dc))
        vc = parse.all_variables(parse.consequent(dc))
        for v in vc:
            if v not in va:
                warnings += "\n  existential variables in the consequent: " + parse.display(
                    dc)
                break
    if len(warnings) == 0:
        return "none"
    else:
        return warnings
示例#8
0
def missing_axiom_warnings(definite_clauses):
    '''Predicates that appear in antecedents but not in any consequent'''
    warnings = ""
    seen_antecedent_predicates = set()
    seen_consequent_predicates = set()
    for dc in definite_clauses:
        seen_consequent_predicates.add(parse.consequent(dc)[0])
        for literal in parse.antecedent(dc):
            if literal[0][0:3] != 'etc':
                seen_antecedent_predicates.add(literal[0])
    for predicate in seen_consequent_predicates:
        if predicate in seen_antecedent_predicates:
            seen_antecedent_predicates.remove(predicate)
    for predicate in seen_antecedent_predicates:
        warnings += "\n  no etcetera axiom for literal: " + predicate
    if len(warnings) == 0:
        return "none"
    else:
        return warnings
示例#9
0
    def __init__(self, kb):
        super(clark_completion_cnf_t, self).__init__()

        explanations = collections.defaultdict(list)

        # collect a set of bodies with the same head.
        for rule in kb:
            explanations[tuple(
                parse.consequent(rule))] += [parse.antecedent(rule)]

        # complete knowledge base.
        for head, bodies in explanations.iteritems():

            # h <=> e1 v e2 v ...

            # from each body to head.
            # e1 => h, e2 => h, ...
            for body in bodies:
                gnid_or = self._create_node("v")
                gnid_head = self._create_node(tuple(head))
                self.nxg.add_edge(gnid_or, gnid_head)

                for b in body:
                    gnid_b = self._create_node(tuple(parse.negate(b)))
                    self.nxg.add_edge(gnid_or, gnid_b)

            # h => e1a v e2a v ...
            # h => e1a v e2b v ...
            for body in itertools.product(*bodies):
                gnid_or = self._create_node("v")
                gnid_head = self._create_node(tuple(parse.negate(head)))
                self.nxg.add_edge(gnid_or, gnid_head)

                for b in body:
                    gnid_b = self._create_node(tuple(b))
                    self.nxg.add_edge(gnid_or, gnid_b)
示例#10
0
    def __init__(self, kb):
        super(clark_completion_t, self).__init__()

        explanations = collections.defaultdict(list)

        # collect a set of bodies with the same head.
        for rule in kb:
            explanations[tuple(
                parse.consequent(rule))] += [parse.antecedent(rule)]

        # complete knowledge base.
        for head, bodies in explanations.iteritems():
            gnid_dimp = self._create_node("<->")
            gnid_head = self._create_node(tuple(head))
            self.nxg.add_edge(gnid_dimp, gnid_head)

            # create "OR" node for representing disjunction of explanations.
            gnid_expl_from = gnid_dimp

            if len(bodies) > 1:
                gnid_disj = self._create_node("|")
                self.nxg.add_edge(gnid_dimp, gnid_disj)
                gnid_expl_from = gnid_disj

            for body in bodies:
                gnid_body_from = gnid_expl_from

                # create "AND" node for representing conjunction of body literals.
                if len(body) > 1:
                    gnid_conj = self._create_node("^")
                    self.nxg.add_edge(gnid_expl_from, gnid_conj)
                    gnid_body_from = gnid_conj

                for literal in body:
                    gnid_lit = self._create_node(tuple(literal))
                    self.nxg.add_edge(gnid_body_from, gnid_lit)
示例#11
0
def contextual_and_or_leaflists(remaining,
                                indexed_kb,
                                depth,
                                context,
                                antecedents=[],
                                assumptions=[]):
    '''Returns list of all entailing sets of leafs in the and-or backchaining tree, with belief context'''
    if depth == 0 and len(antecedents) > 0:  # fail
        return []  # (empty) list of lists
    elif len(remaining) == 0:  # done with this level
        if len(antecedents) == 0:  # found one
            return [assumptions]  # list of lists
        else:
            return contextual_and_or_leaflists(antecedents, indexed_kb,
                                               depth - 1, context, [],
                                               assumptions)
    else:  # more to go on this level
        literal = remaining[0]  # first of remaining
        predicate = literal[0]

        if predicate not in indexed_kb:
            return contextual_and_or_leaflists(
                remaining[1:], indexed_kb, depth, context, antecedents,
                [literal] + assumptions)  # shift literal to assumptions
        else:
            revisions = []
            for rule in indexed_kb[
                    predicate]:  # indexed by predicate of literal
                theta = unify.unify(literal, parse.consequent(rule))
                if theta != None:
                    if depth == 0:  # no depth for revision
                        return []  # (empty) list of lists
                    revisions.append([
                        unify.subst(
                            theta,
                            remaining[1:]),  # new remaining with substitutions
                        indexed_kb,
                        depth,
                        context,
                        unify.standardize(
                            unify.subst(theta, parse.antecedent(rule))) +
                        unify.subst(
                            theta,
                            antecedents),  # new antecedents with substitutions
                        unify.subst(theta, assumptions)
                    ])  # new assumptions with substitutions
            for contextliteral in context:
                theta = unify.unify(literal, contextliteral)
                if theta != None:  # literal unifies with context
                    #print(str(literal) + " unifies with " + str(contextliteral) + " with theta " + str(theta) + " creating " + str(unify.subst(theta, literal)))
                    revisions.append([
                        unify.subst(
                            theta,
                            remaining[1:]),  # new remaining with substitutions
                        indexed_kb,
                        depth,
                        context,  # not revised
                        unify.subst(theta, antecedents),  # antecedents
                        unify.subst(theta, assumptions)  # assumptions
                    ])
                    # should we "break" here now that we've found one?
            return itertools.chain(*[
                contextual_and_or_leaflists(*rev) for rev in revisions
            ])  # list of lists (if any)