def __init__(self,ruleStore,name = None, initialWorkingMemory = None, inferredTarget = None, nsMap = {}, graphVizOutFile=None, dontFinalize=False, goal=None): self.leanCheck = {} self.goal = goal self.nsMap = nsMap self.name = name and name or BNode() self.nodes = {} self.alphaPatternHash = {} self.ruleSet = set() for alphaPattern in xcombine(('1','0'),('1','0'),('1','0')): self.alphaPatternHash[tuple(alphaPattern)] = {} if inferredTarget is None: self.inferredFacts = Graph() namespace_manager = NamespaceManager(self.inferredFacts) for k,v in nsMap.items(): namespace_manager.bind(k, v) self.inferredFacts.namespace_manager = namespace_manager else: self.inferredFacts = inferredTarget self.workingMemory = initialWorkingMemory and initialWorkingMemory or set() self.proofTracers = {} self.terminalNodes = set() self.instantiations = {} start = time.time() self.ruleStore=ruleStore self.justifications = {} self.dischargedBindings = {} if not dontFinalize: self.ruleStore._finalize() self.filteredFacts = Graph() #'Universal truths' for a rule set are rules where the LHS is empty. # Rather than automatically adding them to the working set, alpha nodes are 'notified' # of them, so they can be checked for while performing inter element tests. self.universalTruths = [] from FuXi.Horn.HornRules import Ruleset self.rules=set() self.negRules = set() for rule in Ruleset(n3Rules=self.ruleStore.rules,nsMapping=self.nsMap): import warnings warnings.warn( "Rules in a network should be built *after* construction via "+ " self.buildNetworkClause(HornFromN3(n3graph)) for instance", DeprecationWarning,2) self.buildNetworkFromClause(rule) self.alphaNodes = [node for node in self.nodes.values() if isinstance(node,AlphaNode)] self.alphaBuiltInNodes = [node for node in self.nodes.values() if isinstance(node,BuiltInAlphaNode)] self._setupDefaultRules() if initialWorkingMemory: start = time.time() self.feedFactsToAdd(initialWorkingMemory) print >>sys.stderr,"Time to calculate closure on working memory: %s m seconds"%((time.time() - start) * 1000) if graphVizOutFile: print >>sys.stderr,"Writing out RETE network to ", graphVizOutFile renderNetwork(self,nsMap=nsMap).write(graphVizOutFile)
def clear(self): self.nodes = {} self.alphaPatternHash = {} self.rules = set() for alphaPattern in xcombine(('1','0'),('1','0'),('1','0')): self.alphaPatternHash[tuple(alphaPattern)] = {} self.proofTracers = {} self.terminalNodes = set() self.justifications = {} self._resetinstantiationStats() self.workingMemory = set() self.dischargedBindings = {}
def clear(self): self.nodes = {} self.alphaPatternHash = {} self.rules = set() for alphaPattern in xcombine(('1','0'),('1','0'),('1','0')): self.alphaPatternHash[tuple(alphaPattern)] = {} self.proofTracers = {} self.terminalNodes = set() self.justifications = {} self._resetinstanciationStats() self.workingMemory = set() self.dischargedBindings = {}
def _generateBindings(self): """ Generates a list of dictionaries - each a unique variable substitution (binding) which applies to the ReteTokens in this PartialInstanciation >>> aNode = AlphaNode((Variable('S'),Variable('P'),Variable('O'))) >>> token1 = ReteToken((URIRef('urn:uuid:alpha'),OWL_NS.differentFrom,URIRef('urn:uuid:beta'))) >>> token2 = ReteToken((URIRef('urn:uuid:beta'),OWL_NS.differentFrom,URIRef('urn:uuid:alpha'))) >>> cVars = { Variable('P') : OWL_NS.differentFrom } >>> inst = PartialInstanciation([token1.bindVariables(aNode),token2.bindVariables(aNode)],consistentBindings=cVars) >>> inst <PartialInstanciation (joined on ?P): Set([<ReteToken: S->urn:uuid:beta,P->http://www.w3.org/2002/07/owl#differentFrom,O->urn:uuid:alpha>, <ReteToken: S->urn:uuid:alpha,P->http://www.w3.org/2002/07/owl#differentFrom,O->urn:uuid:beta>])> >>> inst.joinedBindings {u'P': u'http://www.w3.org/2002/07/owl#differentFrom'} >>> inst.tokens Set([<ReteToken: S->urn:uuid:beta,P->http://www.w3.org/2002/07/owl#differentFrom,O->urn:uuid:alpha>, <ReteToken: S->urn:uuid:alpha,P->http://www.w3.org/2002/07/owl#differentFrom,O->urn:uuid:beta>]) >>> inst.bindings [{u'P': u'http://www.w3.org/2002/07/owl#differentFrom', u'S': u'urn:uuid:beta', u'O': u'urn:uuid:alpha'}, {u'P': u'http://www.w3.org/2002/07/owl#differentFrom', u'S': u'urn:uuid:alpha', u'O': u'urn:uuid:beta'}] Ensure unjoined variables with different names aren't bound to the same value (B and Y aren't both bound to "Bart Simpson" simultaneously) >>> aNode1 = AlphaNode((Variable('A'),URIRef('urn:uuid:name'),Variable('B'))) >>> aNode2 = AlphaNode((Variable('X'),URIRef('urn:uuid:name'),Variable('Y'))) >>> token1 = ReteToken((URIRef('urn:uuid:bart'),URIRef('urn:uuid:name'),Literal("Bart Simpson"))) >>> token1 = token1.bindVariables(aNode1) >>> token2 = ReteToken((URIRef('urn:uuid:b'),URIRef('urn:uuid:name'),Literal("Bart Simpson"))) >>> token2 = token2.bindVariables(aNode2) >>> inst = PartialInstanciation([token1,token2]) >>> pprint(inst.bindings) [{u'A': u'urn:uuid:bart', u'B': rdflib.Literal('Bart Simpson',language=None,datatype=None), u'X': u'urn:uuid:b', u'Y': rdflib.Literal('Bart Simpson',language=None,datatype=None)}] Ensure different variables which bind to the same value *within* a token includes this combination in the resulting bindings >>> aNode1 = AlphaNode((Variable('P1'),RDF.type,URIRef('urn:uuid:Prop1'))) >>> aNode2 = AlphaNode((Variable('P2'),RDF.type,URIRef('urn:uuid:Prop1'))) >>> aNode3 = AlphaNode((Variable('P1'),Variable('P2'),RDFS.Class)) >>> token1 = ReteToken((RDFS.domain,RDFS.domain,RDFS.Class)) >>> token2 = ReteToken((RDFS.domain,RDF.type,URIRef('urn:uuid:Prop1'))) >>> token3 = ReteToken((RDFS.range,RDF.type,URIRef('urn:uuid:Prop1'))) >>> token4 = ReteToken((RDFS.range,RDFS.domain,RDFS.Class)) >>> inst = PartialInstanciation([token1.bindVariables(aNode3),token2.bindVariables(aNode1),token3.bindVariables(aNode2),token4.bindVariables(aNode3)]) >>> len(inst.bindings) 3 >>> inst.bindings [{u'P2': u'http://www.w3.org/2000/01/rdf-schema#range', u'P1': u'http://www.w3.org/2000/01/rdf-schema#domain'}, {u'P2': u'http://www.w3.org/2000/01/rdf-schema#domain', u'P1': u'http://www.w3.org/2000/01/rdf-schema#domain'}, {u'P2': u'http://www.w3.org/2000/01/rdf-schema#domain', u'P1': u'http://www.w3.org/2000/01/rdf-schema#range'}] >>> aNode1 = AlphaNode((Variable('X'),RDF.value,Literal(2))) >>> aNode2 = AlphaNode((Variable('X'),RDF.type,Variable('Y'))) >>> aNode3 = AlphaNode((Variable('Z'),URIRef('urn:uuid:Prop1'),Variable('W'))) >>> token2 = ReteToken((URIRef('urn:uuid:Foo'),RDF.value,Literal(2))).bindVariables(aNode1) >>> token3 = ReteToken((URIRef('urn:uuid:Foo'),RDF.type,URIRef('urn:uuid:Baz'))).bindVariables(aNode2) >>> token5 = ReteToken((URIRef('urn:uuid:Bar'),URIRef('urn:uuid:Prop1'),URIRef('urn:uuid:Beezle'))).bindVariables(aNode3) >>> inst = PartialInstanciation([token2,token3,token5],consistentBindings={Variable('X'):URIRef('urn:uuid:Foo')}) >>> pprint(list(inst.tokens)) [<ReteToken: Z->urn:uuid:Bar,W->urn:uuid:Beezle>, <ReteToken: X->urn:uuid:Foo>, <ReteToken: X->urn:uuid:Foo,Y->urn:uuid:Baz>] >>> inst.bindings [{u'Y': u'urn:uuid:Baz', u'X': u'urn:uuid:Foo', u'Z': u'urn:uuid:Bar', u'W': u'urn:uuid:Beezle'}] >>> inst = PartialInstanciation([token2],consistentBindings={Variable('X'):URIRef('urn:uuid:Foo')}) >>> inst.bindings [{u'X': u'urn:uuid:Foo'}] >>> aNode1 = AlphaNode((Variable('P'),OWL_NS.inverseOf,Variable('Q'))) >>> aNode2 = AlphaNode((Variable('P'),RDF.type,OWL_NS.InverseFunctionalProperty)) >>> token1 = ReteToken((URIRef('urn:uuid:Foo'),OWL_NS.inverseOf,URIRef('urn:uuid:Bar'))).bindVariables(aNode1) >>> token2 = ReteToken((URIRef('urn:uuid:Foo'),RDF.type,OWL_NS.InverseFunctionalProperty)).bindVariables(aNode1) >>> inst = PartialInstanciation([token1,token2],consistentBindings={Variable('P'):URIRef('urn:uuid:Foo'),Variable('Q'):URIRef('urn:uuid:Bar')}) >>> inst._generateBindings() >>> inst.bindings [{u'Q': u'urn:uuid:Bar', u'P': u'urn:uuid:Foo'}] """ if len(self.tokens) == 1: self.bindings = [list(self.tokens)[0].bindingDict.copy()] return bindings = [] forcedBindings = [] isolatedBindings = {} for token in self.tokens: noIterations = 0 newDict = {} for key in ifilter( lambda x:x not in self.joinedBindings, token.bindingDict.keys()): var = key newDict[var] = token.bindingDict[var] noIterations+=1 if noIterations == 1: isolatedBindings.setdefault(var,Set()).add(token.bindingDict[var]) elif noIterations > 1: forcedBindings.append(newDict) revIsolBindings = {} for vals in isolatedBindings.itervalues(): for val in vals: revIsolBindings.setdefault(val,Set()).add(var) if isolatedBindings: for i in xcombine(*tuple([tuple([(key,val) for val in vals]) for key,vals in iteritems(isolatedBindings) ])): isolatedDict = dict(i) for val in isolatedDict.itervalues(): keysForVal = revIsolBindings[val] if len(keysForVal) <= 1: newDict = isolatedDict.copy() newDict.update(self.joinedBindings) if newDict not in bindings: bindings.append(newDict) def collapse(left,right): if isinstance(left,list): if not left: if isinstance(right,list): return right else: return [right] elif isinstance(right,list): return reduce([left,right]) elif len(left)==1: u = self.unify(left[0],right) if isinstance(u,list): return u else: return [u] else: return left+[right] elif isinstance(right,list) and not right and left: return [left] return self.unify(left,right) for forcedBinding in forcedBindings: newDict = forcedBinding.copy() newDict.update(self.joinedBindings) if newDict not in bindings: bindings.append(newDict) self.bindings = reduce(collapse,bindings,[]) if not self.bindings: self.bindings = [self.joinedBindings]
def __init__(self,ruleStore,name = None, initialWorkingMemory = None, inferredTarget = None, nsMap = {}, graphVizOutFile=None, dontFinalize=False, goal=None, rulePrioritizer=None, alphaNodePrioritizer=None): self.leanCheck = {} self.goal = goal self.nsMap = nsMap self.name = name and name or BNode() self.nodes = {} self.alphaPatternHash = {} self.ruleSet = set() for alphaPattern in xcombine(('1','0'),('1','0'),('1','0')): self.alphaPatternHash[tuple(alphaPattern)] = {} if inferredTarget is None: self.inferredFacts = Graph() namespace_manager = NamespaceManager(self.inferredFacts) for k,v in nsMap.items(): namespace_manager.bind(k, v) self.inferredFacts.namespace_manager = namespace_manager else: self.inferredFacts = inferredTarget self.workingMemory = initialWorkingMemory and initialWorkingMemory or set() self.proofTracers = {} self.terminalNodes = set() self.instanciations = {} start = time.time() self.ruleStore=ruleStore self.justifications = {} self.dischargedBindings = {} if not dontFinalize: self.ruleStore._finalize() self.filteredFacts = Graph() self.rulePrioritizer = rulePrioritizer self.alphaNodePrioritizer = alphaNodePrioritizer #'Universal truths' for a rule set are rules where the LHS is empty. # Rather than automatically adding them to the working set, alpha nodes are 'notified' # of them, so they can be checked for while performing inter element tests. self.universalTruths = [] from FuXi.Horn.HornRules import Ruleset self.rules=set() self.negRules = set() for rule in Ruleset(n3Rules=self.ruleStore.rules,nsMapping=self.nsMap): import warnings warnings.warn( "Rules in a network should be built *after* construction via "+ " self.buildNetworkClause(HornFromN3(n3graph)) for instance", DeprecationWarning,2) self.buildNetworkFromClause(rule) self.alphaNodes = [node for node in self.nodes.values() if isinstance(node,AlphaNode)] self.alphaBuiltInNodes = [node for node in self.nodes.values() if isinstance(node,BuiltInAlphaNode)] self._setupDefaultRules() if initialWorkingMemory: start = time.time() self.feedFactsToAdd(initialWorkingMemory) print >>sys.stderr,"Time to calculate closure on working memory: %s m seconds"%((time.time() - start) * 1000) if graphVizOutFile: print >>sys.stderr,"Writing out RETE network to ", graphVizOutFile renderNetwork(self,nsMap=nsMap).write(graphVizOutFile)