def RDFTuplesToSPARQL(conjunct, edb, isGround=False, vars=[], symmAtomicInclusion=False, specialBNodeHandling=None): """ Takes a conjunction of Horn literals and returns the corresponding SPARQL query """ queryType = isGround and "ASK" or "SELECT %s"%(' '.join([v.n3() for v in vars])) queryShell = len(conjunct)>1 and "%s {\n%s\n}" or "%s { %s }" if symmAtomicInclusion: if vars: var = vars.pop() prefix = "%s a ?KIND"%var.n3() else: prefix = "%s a ?KIND"%first([first(iterCondition(lit)).arg[0].n3() for lit in conjunct]) conjunct = ( i.formulae[0] if isinstance(i,And) else i for i in conjunct ) subquery = queryShell%(queryType, "%s\nFILTER(%s)"%( prefix, ' ||\n'.join([ '?KIND = %s'%edb.qname(GetOp(lit)) for lit in conjunct]))) else: subquery = queryShell%(queryType,' .\n'.join(['\t'+tripleToTriplePattern( edb, lit, specialBNodeHandling) for lit in conjunct ])) return subquery
def _testNegative(uri, manifest): if verbose: write("TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) if isinstance(inDoc, BNode): inDoc = first(manifest.objects(inDoc, RDFVOC.about)) if verbose: write(u"TESTING: %s" % inDoc) store = Graph() test = BNode() results.add((test, RESULT["test"], inDoc)) results.add((test, RESULT["system"], system)) try: if inDoc[-3:] == ".nt": format = "nt" else: format = "xml" store.parse(cached_file(inDoc), publicID=inDoc, format=format) except ParserError: results.add((test, RDF.type, RESULT["PassingRun"])) # pass else: write("""Failed: '%s'""" % inDoc) results.add((test, RDF.type, RESULT["FailingRun"])) result = 1 return result
def Th(owlGraph,_class,variable=Variable('X'),position=LHS): """ DLP head (antecedent) knowledge assertional forms (ABox assertions, conjunction of ABox assertions, and universal role restriction assertions) """ props = list(set(owlGraph.predicates(subject=_class))) if OWL_NS.allValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#owl_allValuesFrom for s,p,o in owlGraph.triples((_class,OWL_NS.allValuesFrom,None)): prop = list(owlGraph.objects(subject=_class,predicate=OWL_NS.onProperty))[0] newVar = Variable(BNode()) body = Uniterm(prop,[variable,newVar],newNss=owlGraph.namespaces()) for head in Th(owlGraph,o,variable=newVar): yield Clause(body,head) elif OWL_NS.hasValue in props: prop = list(owlGraph.objects(subject=_class,predicate=OWL_NS.onProperty))[0] o =first(owlGraph.objects(subject=_class,predicate=OWL_NS.hasValue)) yield Uniterm(prop,[variable,o],newNss=owlGraph.namespaces()) elif OWL_NS.someValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#someValuesFrom for s,p,o in owlGraph.triples((_class,OWL_NS.someValuesFrom,None)): prop = list(owlGraph.objects(subject=_class,predicate=OWL_NS.onProperty))[0] newVar = BNode() yield And([Uniterm(prop,[variable,newVar],newNss=owlGraph.namespaces()), generatorFlattener(Th(owlGraph,o,variable=newVar))]) elif OWL_NS.intersectionOf in props: from FuXi.Syntax.InfixOWL import BooleanClass yield And([first(Th(owlGraph,h,variable)) for h in BooleanClass(_class)]) else: #Simple class yield Uniterm(RDF.type,[variable, isinstance(_class,BNode) and SkolemizeExistentialClasses(_class) or _class], newNss=owlGraph.namespaces())
def testUnivInversion(self): UniversalNominalRangeTransformer().transform(self.ontGraph) self.failUnlessEqual(len(list(self.foo.subClassOf)), 1, "There should still be one subsumed restriction") subC = CastClass(first(self.foo.subClassOf)) self.failUnless(not isinstance(subC, Restriction), "subclass of a restriction") self.failUnless(subC.complementOf is not None, "Should be a complement!") innerC = CastClass(subC.complementOf) self.failUnless(isinstance(innerC, Restriction), "complement of a restriction, not %r" % innerC) self.failUnlessEqual(innerC.onProperty, EX_NS.propFoo, "restriction on propFoo") self.failUnless( innerC.someValuesFrom, "converted to an existential restriction not %r" % innerC) invertedC = CastClass(innerC.someValuesFrom) self.failUnless(isinstance(invertedC, EnumeratedClass), "existencial restriction on enumerated class") self.assertEqual( len(invertedC), 2, "existencial restriction on enumerated class of length 2") self.assertEqual(repr(invertedC), "{ ex:individual2 ex:individual3 }", "The negated partition should exclude individual1") NominalRangeTransformer().transform(self.ontGraph) DemorganTransformer().transform(self.ontGraph) subC = CastClass(first(self.foo.subClassOf)) self.assertEqual( repr(subC), "( ( not ( ex:propFoo value ex:individual2 ) ) and ( not ( ex:propFoo value ex:individual3 ) ) )" )
def extractRule(self,rule): vars,impl = self.rules[rule] body,bodyType,head,headType = self.implications[impl] allVars = map(self.extractTerm,Collection(self.graph,vars)) head = first(self.extractPredication(head,headType)) if bodyType == RIF_NS.And: body = map( lambda i: first(self.extractPredication( i, first(self.graph.objects(i,RDF.type))) ), Collection(self.graph,first(self.graph.objects(body,RIF_NS.formulas))) ) else: body = self.extractPredication(body,bodyType) if isinstance(body,list): body = And([first(body)]) if len(body) == 1 else And(body) nsMapping = {} nsMapping.update(self.nsBindings) return Rule( Clause(body,head), declare=allVars, nsMapping=nsMapping )
def Th(owlGraph, _class, variable=Variable('X'), position=LHS): """ DLP head (antecedent) knowledge assertional forms (ABox assertions, conjunction of ABox assertions, and universal role restriction assertions) """ props = list(set(owlGraph.predicates(subject=_class))) if OWL_NS.allValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#owl_allValuesFrom for s, p, o in owlGraph.triples((_class, OWL_NS.allValuesFrom, None)): prop = list(owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] newVar = Variable(BNode()) body = Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()) for head in Th(owlGraph, o, variable=newVar): yield Clause(body, head) elif OWL_NS.hasValue in props: prop = list(owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] o = first(owlGraph.objects(subject=_class, predicate=OWL_NS.hasValue)) yield Uniterm(prop, [variable, o], newNss=owlGraph.namespaces()) elif OWL_NS.someValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#someValuesFrom for s, p, o in owlGraph.triples((_class, OWL_NS.someValuesFrom, None)): prop = list(owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] newVar = BNode() yield And([Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()), generatorFlattener(Th(owlGraph, o, variable=newVar))]) elif OWL_NS.intersectionOf in props: from FuXi.Syntax.InfixOWL import BooleanClass yield And([first(Th(owlGraph, h, variable)) for h in BooleanClass(_class)]) else: #Simple class yield Uniterm(RDF.type, [variable, isinstance(_class, BNode) and SkolemizeExistentialClasses(_class) or _class], newNss=owlGraph.namespaces())
def testUnivInversion(self): UniversalNominalRangeTransformer().transform(self.ontGraph) self.failUnlessEqual(len(list(self.foo.subClassOf)), 1, "There should still be one subsumed restriction") subC = CastClass(first(self.foo.subClassOf)) self.failUnless(not isinstance(subC, Restriction), "subclass of a restriction") self.failUnless(subC.complementOf is not None, "Should be a complement.") innerC = CastClass(subC.complementOf) self.failUnless(isinstance(innerC, Restriction), "complement of a restriction, not %r" % innerC) self.failUnlessEqual(innerC.onProperty, EX_NS.propFoo, "restriction on propFoo") self.failUnless(innerC.someValuesFrom, "converted to an existential restriction not %r" % innerC) invertedC = CastClass(innerC.someValuesFrom) self.failUnless(isinstance(invertedC, EnumeratedClass), "existential restriction on enumerated class") self.assertEqual(len(invertedC), 2, "existencial restriction on enumerated class of length 2") self.assertEqual(repr(invertedC), "{ ex:individual2 ex:individual3 }", "The negated partition should exclude individual1") NominalRangeTransformer().transform(self.ontGraph) DemorganTransformer().transform(self.ontGraph) subC = CastClass(first(self.foo.subClassOf)) self.assertEqual(repr(subC), "( ( not ( ex:propFoo value ex:individual2 ) ) and ( not ( ex:propFoo value ex:individual3 ) ) )")
def finalize(self): if self.adornment: if self.hasBindings(): if len(self.adornment) == 1: # adorned predicate occurrence with one out of two arguments bound # convert: It becomes a unary predicate # (an rdf:type assertion) self.arg[-1] = URIRef(GetOp(self) + "_query_" + first(self.adornment)) self.arg[0] = first(self.getDistinguishedVariables()) self.op = RDF.type elif "".join(self.adornment) == "bb": # Two bound args self.setOperator(URIRef(self.op + "_query_bb")) else: # remove unbound argument, and reduce arity singleArg = first(self.getDistinguishedVariables()) self.arg[-1] = URIRef(GetOp(self) + "_query_" + "".join(self.adornment)) self.arg[0] = singleArg self.op = RDF.type else: currentOp = GetOp(self) self.op = RDF.type self.arg = [currentOp, BFP_RULE.OpenQuery] else: if GetOp(self) != HIGHER_ORDER_QUERY: self.setOperator(URIRef(GetOp(self) + "_query")) self._recalculateHash()
def _testPositive(uri, manifest): if verbose: write(u"TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) outDoc = first(manifest.objects(uri, TEST["outputDocument"])) expected = Graph() if outDoc[-3:] == ".nt": format = "nt" else: format = "xml" expected.parse(cached_file(outDoc), publicID=outDoc, format=format) store = TestStore(expected) if inDoc[-3:] == ".nt": format = "nt" else: format = "xml" try: store.parse(cached_file(inDoc), publicID=inDoc, format=format) except ParserError, pe: write("Failed '") write(inDoc) write("' failed with") raise pe try: write(type(pe)) except: write("sorry could not dump out error.") result = 1
def RDFTuplesToSPARQL(conjunct, edb, isGround=False, vars=[], symmAtomicInclusion=False): """ Takes a conjunction of Horn literals and returns the corresponding SPARQL query """ queryType = isGround and "ASK" or "SELECT %s" % (' '.join( [v.n3() for v in vars])) queryShell = len(conjunct) > 1 and "%s {\n%s\n}" or "%s { %s }" if symmAtomicInclusion: if vars: var = vars.pop() prefix = "%s a ?KIND" % var.n3() else: prefix = "%s a ?KIND" % first( [first(iterCondition(lit)).arg[0].n3() for lit in conjunct]) conjunct = (i.formulae[0] if isinstance(i, And) else i for i in conjunct) subquery = queryShell % ( queryType, "%s\nFILTER(%s)" % (prefix, ' ||\n'.join( ['?KIND = %s' % edb.qname(GetOp(lit)) for lit in conjunct]))) else: subquery = queryShell % (queryType, ' .\n'.join( ['\t' + tripleToTriplePattern(edb, lit) for lit in conjunct])) return subquery
def IncomingSIPArcs(sip,predOcc): """docstring for IncomingSIPArcs""" for s,p,o in sip.triples((None,None,predOcc)): if (p,RDF.type,MAGIC.SipArc) in sip: if (s,RDF.type,MAGIC.BoundHeadPredicate) in sip: yield [s],Collection(sip,first(sip.objects(p,MAGIC.bindings))) else: yield Collection(sip,s),Collection(sip,first(sip.objects(p,MAGIC.bindings)))
def extractTerm(self, term): if (term, RDF.type, RIF_NS.Var) in self.graph: return Variable(first(self.graph.objects(term, RIF_NS.varname))) elif (term, RIF_NS.constIRI, None) in self.graph: iriLit = first(self.graph.objects(term, RIF_NS.constIRI)) assert iriLit.datatype == XSD_NS.anyURI return URIRef(iriLit) else: return first(self.graph.objects(term, RIF_NS.value))
def testHiddenDemorgan(self): NormalFormReduction(self.ontGraph) self.failUnless(first(self.foo.subClassOf).complementOf, "should be the negation of a boolean class") innerC = CastClass(first(self.foo.subClassOf).complementOf) self.failUnless(isinstance(innerC, BooleanClass) and \ innerC._operator == OWL_NS.intersectionOf, "should be the negation of a conjunct") self.assertEqual(repr(innerC), "( ex:alpha and ex:omega )")
def testHiddenDemorgan(self): NormalFormReduction(self.ontGraph) self.failUnless(first(self.foo.subClassOf).complementOf, "should be the negation of a boolean class") innerC = CastClass(first(self.foo.subClassOf).complementOf) self.failUnless(isinstance(innerC, BooleanClass) and innerC._operator == OWL_NS.intersectionOf, "should be the negation of a conjunct") self.assertEqual(repr(innerC), "( ex:alpha and ex:omega )")
def extractFrame(self, frame): obj, slots = self.frames[frame] rt = [] for slot in Collection(self.graph, slots): k = self.extractTerm(first(self.graph.objects(slot, RIF_NS.slotkey))) v = self.extractTerm(first(self.graph.objects(slot, RIF_NS.slotvalue))) rt.append( Uniterm(k, [self.extractTerm(obj), v]) ) return rt
def extractImp(self, impl): body, bodyType, head, headType = self.implications[impl] head = first(self.extractPredication(head, headType)) if bodyType == RIF_NS.And: raise else: body = self.extractPredication(body, bodyType) body = And([first(body)]) if len(body) == 1 else And(body) return Rule(Clause(body, head), declare=[])
def IncomingSIPArcs(sip, predOcc): """docstring for IncomingSIPArcs""" for s, p, o in sip.triples((None, None, predOcc)): if (p, RDF.type, MAGIC.SipArc) in sip: if (s, RDF.type, MAGIC.BoundHeadPredicate) in sip: yield [s], Collection(sip, first(sip.objects(p, MAGIC.bindings))) else: yield Collection(sip, s), Collection( sip, first(sip.objects(p, MAGIC.bindings)))
def predicate(self, predicate, object, depth=1): writer = self.writer store = self.store writer.push(predicate) if isinstance(object, Literal): attributes = "" if object.language: writer.attribute(XMLLANG, object.language) if object.datatype: writer.attribute(RDF.datatype, object.datatype) writer.text(object) elif object in self.__serialized or not (object, None, None) in store: if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) else: if first(store.objects(object, RDF.first)): # may not have type RDF.List collection = object self.__serialized[object] = 1 # TODO: warn that any assertions on object other than # RDF.first and RDF.rest are ignored... including RDF.List writer.attribute(RDF.parseType, "Collection") col = Collection(store, object) for item in col: if isinstance(item, URIRef): self.forceRDFAbout.add(item) self.subject(item) if not isinstance(item, URIRef): self.__serialized[item] = 1 else: if first(store.triples_choices((object, RDF.type, [OWL_NS.Class,RDFS.Class]))) and\ isinstance(object, URIRef): writer.attribute(RDF.resource, self.relativize(object)) elif depth <= self.max_depth: self.subject(object, depth + 1) elif isinstance(object, BNode): if not object in self.__serialized and \ (object, None, None) in store and \ len(list(store.subjects(object=object)))==1: #inline blank nodes if they haven't been serialized yet and are #only referenced once (regardless of depth) self.subject(object, depth + 1) else: writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) writer.pop(predicate)
def predicate(self, predicate, object, depth=1): writer = self.writer store = self.store writer.push(predicate) if isinstance(object, Literal): attributes = "" if object.language: writer.attribute(XMLLANG, object.language) if object.datatype: writer.attribute(RDF.datatype, object.datatype) writer.text(object) elif object in self.__serialized or not (object, None, None) in store: if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) else: if first(store.objects(object, RDF.first)): # may not have type RDF.List collection = object self.__serialized[object] = 1 # TODO: warn that any assertions on object other than # RDF.first and RDF.rest are ignored... including RDF.List writer.attribute(RDF.parseType, "Collection") col = Collection(store, object) for item in col: if isinstance(item, URIRef): self.forceRDFAbout.add(item) self.subject(item) if not isinstance(item, URIRef): self.__serialized[item] = 1 else: if first(store.triples_choices((object, RDF.type, [OWL_NS.Class, RDFS.Class]))) and isinstance( object, URIRef ): writer.attribute(RDF.resource, self.relativize(object)) elif depth <= self.max_depth: self.subject(object, depth + 1) elif isinstance(object, BNode): if ( not object in self.__serialized and (object, None, None) in store and len(list(store.subjects(object=object))) == 1 ): # inline blank nodes if they haven't been serialized yet and are # only referenced once (regardless of depth) self.subject(object, depth + 1) else: writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) writer.pop(predicate)
def testOtherForm2(self): hasCoronaryBypassConduit = Property(EX_NS.hasCoronaryBypassConduit) ITALeft = EX.ITALeft ITALeft += (hasCoronaryBypassConduit|some| EnumeratedClass( members=[EX_NS.CoronaryBypassConduit_internal_thoracic_artery_left_insitu, EX_NS.CoronaryBypassConduit_internal_thoracic_artery_left_free])) from FuXi.DLP.DLNormalization import NormalFormReduction self.assertEquals(repr(Class(first(ITALeft.subSumpteeIds()))),"Some Class SubClassOf: Class: ex:ITALeft ") NormalFormReduction(self.ontGraph) self.assertEquals(repr(Class(first(ITALeft.subSumpteeIds()))), "Some Class SubClassOf: Class: ex:ITALeft . EquivalentTo: ( ( ex:hasCoronaryBypassConduit value ex:CoronaryBypassConduit_internal_thoracic_artery_left_insitu ) or ( ex:hasCoronaryBypassConduit value ex:CoronaryBypassConduit_internal_thoracic_artery_left_free ) )")
def Tb(owlGraph, _class, variable=Variable('X')): """ DLP body (consequent knowledge assertional forms (ABox assertions, conjunction / disjunction of ABox assertions, and exisential role restriction assertions) These are all common EL++ templates for KR """ props = list(set(owlGraph.predicates(subject=_class))) if OWL_NS.intersectionOf in props and not isinstance(_class, URIRef): for s, p, o in owlGraph.triples((_class, OWL_NS.intersectionOf, None)): conj = [] handleConjunct(conj, owlGraph, o, variable) return And(conj) elif OWL_NS.unionOf in props and not isinstance(_class, URIRef): # http://www.w3.org/TR/owl-semantics/#owl_unionOf for s, p, o in owlGraph.triples((_class, OWL_NS.unionOf, None)): return Or([ Tb(owlGraph, c, variable=variable) for c in Collection(owlGraph, o) ]) elif OWL_NS.someValuesFrom in props: # http://www.w3.org/TR/owl-semantics/#owl_someValuesFrom prop = list( owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] o = list( owlGraph.objects(subject=_class, predicate=OWL_NS.someValuesFrom))[0] newVar = Variable(BNode()) # body = Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()) # head = Th(owlGraph, o, variable=newVar) return And([ Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()), Tb(owlGraph, o, variable=newVar) ]) elif OWL_NS.hasValue in props: # http://www.w3.org/TR/owl-semantics/#owl_hasValue # Domain-specific rules for hasValue # Can be achieved via pD semantics prop = list( owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] o = first(owlGraph.objects(subject=_class, predicate=OWL_NS.hasValue)) return Uniterm(prop, [variable, o], newNss=owlGraph.namespaces()) elif OWL_NS.complementOf in props: return Tc(owlGraph, first(owlGraph.objects(_class, OWL_NS.complementOf))) else: # simple class # "Named" Uniterm _classTerm = SkolemizeExistentialClasses(_class) return Uniterm(RDF.type, [variable, _classTerm], newNss=owlGraph.namespaces())
def Tb(owlGraph, _class, variable=Variable('X')): """ DLP body (consequent knowledge assertional forms (ABox assertions, conjunction / disjunction of ABox assertions, and exisential role restriction assertions) These are all common EL++ templates for KR """ props = list(set(owlGraph.predicates(subject=_class))) if OWL_NS.intersectionOf in props and not isinstance(_class, URIRef): for s, p, o in owlGraph.triples((_class, OWL_NS.intersectionOf, None)): conj = [] handleConjunct(conj, owlGraph, o, variable) return And(conj) elif OWL_NS.unionOf in props and not isinstance(_class, URIRef): #http://www.w3.org/TR/owl-semantics/#owl_unionOf for s, p, o in owlGraph.triples((_class, OWL_NS.unionOf, None)): return Or([Tb(owlGraph, c, variable=variable) for c in Collection(owlGraph, o)]) elif OWL_NS.someValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#owl_someValuesFrom prop = list( owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] o = list(owlGraph.objects( subject=_class, predicate=OWL_NS.someValuesFrom))[0] newVar = Variable(BNode()) # @FIXME: unused code # body = Uniterm( # prop, [variable, newVar], newNss=owlGraph.namespaces()) # head = Th(owlGraph, o, variable=newVar) return And( [Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()), Tb(owlGraph, o, variable=newVar)]) elif OWL_NS.hasValue in props: # http://www.w3.org/TR/owl-semantics/#owl_hasValue # Domain-specific rules for hasValue # Can be achieved via pD semantics prop = list( owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] o = first(owlGraph.objects(subject=_class, predicate=OWL_NS.hasValue)) return Uniterm(prop, [variable, o], newNss=owlGraph.namespaces()) elif OWL_NS.complementOf in props: return Tc( owlGraph, first(owlGraph.objects(_class, OWL_NS.complementOf))) else: # simple class # "Named" Uniterm _classTerm = SkolemizeExistentialClasses(_class) return Uniterm( RDF.type, [variable, _classTerm], newNss=owlGraph.namespaces())
def subject(self, subject, depth=1): store = self.store write = self.write indent = "\n"+indent_string*depth if not subject in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDFS.Resource if isinstance(subject, BNode):# not referenced more than once if more_than(store.triples((None, None, subject)), 1): write("%s<div typeof=\"%s\" about=\"%s\">" % (indent, self.getQName(element), fix(subject))) else: write("%s<div typeof=\"%s\">" % (indent, self.getQName(element))) else: write("%s<div typeof=\"%s\" about=\"%s\">" % (indent, self.getQName(element), self.relativize(subject))) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate == RDF.type and object == type): self.predicate(predicate, object, depth+1) write("%s</div>" % indent)
def testOtherForm2(self): hasCoronaryBypassConduit = Property(EX_NS.hasCoronaryBypassConduit) ITALeft = EX.ITALeft ITALeft += (hasCoronaryBypassConduit | some | EnumeratedClass(members=[ EX_NS.CoronaryBypassConduit_internal_thoracic_artery_left_insitu, EX_NS.CoronaryBypassConduit_internal_thoracic_artery_left_free ])) from FuXi.DLP.DLNormalization import NormalFormReduction self.assertEquals(repr(Class(first(ITALeft.subSumpteeIds()))), "Some Class SubClassOf: Class: ex:ITALeft ") NormalFormReduction(self.ontGraph) self.assertEquals( repr(Class(first(ITALeft.subSumpteeIds()))), 'Some Class SubClassOf: Class: ex:ITALeft . EquivalentTo: ( ( ex:hasCoronaryBypassConduit VALUE <http://example.com/CoronaryBypassConduit_internal_thoracic_artery_left_insitu> ) OR ( ex:hasCoronaryBypassConduit VALUE <http://example.com/CoronaryBypassConduit_internal_thoracic_artery_left_free> ) )' )
def ProperSipOrderWithNegation(body): """ Ensures the list of literals has the negated literals at the end of the list """ # from FuXi.Rete.SidewaysInformationPassing import iterCondition #import pdb;pdb.set_trace() firstNegLiteral = None bodyIterator = list(body) for idx, literal in enumerate(bodyIterator): if literal.naf: firstNegLiteral = literal break if firstNegLiteral: #There is a first negative literal, are there subsequent positive literals? subsequentPosLits = first( itertools.dropwhile(lambda i: i.naf, bodyIterator[idx:])) if len(body) - idx > 1: #if this is not the last term in the body #then we succeed only if there are no subsequent positive literals return not subsequentPosLits else: #this is the last term, so we are successful return True else: #There are no negative literals return True
def ComplementExpand(tBoxGraph, complementAnnotation): complementExpanded = [] for negativeClass in tBoxGraph.subjects(predicate=OWL_NS.complementOf): containingList = first(tBoxGraph.subjects(RDF.first, negativeClass)) prevLink = None while containingList: prevLink = containingList containingList = first(tBoxGraph.subjects(RDF.rest, containingList)) if prevLink: for s, p, o in tBoxGraph.triples_choices((None, [OWL_NS.intersectionOf, OWL_NS.unionOf], prevLink)): if (s, complementAnnotation, None) in tBoxGraph: continue _class = Class(s) complementExpanded.append(s) print("Added %s to complement expansion" % _class) ComplementExpansion(_class)
def __call__(self, tNode, inferredTriple, token, binding, debug=False): """ Called when a (EDB) query literal is triggered with given bindings. """ assert len(tNode.consequent) == 1 key = (self.queryLiteral, tNode, token) if key not in self.bfp.firedEDBQueries: self.bfp.firedEDBQueries.add(key) for binding in token.bindings: _bindings = dict([(k, v) for k, v in list(binding.items()) if v != None]) closure = ReadOnlyGraphAggregate([self.factGraph, self.bfp.metaInterpNetwork.inferredFacts]) closure.templateMap = self.factGraph.templateMap # For each mapping that unifies with theory if self.edbConj: _vars = set() for lit in self.edbConj: _vars.update(list(GetVariables(lit, secondOrder=True))) _qLit = EDBQuery( [copy.deepcopy(lit) for lit in self.edbConj], self.factGraph, # closure, _vars, specialBNodeHandling=self.bfp.specialBNodeHandling, ) else: _qLit = copy.deepcopy(self.queryLiteral) _qLit = EDBQuery( [_qLit], self.factGraph, # closure, list(GetVariables(_qLit, secondOrder=True)), specialBNodeHandling=self.bfp.specialBNodeHandling, ) origQuery = _qLit.copy() _qLit.ground(_bindings) if self.bfp.debug: print( "%sQuery triggered for " % (" maximal db conjunction " if self.edbConj else ""), tNode.clauseRepresentation(), ) self.bfp.edbQueries.add(_qLit) # queryVars = origQuery.getOpenVars() # tokens2Propagate=[ # t for t in token.tokens # if [ # v for v in t.getVarBindings() # if v not in queryVars # ] # ] isGround = not _qLit.returnVars rt = self.tabledQuery(_qLit) if isGround: if first(rt): self.handleQueryAnswer(origQuery, token, self.bfp.debug, ({}, binding)) else: for ans in rt: if self.bfp.debug: pprint(ans) self.handleQueryAnswer(origQuery, token, self.bfp.debug, (ans, binding))
def subject(self, subject, depth=1): store = self.store write = self.write indent = "\n" + indent_string * depth if not subject in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDFS.Resource if isinstance(subject, BNode): # not referenced more than once if more_than(store.triples((None, None, subject)), 1): write("%s<div typeof=\"%s\" about=\"%s\">" % (indent, self.getQName(element), fix(subject))) else: write("%s<div typeof=\"%s\">" % (indent, self.getQName(element))) else: write( "%s<div typeof=\"%s\" about=\"%s\">" % (indent, self.getQName(element), self.relativize(subject))) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate == RDF.type and object == type): self.predicate(predicate, object, depth + 1) write("%s</div>" % indent)
def invokeDecisionProcedure(self, tp, factGraph, bindings, debug, sipCollection): isNotGround = first(filter(lambda i: isinstance(i, Variable), tp)) rule_store, rule_graph, network = SetupRuleStore(makeNetwork=True) bfp = BackwardFixpointProcedure(factGraph, network, self.derivedPredicates, tp, sipCollection, hybridPredicates=self.hybridPredicates, debug=self.DEBUG) bfp.createTopDownReteNetwork(self.DEBUG) # rt = bfp.answers(debug=self.DEBUG) self.queryNetworks.append((bfp.metaInterpNetwork, tp)) self.edbQueries.update(bfp.edbQueries) if self.DEBUG: print("Goal/Query: ", tp) print("Query was not ground" \ if isNotGround is not None else "Query was ground") if isNotGround is not None: for item in bfp.goalSolutions: yield item, None else: yield True, None if debug: print(bfp.metaInterpNetwork) bfp.metaInterpNetwork.reportConflictSet(True, sys.stderr) for query in self.edbQueries: print("Dispatched query against dataset: ", query.asSPARQL())
def subject(self, subject, depth=1): store = self.store writer = self.writer if not subject in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDF.Description writer.push(element) if isinstance(subject, BNode): def subj_as_obj_more_than(ceil): return more_than(store.triples((None, None, subject)), ceil) if (depth == 1 and subj_as_obj_more_than(0) ) or subj_as_obj_more_than(1): writer.attribute(RDF.nodeID, fix(subject)) else: writer.attribute(RDF.about, self.relativize(subject)) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate==RDF.type and object==type): self.predicate(predicate, object, depth+1) writer.pop(element) elif subject in self.forceRDFAbout: writer.push(RDF.Description) writer.attribute(RDF.about, self.relativize(subject)) writer.pop(RDF.Description) self.forceRDFAbout.remove(subject)
def _testNegative(uri, manifest): if verbose: write(u"TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) store = Graph() test = BNode() results.add((test, RESULT["test"], uri)) results.add((test, RESULT["system"], system)) try: if inDoc[-3:] == ".nt": format = "nt" else: format = "xml" store.parse(cached_file(inDoc), publicID=inDoc, format=format) except ParserError: results.add((test, RDF.type, RESULT["PassingRun"])) # pass else: write(u"""Failed: '%s'""" % uri) results.add((test, RDF.type, RESULT["FailingRun"])) result = 1 return result
def extractExists(self,exists_resource): formula, formulaType, vars = self.exists[exists_resource] allVars = map(self.extractTerm,Collection(self.graph,vars)) if formulaType == RIF_NS.And: formula = And(map( lambda i: first(self.extractPredication( i, first(self.graph.objects(i,RDF.type))) ), Collection(self.graph,first(self.graph.objects(formula,RIF_NS.formulas))) )) else: formula = self.extractPredication(formula,formulaType) return Exists(formula,allVars)
def testEvaluateBuiltIns(self): # from FuXi.Rete.RuleStore import N3Builtin # from FuXi.Rete.AlphaNode import BuiltInAlphaNode self.failUnless(first( self.closureDeltaGraph.triples( (None, URIRef('http://test/pred1'), Literal(3)))), "Missing inferred :pred1 assertions")
def ProperSipOrderWithNegation(body): """ Ensures the list of literals has the negated literals at the end of the list """ from FuXi.Rete.SidewaysInformationPassing import iterCondition # import pdb;pdb.set_trace() firstNegLiteral = None bodyIterator = list(body) for idx, literal in enumerate(bodyIterator): if literal.naf: firstNegLiteral = literal break if firstNegLiteral: # There is a first negative literal, are there subsequent positive literals? subsequentPosLits = first(itertools.dropwhile(lambda i: i.naf, bodyIterator[idx:])) if len(body) - idx > 1: # if this is not the last term in the body # then we succeed only if there are no subsequent positive literals return not subsequentPosLits else: # this is the last term, so we are successful return True else: # There are no negative literals return True
def subject(self, subject, depth=1): store = self.store writer = self.writer if not subject in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDF.Description writer.push(element) if isinstance(subject, BNode): def subj_as_obj_more_than(ceil): return more_than(store.triples((None, None, subject)), ceil) if (depth == 1 and subj_as_obj_more_than(0)) or subj_as_obj_more_than(1): writer.attribute(RDF.nodeID, fix(subject)) else: writer.attribute(RDF.about, self.relativize(subject)) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate == RDF.type and object == type): self.predicate(predicate, object, depth + 1) writer.pop(element) elif subject in self.forceRDFAbout: writer.push(RDF.Description) writer.attribute(RDF.about, self.relativize(subject)) writer.pop(RDF.Description) self.forceRDFAbout.remove(subject)
def invokeDecisionProcedure(self,tp,factGraph,bindings,debug,sipCollection): isNotGround = first(itertools.ifilter(lambda i:isinstance(i,Variable), tp)) rule_store, rule_graph, network = SetupRuleStore(makeNetwork=True) bfp = BackwardFixpointProcedure( factGraph, network, self.derivedPredicates, tp, sipCollection, hybridPredicates=self.hybridPredicates, debug=self.DEBUG) bfp.createTopDownReteNetwork(self.DEBUG) # rt = bfp.answers(debug=self.DEBUG) self.queryNetworks.append((bfp.metaInterpNetwork,tp)) self.edbQueries.update(bfp.edbQueries) if self.DEBUG: print >>sys.stderr, "Goal/Query: ", tp print >>sys.stderr, "Query was not ground" if isNotGround is not None else "Query was ground" if isNotGround is not None: for item in bfp.goalSolutions: yield item,None else: yield True,None if debug: print >>sys.stderr, bfp.metaInterpNetwork bfp.metaInterpNetwork.reportConflictSet(True,sys.stderr) for query in self.edbQueries: print >>sys.stderr, "Dispatched query against dataset: ", query.asSPARQL()
def LloydToporTransformation(clause,fullReduction=True): """ Tautological, common horn logic forms (useful for normalizing conjunctive & disjunctive clauses) (H ^ H0) :- B -> { H :- B H0 :- B } (H :- H0) :- B -> H :- B ^ H0 H :- (B v B0) -> { H :- B H :- B0 } """ assert isinstance(clause,OriginalClause),repr(clause) assert isinstance(clause.body,Condition),repr(clause) if isinstance(clause.body,Or): for atom in clause.body.formulae: if hasattr(atom, 'next'): atom=first(atom) yield NormalizeClause(Clause(atom,clause.head)) elif isinstance(clause.head,OriginalClause): yield NormalizeClause(Clause(And([clause.body,clause.head.body]),clause.head.head)) elif fullReduction and isinstance(clause.head,And): for i in clause.head: for j in LloydToporTransformation(Clause(clause.body,i), fullReduction=fullReduction): if [i for i in breadth_first(j.head) if isinstance(i,And)]: #Ands in the head need to be further flattened yield NormalizeClause(j) else: yield j else: yield clause
def clauseRepresentation(self): if len(self.rules) > 1: return "And(%s) :- %s" % (' '.join( [repr(atom) for atom in self.headAtoms]), self.antecedent) elif len(self.rules) > 0: return repr(first(self.rules).formula) else: return ''
def IsAtomicInclusionAxiomRHS(rule, dPreds): """ This is an atomic inclusion axiom with a variable (or bound) RHS: uniPred(?ENTITY) """ bodyList = list(iterCondition(rule.formula.body)) body = first(bodyList) return GetOp(body) not in dPreds and len(bodyList) == 1 and body.op == RDF.type
def testEvaluateBuiltIns(self): # from FuXi.Rete.RuleStore import N3Builtin # from FuXi.Rete.AlphaNode import BuiltInAlphaNode self.failUnless( first( self.closureDeltaGraph.triples( (None, URIRef('http://test/pred1'), Literal(3)))), "Missing inferred :pred1 assertions")
def predicate(self, predicate, object, depth=1): writer = self.writer store = self.store writer.push(predicate) if isinstance(object, Literal): attributes = "" if object.language: writer.attribute(XMLLANG, object.language) if object.datatype: writer.attribute(RDF.datatype, object.datatype) writer.text(object) elif object in self.__serialized or not (object, None, None) in store: if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) else: items = [] for item in store.items(object): # add a strict option to items? if isinstance(item, Literal): items = None # can not serialize list with literal values in them with rdf/xml else: items.append(item) if first(store.objects(object, RDF.first)): # may not have type RDF.List collection = object self.__serialized[object] = 1 # TODO: warn that any assertions on object other than # RDF.first and RDF.rest are ignored... including RDF.List writer.attribute(RDF.parseType, "Collection") while collection: item = first(store.objects(collection, RDF.first)) if item: self.subject(item) collection = first(store.objects(collection, RDF.rest)) self.__serialized[collection] = 1 else: if depth <= self.max_depth: self.subject(object, depth + 1) elif isinstance(object, BNode): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) writer.pop(predicate)
def predicate(self, predicate, object, depth=1): writer = self.writer store = self.store writer.push(predicate) if isinstance(object, Literal): attributes = "" if object.language: writer.attribute(XMLLANG, object.language) if object.datatype: writer.attribute(RDF.datatype, object.datatype) writer.text(object) elif object in self.__serialized or not (object, None, None) in store: if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) else: items = [] for item in store.items(object): # add a strict option to items? if isinstance(item, Literal): items = None # can not serialize list with literal values in them with rdf/xml else: items.append(item) if first(store.objects(object, RDF.first)): # may not have type RDF.List collection = object self.__serialized[object] = 1 # TODO: warn that any assertions on object other than # RDF.first and RDF.rest are ignored... including RDF.List writer.attribute(RDF.parseType, "Collection") while collection: item = first(store.objects(collection, RDF.first)) if item: self.subject(item) collection = first(store.objects(collection, RDF.rest)) self.__serialized[collection] = 1 else: if depth<=self.max_depth: self.subject(object, depth+1) elif isinstance(object, BNode): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) writer.pop(predicate)
def ComplementExpand(tBoxGraph, complementAnnotation): complementExpanded = [] for negativeClass in tBoxGraph.subjects(predicate=OWL_NS.complementOf): containingList = first(tBoxGraph.subjects(RDF.first, negativeClass)) prevLink = None while containingList: prevLink = containingList containingList = first(tBoxGraph.subjects(RDF.rest, containingList)) if prevLink: for s, p, o in tBoxGraph.triples_choices( (None, [OWL_NS.intersectionOf, OWL_NS.unionOf], prevLink)): if (s, complementAnnotation, None) in tBoxGraph: continue _class = Class(s) complementExpanded.append(s) print("Added %s to complement expansion" % _class) ComplementExpansion(_class)
def clauseRepresentation(self): if len(self.rules)>1: return "And(%s) :- %s"%( ' '.join([repr(atom) for atom in self.headAtoms]), self.antecedent ) elif len(self.rules)>0: return repr(first(self.rules).formula) else: return ''
def IsAtomicInclusionAxiomRHS(rule, dPreds): """ This is an atomic inclusion axiom with a variable (or bound) RHS: uniPred(?ENTITY) """ bodyList = list(iterCondition(rule.formula.body)) body = first(bodyList) return GetOp(body) not in dPreds and \ len(bodyList) == 1 and \ body.op == RDF.type
def stupidAssLambda(term): if isinstance(term,Individual): termId = term.identifier else: termId = term term = Individual(term) return OWL.Restriction in term.type and \ first(Individual.factoryGraph.objects( termId, OWL.onProperty)) in roles
def _testPositive(uri, manifest): if verbose: write(u"TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) outDoc = first(manifest.objects(uri, TEST["outputDocument"])) expected = Graph() if outDoc[-3:] == ".nt": format = "nt" else: format = "xml" expected.parse(cached_file(outDoc), publicID=outDoc, format=format) store = TestStore(expected) if inDoc[-3:] == ".nt": format = "nt" else: format = "xml" try: store.parse(cached_file(inDoc), publicID=inDoc, format=format) except ParserError as pe: write("Failed '") write(inDoc) write("' failed with") raise pe try: write(type(pe)) except: write("sorry could not dump out error.") result = 1 else: if not store.isomorphic(expected): write(u"""Failed: '%s'""" % uri) if verbose: write(""" In:\n""") for s, p, o in store: write("%s %s %s." % (repr(s), repr(p), repr(o))) write(""" Out:\n""") for s, p, o in expected: write("%s %s %s." % (repr(s), repr(p), repr(o))) result += 1 return result
def isRecursive(self): def termHash(term): return GetOp(term),\ reduce(lambda x,y:x+y,term.adornment) headHash = termHash(self.formula.head) def recursiveLiteral(term): return isinstance(term,AdornedUniTerm) and termHash(term) == headHash if first(itertools.ifilter(recursiveLiteral,iterCondition(self.formula.body))): return True else: return False
def extractRule(self, rule): vars, impl = self.rules[rule] body, bodyType, head, headType = self.implications[impl] allVars = map(self.extractTerm, Collection(self.graph, vars)) head = first(self.extractPredication(head, headType)) if bodyType == RIF_NS.And: body = [first(self.extractPredication( i, first(self.graph.objects(i, RDF.type))) ) for i in Collection(self.graph, first(self.graph.objects(body, RIF_NS.formulas)))] else: body = self.extractPredication(body, bodyType) body = And([first(body)]) if len(body) == 1 else And(body) return Rule( Clause(body, head), declare=allVars, nsMapping=dict(self.graph.namespaces()) )
def testNegative(self): manifest = self.manifest num_failed = total = 0 negs = list(manifest.subjects(RDF.type, TEST["NegativeParserTest"])) negs.sort() for neg in negs: status = first(manifest.objects(neg, TEST["status"])) if status==Literal("APPROVED"): result = _testNegative(neg, manifest) total += 1 num_failed += result self.assertEquals(num_failed, 0, "Failed: %s of %s." % (num_failed, total))
def RenderSIPCollection(sipGraph, dot=None): try: from pydot import Node, Edge, Dot except: import warnings warnings.warn("Missing pydot library", ImportWarning) if not dot: dot = Dot(graph_type='digraph') dot.leftNodesLookup = {} nodes = {} for N, prop, q in sipGraph.query( 'SELECT ?N ?prop ?q { ?prop a magic:SipArc . ?N ?prop ?q . }', initNs={u'magic': MAGIC}): if MAGIC.BoundHeadPredicate in sipGraph.objects(subject=N, predicate=RDF.type): NCol = [N] else: NCol = Collection(sipGraph, N) if q not in nodes: newNode = Node(makeMD5Digest(q), label=normalizeTerm(q, sipGraph), shape='plaintext') nodes[q] = newNode dot.add_node(newNode) bNode = BNode() nodeLabel = ', '.join([normalizeTerm(term, sipGraph) for term in NCol]) edgeLabel = ', '.join([ var.n3() for var in Collection( sipGraph, first(sipGraph.objects(prop, MAGIC.bindings))) ]) markedEdgeLabel = '' if nodeLabel in dot.leftNodesLookup: bNode, leftNode, markedEdgeLabel = dot.leftNodesLookup[nodeLabel] # print "\t",nodeLabel,edgeLabel, markedEdgeLabel,not edgeLabel == markedEdgeLabel else: leftNode = Node(makeMD5Digest(bNode), label=nodeLabel, shape='plaintext') dot.leftNodesLookup[nodeLabel] = (bNode, leftNode, edgeLabel) nodes[bNode] = leftNode dot.add_node(leftNode) if not edgeLabel == markedEdgeLabel: edge = Edge(leftNode, nodes[q], label=edgeLabel) dot.add_edge(edge) return dot
def subject(self, subject: Identifier, depth: int = 1): store = self.store writer = self.writer if subject in self.forceRDFAbout: writer.push(RDFVOC.Description) writer.attribute(RDFVOC.about, self.relativize(subject)) writer.pop(RDFVOC.Description) self.forceRDFAbout.remove(subject) # type: ignore[arg-type] elif subject not in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDFVOC.Description writer.push(element) if isinstance(subject, BNode): def subj_as_obj_more_than(ceil): return True # more_than(store.triples((None, None, subject)), ceil) # here we only include BNode labels if they are referenced # more than once (this reduces the use of redundant BNode # identifiers) if subj_as_obj_more_than(1): writer.attribute(RDFVOC.nodeID, fix(subject)) else: writer.attribute(RDFVOC.about, self.relativize(subject)) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate == RDF.type and object == type): self.predicate(predicate, object, depth + 1) writer.pop(element) elif subject in self.forceRDFAbout: # TODO FIXME?: this looks like a duplicate of first condition writer.push(RDFVOC.Description) writer.attribute(RDFVOC.about, self.relativize(subject)) writer.pop(RDFVOC.Description) self.forceRDFAbout.remove(subject) # type: ignore[arg-type]
def isRecursive(self): def termHash(term): return GetOp(term), \ reduce(lambda x, y: x + y, term.adornment) headHash = termHash(self.formula.head) def recursiveLiteral(term): return isinstance(term, AdornedUniTerm) and termHash(term) == headHash if first(filter(recursiveLiteral, iterCondition(self.formula.body))): return True else: return False