def _consume_area(self): area = self.params['area'] if area == 'ANY': self.helper.push_triple_variables(Variable("source"), DEB.area, Variable("area")) else: self.helper.push_triple(Variable("source"), DEB.area, URIRef(area))
def testQueryMemoization(self): raise SkipTest( "SKIPFAIL testQueryMemoization, see test/testBFPQueryMemoization.py" ) topDownStore = TopDownSPARQLEntailingStore( self.owlGraph.store, self.owlGraph, idb=self.program, DEBUG=False, nsBindings=nsMap, decisionProcedure=BFP_METHOD, identifyHybridPredicates=True) targetGraph = Graph(topDownStore) for pref, nsUri in nsMap.items(): targetGraph.bind(pref, nsUri) goal = (Variable('SUBJECT'), RDF.type, EX.C) queryLiteral = EDBQuery([BuildUnitermFromTuple(goal)], self.owlGraph, [Variable('SUBJECT')]) query = queryLiteral.asSPARQL() # rt=targetGraph.query(query,initNs=nsMap) # if len(topDownStore.edbQueries) == len(set(topDownStore.edbQueries)): # pprint(topDownStore.edbQueries) print("Queries dispatched against EDB") for query in self.owlGraph.queriesDispatched: print(query) self.failUnlessEqual(len(self.owlGraph.queriesDispatched), 4, "Duplicate query")
def encodeAction(tNode, inferredTriple, token, binding, debug=False): from hashlib import sha1 person = binding[Variable('person')] email = binding[Variable('email')] newTriple = (person, FOAF['mbox_sha1sum'], Literal(sha1(email.encode('utf-8')).hexdigest())) tNode.network.inferredFacts.add(newTriple)
def contexts(self, triple=None): """ Iterates over results to SELECT ?NAME { GRAPH ?NAME { ?s ?p ?o } } returning instances of this store with the SPARQL wrapper object updated via addNamedGraph(?NAME) This causes a named-graph-uri key / value pair to be sent over the protocol """ if triple: s, p, o = triple else: s = p = o = None params = ((s if s else Variable('s')).n3(), (p if p else Variable('p')).n3(), (o if o else Variable('o')).n3()) self.setQuery('SELECT ?name WHERE { GRAPH ?name { %s %s %s }}' % params) doc = ElementTree.parse(SPARQLWrapper.query(self).response) return ( rt.get(Variable("name")) for rt, vars in TraverseSPARQLResultDOM(doc, asDictionary=True))
def contexts(self, triple=None): """ Iterates over results to "SELECT ?NAME { GRAPH ?NAME { ?s ?p ?o } }" or "SELECT ?NAME { GRAPH ?NAME {} }" if triple is `None`. Returns instances of this store with the SPARQL wrapper object updated via addNamedGraph(?NAME). This causes a named-graph-uri key / value pair to be sent over the protocol. Please note that some SPARQL endpoints are not able to find empty named graphs. """ self.resetQuery() if triple: nts = self.node_to_sparql s, p, o = triple params = (nts(s if s else Variable('s')), nts(p if p else Variable('p')), nts(o if o else Variable('o'))) self.setQuery('SELECT ?name WHERE { GRAPH ?name { %s %s %s }}' % params) else: self.setQuery('SELECT ?name WHERE { GRAPH ?name {} }') with contextlib.closing(SPARQLWrapper.query(self).response) as res: doc = etree.parse(res) return (rt.get( Variable("name") ) for rt, vars in _traverse_sparql_result_dom( doc, as_dictionary=True, node_from_result=self.node_from_result))
def contexts(self, triple=None): """ Iterates over results to "SELECT ?NAME { GRAPH ?NAME { ?s ?p ?o } }" or "SELECT ?NAME { GRAPH ?NAME {} }" if triple is `None`. Returns instances of this store with the SPARQL wrapper object updated via addNamedGraph(?NAME). This causes a named-graph-uri key / value pair to be sent over the protocol. Please note that some SPARQL endpoints are not able to find empty named graphs. """ self.resetQuery() if triple: s, p, o = triple params = ((s if s else Variable('s')).n3(), (p if p else Variable('p')).n3(), (o if o else Variable('o')).n3()) self.setQuery('SELECT ?name WHERE { GRAPH ?name { %s %s %s }}' % params) else: self.setQuery('SELECT ?name WHERE { GRAPH ?name {} }') doc = ElementTree.parse(SPARQLWrapper.query(self).response) return ( rt.get(Variable("name")) for rt, vars in TraverseSPARQLResultDOM(doc, asDictionary=True))
def __init__(self, source, content_type=None): tree = etree.parse(source) boolean = tree.find(RESULTS_NS_ET + 'boolean') results = tree.find(RESULTS_NS_ET + 'results') if boolean is not None: type_ = 'ASK' elif results is not None: type_ = 'SELECT' else: raise ResultException( "No RDF result-bindings or boolean answer found!") Result.__init__(self, type_) if type_ == 'SELECT': self.bindings = [] for result in results: r = {} for binding in result: r[Variable(binding.get('name'))] = parseTerm(binding[0]) self.bindings.append(r) self.vars = [ Variable(x.get("name")) for x in tree.findall('./%shead/%svariable' % (RESULTS_NS_ET, RESULTS_NS_ET)) ] else: self.askAnswer = boolean.text.lower().strip() == "true"
def remove(self, spo, context): """ Remove a triple from the store """ if not self.update_endpoint: raise Exception("UpdateEndpoint is not set - call 'open'") (subject, predicate, obj) = spo if not subject: subject = Variable("S") if not predicate: predicate = Variable("P") if not obj: obj = Variable("O") nts = self.node_to_sparql triple = "%s %s %s ." % (nts(subject), nts(predicate), nts(obj)) if self._is_contextual(context): cid = nts(context.identifier) q = "WITH %(graph)s DELETE { %(triple)s } WHERE { %(triple)s }" % { "graph": cid, "triple": triple, } else: q = "DELETE { %s } WHERE { %s } " % (triple, triple) self._transaction().append(q) if self.autocommit: self.commit()
def TraverseSPARQLResultDOM(doc, asDictionary=False): """ Returns a generator over tuples of results """ # namespace handling in elementtree xpath sub-set is not pretty :( vars = [ Variable(v.attrib["name"]) for v in doc.findall('./{http://www.w3.org/2005/sparql-results#}head/' + '{http://www.w3.org/2005/sparql-results#}variable') ] for result in doc.findall( './{http://www.w3.org/2005/sparql-results#}results/' + '{http://www.w3.org/2005/sparql-results#}result'): currBind = {} values = [] for binding in result.findall( '{http://www.w3.org/2005/sparql-results#}binding'): varVal = binding.attrib["name"] var = Variable(varVal) term = CastToTerm(binding.findall('*')[0]) values.append(term) currBind[var] = term if asDictionary: yield currBind, vars else: def __locproc(values): if len(values) == 1: return values[0] else: return tuple(values) yield __locproc(values), vars
def mappings_to_ctx(mappings: Dict[str, str]) -> {}: """re-create a rdflib context (ctx) from a sage bag of mappings (grrr) Args: * mappings: A dictionnary of mappings Returns: * A context compatible with rdflib Ex: mappings={'?s': 'http://auth12/scma/s3', '?p': 'http://common/scma/p5', '?o': 'o14'} returns: {rdflib.term.Variable('s'): rdflib.term.URIRef('http://auth12/scma/s3'), rdflib.term.Variable('p'): rdflib.term.URIRef('http://common/scma/p5'), rdflib.term.Variable('o'): rdflib.term.Literal('o14')} """ ctx=dict() for key,value in mappings.items(): #print(key+":"+value) if (key.startswith('?')): key=key[1:] else: print("mappings_to_ctx found "+key+" as key in mappings") if re.match("^http://",value): ctx[Variable(key)]=URIRef(value) elif re.match("^_:",value): ctx[Variable(key)]=BNode(value) else: ctx[Variable(key)]=Literal(value) return ctx
def contexts(self, triple=None): """ Iterates over results to "SELECT ?NAME { GRAPH ?NAME { ?s ?p ?o } }" or "SELECT ?NAME { GRAPH ?NAME {} }" if triple is `None`. Returns instances of this store with the SPARQL wrapper object updated via addNamedGraph(?NAME). This causes a named-graph-uri key / value pair to be sent over the protocol. Please note that some SPARQL endpoints are not able to find empty named graphs. """ if triple: nts = self.node_to_sparql s, p, o = triple params = ( nts(s if s else Variable("s")), nts(p if p else Variable("p")), nts(o if o else Variable("o")), ) q = "SELECT ?name WHERE { GRAPH ?name { %s %s %s }}" % params else: q = "SELECT ?name WHERE { GRAPH ?name {} }" result = self._query(q) return (row.name for row in result)
def Th(owlGraph, _class, variable=Variable('X'), position=LHS): """ DLP head (antecedent) knowledge assertional forms (ABox assertions, conjunction of ABox assertions, and universal role restriction assertions) """ props = list(set(owlGraph.predicates(subject=_class))) if OWL_NS.allValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#owl_allValuesFrom for s, p, o in owlGraph.triples((_class, OWL_NS.allValuesFrom, None)): prop = list(owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] newVar = Variable(BNode()) body = Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()) for head in Th(owlGraph, o, variable=newVar): yield Clause(body, head) elif OWL_NS.hasValue in props: prop = list(owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] o = first(owlGraph.objects(subject=_class, predicate=OWL_NS.hasValue)) yield Uniterm(prop, [variable, o], newNss=owlGraph.namespaces()) elif OWL_NS.someValuesFrom in props: #http://www.w3.org/TR/owl-semantics/#someValuesFrom for s, p, o in owlGraph.triples((_class, OWL_NS.someValuesFrom, None)): prop = list(owlGraph.objects(subject=_class, predicate=OWL_NS.onProperty))[0] newVar = BNode() yield And([Uniterm(prop, [variable, newVar], newNss=owlGraph.namespaces()), generatorFlattener(Th(owlGraph, o, variable=newVar))]) elif OWL_NS.intersectionOf in props: from FuXi.Syntax.InfixOWL import BooleanClass yield And([first(Th(owlGraph, h, variable)) for h in BooleanClass(_class)]) else: #Simple class yield Uniterm(RDF.type, [variable, isinstance(_class, BNode) and SkolemizeExistentialClasses(_class) or _class], newNss=owlGraph.namespaces())
def Tc(owlGraph, negatedFormula): """ Handles the conversion of negated DL concepts into a general logic programming condition for the body of a rule that fires when the body conjunct is in the minimal model """ if (negatedFormula, OWL_NS.hasValue, None) in owlGraph: #not ( R value i ) bodyUniTerm = Uniterm(RDF.type, [Variable("X"), NormalizeBooleanClassOperand(negatedFormula, owlGraph)], newNss=owlGraph.namespaces()) condition = NormalizeClause(Clause(Tb(owlGraph, negatedFormula), bodyUniTerm)).body assert isinstance(condition, Uniterm) condition.naf = True return condition elif (negatedFormula, OWL_NS.someValuesFrom, None) in owlGraph: #not ( R some C ) binaryRel, unaryRel = Tb(owlGraph, negatedFormula) negatedBinaryRel = copy.deepcopy(binaryRel) negatedBinaryRel.naf = True negatedUnaryRel = copy.deepcopy(unaryRel) negatedUnaryRel.naf = True return Or([negatedBinaryRel, And([binaryRel, negatedUnaryRel])]) elif isinstance(negatedFormula, URIRef): return Uniterm(RDF.type, [Variable("X"), NormalizeBooleanClassOperand(negatedFormula, owlGraph)], newNss=owlGraph.namespaces(), naf=True) else: raise UnsupportedNegation("Unsupported negated concept: %s" % negatedFormula)
def test_push_triple_variables(self): self.s.push_triple_variables(\ Variable("var3"), RDFS.type, Variable("var4")) self.assertEqual(1, len(self.s.query.whereclause.stmts)) self.assertEqual(self.triple3, self.s.query.whereclause.stmts[0]) varlist = [Variable("var3"), Variable("var4")] self.assertEqual(varlist.sort(), self.s.query.variables.sort())
def test_add_filter_regex_str_var(self): self.s.add_filter_regex_str_var(Variable("var"), "regex") self.assertEqual(1, len(self.s.query.whereclause.stmts)) str = FunCall("str", Variable("var")) regex = FunCall("regex", str, '"regex"') f1 = Filter(regex) self.assertEqual(f1, self.s.query.whereclause.stmts[0])
def _traverse_sparql_result_dom(doc, as_dictionary=False, node_from_result=_node_from_result): """ Returns a generator over tuples of results """ # namespace handling in elementtree xpath sub-set is not pretty :( vars_ = [ Variable(v.attrib["name"]) for v in doc.findall( './{http://www.w3.org/2005/sparql-results#}head/' '{http://www.w3.org/2005/sparql-results#}variable') ] for result in doc.findall( './{http://www.w3.org/2005/sparql-results#}results/' '{http://www.w3.org/2005/sparql-results#}result'): curr_bind = {} values = [] for binding in result.findall( '{http://www.w3.org/2005/sparql-results#}binding'): var_val = binding.attrib["name"] var = Variable(var_val) term = node_from_result(binding.findall('*')[0]) values.append(term) curr_bind[var] = term if as_dictionary: yield curr_bind, vars_ else: def __locproc(values_): if len(values_) == 1: return values_[0] else: return tuple(values_) yield __locproc(values), vars_
def test_comparisons_var(): ctx = QueryContext() ctx[Variable('x')] = Literal(2) eq(bool(_eval(_translate((p.Expression.parseString('?x<3')[0])), ctx)), True) eq(bool(_eval(_translate((p.Expression.parseString('?x<3.0')[0])), ctx)), True) eq(bool(_eval(_translate((p.Expression.parseString('?x<3e0')[0])), ctx)), True) eq(bool(_eval(_translate((p.Expression.parseString('?x<2.1')[0])), ctx)), True) eq(bool(_eval(_translate((p.Expression.parseString('?x<21e-1')[0])), ctx)), True) eq(bool(_eval(_translate((p.Expression.parseString('?x=2.0')[0])), ctx)), True) eq(bool(_eval(_translate((p.Expression.parseString('?x=2e0')[0])), ctx)), True) eq( bool(_eval(_translate((p.Expression.parseString('?x="cake"')[0])), ctx)), False) ctx = QueryContext() ctx[Variable('x')] = Literal(4) eq(bool(_eval(_translate((p.Expression.parseString('?x<3')[0])), ctx)), False) eq(bool(_eval(_translate((p.Expression.parseString('?x<3.0')[0])), ctx)), False) eq(bool(_eval(_translate((p.Expression.parseString('?x<3e0')[0])), ctx)), False)
def TraverseSPARQLResultDOM(doc, asDictionary=False): """ Returns a generator over tuples of results by (4Suite) XPath evaluation over the result XML """ # namespace handling in elementtree xpath sub-set is not pretty :( vars = [ Variable(v.attrib["name"]) for v in doc.findall( './{http://www.w3.org/2005/sparql-results#}head/{http://www.w3.org/2005/sparql-results#}variable' ) ] for result in doc.findall( './{http://www.w3.org/2005/sparql-results#}results/{http://www.w3.org/2005/sparql-results#}result' ): # # and broken in < 1.3, according to two FutureWarnings: # # 1. # # FutureWarning: This search is broken in 1.3 and earlier, and will # # be fixed in a future version. If you rely on the current behaviour, # # change it to # # './{http://www.w3.org/2005/sparql-results#}head/{http://www.w3.org/2005/sparql-results#}variable' # # 2. # # FutureWarning: This search is broken in 1.3 and earlier, and will be # # fixed in a future version. If you rely on the current behaviour, # # change it to # # './{http://www.w3.org/2005/sparql-results#}results/{http://www.w3.org/2005/sparql-results#}result' # # Handle ElementTree warning # variablematch = '/{http://www.w3.org/2005/sparql-results#}head/{http://www.w3.org/2005/sparql-results#}variable' # resultmatch = '/{http://www.w3.org/2005/sparql-results#}results/{http://www.w3.org/2005/sparql-results#}result' # # with warnings.catch_warnings(record=True) as w: # # warnings.simplefilter("always") # # matched_variables = doc.findall(variablematch) # # if len(w) == 1: # # variablematch = '.' + variablematch # # resultmatch = '.' + resultmatch # # # Could be wrong result, re-do from start # # matched_variables = doc.findall(variablematch) # vars = [Variable(v.attrib["name"]) for v in matched_variables] # for result in doc.findall(resultmatch): currBind = {} values = [] for binding in result.findall( '{http://www.w3.org/2005/sparql-results#}binding'): varVal = binding.attrib["name"] var = Variable(varVal) term = CastToTerm(binding.findall('*')[0]) values.append(term) currBind[var] = term if asDictionary: yield currBind, vars else: def stab(values): if len(values) == 1: return values[0] else: return tuple(values) yield stab(values), vars
def test_visit_BinaryExpression(self): f1 = FunCall("regex", Variable("v1"), "r1") f2 = FunCall("regex", Variable("v2"), "r2") f3 = FunCall("regex", Variable("v3"), "r3") b1 = BinaryExpression(f2, "||", f3) b2 = BinaryExpression(f1, "||", b1) expected = 'regex(?v1,r1)||regex(?v2,r2)||regex(?v3,r3)' self.assertEqual(expected, self.v.visit(b2))
def test_issue_41(self): ruleStore, ruleGraph, network = SetupRuleStore(makeNetwork=True) for rule in self.rules: network.buildNetworkFromClause(rule) def dummy(*av, **kw): pass head = (Variable("x"), Variable("y"), Variable("z")) network.registerReteAction(head, False, dummy)
def fixup(o): if type(o) == BNode: o = Variable(str(o)) elif isinstance(o, Graph): o = URIRef(o.identifier, base=bbbb) if type(o) == Variable: o = URIRef(o.n3(), base=bbbb) return o
def test__consume_distribution_any(self): self.builder.params['distribution'] = "ANY" mock = self.mox.CreateMock(SelectQueryHelper) mock.push_triple_variables(Variable("source"), DEB.distribution,\ Variable("distribution")) self.builder.helper = mock self.mox.ReplayAll() self.builder._consume_distribution() self.mox.VerifyAll()
def _consume_distribution(self): distribution = self.params['distribution'] if distribution == 'ANY': self.helper.push_triple_variables(Variable("source"), DEB.distribution, Variable("distribution")) else: self.helper.push_triple(Variable("source"), DEB.distribution, URIRef(distribution))
def test__consume_area_any(self): self.builder.params['area'] = "ANY" mock = self.mox.CreateMock(SelectQueryHelper) mock.push_triple_variables(Variable("source"), DEB.area,\ Variable("area")) self.builder.helper = mock self.mox.ReplayAll() self.builder._consume_area() self.mox.VerifyAll()
def remove(self, spo, context): """ Remove a triple from the store """ if not self.connection: raise "UpdateEndpoint is not set - call 'open'" (subject, predicate, obj) = spo if not subject: subject = Variable("S") if not predicate: predicate = Variable("P") if not obj: obj = Variable("O") triple = "%s %s %s ." % (subject.n3(), predicate.n3(), obj.n3()) if self.context_aware and context is not None: q = "DELETE { GRAPH %s { %s } } WHERE { GRAPH %s { %s } }" % ( context.identifier.n3(), triple, context.identifier.n3(), triple) else: q = "DELETE { %s } WHERE { %s } " % (triple, triple) r = self._do_update(q) content = r.read() # we expect no content if r.status not in (200, 204): raise Exception("Could not update: %d %s\n%s" % ( r.status, r.reason, content))
def CreateHybridPredicateRule(hybridPred, program, nsMap=None): hPred = URIRef(hybridPred + u'_derived') literals = set( reduce(lambda l, r: l + r, [ list(iterCondition(clause.formula.body)) + list(iterCondition(clause.formula.head)) for clause in program ])) for literal in literals: if GetOp(literal) == hybridPred: noArgs = len(GetArgs(literal)) if noArgs == 1: # p_derived(X) :- p(X) body = BuildUnitermFromTuple( (Variable('X'), RDF.type, hybridPred), newNss=nsMap) head = BuildUnitermFromTuple((Variable('X'), RDF.type, hPred), newNss=nsMap) vars = [Variable('X')] else: # p_derived(X,Y) :- p(X,Y) body = BuildUnitermFromTuple( (Variable('X'), hybridPred, Variable('Y')), newNss=nsMap) head = BuildUnitermFromTuple( (Variable('X'), hPred, Variable('Y')), newNss=nsMap) vars = [Variable('Y'), Variable('X')] return Rule(Clause(And([body]), head), nsMapping=nsMap, declare=vars)
def test_ldp_result_to_dataset(self): result = self.mock.collection_result() b = result.bindings ds = self.sparql.result_to_dataset(result) g = Variable('g') s = Variable('s') p = Variable('p') o = Variable('o') self.assertEqual(len(b), len(ds)) for d in result.bindings: self.assertIn((d[s], d[p], d[o], d[g]), ds)
def test__consume_vcs_one(self): self.builder.params['vcs'] = ["SVN"] mock = self.mox.CreateMock(SelectQueryHelper) mock.push_triple(Variable("source"), DEB.repository, Variable("repobnode")) triple = Triple(Variable("repobnode"), RDF.type, DOAP.SVNRepository) mock.add_triple(triple) self.builder.helper = mock self.mox.ReplayAll() self.builder._consume_vcs() self.mox.VerifyAll()
def test__consume_maintainer_filter(self): self.builder.params['maintainerfilter'] = "keyword" mock = self.mox.CreateMock(SelectQueryHelper) mock.push_triple(\ Variable("maint"), FOAF.name, Variable("maintname")) mock.add_or_filter_regex({Variable("maintmail"): "keyword",\ Variable("maintname"): "keyword"}) self.builder.helper = mock self.mox.ReplayAll() self.builder._consume_maintainer_filter() self.mox.VerifyAll()
def test__consume_homepage_true(self): self.builder.params['homepage'] = True mock = self.mox.CreateMock(SelectQueryHelper) mock.add_variable(Variable("homepage")) triple = Triple(\ Variable("source"), FOAF.page, Variable("homepage")) mock.add_optional(triple) self.builder.helper = mock self.mox.ReplayAll() self.builder._consume_homepage() self.mox.VerifyAll()
def remove(self, spo, context): """ Remove a triple from the store """ if not self.connection: raise Exception("UpdateEndpoint is not set - call 'open'") (subject, predicate, obj) = spo if not subject: subject = Variable("S") if not predicate: predicate = Variable("P") if not obj: obj = Variable("O") triple = "%s %s %s ." % (subject.n3(), predicate.n3(), obj.n3()) if self.context_aware and context is not None: q = "DELETE { GRAPH %s { %s } } WHERE { GRAPH %s { %s } }" % ( context.identifier.n3(), triple, context.identifier.n3(), triple) else: q = "DELETE { %s } WHERE { %s } " % (triple, triple) r = self._do_update(q) content = r.read() # we expect no content if r.status not in (200, 204): raise Exception("Could not update: %d %s\n%s" % ( r.status, r.reason, content))
def triples(self, xxx_todo_changeme2, context=None): """ SELECT ?subj ?pred ?obj WHERE { ?subj ?pred ?obj } """ (s, p, o) = xxx_todo_changeme2 if ( isinstance(s, BNode) or isinstance(p, BNode) or isinstance(o, BNode) ): raise Exception("SPARQLStore does not support Bnodes! See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes") vars = [] if not s: s = Variable('s') vars.append(s) if not p: p = Variable('p') vars.append(p) if not o: o = Variable('o') vars.append(o) if vars: v = ' '.join([term.n3() for term in vars]) else: v = '*' if self.context_aware and context is not None: query = "SELECT %s WHERE { GRAPH %s { %s %s %s } }" % \ (v, context.identifier.n3(), s.n3(), p.n3(), o.n3()) else: query = "SELECT %s WHERE { %s %s %s }" % \ (v, s.n3(), p.n3(), o.n3()) self.setQuery(query) doc = ElementTree.parse(SPARQLWrapper.query(self).response) # ElementTree.dump(doc) for rt, vars in TraverseSPARQLResultDOM(doc, asDictionary=True): yield (rt.get(s, s), rt.get(p, p), rt.get(o, o)), None
def remove(self, spo, context): """ Remove a triple from the store """ if not self.endpoint: raise Exception("UpdateEndpoint is not set - call 'open'") (subject, predicate, obj) = spo if not subject: subject = Variable("S") if not predicate: predicate = Variable("P") if not obj: obj = Variable("O") triple = "%s %s %s ." % (subject.n3(), predicate.n3(), obj.n3()) if self._is_contextual(context): q = "DELETE { GRAPH %s { %s } } WHERE { GRAPH %s { %s } }" % ( context.identifier.n3(), triple, context.identifier.n3(), triple) else: q = "DELETE { %s } WHERE { %s } " % (triple, triple) self._do_update(q)
#do something #Removes LIMIT and OFFSET if not required for the next triple() calls del a_graph.LIMIT del a_graph.OFFSET `` """ if ( isinstance(s, BNode) or isinstance(p, BNode) or isinstance(o, BNode) ): raise Exception("SPARQLStore does not support Bnodes! " "See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes") vars = [] if not s: s = Variable('s') vars.append(s) if not p: p = Variable('p') vars.append(p) if not o: o = Variable('o') vars.append(o) if vars: v = ' '.join([term.n3() for term in vars]) else: v = '*' query = "SELECT %s WHERE { %s %s %s }" % \
triple_generator = a_graph.triples(mytriple): #do something #Removes LIMIT and OFFSET if not required for the next triple() calls del a_graph.LIMIT del a_graph.OFFSET `` """ if isinstance(s, BNode) or isinstance(p, BNode) or isinstance(o, BNode): raise Exception( "SPARQLStore does not support Bnodes! " "See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes" ) vars = [] if not s: s = Variable("s") vars.append(s) if not p: p = Variable("p") vars.append(p) if not o: o = Variable("o") vars.append(o) if vars: v = " ".join([term.n3() for term in vars]) else: v = "*" query = "SELECT %s WHERE { %s %s %s }" % (v, s.n3(), p.n3(), o.n3())
def triples(self, xxx_todo_changeme2, context=None): """ - tuple **(s, o, p)** the triple used as filter for the SPARQL select. (None, None, None) means anything. - context **context** the graph effectively calling this method. Returns a tuple of triples executing essentially a SPARQL like SELECT ?subj ?pred ?obj WHERE { ?subj ?pred ?obj } **context** may include three parameter to refine the underlying query: * LIMIT: an integer to limit the number of results * OFFSET: an integer to enable paging of results * ORDERBY: an instance of Variable('s'), Variable('o') or Variable('p') or, by default, the first 'None' from the given triple .. warning:: - Using LIMIT or OFFSET automatically include ORDERBY otherwise this is because the results are retrieved in a not deterministic way (depends on the walking path on the graph) - Using OFFSET without defining LIMIT will discard the first OFFSET - 1 results `` a_graph.LIMIT = limit a_graph.OFFSET = offset triple_generator = a_graph.triples(mytriple): #do something #Removes LIMIT and OFFSET if not required for the next triple() calls del a_graph.LIMIT del a_graph.OFFSET `` """ (s, p, o) = xxx_todo_changeme2 if ( isinstance(s, BNode) or isinstance(p, BNode) or isinstance(o, BNode) ): raise Exception("SPARQLStore does not support Bnodes! " "See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes") vars = [] if not s: s = Variable('s') vars.append(s) if not p: p = Variable('p') vars.append(p) if not o: o = Variable('o') vars.append(o) if vars: v = ' '.join([term.n3() for term in vars]) else: v = '*' query = "SELECT %s WHERE { %s %s %s }" % \ (v, s.n3(), p.n3(), o.n3()) # The ORDER BY is necessary if hasattr(context, LIMIT) or hasattr(context, OFFSET) \ or hasattr(context, ORDERBY): var = None if isinstance(s, Variable): var = s elif isinstance(p, Variable): var = p elif isinstance(o, Variable): var = o elif hasattr(context, ORDERBY) \ and isinstance(getattr(context, ORDERBY), Variable): var = getattr(context, ORDERBY) query = query + ' %s %s' % (ORDERBY, var.n3()) try: query = query + ' LIMIT %s' % int(getattr(context, LIMIT)) except (ValueError, TypeError, AttributeError): pass try: query = query + ' OFFSET %s' % int(getattr(context, OFFSET)) except (ValueError, TypeError, AttributeError): pass self.resetQuery() if self._is_contextual(context): self.addDefaultGraph(context.identifier) self.setQuery(query) doc = ElementTree.parse(SPARQLWrapper.query(self).response) # ElementTree.dump(doc) for rt, vars in TraverseSPARQLResultDOM(doc, asDictionary=True): yield (rt.get(s, s), rt.get(p, p), rt.get(o, o)), None