def handleImport(self): additionalRules = set() additionalFacts = set() for location,profile in self.graph.query(IMPORT_PARTS, initNs=rif_namespaces): graph = [] if profile == ENT.RDF: graph = Graph().parse(location) additionalFacts.update(graph) if self.debug: print "Importing RDF referenced from RIF document" if profile == ENT['OWL-Direct'] and self.owlEmbeddings: rule_store, rule_graph, network = SetupRuleStore(makeNetwork=True) graph = Graph().parse(location) additionalFacts.update(graph) additionalRules.update(network.setupDescriptionLogicProgramming( graph, addPDSemantics=False, constructNetwork=False)) if self.debug: print "Embedded %s rules from %s (imported OWL 2 RL)"%( len(additionalRules), location ) print "Added %s RDF statements from RDF Graph"%(len(graph)) return additionalFacts,additionalRules
def testAggregateSPARQL(): memStore = plugin.get('IOMemory', Store)() graph1 = Graph(memStore, URIRef("graph1")) graph2 = Graph(memStore, URIRef("graph2")) graph3 = Graph(memStore, URIRef("graph3")) for n3Str, graph in [(testGraph1N3, graph1), (testGraph2N3, graph2), (testGraph3N3, graph3)]: graph.parse(StringIO(n3Str), format='n3') print '-------------------testAggregateSPARQL()----------------------' print RDFS.RDFSNS print '---------------------------------------------------------------' graph4 = Graph(memStore, RDFS.RDFSNS) graph4.parse(RDFS.RDFSNS) #print graph4.serialize() G = ConjunctiveGraph(memStore) rt = G.query(sparqlQ) print '-------------------G.query(sparqlQ)----------------------' #print rt.serialize(format='xml') print '---------------------------------------------------------------' assert len(rt) > 1 #print rt.serialize(format='xml') LOG_NS = Namespace(u'http://www.w3.org/2000/10/swap/log#') rt = G.query(sparqlQ2, initBindings={u'?graph': URIRef("graph3")}) #print rt.serialize(format='json') assert rt.serialize('python')[0] == LOG_NS.N3Document, str(rt)
def reason_func(resource_name): famNs = Namespace('file:///code/ganglia/metric.n3#') nsMapping = {'mtc': famNs} rules = HornFromN3('ganglia/metric/metric_rule.n3') factGraph = Graph().parse('ganglia/metric/metric.n3', format='n3') factGraph.bind('mtc', famNs) dPreds = [famNs.relateTo] topDownStore = TopDownSPARQLEntailingStore(factGraph.store, factGraph, idb=rules, derivedPredicates=dPreds, nsBindings=nsMapping) targetGraph = Graph(topDownStore) targetGraph.bind('ex', famNs) #get list of the related resource r_list = list( targetGraph.query('SELECT ?RELATETO { mtc:%s mtc:relateTo ?RELATETO}' % resource_name, initNs=nsMapping)) res_list = [] for res in r_list: res_list.append(str(res).split("#")[1]) return res_list
def testAggregateRaw(): memStore = plugin.get('IOMemory', Store)() graph1 = Graph(memStore) graph2 = Graph(memStore) graph3 = Graph(memStore) for n3Str, graph in [(testGraph1N3, graph1), (testGraph2N3, graph2), (testGraph3N3, graph3)]: graph.parse(StringIO(n3Str), format='n3') G = ReadOnlyGraphAggregate([graph1, graph2, graph3]) #Test triples assert len(list(G.triples((None, RDF.type, None)))) == 4 assert len(list(G.triples((URIRef("http://test/bar"), None, None)))) == 2 assert len(list(G.triples((None, URIRef("http://test/d"), None)))) == 3 #Test __len__ assert len(G) == 8 #Test __contains__ assert (URIRef("http://test/foo"), RDF.type, RDFS.Resource) in G barPredicates = [URIRef("http://test/d"), RDFS.isDefinedBy] assert len( list( G.triples_choices( (URIRef("http://test/bar"), barPredicates, None)))) == 2
def testGraphValue(self): from rdflib.Graph import GraphValue graph = self.graph alice = URIRef("alice") bob = URIRef("bob") pizza = URIRef("pizza") cheese = URIRef("cheese") g1 = Graph() g1.add((alice, RDF.value, pizza)) g1.add((bob, RDF.value, cheese)) g1.add((bob, RDF.value, pizza)) g2 = Graph() g2.add((bob, RDF.value, pizza)) g2.add((bob, RDF.value, cheese)) g2.add((alice, RDF.value, pizza)) gv1 = GraphValue(store=graph.store, graph=g1) gv2 = GraphValue(store=graph.store, graph=g2) graph.add((gv1, RDF.value, gv2)) v = graph.value(gv1) #print type(v) self.assertEquals(gv2, v) #print list(gv2) #print gv2.identifier graph.remove((gv1, RDF.value, gv2))
def setUp(self): self.rule_store, self.rule_graph, self.network = SetupRuleStore( makeNetwork=True) self.tBoxGraph = Graph().parse(StringIO(TBOX), format='n3') self.aBoxGraph = Graph().parse(StringIO(ABOX), format='n3') NormalFormReduction(self.tBoxGraph)
def __init__(self, connection, ontology): self._connection = connection self._ontology = ontology self._rdfObjects = {} self._graph = Graph() self._added = Graph() self._removed = Graph()
def endElementNS(self, name, qname): if name[0] != TRIXNS: self.error("Only elements in the TriX namespace are allowed.") if name[1] == "uri": if self.state == 3: self.graph = Graph(store=self.store.store, identifier=URIRef(self.chars.strip())) self.state = 2 elif self.state == 4: self.triple += [URIRef(self.chars.strip())] else: self.error( "Illegal internal self.state - This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "id": if self.state == 3: self.graph = Graph(self.store.store, identifier=self.get_bnode( self.chars.strip())) self.state = 2 elif self.state == 4: self.triple += [self.get_bnode(self.chars.strip())] else: self.error( "Illegal internal self.state - This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "plainLiteral" or name[1] == "typedLiteral": if self.state == 4: self.triple += [ Literal(self.chars, lang=self.lang, datatype=self.datatype) ] else: self.error( "This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "triple": if self.state == 4: if len(self.triple) != 3: self.error("Triple has wrong length, got %d elements: %s" % (len(self.triple), self.triple)) self.graph.add(self.triple) #self.store.store.add(self.triple,context=self.graph) #self.store.addN([self.triple+[self.graph]]) self.state = 2 else: self.error( "This should never happen if the SAX parser ensures XML syntax correctness" ) if name[1] == "graph": self.state = 1 if name[1] == "TriX": self.state = 0
def len_graph(request): """ This Works...""" #store = Graph() #store.bind("contact", "http://www.example.com/contact#") #store.bind("person", "http://www.example.com/person#") #store.bind("xs", "http://www.w3.org/2001/XMLSchema#") #store.bind("rdfs", "http://www.w3.org/2000/01/rdf-schema#") #store.bind("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#") #store.bind("owl", "http://www.w3.org/2002/07/owl#") # Declare namespaces to use. ns_sn = Namespace("http://www.snee.com/ns/misc#") ns_sd = Namespace("http://www.snee.com/docs/") ns_dc = Namespace("http://purl.org/dc/elements/1.1/") ns_pr = Namespace("http://prismstandard.org/1.0#") myfile = '/var/rdf/municipality.rdf' # Create storage object for triples. store = Graph() # Add triples to store. graph.add( (ns_sd["d1001"], ns_dc["title"], Literal("Sample Acrobat document"))) graph.add((ns_sd["d1001"], ns_dc["format"], Literal("PDF"))) graph.add((ns_sd["d1001"], ns_dc["creator"], Literal("Billy Shears"))) graph.add( (ns_sd["d1001"], ns_pr["publicationTime"], Literal("2002-12-19"))) graph.add((ns_sd["d1002"], ns_dc["title"], Literal("Sample RTF document"))) graph.add((ns_sd["d1002"], ns_dc["format"], Literal("RTF"))) graph.add((ns_sd["d1002"], ns_dc["creator"], Literal("Nanker Phelge"))) graph.add( (ns_sd["d1002"], ns_pr["publicationTime"], Literal("2002-12-15"))) graph.add( (ns_sd["d1003"], ns_dc["title"], Literal("Sample LaTeX document"))) graph.add((ns_sd["d1003"], ns_dc["format"], Literal("LaTeX"))) graph.add((ns_sd["d1003"], ns_dc["creator"], Literal("Richard Mutt"))) graph.add( (ns_sd["d1003"], ns_pr["publicationTime"], Literal("2002-12-16"))) graph.add((ns_sd["d1003"], ns_sn["quality"], Literal("pretty good"))) #store.parse (myfile) rdf_subjects = graph.subjects() rdf_predicates = graph.predicates() rdf_objects = graph.objects() select_predicate_by_subject = graph.predicates(subject=ns_sd["d1001"]) select_object_by_predicate = graph.objects(predicate=ns_dc["title"]) g = Graph() g.parse(myfile, format="xml") exec "html = 'the lenght of the graph is: %s'" % len(g) context = {'html': html, 'g': select_predicate_by_subject} return render_to_response('len_graph.html', context)
def commit(self): """ Commits changes to the remote graph and flushes local caches. """ self._connection.update(add=self._added, remove=self._removed) self._rdfObjects = {} self._graph = Graph() self._added = Graph() self._removed = Graph()
def setUp(self): self.graph1 = Graph( identifier=URIRef('http://example.org/foaf/aliceFoaf')) self.graph1.parse(StringIO(test_graph_a), format="n3") self.graph2 = Graph( identifier=URIRef('http://example.org/foaf/bobFoaf')) self.graph2.parse(StringIO(test_graph_b), format="n3") self.unionGraph = ReadOnlyGraphAggregate( graphs=[self.graph1, self.graph2])
def rollback(self): #Aquire Rollback lock and apply reverse operations in the forward order self.rollbackLock.acquire() for subject, predicate, obj, context, op in self.reverseOps: if op == 'add': self.storage.add((subject, predicate, obj), Graph(self.storage, context)) else: self.storage.remove((subject, predicate, obj), Graph(self.storage, context)) self.reverseOps = [] self.rollbackLock.release()
def addStuffInMultipleContexts(self): c1 = self.c1 c2 = self.c2 triple = (self.pizza, self.hates, self.tarek) # revenge! # add to default context self.graph.add(triple) # add to context 1 graph = Graph(self.graph.store, c1) graph.add(triple) # add to context 2 graph = Graph(self.graph.store, c2) graph.add(triple)
def test(self): g = Graph() NS = Namespace("http://quoting.test/") for i, case in enumerate(cases): g.add((NS['subj'], NS['case%s' % i], Literal(case))) n3txt = g.serialize(format="n3") #print n3txt g2 = Graph() g2.parse(StringInputSource(n3txt), format="n3") for i, case in enumerate(cases): l = g2.value(NS['subj'], NS['case%s' % i]) #print repr(l), repr(case) self.assertEqual(l, Literal(case))
def SetupRuleStore(n3Stream=None, additionalBuiltins=None, makeNetwork=False): """ Sets up a N3RuleStore, a Graph (that uses it as a store, and ) """ ruleStore = N3RuleStore(additionalBuiltins=additionalBuiltins) nsMgr = NamespaceManager(Graph(ruleStore)) ruleGraph = Graph(ruleStore, namespace_manager=nsMgr) if n3Stream: ruleGraph.parse(n3Stream, format='n3') if makeNetwork: from Network import ReteNetwork closureDeltaGraph = Graph() network = ReteNetwork(ruleStore, inferredTarget=closureDeltaGraph) return ruleStore, ruleGraph, network return ruleStore, ruleGraph
def testDefaultGraph(): memStore = plugin.get('IOMemory', Store)() graph1 = Graph(memStore, URIRef("graph1")) graph2 = Graph(memStore, URIRef("graph2")) graph3 = Graph(memStore, URIRef("graph3")) for n3Str, graph in [(testGraph1N3, graph1), (testGraph2N3, graph2), (testGraph3N3, graph3)]: graph.parse(StringIO(n3Str), format='n3') G = ConjunctiveGraph(memStore) #test that CG includes triples from all 3 assert G.query(sparqlQ3), "CG as default graph should *all* triples" assert not graph2.query( sparqlQ3 ), "Graph as default graph should *not* include triples from other graphs"
def setUp(self): from FuXi.Rete.RuleStore import N3RuleStore from FuXi.Rete import ReteNetwork from FuXi.Rete.Util import generateTokenSet self.testGraph = Graph() self.ruleStore = N3RuleStore() self.ruleGraph = Graph(self.ruleStore) self.ruleGraph.parse(StringIO(testN3), format='n3') self.testGraph.parse(StringIO(testN3), format='n3') self.closureDeltaGraph = Graph() self.network = ReteNetwork(self.ruleStore, initialWorkingMemory=generateTokenSet( self.testGraph), inferredTarget=self.closureDeltaGraph, nsMap={})
def setUp(self): self.gcold = gc.isenabled() gc.collect() gc.disable() self.graph = Graph(store=self.store) if self.store == "MySQL": from test.mysql import configString from rdflib.store.MySQL import MySQL path = configString MySQL().destroy(path) else: path = a_tmp_dir = mkdtemp() self.graph.open(path, create=True) self.input = input = Graph() input.parse("http://eikeon.com")
def testAggregateRaw(): memStore = plugin.get('IOMemory', Store)() graph1 = Graph(memStore) #Crea una instancia de un grafo graph2 = Graph(memStore) graph3 = Graph(memStore) for n3Str, graph in [(testGraph1N3, graph1), (testGraph2N3, graph2), (testGraph3N3, graph3)]: graph.parse(StringIO(n3Str), format='n3') #Lee los grafos desde texto en formato N3 for s, p, o in graph2: #Analizando el contenido del grafo print 'tripleta:', s, p, o, '-' # Utility class for treating a set of graphs as a single graph # Only read operations are supported (hence the name). #Essentially a ConjunctiveGraph over an explicit subset of the entire store. G = ReadOnlyGraphAggregate([graph1, graph2, graph3]) # print '----------------------------------------' print G.triples((None, RDF.type, None)) for g in G.triples((URIRef("http://test/bar"), None, None)): print g print '---------------------------------------' #Test triples assert len(list(G.triples((None, RDF.type, None)))) == 4 assert len(list(G.triples((URIRef("http://test/bar"), None, None)))) == 2 assert len(list(G.triples((None, URIRef("http://test/d"), None)))) == 3 #Test __len__ assert len(G) == 8 print '----------Analizando G:------------------------------' for g in G: print g print '----------------------------------------' #Test __contains__ assert (URIRef("http://test/foo"), RDF.type, RDFS.Resource) in G print '----------Comprobando contenido G:------------------------------' print(URIRef("http://test/foo"), RDF.type, RDFS.Resource) in G print '----------------------------------------' barPredicates = [URIRef("http://test/d"), RDFS.isDefinedBy] assert len( list( G.triples_choices( (URIRef("http://test/bar"), barPredicates, None)))) == 2
def makeOutputGraph(): graph = Graph() graph.bind('pre', 'http://bigasterisk.com/pre/general/') graph.bind('local', 'http://bigasterisk.com/pre/drew/') # todo graph.bind('ad', 'http://bigasterisk.com/pre/general/accountDataType/') graph.bind('mt', 'http://bigasterisk.com/pre/general/messageType/') return graph
def read_input(self, filenames, infmt): """Read the given RDF file(s) and return an rdflib Graph object.""" rdf = Graph() for filename in filenames: if filename == '-': f = sys.stdin else: f = open(filename, 'r') if infmt: fmt = infmt else: # determine format based on file extension fmt = 'xml' # default if filename.endswith('n3'): fmt = 'n3' if filename.endswith('ttl'): fmt = 'n3' if filename.endswith('nt'): fmt = 'nt' logging.debug("Parsing input file %s (format: %s)", filename, fmt) try: rdf.parse(f, format=fmt) except: logging.critical("Parsing failed. Exception: %s", str(sys.exc_info()[1])) sys.exit(1) return rdf
def _extract_rdfa(self, filename): g = Graph() g.parse(open(filename),format="rdfa", rdfa_version="1.0") self.__tidy_graph(g) return g
def parse_from_soup(self,soup,basefile): g = Graph() self.log.info("%s: Parsing" % basefile) if basefile == "teu": # FIXME: Use a better base URI? uri = 'http://rinfo.lagrummet.se/extern/celex/12008M' startnode = soup.findAll(text="-"*50)[1].parent g.add((URIRef(uri),DCT['title'],Literal("Treaty on European Union"))) elif basefile == "tfeu": uri = 'http://rinfo.lagrummet.se/extern/celex/12008E' startnode = soup.findAll(text="-"*50)[2].parent g.add((URIRef(uri),DCT['title'],Literal("Treaty on the Functioning of the European Union"))) lines = deque() for p in startnode.findNextSiblings("p"): if p.string == "-" * 50: self.log.info("found the end") break else: if p.string: lines.append(unicode(p.string)) self.log.info("%s: Found %d lines" % (basefile,len(lines))) body = self.make_body(lines) self.process_body(body, '', uri) # print serialize(body) return {'meta':g, 'body':body, 'lang':'en', 'uri':uri}
def _loadAndEscape(ruleStore, n3, outputPatterns): ruleGraph = Graph(ruleStore) # Can't escapeOutputStatements in the ruleStore since it # doesn't support removals. Can't copy plainGraph into # ruleGraph since something went wrong with traversing the # triples inside quoted graphs, and I lose all the bodies # of my rules. This serialize/parse version is very slow (400ms), # but it only runs when the file changes. plainGraph = Graph() plainGraph.parse(StringInputSource(n3.encode('utf8')), format='n3') # for inference escapeOutputStatements(plainGraph, outputPatterns=outputPatterns) expandedN3 = plainGraph.serialize(format='n3') ruleGraph.parse(StringInputSource(expandedN3), format='n3')
def __init__(self, fname): self.graph = Graph() if os.path.splitext(fname)[1].lower() in IMAGE_EXTENSIONS: self.parseImageHeader(fname) else: self.parseXmp(fname) self.xmpName = fname
def __init__(self, case): super(RDFWriter, self).__init__(case) self.store = Graph() # Map of Bus objects to BNodes. self.bus_map = {}
def _parse_rdf(self, file): """ Returns a case from the given file. """ store = Graph() store.parse(file) print len(store)
def testConjunction(self): self.addStuffInMultipleContexts() triple = (self.pizza, self.likes, self.pizza) # add to context 1 graph = Graph(self.graph.store, self.c1) graph.add(triple) self.assertEquals(len(self.graph), len(graph))
def intro_sparql(request): """ Introduction to using SPARQL to query an rdflib graph - http://code.google.com/p/rdflib/wiki/IntroSparql """ g = Graph() g.parse("http://bigasterisk.com/foaf.rdf") g.parse("http://www.w3.org/People/Berners-Lee/card.rdf") FOAF = Namespace("http://xmlns.com/foaf/0.1/") g.parse("http://danbri.livejournal.com/data/foaf") [ g.add((s, FOAF['name'], n)) for s, _, n in g.triples((None, FOAF['member_name'], None)) ] graph_as_list = [] """ The Graph.parse 'initNs' argument is a dictionary of namespaces to be expanded in the query string """ """ Example 'row': (rdflib.Literal('Dan Brickley', language=u'en', datatype=None), rdflib.Literal('Brad Fitzpatrick', language=u'en', datatype=None)) """ for row in g.query( 'SELECT ?aname ?bname \ WHERE {\ ?a foaf:knows ?b .\ ?a foaf:name ?aname .\ ?b foaf:name ?bname .\ }', initNs=dict(foaf=Namespace("http://xmlns.com/foaf/0.1/"))): exec "line = '%s knows %s'" % row row = row # explore... graph_as_list.append(line) context = { 'row': row, 'graph': graph_as_list, } return render_to_response('rdf/intro_sparql.html', context)
def __init__(self, graphs): """ Constructor takes a list of URLs that point to RDF/OWL files. :type graphs: list :param graphs: a list RDF/OWL files """ self.g = Graph() if type(graphs) == str: self.g.parse(graphs) else: for i in range(0, len(graphs)): self.g.parse(graphs[i]) # for testing - todo: load all namespaces from a merged graph in the code below self.owl = Namespace("http://www.w3.org/2002/07/owl#") self.rdf = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") self.biblio = Namespace( "http://www.linguistics-ontology.org/bibliography/bibliography.owl#" ) self.goldbib = Namespace( "http://www.linguistics-ontology.org/bibliography/gold-bibliography.rdf#" ) self.gold = Namespace("http://purl.org/linguistics/gold/") self.rdfs = Namespace("http://www.w3.org/2000/01/rdf-schema#") self.bibtex = Namespace("http://purl.oclc.org/NET/nknouf/ns/bibtex#") self.person = Namespace( "http://www.linguistics-ontology.org/bibliography/person.rdf#") # get namespaces from graph self.namespaces = [] namespaces = self.g.namespaces()