def create_ontology(self, tr, predicate, subClass, address, booktitle): LDT = Namespace("http://www.JceFinalProjectOntology.com/") ut = Namespace("http://www.JceFinalProjectOntology.com/subject/#") usubClass = URIRef("http://www.JceFinalProjectOntology.com/subject/" + subClass.strip() + '#') #LDT.subClass=LDT[subClass] print(ut) print(usubClass) store = IOMemory() sty = LDT[predicate] g = rdflib.Graph(store=store, identifier=LDT) t = ConjunctiveGraph(store=store, identifier=ut) print('Triples in graph before add: ', len(t)) #g.add((LDT,RDF.type,RDFS.Class)) g.add((URIRef(LDT), RDF.type, RDFS.Class)) g.add((URIRef(LDT), RDFS.label, Literal("JFPO"))) g.add((URIRef(LDT), RDFS.comment, Literal('class of all properties'))) for v in self.symbols.values(): if self.if_compoTerm(v) == True: vs = self.splitTerms(v)[0] else: vs = v g.add((LDT[vs], RDF.type, RDF.Property)) g.add((LDT[vs], RDFS.label, Literal('has' + vs))) g.add((LDT[vs], RDFS.comment, Literal(v))) g.add((LDT[vs], RDFS.range, OWL.Class)) g.add((LDT[vs], RDFS.domain, Literal(vs))) g.bind('JFPO', LDT) #g.commit() g.serialize('trtst.rdf', format='turtle') t.add((ut[tr], RDF.type, OWL.Class)) t.add((ut[tr], RDFS.subClassOf, OWL.Thing)) t.add((ut[tr], RDFS.label, Literal(tr))) t.add((ut[tr], DC.title, Literal(booktitle))) t.add((ut[tr], DC.source, Literal(address))) t.add((ut[tr], DC[predicate], URIRef(usubClass))) t.add((ut[tr], LDT[predicate], RDF.Property)) t.add((ut[tr], DC[predicate], URIRef(usubClass))) t.add((ut[tr], DC[predicate], URIRef(usubClass))) relation = 'has' + predicate t.add((ut[tr], LDT.term(predicate), URIRef(usubClass))) t.add((usubClass, RDF.type, OWL.Class)) t.add((usubClass, RDFS.subClassOf, OWL.Thing)) t.add((usubClass, RDFS.subClassOf, URIRef(sty))) t.add((usubClass, RDFS.label, Literal(subClass))) #tc=Graph(store=store,identifier=usubClass) t.bind("dc", "http://http://purl.org/dc/elements/1.1/") t.bind('JFPO', LDT) t.commit() #print(t.serialize(format='pretty-xml')) t.serialize('test2.owl', format='turtle')
def main(fd, store_type=None, store_id=None, graph_id=None, gzipped=False): """ Converts MARC21 data stored in fd to a RDFlib graph. """ from rdflib import plugin if store_type: msg = "Need a {} identifier for a disk-based store." assert store_id, msg.format('store') assert graph_id, msg.format('graph') store = plugin.get(store_type, Store)(store_id) else: store = 'default' graph = Graph(store=store, identifier=graph_id) try: records = MARCReader(open(fd)) for i, triple in enumerate(process_records(records)): graph.add(triple) if i % 100 == 0: graph.commit() if i % 10000 == 0: print i finally: graph.commit() return graph
def create_ontology(self,tr,predicate,subClass,address,booktitle): LDT= Namespace("http://www.JceFinalProjectOntology.com/") ut=Namespace("http://www.JceFinalProjectOntology.com/subject/#") usubClass=URIRef("http://www.JceFinalProjectOntology.com/subject/"+subClass.strip()+'#') #LDT.subClass=LDT[subClass] print(ut) print(usubClass) store=IOMemory() sty=LDT[predicate] g = rdflib.Graph(store=store,identifier=LDT) t = ConjunctiveGraph(store=store,identifier=ut) print ('Triples in graph before add: ', len(t)) #g.add((LDT,RDF.type,RDFS.Class)) g.add((URIRef(LDT),RDF.type,RDFS.Class)) g.add((URIRef(LDT),RDFS.label,Literal("JFPO"))) g.add((URIRef(LDT),RDFS.comment,Literal('class of all properties'))) for v in self.symbols.values(): if self.if_compoTerm(v)==True: vs=self.splitTerms(v)[0] else: vs =v g.add((LDT[vs],RDF.type,RDF.Property)) g.add((LDT[vs],RDFS.label,Literal('has'+vs))) g.add((LDT[vs],RDFS.comment,Literal(v))) g.add((LDT[vs],RDFS.range,OWL.Class)) g.add((LDT[vs],RDFS.domain,Literal(vs))) g.bind('JFPO',LDT) #g.commit() g.serialize('trtst.rdf',format='turtle') t.add( (ut[tr], RDF.type,OWL.Class) ) t.add((ut[tr],RDFS.subClassOf,OWL.Thing)) t.add((ut[tr],RDFS.label,Literal(tr))) t.add((ut[tr],DC.title,Literal(booktitle))) t.add((ut[tr],DC.source,Literal(address))) t.add((ut[tr],DC[predicate],URIRef(usubClass))) t.add((ut[tr],LDT[predicate],RDF.Property)) t.add((ut[tr],DC[predicate],URIRef(usubClass))) t.add((ut[tr],DC[predicate],URIRef(usubClass))) relation='has'+predicate t.add((ut[tr],LDT.term(predicate),URIRef(usubClass))) t.add( (usubClass,RDF.type,OWL.Class)) t.add((usubClass,RDFS.subClassOf,OWL.Thing)) t.add((usubClass,RDFS.subClassOf,URIRef(sty))) t.add((usubClass,RDFS.label,Literal(subClass))) #tc=Graph(store=store,identifier=usubClass) t.bind("dc", "http://http://purl.org/dc/elements/1.1/") t.bind('JFPO',LDT) t.commit() #print(t.serialize(format='pretty-xml')) t.serialize('test2.owl',format='turtle')
def write_graph(data_handle, out_handle, format='n3'): graph = Graph() count = 0 for record in generate_records(data_handle): count += 1 if count % 1000: sys.stderr.write(".") else: sys.stderr.write(str(count)) for triple in get_triples(record): graph.add(triple) graph.commit() current_site = Site.objects.get_current() domain = 'https://%s' % current_site.domain out_handle.write(graph.serialize(format=format, base=domain, include_base=True)) return count
def rdf_description(name, notation='xml' ): """ Funtion takes title of node, and rdf notation. """ valid_formats = ["xml", "n3", "ntriples", "trix"] default_graph_uri = "http://gstudio.gnowledge.org/rdfstore" configString = "/var/tmp/rdfstore" # Get the Sleepycat plugin. store = plugin.get('Sleepycat', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="Sleepycat", identifier = URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: #There is no underlying Sleepycat infrastructure, create it graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" # Now we'll add some triples to the graph & commit the changes rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') graph.bind("gstudio", "http://gnowledge.org/") exclusion_fields = ["id", "rght", "node_ptr_id", "image", "lft", "_state", "_altnames_cache", "_tags_cache", "nid_ptr_id", "_mptt_cached_fields"] node=Objecttype.objects.get(title=name) node_dict=node.__dict__ subject=str(node_dict['id']) for key in node_dict: if key not in exclusion_fields: predicate=str(key) pobject=str(node_dict[predicate]) graph.add((rdflib[subject], rdflib[predicate], Literal(pobject))) graph.commit() print graph.serialize(format=notation) graph.close()
def rdf_description(name, notation='xml'): """ Funtion takes title of node, and rdf notation. """ valid_formats = ["xml", "n3", "ntriples", "trix"] default_graph_uri = "http://gstudio.gnowledge.org/rdfstore" configString = "/var/tmp/rdfstore" # Get the Sleepycat plugin. store = plugin.get('Sleepycat', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="Sleepycat", identifier=URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: #There is no underlying Sleepycat infrastructure, create it graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" # Now we'll add some triples to the graph & commit the changes rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') graph.bind("gstudio", "http://gnowledge.org/") exclusion_fields = [ "id", "rght", "node_ptr_id", "image", "lft", "_state", "_altnames_cache", "_tags_cache", "nid_ptr_id", "_mptt_cached_fields" ] node = Objecttype.objects.get(title=name) node_dict = node.__dict__ subject = str(node_dict['id']) for key in node_dict: if key not in exclusion_fields: predicate = str(key) pobject = str(node_dict[predicate]) graph.add((rdflib[subject], rdflib[predicate], Literal(pobject))) graph.commit() print graph.serialize(format=notation) graph.close()
def rdf_all(notation='xml'): """ Funtion takes title of node, and rdf notation. """ valid_formats = ["xml", "n3", "ntriples", "trix"] default_graph_uri = "http://gstudio.gnowledge.org/rdfstore" configString = "/var/tmp/rdfstore" # Get the IOMemory plugin. store = plugin.get('IOMemory', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="IOMemory", identifier = URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" # Now we'll add some triples to the graph & commit the changes graph.bind("gstudio", "http://gnowledge.org/") exclusion_fields = ["id", "rght", "node_ptr_id", "image", "lft", "_state", "_altnames_cache", "_tags_cache", "nid_ptr_id", "_mptt_cached_fields"] for node in NID.objects.all(): node_dict=node.ref.__dict__ node_type = node.reftype try: if (node_type=='Gbobject'): node=Gbobject.objects.get(title=node) elif (node_type=='None'): node=Gbobject.objects.get(title=node) elif (node_type=='Processes'): node=Gbobject.objects.get(title=node) elif (node_type=='System'): node=Gbobject.objects.get(title=node) rdflib=link(node) url_addr=link1(node) a=fstore_dump(url_addr) elif (node_type=='Objecttype'): node=Objecttype.objects.get(title=node) elif (node_type=='Attributetype'): node=Attributetype.objects.get(title=node) elif (node_type=='Complement'): node=Complement.objects.get(title=node) elif (node_type=='Union'): node=Union.objects.get(title=node) elif (node_type=='Intersection'): node=Intersection.objects.get(title=node) elif (node_type=='Expression'): node=Expression.objects.get(title=node) elif (node_type=='Processtype'): node=Processtype.objects.get(title=node) elif (node_type=='Systemtype'): node=Systemtype.objects.get(title=node) elif (node_type=='AttributeSpecification'): node=AttributeSpecification.objects.get(title=node) elif (node_type=='RelationSpecification'): node=RelationSpecification.objects.get(title=node) rdflib=link(node) url_addr=link1(node) a=fstore_dump(url_addr) if(node_type=='Attribute'): node=Attribute.objects.get(title=node) rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') elif(node_type=='Relationtype' ): node=Relationtype.objects.get(title=node) rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') elif(node_type=='Metatype'): node=Metatype.objects.get(title=node) rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') url_addr='http://sbox.gnowledge.org/gstudio/' a=fstore_dump(url_addr) except: if(node_type=='Attribute'): rdflib= Namespace('http://sbox.gnowledge.org/gstudio/') if(node_type=='Relationtype' ): rdflib= Namespace('http://sbox.gnowledge.org/gstudio/') if(node_type=='Metatype'): rdflib= Namespace('http://sbox.gnowledge.org/gstudio/') subject=str(node_dict['id']) for key in node_dict: if key not in exclusion_fields: predicate=str(key) pobject=str(node_dict[predicate]) graph.add((rdflib[subject], rdflib[predicate], Literal(pobject))) rdf_code=graph.serialize(format=notation) #path to store the rdf in a file #x = os.path.join(os.path.dirname(__file__), 'rdffiles.rdf') graph.commit() graph.close()
class TestLevelDBConjunctiveGraphCore(unittest.TestCase): def setUp(self): store = "LevelDB" self.graph = ConjunctiveGraph(store=store) self.path = configString self.graph.open(self.path, create=True) def tearDown(self): self.graph.destroy(self.path) try: self.graph.close() except: pass if getattr(self, 'path', False) and self.path is not None: if os.path.exists(self.path): if os.path.isdir(self.path): for f in os.listdir(self.path): os.unlink(self.path + '/' + f) os.rmdir(self.path) elif len(self.path.split(':')) == 1: os.unlink(self.path) else: os.remove(self.path) def test_namespaces(self): self.graph.bind("dc", "http://http://purl.org/dc/elements/1.1/") self.graph.bind("foaf", "http://xmlns.com/foaf/0.1/") self.assert_(len(list(self.graph.namespaces())) == 5) self.assert_(('foaf', rdflib.term.URIRef(u'http://xmlns.com/foaf/0.1/') ) in list(self.graph.namespaces())) def test_readable_index(self): print(readable_index(111)) def test_triples_context_reset(self): michel = rdflib.URIRef(u'michel') likes = rdflib.URIRef(u'likes') pizza = rdflib.URIRef(u'pizza') cheese = rdflib.URIRef(u'cheese') self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.commit() ntriples = self.graph.triples((None, None, None), context=self.graph.store) self.assert_(len(list(ntriples)) == 2) def test_remove_context_reset(self): michel = rdflib.URIRef(u'michel') likes = rdflib.URIRef(u'likes') pizza = rdflib.URIRef(u'pizza') cheese = rdflib.URIRef(u'cheese') self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.commit() self.graph.store.remove((michel, likes, cheese), self.graph.store) self.graph.commit() self.assert_( len( list( self.graph.triples((None, None, None), context=self.graph.store))) == 1) def test_remove_db_exception(self): michel = rdflib.URIRef(u'michel') likes = rdflib.URIRef(u'likes') pizza = rdflib.URIRef(u'pizza') cheese = rdflib.URIRef(u'cheese') self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.commit() self.graph.store.__len__(context=self.graph.store) self.assert_( len( list( self.graph.triples((None, None, None), context=self.graph.store))) == 2)
def rdf_description(name, notation='xml'): """ Funtion takes title of node, and rdf notation. """ valid_formats = ["xml", "n3", "ntriples", "trix"] default_graph_uri = "http://gstudio.gnowledge.org/rdfstore" # default_graph_uri = "http://example.com/" configString = "/var/tmp/rdfstore" # Get the IOMemory plugin. store = plugin.get('IOMemory', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="IOMemory", identifier=URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" # Now we'll add some triples to the graph & commit the changes #rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') graph.bind("gstudio", "http://gnowledge.org/") exclusion_fields = [ "id", "rght", "node_ptr_id", "image", "lft", "_state", "_altnames_cache", "_tags_cache", "nid_ptr_id", "_mptt_cached_fields" ] #verifies the type of node node = NID.objects.get(title=name) node_type = node.reftype if (node_type == 'Gbobject'): node = Gbobject.objects.get(title=name) rdflib = link(node) elif (node_type == 'None'): node = Gbobject.objects.get(title=name) rdflib = link(node) elif (node_type == 'Processes'): node = Gbobject.objects.get(title=name) rdflib = link(node) elif (node_type == 'System'): node = Gbobject.objects.get(title=name) rdflib = link(node) elif (node_type == 'Objecttype'): node = Objecttype.objects.get(title=name) rdflib = link(node) elif (node_type == 'Attributetype'): node = Attributetype.objects.get(title=name) rdflib = link(node) elif (node_type == 'Complement'): node = Complement.objects.get(title=name) rdflib = link(node) elif (node_type == 'Union'): node = Union.objects.get(title=name) rdflib = link(node) elif (node_type == 'Intersection'): node = Intersection.objects.get(title=name) rdflib = link(node) elif (node_type == 'Expression'): node = Expression.objects.get(title=name) rdflib = link(node) elif (node_type == 'Processtype'): node = Processtype.objects.get(title=name) rdflib = link(node) elif (node_type == 'Systemtype'): node = Systemtype.objects.get(title=name) rdflib = link(node) elif (node_type == 'AttributeSpecification'): node = AttributeSpecification.objects.get(title=name) rdflib = link(node) elif (node_type == 'RelationSpecification'): node = RelationSpecification.objects.get(title=name) rdflib = link(node) elif (node_type == 'Attribute'): node = Attribute.objects.get(title=name) rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') elif (node_type == 'Relationtype'): node = Relationtype.objects.get(title=name) rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') elif (node_type == 'Metatype'): node = Metatype.objects.get(title=name) rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') else: rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') node_dict = node.__dict__ subject = str(node_dict['id']) for key in node_dict: if key not in exclusion_fields: predicate = str(key) pobject = str(node_dict[predicate]) graph.add((rdflib[subject], rdflib[predicate], Literal(pobject))) rdf_code = graph.serialize(format=notation) graph.commit() print rdf_code graph.close()
class TestKyotoCabinetConjunctiveGraphCore(unittest.TestCase): def setUp(self): store = "KyotoCabinet" self.graph = ConjunctiveGraph(store=store) self.path = configString self.graph.open(self.path, create=True) def tearDown(self): self.graph.destroy(self.path) try: self.graph.close() except: pass if getattr(self, "path", False) and self.path is not None: if os.path.exists(self.path): if os.path.isdir(self.path): for f in os.listdir(self.path): os.unlink(self.path + "/" + f) os.rmdir(self.path) elif len(self.path.split(":")) == 1: os.unlink(self.path) else: os.remove(self.path) def test_namespaces(self): self.graph.bind("dc", "http://http://purl.org/dc/elements/1.1/") self.graph.bind("foaf", "http://xmlns.com/foaf/0.1/") self.assert_(len(list(self.graph.namespaces())) == 5) self.assert_(("foaf", rdflib.term.URIRef(u"http://xmlns.com/foaf/0.1/")) in list(self.graph.namespaces())) def test_play_journal(self): self.assertRaises(NotImplementedError, self.graph.store.play_journal, {"graph": self.graph}) def test_readable_index(self): print(readable_index(111)) def test_triples_context_reset(self): michel = rdflib.URIRef(u"michel") likes = rdflib.URIRef(u"likes") pizza = rdflib.URIRef(u"pizza") cheese = rdflib.URIRef(u"cheese") self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.commit() ntriples = self.graph.triples((None, None, None), context=self.graph.store) self.assert_(len(list(ntriples)) == 2) def test_remove_context_reset(self): michel = rdflib.URIRef(u"michel") likes = rdflib.URIRef(u"likes") pizza = rdflib.URIRef(u"pizza") cheese = rdflib.URIRef(u"cheese") self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.commit() self.graph.store.remove((michel, likes, cheese), self.graph.store) self.graph.commit() self.assert_(len(list(self.graph.triples((None, None, None), context=self.graph.store))) == 1) def test_remove_db_exception(self): michel = rdflib.URIRef(u"michel") likes = rdflib.URIRef(u"likes") pizza = rdflib.URIRef(u"pizza") cheese = rdflib.URIRef(u"cheese") self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.commit() self.graph.store.__len__(context=self.graph.store) self.assert_(len(list(self.graph.triples((None, None, None), context=self.graph.store))) == 2)
default_graph_uri = "http://id.southampton.ac.uk/dataset/places" configString = # Get the mysql plugin. You may have to install the python mysql libraries store = plugin.get('MySQL', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet rt = store.open(configString,create=False) if rt == NO_STORE: # There is no underlying MySQL infrastructure, create it store.open(configString,create=True) else: assert rt == VALID_STORE,"There underlying store is corrupted" # There is a store, use it graph = Graph(store, identifier = URIRef(default_graph_uri)) print("Triples in graph before add: %s" % len(graph)) # Now we'll add some triples to the graph & commit the changes rdflib = Namespace('http://rdflib.net/test/') graph.add((rdflib['pic:1'], rdflib['name'], Literal('Jane & Bob'))) graph.add((rdflib['pic:2'], rdflib['name'], Literal('Squirrel in Tree'))) graph.commit() print("Triples in graph after add: %" % len(graph)) # display the graph in RDF/XML print(graph.serialize())
class Project: def __init__(self, uri, storePath): if (uri[-1] != "/"): raise SemPipeException("A Module must be a directory and its URI must end with a /") self.n = URIRef(uri) self.g = ConjunctiveGraph('IOMemory') self.storePath = storePath if storePath and os.path.exists(storePath+"/store.nquads"): self.g.parse(storePath + "/store.nq", format='nquads') self.confGraph = self.g.get_context(URIRef("sempipe:confgraph")) #self.storePath = storePath ## Get the Sleepycat plugin. #self.store = plugin.get('Sleepycat', Store)('rdfstore') ## Open previously created store, or create it if it doesn't exist yet #self.g = ConjunctiveGraph(store="Sleepycat", # identifier = URIRef(self.default_graph_uri)) ##path = mkdtemp() #rt = self.g.open(self.storePath, create=False) #if rt == NO_STORE: # # There is no underlying Sleepycat infrastructure, create it # self.g.open(self.storePath, create=True) #else: # assert rt == VALID_STORE, "The underlying store is corrupt" else: #Aggregate graphs self.confGraph = self.g.get_context(URIRef("sempipe:confgraph")) self._loadconf() for graph in self.confGraph.objects(self.n, semp.dataGraph): self.loadData(graph) for updateList in self.confGraph.objects(self.n, semp.update): for updateInstruction in Collection(self.confGraph, updateList): self.updateGraph(str(updateInstruction)) self.commit() # Cache HostedSpaces self.hostedSpaces = [] res = self.confGraph.query(""" SELECT ?baseURI ?mapTo ?index ?htaccess { ?baseURI a semp:HostedSpace ; semp:mapTo ?mapTo ; semp:mapIndexTo ?index ; semp:mapHTAccessTo ?htaccess . } """, initNs={"semp": semp}) for s in res: self.hostedSpaces.append(HostedSpace._make(s)) def __str__(self): return str(self.n) def __repr__(self): return "{0}({1},{2})".format(self.__class__.__name__, repr(self.n), repr(self.storePath)) def _loadconf(self, uri=None): """Loads a graph and all config-graphs it references as configuration graphs @param uri: a URIRef, defaults to self.n+SempPipe.conffilename""" uri = uri or URIRef(self.n + conffilename) if self.g.get_context(uri): print("ConfGraph {} already in database".format(uri), file=sys.stderr) return print("Loading {} as config graph".format(uri), file=sys.stderr) newgraph = self.g.parse(uri, format="n3") self.confGraph += newgraph self.confGraph.add((uri, rdf.type, semp.ConfGraph)) imports = set(newgraph.objects(uri, semp.confGraph)) imports |= set(newgraph.objects(self.n, semp.confGraph)) imports = filter(lambda x: not self.g.get_context(x), imports) #Recursively load additional graphs for imp in imports: self._loadconf(imp) def loadData(self, url): """Loads a data graph""" return parse(self.g, url) def updateGraph(self, sparql): try: self.g.update(sparql) except: raise SemPipeException("Update instruction failed:\n{}".format(str(sparql))) def hostedSpace(self, resource, reverse=False): """Picks the best matching hostedSpace for the given resource. If reverse is set, resource is considered to be a path relative to the buildDir and the corresponding URIRef is returned.""" if reverse: hostedSpaces = filter(lambda s: resource.startswith(self.buildDir + s.mapTo), self.hostedSpaces) else: hostedSpaces = filter(lambda s: resource.startswith(s.baseURI), self.hostedSpaces) # Find the best match, which is the most specific one: try: return max(hostedSpaces, key=lambda s: len(s.baseURI)) except ValueError: raise SemPipeException("No hosted space found for {}".format(resource)) def contentLocation(self, base, ending): if str(base)[-1] == '/': index = self.hostedSpace(base).index return str(base) + index + ending else: return str(base) + ending @property def buildDir(self): return next(self.confGraph.objects(self.n, semp.buildDir)) def buildLocation(self, resource): """Determines the filename in the build directory corresponding to a URI.""" hs = self.hostedSpace(resource) return self.buildDir + hs.mapTo + resource[len(hs.baseURI):] def buildLocationToResource(self, buildLocation): """Determines the filename in the build directory corresponding to a URI.""" if not buildLocation.startswith(self.buildDir): raise SemPipeException("{} is not in buildDir".format(buildLocation)) hs = self.hostedSpace(buildLocation, reverse=True) return URIRef(hs.baseURI + buildLocation[len(self.buildDir + hs.mapTo):]) def copy(self, source, dest): """Publish a resource by copying a file Note that dest is the URI where the resource should be published, the corresponding directory in the build directory is derived automatically.""" dest = self.buildLocation(dest) print("copy {0} to {1}".format(source, dest), file=sys.stderr) directory = dest.rsplit("/",1)[0] directory = fileurl2path(directory) print(" Making shure directory {0} exists".format(directory), file=sys.stderr) os.makedirs(directory, mode=0o777, exist_ok=True) shutil.copy(fileurl2path(source), fileurl2path(dest)) print(" done", file=sys.stderr) def write(self, dest, data): """Publishes a file with contents data""" dest = self.buildLocation(dest) print("writing data to {0}".format(dest), file=sys.stderr) directory = dest.rsplit("/",1)[0] directory = fileurl2path(directory) print(" Making shure directory {0} exists".format(directory), file=sys.stderr) os.makedirs(directory, mode=0o777, exist_ok=True) with open(fileurl2path(dest), mode="wb") as f: f.write(data) print(" done", file=sys.stderr) def buildResource(self, resource): """Looks up the description of the resource and builds it Creates all representations of the resource and adds information to the .htaccess if required. semp:Reource type of a Resource semp:subject What the page is mainly about. This is used by semp:Render to know which one is the root node. semp:source points to a source file semp:representation A variant of the resource, obtainable by content nogtiation semp:content-type indicates the targetted content type semp:buildCommand tells how to build the representation. Use semp:Render to render with fresnel Lenses and an XSLT. Use semp:Raw to just take the surce file. semp:content-type used on a source file or representation to indicate the content type """ representations = self.confGraph.objects(resource, semp.representation) for r in representations: content_type = next(self.confGraph.objects(r, semp["content-type"])) try: source = next(self.confGraph.objects(r, semp.source)) except(StopIteration): source = None try: language = next(self.confGraph.objects(r, semp.language)) except(StopIteration): language = None try: quality = next(self.confGraph.objects(r, semp.quality)) except(StopIteration): quality = None contentLocation = URIRef(self.contentLocation(resource, self.defaultEnding(content_type, language))) if semp.Raw in self.confGraph.objects(r, semp.buildCommand): self.copy(source, contentLocation) elif semp.Render in self.confGraph.objects(r, semp.buildCommand): #fresnelGraph = Graph() #multiparse(fresnelGraph, self.confGraph.objects(r, semp.fresnelGraph)) #instanceGraph = Graph() #parse(instanceGraph, source) #multiparse(instanceGraph, self.confGraph.objects(r, semp.additionalData)) fresnelGraph = self.g instanceGraph = self.g ctx = Fresnel.Context(fresnelGraph=fresnelGraph, instanceGraph=instanceGraph) box = Fresnel.ContainerBox(ctx) box.append(resource) box.select() box.portray() tree = box.transform() #Fresnel.prettify(tree) # results in bad whitespace self.write(contentLocation, etree.tostring(tree,encoding="UTF-8",xml_declaration=True)) elif semp.Serialize in self.confGraph.objects(r, semp.buildCommand): graph = self.g.get_context(resource) self.write(contentLocation, graph.serialize()) else: raise SemPipeException("Failed to produce representation {0} of {1}".format(r, resource)) try: xslt_files = Collection(self.confGraph, next(self.confGraph.objects(r, semp.transformation))) buildloc = self.buildLocation(contentLocation) for xslt_file in xslt_files: command = ["xsltproc", "--output", fileurl2path(buildloc), fileurl2path(str(xslt_file)), fileurl2path(buildloc)] print("Running transformation", *command, file=sys.stderr) subprocess.call(command) except (StopIteration): pass #write typemap typemap = self.typemap(resource) if typemap is not None: self.write(resource, typemap) def typemap(self, resource): """ Returns the contents of a type-map file for all representations of the given resource. Returns None if no typemap is necessary. """ representations = sorted(self.confGraph.objects(resource, semp.representation)) typemap_url = lambda url: str(url).rsplit("/", 1)[-1] typemap = ["URI: {}\n\n".format(typemap_url(resource))] typemap_needed = False for r in representations: content_type = next(self.confGraph.objects(r, semp["content-type"])) try: source = next(self.confGraph.objects(r, semp.source)) except(StopIteration): source = None try: language = next(self.confGraph.objects(r, semp.language)) except(StopIteration): language = None try: quality = next(self.confGraph.objects(r, semp.quality)) except(StopIteration): quality = None contentLocation = URIRef(self.contentLocation(resource, self.defaultEnding(content_type, language))) typemap.append("URI: {}\n".format(typemap_url(contentLocation))) typemap.append("Content-type: {}".format(content_type)) if quality is not None: typemap[-1] += "; q={}\n".format(quality) typemap_needed = True else: typemap[-1] += "\n" if language is not None: typemap.append("Content-language: {}\n".format(language)) typemap.append("\n") if typemap_needed: return "".join(typemap).encode("UTF-8") else: return None def defaultEnding(self, content_type=None, language=None): cts = { "application/rdf+xml": ".rdf", "application/xhtml+xml": ".xhtml", "text/html": ".html", None: "" } if content_type: typeendings = list(self.confGraph.objects(URIRef("http://purl.org/NET/mediatypes/" + content_type), semp.defaultExtension)) if len(typeendings) > 1: raise SemPipeException("ambiguous extension for content-type {} in confGraph.".format(content_type)) elif len(typeendings) < 1: raise SemPipeException("No extension for content-type {} found".format(content_type)) else: typeending = typeendings[0] else: typeending = "" return ("." + language if language else "") + "." + typeending def write_htaccess(self): """Writes all required .htaccess files.""" # First generate the directives for each resource filesinfo = []; resources = self.resources for resource in resources: info = []; filesinfo.append((resource, info)); if self.typemap(resource) is not None: info.append("SetHandler type-map\n") # Generate the .htaccess files htaccessfiles = dict() for resource, info in filter(lambda x: x[1], filesinfo): directory, filename = resource.rsplit("/", 1) ht = htaccessfiles.setdefault(directory, []) ht.append('<Files "{}">\n'.format(filename)) ht += info ht.append('</Files>\n') for directory, ht in htaccessfiles.items(): print("Writing a .htaccess in {}".format(directory), file=sys.stderr) filename = self.hostedSpace(resource).htaccess self.write(directory + "/" + filename, "".join(ht).encode("UTF-8")) def publish(self): import getpass import subprocess """Walks through HostedSpaces and upload the respective files from the build diretory. (Instead we should walk through the build directory. Will be changed later.)""" hostedSpacesQuery = """ SELECT ?space ?method ?command ?invocation WHERE { ?space a semp:HostedSpace . ?space semp:publishMethod ?method . ?method semp:command ?command . ?method semp:invocation ?invocation . }""" askForQuery = """ SELECT ?variable WHERE { { ?method semp:askFor ?variable . } UNION { ?method semp:askForHidden ?variable . } }""" #?hidden #{ ?method semp:askFor ?variable . } #UNION #{ ?method semp:askForHidden ?variable . # BIND ("true"^^xsd:boolean as ?hidden) } for spaceRow in self.confGraph.query(hostedSpacesQuery, initNs={"semp": semp}).bindings: space = spaceRow[Variable("?space")] method = spaceRow[Variable("?method")] answers = dict() for question in self.confGraph.query(askForQuery, initNs={"semp": semp}, initBindings={"method": method}).bindings: answers[question[Variable("?variable")]] = getpass.getpass("{} for method {}".format(question[Variable("?variable")], method)) spacedir = self.buildLocation(space) command = [] for arg in Collection(self.confGraph, spaceRow[Variable("command")]): command.append(str(arg).format("",fileurl2path(spacedir),str(space),**answers)) print("Running {}".format(command[0]), file=sys.stderr) subprocess.call(command) @property def resources(self): return self.confGraph.subjects(rdf.type, semp.Resource) def commit(self): self.g.commit() if self.storePath: self.g.serialize(destination=self.storePath+"/store.nq", format='nquads', encoding='UTF-8') def serialize(self): return self.g.serialize() def close(self): self.g.close()
def rdf_description(notation='xml'): """ Funtion takes title of node, and rdf notation. """ name = 'student' valid_formats = ["xml", "n3", "ntriples", "trix"] default_graph_uri = "http://gstudio.gnowledge.org/rdfstore" # default_graph_uri = "http://example.com/" configString = "/var/tmp/rdfstore" # Get the Sleepycat plugin. store = plugin.get('IOMemory', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="IOMemory", identifier=URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: #There is no underlying Sleepycat infrastructure, create it graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" # Now we'll add some triples to the graph & commit the changes # rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') graph.bind("gstudio", "http://gnowledge.org/") exclusion_fields = [ "id", "rght", "node_ptr_id", "image", "lft", "_state", "_altnames_cache", "_tags_cache", "nid_ptr_id", "_mptt_cached_fields" ] node_type = get_nodetype() if (node_type == 'gbobject'): node = Gbobject.objects.get(title=name) elif (node_type == 'objecttype'): node = Objecttype.objects.get(title=name) elif (node_type == 'metatype'): node = Metatype.objects.get(title=name) elif (node_type == 'attributetype'): node = Attributetype.objects.get(title=name) elif (node_type == 'relationtype'): node = Relationtype.objects.get(title=name) elif (node_type == 'attribute'): node = Attribute.objects.get(title=name) elif (node_type == 'complement'): node = Complement.objects.get(title=name) elif (node_type == 'union'): node = Union.objects.get(title=name) elif (node_type == 'intersection'): node = Intersection.objects.get(title=name) elif (node_type == 'expression'): node = Expression.objects.get(title=name) elif (node_type == 'processtype'): node = Processtype.objects.get(title=name) elif (node_type == 'systemtype'): node = Systemtype.objects.get(title=name) node_url = node.get_absolute_url() site_add = node.sites.all() a = site_add[0] host_name = a.name #host_name=name link = 'http://' #Concatenating the above variables will give the url address. url_add = link + host_name + node_url rdflib = Namespace(url_add) # node=Objecttype.objects.get(title=name) node_dict = node.__dict__ subject = str(node_dict['id']) for key in node_dict: if key not in exclusion_fields: predicate = str(key) pobject = str(node_dict[predicate]) graph.add((rdflib[subject], rdflib[predicate], Literal(pobject))) rdf_code = graph.serialize(format=notation) #graph = rdflib.Graph("IOMemory") #graph.open("store", create=True) #graph.parse(rdf_code) # print out all the triples in the graph # for subject, predicate, object in graph: # print subject, predicate, object # store.add(self,(subject, predicate, object),context) graph.commit() print rdf_code graph.close()
def rdf_description(notation='xml' ): """ Funtion takes title of node, and rdf notation. """ name='student' valid_formats = ["xml", "n3", "ntriples", "trix"] default_graph_uri = "http://gstudio.gnowledge.org/rdfstore" # default_graph_uri = "http://example.com/" configString = "/var/tmp/rdfstore" # Get the Sleepycat plugin. store = plugin.get('IOMemory', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="IOMemory", identifier = URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: #There is no underlying Sleepycat infrastructure, create it graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" # Now we'll add some triples to the graph & commit the changes # rdflib = Namespace('http://sbox.gnowledge.org/gstudio/') graph.bind("gstudio", "http://gnowledge.org/") exclusion_fields = ["id", "rght", "node_ptr_id", "image", "lft", "_state", "_altnames_cache", "_tags_cache", "nid_ptr_id", "_mptt_cached_fields"] node_type=get_nodetype() if (node_type=='gbobject'): node=Gbobject.objects.get(title=name) elif (node_type=='objecttype'): node=Objecttype.objects.get(title=name) elif (node_type=='metatype'): node=Metatype.objects.get(title=name) elif (node_type=='attributetype'): node=Attributetype.objects.get(title=name) elif (node_type=='relationtype'): node=Relationtype.objects.get(title=name) elif (node_type=='attribute'): node=Attribute.objects.get(title=name) elif (node_type=='complement'): node=Complement.objects.get(title=name) elif (node_type=='union'): node=Union.objects.get(title=name) elif (node_type=='intersection'): node=Intersection.objects.get(title=name) elif (node_type=='expression'): node=Expression.objects.get(title=name) elif (node_type=='processtype'): node=Processtype.objects.get(title=name) elif (node_type=='systemtype'): node=Systemtype.objects.get(title=name) node_url=node.get_absolute_url() site_add= node.sites.all() a = site_add[0] host_name =a.name #host_name=name link='http://' #Concatenating the above variables will give the url address. url_add=link+host_name+node_url rdflib = Namespace(url_add) # node=Objecttype.objects.get(title=name) node_dict=node.__dict__ subject=str(node_dict['id']) for key in node_dict: if key not in exclusion_fields: predicate=str(key) pobject=str(node_dict[predicate]) graph.add((rdflib[subject], rdflib[predicate], Literal(pobject))) rdf_code=graph.serialize(format=notation) #graph = rdflib.Graph("IOMemory") #graph.open("store", create=True) #graph.parse(rdf_code) # print out all the triples in the graph # for subject, predicate, object in graph: # print subject, predicate, object # store.add(self,(subject, predicate, object),context) graph.commit() print rdf_code graph.close()
default_graph_uri = "http://rdflib.net/rdfstore" configString = "/var/tmp/rdfstore" # Get the Sleepycat plugin. store = plugin.get('Sleepycat', Store)('rdfstore') # Open previously created store, or create it if it doesn't exist yet graph = Graph(store="Sleepycat", identifier=URIRef(default_graph_uri)) path = mkdtemp() rt = graph.open(path, create=False) if rt == NO_STORE: # There is no underlying Sleepycat infrastructure, create it graph.open(path, create=True) else: assert rt == VALID_STORE, "The underlying store is corrupt" print "Triples in graph before add: ", len(graph) # Now we'll add some triples to the graph & commit the changes rdflib = Namespace('http://rdflib.net/test/') graph.bind("test", "http://rdflib.net/test/") graph.add((rdflib['pic:1'], rdflib['name'], Literal('Jane & Bob'))) graph.add((rdflib['pic:2'], rdflib['name'], Literal('Squirrel in Tree'))) graph.commit() print "Triples in graph after add: ", len(graph) # display the graph in RDF/XML print graph.serialize()