def test_proxy(self): params = { "uri": "http://www.w3.org/People/EM/contact#me", "format": "text/n3" } response = self.app.get("/proxy?" + urlencode(params)) data = StringIO(response.body) g = Graph() g.parse(data, format="n3")
def command(self): uri = self.get_uri() g = Graph(identifier=uri) g.parse(uri, format="n3") if self.options.format == "thimbl": print serialize_thimbl(g) else: print g.serialize(format=self.options.format)
def getContent(self): registry = getUtility(IRegistry) settings = registry.forInterface(IRDFSettings, check=False) graph_uri = settings.fresnel_graph_uri graph = Graph(identifier=graph_uri) graph.parse(StringIO(master), format='n3') graph.parse(StringIO(custom), format='n3') return dict( lens=graph.serialize(format='n3') )
def test_05_put(self): response = self.app.get( url("/graph", uri=test_graph, format="application/rdf+xml")) data = StringIO(response.body) g = Graph() g.parse(data, format="xml") ## now put it back body = g.serialize(format="pretty-xml") response = self.app.put( url("/graph", uri=test_graph), params=body, headers={"Content-type": "application/rdf+xml"}) assert response.body.find("urn:uuid:") == -1
def load_file(fileobj): '''Loads the specified COFOG-like file into the database with key names 'cofog1', 'cofog2' and 'cofog3'. ''' # TODO: replace with simple import of the cofog rdf data which already has # relevant structure from wdmmgrdf.model import handler ctx = handler.context(u'importer', u'loading cofog') g = Graph(identifier=COFOG_IDENTIFIER) g.parse(fileobj, format='n3') log.info('add %s' % g.identifier) ctx.add(g) log.info('commit changes') cs = ctx.commit()
def applyChanges(self, data): data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return registry = getUtility(IRegistry) settings = registry.forInterface(IRDFSettings, check=False) graph_uri = settings.fresnel_graph_uri localGraph = self.localGraph graph = Graph(identifier=graph_uri) graph.parse(StringIO(data['lens']), format='turtle') localGraph.remove_context(graph) quads = ((s,p,o,graph) for (s,p,o) in graph.triples((None, None, None))) localGraph.addN(quads) rdftool = getUtility(IORDF) rdftool.clearCache()
def rdf_data(): graph_uri = "http://purl.org/okfn/obp#" log.info("Loading %s" % graph_uri) graph = Graph(identifier=graph_uri) fp = pkg_resources.resource_stream("openbiblio", os.path.join("n3", "obp.n3")) graph.parse(fp, format="n3") fp.close() yield graph for lens in pkg_resources.resource_listdir("openbiblio", "lenses"): if not lens.endswith(".n3"): continue lens_uri = OBPL[lens[:-3]] graph = Graph(identifier=lens_uri) fp = pkg_resources.resource_stream("openbiblio", os.path.join("lenses", lens)) graph.parse(fp, format="n3") fp.close() yield graph
def setUp(cls): from openbiblio import handler if cls.done: return ctx = handler.context(getuser(), "Initial Data") for graph in cls.data(): ## delete any stale history ctx.add(graph) ctx.commit() ctx = handler.context(getuser(), "Bibtex Graph data") ident = URIRef("http://bnb.bibliographica.org/entry/GB9361575") data = Graph(identifier=ident) data.parse(os.path.join(cls.testdata, "GB9361575.rdf")) ctx.add(data) ctx.commit() cls.done = True
def importLocalRDF(context): # FIXME: there is no internal store at the moment. # maybe we can do something like import to named store # or just let IORDF tool decide where to store it? # TODO: allow to replace / add # clear whole store # clear single graphs # support not just turtle xml = context.readDataFile('ontologies.xml') if xml is None: LOG.debug('Nothing to import.') return LOG.info('Import RDF data into local triple store') root = ET.fromstring(xml) tool = getUtility(IORDF) for node in root: if node.tag not in('local', 'external'): raise ValueError('Unknown node: {}'.format(node.tag)) if node.tag in ('local',): LOG.warn("Import to local store no longer supported.") continue file = node.get('file') uri = node.get('uri') filename = 'ontologies/{}'.format(file) data = context.readDataFile(filename) if data is None: raise ValueError('File missing: {}'.format(filename)) if not uri: raise ValueError('Missing URI for graph: {}'.format(filename)) # node.tag == 'external' LOG.info('load {} into external store.'.format(file)) graph = Graph(identifier=uri) graph.parse(data=data, format='turtle') tool.getHandler().put(graph)
def create_collection(user, object_dict={}): defaults = { 'uri': 'http://bibliographica.org/collection/' + str(uuid.uuid4()), 'title': 'Untitled', 'user': user, 'works': [] } values = dict(defaults) values.update(object_dict) uri = values['uri'] ident = URIRef(uri) data = Graph(identifier=ident) ourdata = collection_n3 % values for work in values['works']: membership = '<%s> rdfs:member <%s> .\n' % (work, ident) ourdata += membership data.parse(data=ourdata, format='n3') ctx = handler.context(user, "Creating collection: %s" % uri) ctx.add(data) ctx.commit() return uri
def rdf_data(): s = LicensesService2() g = Graph(identifier=CC[""]) g.parse("http://creativecommons.org/schema.rdf") yield g fp = pkg_resources.resource_stream("licenses", os.path.join("n3", "license.n3")) g = Graph(identifier=LICENSES["lens"]) g.parse(fp, format="n3") fp.close() yield g for ld in s.get_licenses(): ident = LICENSES[ld["id"]] g = Graph(identifier=ident) l = License(ident, graph=g) l.label = Literal(ld["title"]) l.prefLabel = Literal(ld["title"]) l.notation = Literal(ld["id"]) l.lens = LICENSES.lens if ld.get("url"): url = URIRef(ld["url"]) sa = Graph() try: sa.parse(url) except: pass try: sa.parse(url, format="rdfa") except: pass sa.remove((url, XHV.icon, None)) sa.remove((url, XHV.alternate, None)) sa.remove((url, XHV.stylesheet, None)) for ll in sa.distinct_objects(url, XHV.license): l.license = ll sa.remove((url, XHV.license, None)) if sa.bnc((url, None, None)): [g.add((ident, p, o)) for s, p, o in sa.bnc((url, None, None))] l.sameAs = url else: l.seeAlso = URIRef(ld["url"]) yield g
def rdf_data(): s = LicensesService2() g = Graph(identifier=CC[""]) g.parse("http://creativecommons.org/schema.rdf") yield g fp = pkg_resources.resource_stream("licenses", os.path.join("n3", "license.n3")) g = Graph(identifier=LICENSES["lens"]) g.parse(fp, format="n3") fp.close() yield g for ld in s.get_licenses(): ident = LICENSES[ld["id"]] g = Graph(identifier=ident) l = License(ident, graph=g) l.label = Literal(ld["title"]) l.prefLabel = Literal(ld["title"]) l.notation = Literal(ld["id"]) l.lens = LICENSES.lens if ld.get("url"): url = URIRef(ld["url"]) sa = Graph() try: sa.parse(url) except: pass try: sa.parse(url, format="rdfa") except: pass sa.remove((url, XHV.icon, None)) sa.remove((url, XHV.alternate, None)) sa.remove((url, XHV.stylesheet, None)) for ll in sa.distinct_objects(url, XHV.license): l.license = ll sa.remove((url, XHV.license, None)) if sa.bnc((url, None, None)): [g.add((ident, p, o)) for s,p,o in sa.bnc((url, None, None))] l.sameAs = url else: l.seeAlso = URIRef(ld["url"]) yield g
def test_04_autoneg(self): response = self.app.get(url("/graph", uri=test_graph + '.n3'), headers={"Accept": "text/n3"}) data = StringIO(response.body) g = Graph() g.parse(data, format="n3")
def test_03_rdfxml(self): response = self.app.get( url("/graph", uri=test_graph, format="application/rdf+xml")) data = StringIO(response.body) g = Graph() g.parse(data, format="xml")
def test_02_n3(self): response = self.app.get(url("/graph", uri=test_graph, format="text/n3")) data = StringIO(response.body) g = Graph() g.parse(data, format="n3")
def data(cls): ident = URIRef("http://bibliographica.org/test") data = Graph(identifier=ident) data.parse(os.path.join(cls.testdata, "fixtures.rdf")) yield data