def test_multiple_sessions(self): """ Test that multiple sessions coexist correctly. """ s1 = surf.Session(surf.Store(reader = "rdflib")) P = s1.get_class(surf.ns.FOAF.Person) self.assertEquals(P.session, s1) _ = surf.Session(surf.Store(reader = "rdflib")) # Making another session shouldn't change session of already # instantiated classes and instances: self.assertEquals(P.session, s1)
def _get_store_session(self, use_default_context=True): """ Return initialized SuRF store and session objects. """ # FIXME: take endpoint from configuration file, kwargs = { "reader": "allegro_franz", "writer": "allegro_franz", "server": "localhost", "port": 6789, "catalog": "repositories", "repository": "test_surf" } if use_default_context: kwargs["default_context"] = URIRef("http://surf_test_graph/dummy2") store = surf.Store(**kwargs) session = surf.Session(store) # Fresh start! store.clear(URIRef("http://surf_test_graph/dummy2")) store.clear(URIRef("http://my_context_1")) store.clear(URIRef("http://other_context_1")) store.clear() return store, session
def store(self): """Factory for store objects """ if self._store is not None: return self._store store = surf.Store(reader='rdflib', writer='rdflib', rdflib_store='IOMemory') store.reader.graph.bind('dc', surf.ns.DC, override=True) store.reader.graph.bind('dcterms', surf.ns.DCTERMS, override=True) store.reader.graph.bind('eea', surf.ns.EEA, override=True) store.reader.graph.bind('skos', surf.ns.SKOS, override=True) store.reader.graph.bind('geo', surf.ns.GEO, override=True) store.reader.graph.bind('owl', surf.ns.OWL, override=True) store.reader.graph.bind('dcat', surf.ns.DCAT, override=True) store.reader.graph.bind('schema', surf.ns.SCHEMA, override=True) store.reader.graph.bind('foaf', surf.ns.FOAF, override=True) store.reader.graph.bind('odrs', surf.ns.ODRS, override=True) self._store = store return store
def get_store_session(): """ Return initialized SuRF store and session objects. """ # maybe we can mock SPARQL endpoint. kwargs = { "reader": "sparql_protocol", "writer": "sparql_protocol", "endpoint": "http://localhost:9999/blazegraph/sparql", "use_subqueries": True, "combine_queries": False } #if True: # use_default_context: # kwargs["default_context"] = "http://surf_test_graph/dummy2" surf.log.setup_logger() surf.log.set_logger_level(logging.DEBUG) store = surf.Store(**kwargs) session = surf.Session(store) # Fresh start! store.clear("http://surf_test_graph/dummy2") #store.clear(URIRef("http://my_context_1")) #store.clear(URIRef("http://other_context_1")) store.clear() return store, session
def __init__(self, url): self.store = surf.Store(reader='rdflib', writer='rdflib', rdflib_store='IOMemory') self.session = surf.Session( self.store, mapping={surf.ns.SOER.NationalStory: NationalStory}) self.loadUrl(url)
def _get_store_session(self, use_default_context=True): """ Return initialized SuRF store and session objects. """ # FIXME: take endpoint from configuration file, # maybe we can mock SPARQL endpoint. kwargs = { "reader": "sparql_protocol", "writer": "sparql_protocol", "endpoint": "http://localhost:9980/sparql", "use_subqueries": True, "combine_queries": True } if True: #use_default_context: kwargs["default_context"] = "http://surf_test_graph/dummy2" store = surf.Store(**kwargs) session = surf.Session(store) # Fresh start! store.clear("http://surf_test_graph/dummy2") store.clear(URIRef("http://my_context_1")) store.clear(URIRef("http://other_context_1")) # store.clear() return store, session
def _get_store_session(self, use_default_context=True): """ Return initialized SuRF store and session objects. """ kwargs = { "reader": "virtuoso_protocol", "writer": "virtuoso_protocol", "endpoint": ENDPOINT, "use_subqueries": True, "combine_queries": True, "default_write_context": self.CONTEXT } if use_default_context: kwargs["default_context"] = self.CONTEXT store = surf.Store(**kwargs) session = surf.Session(store) # Fresh start! store.clear(self.CONTEXT) Person = session.get_class(surf.ns.FOAF + "Person") for name in ["John", "Mary", "Jane"]: # Some test data. person = session.get_resource("http://%s" % name, Person) person.foaf_name = name person.save() return store, session
def get_rdflib_store(db_conn, load_triples=True, force_create=False): createdb = False rdfstore = rdflib.plugin.get('Sleepycat', rdflib.store.Store)() # rdflib can create necessary structures if the store is empty rt = rdfstore.open(db_conn, create=False) if rt == rdflib.store.VALID_STORE: pass elif rt == rdflib.store.NO_STORE: rdfstore.open(db_conn, create=True) createdb = True elif rt == rdflib.store.CORRUPTED_STORE: rdfstore.destroy(db_conn) rdfstore.open(db_conn, create=True) createdb = True surfstore = surf.Store(reader='rdflib', writer='rdflib', rdflib_store=rdfstore) if (createdb or force_create) and load_triples: sources = [ #'file:/var/www/geonode/src/GeoNodePy/geonode/corila-researchareas.rdf', os.path.join(settings.PROJECT_ROOT, "stores", "researchareas.rdf"), 'http://www.eionet.europa.eu/gemet/gemet-backbone.rdf', 'http://www.eionet.europa.eu/gemet/gemet-skoscore.rdf', 'http://www.eionet.europa.eu/gemet/gemet-groups.rdf?langcode=en', 'http://www.eionet.europa.eu/gemet/gemet-groups.rdf?langcode=it', 'http://www.eionet.europa.eu/gemet/gemet-definitions.rdf?langcode=en', 'http://www.eionet.europa.eu/gemet/gemet-definitions.rdf?langcode=it', 'http://dublincore.org/2010/10/11/dcterms.rdf', 'http://www.geonames.org/ontology/ontology_v3.01.rdf' ] for source in sources: if not surfstore.load_triples(source=source): raise Exception('Cannot load %s' % source) return surfstore
def __init__(self, config_file): """ TODO: read default configuration file if none is provided """ self._authors = None self._works = None self._author_abbreviations = None self._work_abbreviations = None try: config = configparser.ConfigParser() config.readfp(open(config_file)) self._store_params = dict(config.items("surf")) if ('port' in self._store_params): self._store_params['port'] = int( self._store_params['port'] ) # force the `port` to be an integer self._store = surf.Store(**self._store_params) self._session = surf.Session(self._store, {}) if ('rdflib_store' in self._store_params): basedir = pkg_resources.resource_filename( 'knowledge_base', 'data/kb/') sources = [ "%s%s" % (basedir, file) for file in self._store_params["knowledge_base_sources"].split(",") ] source_format = self._store_params["sources_format"] for source_path in sources: self._store.writer._graph.parse(source=source_path, format=source_format) logger.info("The KnowledgeBase contains %i triples" % self._store.size()) self._register_namespaces() self._register_mappings() except Exception as e: raise e
def test_get_by(self): """ Test Resource.get_by() method. """ store = surf.Store() session = surf.Session(store) Person = session.get_class(ns.FOAF['Person']) Person.get_by(foaf_name=u"John")
def setUp(self): """ Prepare store and session. """ # Hack to make RDFQueryReader available as it was provided by plugin. surf.store.__readers__["query_reader"] = query_reader.RDFQueryReader self.store = surf.Store(reader="query_reader", use_subqueries=True) self.session = surf.Session(self.store)
def test_rdflib_load(): store = surf.Store(reader="rdflib", writer="rdflib", rdflib_store="IOMemory") print("Load RDF data") store.load_triples(source=_card_file) assert len(store) == 76
def connect(self, config): store = surf.Store(reader='allegro_franz', writer='allegro_franz', server=config['AGRAPH_HOST'], port=config['AGRAPH_PORT'], catalog='ewetasker', repository='ewetasker-db') print('Create the session') session = surf.Session(store, {}) return session
def test_get_by(): """ Test Resource.get_by() method. """ store = surf.Store() session = surf.Session(store) Person = session.get_class(ns.FOAF['Person']) persons = Person.get_by(foaf_name=u"John") assert isinstance(persons, (list, ResultProxy))
def default_session(): """ Prepare store and session. """ # Hack to make RDFQueryReader available as it was provided by plugin. surf.plugin.manager.__readers__[ "query_reader"] = query_reader.RDFQueryReader store = surf.Store(reader="query_reader", use_subqueries=True) return surf.Session(store)
def test_rdflib_query(): store = surf.Store(reader="rdflib", writer="rdflib", rdflib_store="IOMemory") session = surf.Session(store) store.load_triples(source=_card_file) Person = session.get_class(surf.ns.FOAF["Person"]) all_persons = Person.all() assert len(all_persons) == 1 assert all_persons.one().foaf_name.first == Literal(u'Timothy Berners-Lee')
def test_auto_load(self): """ Test that session.auto_load works. """ store = surf.Store(reader = "rdflib", writer = "rdflib") session = surf.Session(store, auto_load = True) Person = session.get_class(surf.ns.FOAF.Person) person = Person() person.foaf_name = "John" person.save() same_person = Person(person.subject) # Check that rdf_direct is filled self.assertTrue(surf.ns.FOAF.name in same_person.rdf_direct)
def schematize(store): graph = store.reader.graph triples = list(graph) res = [(s, p, v) for (s, p, v) in triples if ( v.startswith('http://schema.org') or p.startswith('http://schema.org')) ] s = surf.Store(reader='rdflib', writer='rdflib', rdflib_store='IOMemory') for triple in res: s.add_triple(*triple) return s
def __init__(self): self.store = surf.Store(reader='allegro_franz', writer='allegro_franz', server=Config.query_server, port=Config.query_port, username=Config.query_username, password=Config.query_password, catalog=Config.query_catalog, repository=Config.query_repository, reasoning=True) self.main_session = surf.Session(self.store, {}) self.metadatastore = surf.Store(reader='allegro_franz', writer='allegro_franz', server=Config.metadata_server, port=Config.metadata_port, username=Config.metadata_username, password=Config.metadata_password, catalog=Config.metadata_catalog, repository=Config.metadata_repository, reasoning=True) self.meta_session = surf.Session(self.metadatastore, {})
def json_serialize(res): s = surf.Store(reader='rdflib', writer='rdflib', rdflib_store='IOMemory') for triple in res: s.add_triple(*triple) context = { "odrs": "http://schema.theodi.org/odrs#", "dct": "http://purl.org/dc/terms/", "rdfs": "http://www.w3.org/2000/01/rdf-schema#", "dcat": "http://www.w3.org/ns/dcat#", } ser = s.reader.graph.serialize(format='json-ld', context=context) return ser
def test_exceptions(self): """ Test that exceptions are raised on invalid queries. """ store = surf.Store(reader="sparql_protocol", writer="sparql_protocol", endpoint="invalid") def try_query(): store.execute(query) query = select("?a") self.assertRaises(SparqlReaderException, try_query) def try_add_triple(): store.add_triple("?s", "?p", "?o") self.assertRaises(SparqlWriterException, try_add_triple)
def make_store(): """ store initialization """ store = surf.Store(reader='rdflib', writer='rdflib', rdflib_store='IOMemory') store.reader.graph.bind('dc', surf.ns.DC, override=True) store.reader.graph.bind('dcterms', surf.ns.DCTERMS, override=True) store.reader.graph.bind('eea', surf.ns.EEA, override=True) store.reader.graph.bind('geo', surf.ns.GEO, override=True) store.reader.graph.bind('owl', surf.ns.OWL, override=True) store.reader.graph.bind('dcat', surf.ns.DCAT, override=True) store.reader.graph.bind('schema', surf.ns.SCHEMA, override=True) store.reader.graph.bind('foaf', surf.ns.FOAF, override=True) store.reader.graph.bind('article', surf.ns.ARTICLE, override=True) return store
def load_objects_from_rdf(self): store = surf.Store( reader='rdflib', writer='rdflib', rdflib_store='IOMemory', ) # Get a new surf session session = surf.Session(store) # Load the rdf data from the uris for uri in self.uris: store.load_triples(source=uri) # Define the License class as an skos:concept object Object = session.get_class(self.object_class) # Get the licenses objects objects = Object.all().full() return objects
def test_rdflib_store(): """ Create a SuRF rdflib based store """ kwargs = {"reader": "rdflib", "writer": "rdflib"} if False: # use_default_context: kwargs["default_context"] = "http://surf_test_graph/dummy2" try: store = surf.Store(**kwargs) session = surf.Session(store) # clean store store.clear() except Exception as e: pytest.fail(error_message(e), pytrace=True)
def test_dict_access(self): """ Test that resources support dictionary-style attribute access. """ session = surf.Session(surf.Store(reader = "rdflib")) Person = session.get_class(surf.ns.FOAF.Person) person = Person() person.foaf_name = "John" # Reading self.assertEquals(person["foaf_name"].first, "John") self.assertEquals(person[surf.ns.FOAF.name].first, "John") # Writing person["foaf_name"] = "Dave" self.assertEquals(person.foaf_name.first, "Dave") # Deleting del person["foaf_name"] self.assertEquals(person.foaf_name.first, None)
def _get_store_session(self, use_default_context=True): """ Return initialized SuRF store and session objects. """ # FIXME: take endpoint from configuration file, # maybe we can mock SPARQL endpoint. kwargs = {"reader": "rdflib", "writer": "rdflib"} if False: #use_default_context: kwargs["default_context"] = "http://surf_test_graph/dummy2" store = surf.Store(**kwargs) session = surf.Session(store) # Fresh start! #store.clear("http://surf_test_graph/dummy2") #store.clear(URIRef("http://my_context_1")) #store.clear(URIRef("http://other_context_1")) store.clear() return store, session
def __setstate__(self, dict): self.__dict__.update(dict) self._store = surf.Store(**self._store_params) self._session = surf.Session(self._store, {}) # don't forget to reload the triples if it's an in-memory store! if ('rdflib_store' in self._store_params): basedir = pkg_resources.resource_filename('knowledge_base', 'data/kb/') sources = [ "%s%s" % (basedir, file) for file in self._store_params["knowledge_base_sources"].split(",") ] source_format = self._store_params["sources_format"] for source_path in sources: self._store.writer._graph.parse(source=source_path, format=source_format) logger.info("The KnowledgeBase contains %i triples" % self._store.size()) self._register_namespaces() self._register_mappings()
def test_dict_access(): """ Test that resources support dictionary-style attribute access. """ session = surf.Session(surf.Store(reader="rdflib")) Person = session.get_class(surf.ns.FOAF.Person) person = Person() person.foaf_name = "John" # Reading assert person["foaf_name"].first == Literal(u"John") assert person[surf.ns.FOAF.name].first == Literal(u"John") # Writing person["foaf_name"] = "Dave" assert person.foaf_name.first == Literal(u"Dave") # Deleting del person["foaf_name"] assert person.foaf_name.first is None
def initialize_rdflib(engine=None, mysql_connstr=None, virtuoso_connstr=None, clear=False, logging=True): #rdflib.plugin.register('MySQL', rdflib.store.Store,'rdfstorage.MySQL', 'MySQL') rdflib.plugin.register('sparql', rdflib.query.Processor, 'rdfextras.sparql.processor', 'Processor') rdflib.plugin.register('sparql', rdflib.query.Result, 'rdfextras.sparql.query', 'SPARQLQueryResult') #if engine: # store = store_from_engine(engine=engine) #elif mysql_connstr: # store = store_from_connstr(connstr=mysql_connstr) #elif virtuoso_connstr: # store = store_from_virtuoso_connstr(virtuoso_connstr) rdf_store = surf.Store(reader='virtuoso_protocol', writer='virtuoso_protocol', endpoint='http://localhost:8890/sparql', default_context='http://default') if clear: rdf_store.clear() #print 'SIZE of STORE : ',rdf_store.size() # the surf session rdf_session = surf.Session(rdf_store, {}) rdf_session.enable_logging = logging # register the namespace # ns.register(myblog=config['myblog.namespace']) init_model(rdf_session)
def _get_store_session(self, use_default_context = True): """ Return initialized SuRF store and session objects. """ # FIXME: take endpoint from configuration file, kwargs = {"reader": "sesame2", "writer" : "sesame2", "server" : "localhost", "port" : 8080, "root_path" : "/openrdf-sesame", "repository" : "test"} if use_default_context: kwargs["default_context"] = "http://surf_test_graph/dummy2" store = surf.Store(**kwargs) session = surf.Session(store) # Fresh start! store.clear(URIRef("http://surf_test_graph/dummy2")) store.clear(URIRef("http://my_context_1")) store.clear(URIRef("http://other_context_1")) store.clear() return store, session