def setUp(self): registerplugins() store = plugin.get("SQLAlchemy", Store)(identifier=self.ident) self.graph = Graph(store, identifier=self.ident) self.graph.open(self.uri, create=True) g = self.graph sid = BNode() g.add((sid, RDF.subject, URIRef("http://www.google.com/")))
def test_registerplugins(self): # I doubt this is quite right for a fresh pip installation, # this test is mainly here to fill a coverage gap. registerplugins() self.assert_(plugin.get("SQLAlchemy", Store) is not None) p = plugin._plugins self.assert_(("SQLAlchemy", Store) in p, p) del p[("SQLAlchemy", Store)] plugin._plugins = p registerplugins() self.assert_(("SQLAlchemy", Store) in p, p)
def test_registerplugins(self): # I doubt this is quite right for a fresh pip installation, # this test is mainly here to fill a coverage gap. from rdflib_sqlalchemy import registerplugins from rdflib import plugin from rdflib.store import Store registerplugins() self.assert_(plugin.get('SQLAlchemy', Store) is not None) p = plugin._plugins self.assert_(('SQLAlchemy', Store) in p, p) del p[('SQLAlchemy', Store)] plugin._plugins = p registerplugins() self.assert_(('SQLAlchemy', Store) in p, p)
def generate_graph(): """Main function that, will parse all the cache folder and will generate a graph with all its content Run this using python graph.py <path_to_dir_with_lattes_pages> Parameters ---------- Returns ------- """ sys.excepthook = exception_handler registerplugins() # This is our ontology schema = Namespace("http://schema.org/version/latest/schema.nt#") identifier = URIRef("knowlattes_uriref") # Create the Graph ### SQLALCHEMY_URL = "sqlite:///%(here)s/database.sqlite" % { "here": os.getcwd() } print(f"Creating the file to output: {SQLALCHEMY_URL}") store = plugin.get("SQLAlchemy", Store)(identifier=identifier) graph = Graph(store, identifier=identifier) graph.open(SQLALCHEMY_URL, create=True) # Get the lattes list base_path = sys.argv[1] lattes_profile_list = all_the_files_in_directory(base_path) non_lattes_page = find_non_lattes_pages(base_path, lattes_profile_list) lattes_pages = [i for i in lattes_profile_list if i not in non_lattes_page] for lattes_page in tqdm(lattes_pages): lattes_id = re.sub(".html", "", lattes_page) file = open(base_path + "/" + lattes_page, "r", encoding="ISO-8859-1") lattes_file = file.read() file.close() try: lattes_page = ParserLattes(lattes_id, lattes_file) add_lattes_reasercher_to_graph(lattes_page, graph, schema) except Exception as inst: print(f"{inst}") graph.close()
def setup(graph_id): rdf_store_ident = URIRef(sys.argv[1]) # Note that this DBURI does not specify host or username: instead, connection is # made via UNIX domain sockets, which are self-authenticating # To use this, this code needs to be run as the user in question # Note also that pip install is in general user-local dburi = Literal('postgresql+psycopg2:///kendraio_facta') registerplugins() store = plugin.get("SQLAlchemy", Store)(identifier=rdf_store_ident) graph = Graph(store, identifier=URIRef(graph_id)) graph.open(dburi, create=True) return graph, rdf_store_ident
def load_grah(path=None): registerplugins() identifier = URIRef("knowlattes_uriref") # Create the Graph SQLALCHEMY_URL = "sqlite:///%(here)s/database.sqlite" % { "here": os.getcwd() if path is None else path } print(f"Reading the file to output: {SQLALCHEMY_URL}") store = plugin.get("SQLAlchemy", Store)(identifier=identifier) graph = Graph(store, identifier=identifier) graph.open(SQLALCHEMY_URL, create=False) return graph
def open(self): try: from rdflib_sqlalchemy import registerplugins except ImportError: raise OpenFailError( 'The rdflib-sqlalchemy package is not installed.' ' You may need to install one of "sqlite_source", "mysql_source", or' ' "postgresql_source" extra for owmeta_core.' ' For example, change "owmeta_core" in your setup.py or' ' requirements.txt to "owmeta_core[sqlite_source]" and reinstall' ) registerplugins() store = plugin.get("SQLAlchemy", Store)(**self._initargs()) self.graph = ConjunctiveGraph(store) cfg = self._openconfig() self.graph.open(cfg, create=True)
import os from rdflib import plugin, ConjunctiveGraph, Literal, URIRef from rdflib.store import Store from rdflib_sqlalchemy import registerplugins def read_configuration_file(config_file_path): if not os.path.isfile(config_file_path): raise Exception("The config file does not exist!") config_parser = configparser.ConfigParser() config_parser.read(config_file_path) return config_parser registerplugins() config_parser = read_configuration_file('config.ini') server = config_parser.get('POSTGRES', 'server', fallback='localhost') port = config_parser.get('POSTGRES', 'port', fallback='5432') database = config_parser.get('POSTGRES', 'database', fallback='benchmark') password = config_parser.get('POSTGRES', 'password', fallback='mysecretpassword') identifier = URIRef("benchmark") db_uri = Literal('postgresql+psycopg2://postgres:{0}@{1}:{2}/{3}'.format( password, server, port, database)) def do(action):
def setup_rdf_store(ident): registerplugins() store = plugin.get("SQLAlchemy", Store)(identifier=ident) return store
self.graph.commit() @classmethod def _is_graph_add_exception_acceptable(cls, ex): """Checks if a graph-add exception can be safely ignored. """ # integrity errors due to violating unique constraints should be safe # to ignore since the only unique constraints in rdflib-sqlalchemy are # on index columns return 'UNIQUE constraint failed' in text_type(ex) _METADATA_CACHE = None registerplugins() def set_metadata_cache(cache): """Sets the metadata cache object to use. """ global _METADATA_CACHE if _METADATA_CACHE and _METADATA_CACHE.is_open: _METADATA_CACHE.close() _METADATA_CACHE = cache def get_metadata_cache():
# limitations under the License. # #=============================================================================== # # A simple knowledge base... # #=============================================================================== from contextlib import ContextDecorator #=============================================================================== import rdflib import rdflib_sqlalchemy as sqlalchemy sqlalchemy.registerplugins() #from rdflib.plugins.sparql.results.jsonlayer import encode as JSON_results_encode from rdflib.plugins.sparql.results.jsonresults import JSONResultSerializer #=============================================================================== class KnowledgeBase(rdflib.Graph, ContextDecorator): def __init__(self, kb_path, create=False): SPARC = rdflib.URIRef('SPARC') store = rdflib.plugin.get('SQLAlchemy', rdflib.store.Store)(identifier=SPARC) super().__init__(store, identifier=SPARC) database = rdflib.Literal('sqlite:///{}'.format(kb_path)) self.open(database, create=create)