예제 #1
0
 def __init__(self, chainsupplier, instance, dbname, clear):
     # Perform necessary setup by connecting to the repository, creating a
     # URI factory and setting the provided supplier.
     url = urlparse(instance)
     self._dbname = dbname
     self._connargs = {
         'host': url.hostname,
         'port': url.port,
         'user': url.username,
         'password': url.password,
         'create': True,
         'clear': clear,
         'autocommit': True,
     }
     self._connection = ag_connect(self._dbname, **self._connargs)
     self._connection.setAddCommitSize(10000)
     self._mns = self._connection.namespace(BTC_RDF_MODEL)
     self._dns = self._connection.namespace(BTC_RDF_DATA)
     self._supplier = chainsupplier
     if clear:
         # Drop unused indices.
         for i in ["gpsoi", "gposi", "gospi", "gopsi", "gspoi"]:
             self._connection.dropIndex(i)
         # Load model into repository.
         self._connection.setNamespace('btcm', BTC_RDF_MODEL)
         self._connection.setNamespace('btcd', BTC_RDF_DATA)
         self._connection.addFile('model.ttl')
예제 #2
0
def test_ag_connect_recreate(conn):
    store = conn.repository.database_name
    with conn:
        conn.addTriple('<http://franz.com/s>', '<http://franz.com/p>',
                       '<http://franz.com/o>')
    with ag_connect(store, clear=True, **common_args) as conn:
        assert conn.size() == 0
예제 #3
0
    def alt_connect_to_kabob(self):
        self.kabob = ag_connect(KaBOB_Constants.RELEASE,
                                host=KaBOB_Constants.HOST,
                                port=KaBOB_Constants.PORT,
                                user=KaBOB_Constants.USER,
                                password=KaBOB_Constants.PASSWORD,
                                create=False,
                                clear=False)

        print('Statements in KaBOB:', self.kabob.size())
    def __init__(self, kg_config_path):
        cp = ConfigParser()
        cp.read(kg_config_path)

        self.prefix = "<http://localkg.cn/entity/%s>"
        self.conn = ag_connect(repo=cp.get("repository", "repo"),
                               catalog=cp.get("repository", "catalog"),
                               host=cp.get("server", "host"),
                               user=cp.get("account", "user"),
                               password=cp.get("account", "password"))
예제 #5
0
 def __init__(self, instance, dbname, clear):
     url = urlparse(instance)
     self._dbname = dbname
     self._connargs = {
         'host': url.hostname,
         'port': url.port,
         'user': url.username,
         'password': url.password,
         'create': True,
         'clear': clear,
     }
     self._connection = ag_connect(self._dbname, **self._connargs)
예제 #6
0
def my_ag_conn(clear=False):
    global conn
    if not conn:
        host = os.environ['SEMANTIC_DESKTOP_AGRAPH_HOST']
        port = os.environ['SEMANTIC_DESKTOP_AGRAPH_PORT']
        logging.getLogger(__name__).info(f'connecting to {host}:{port}...')
        conn = ag_connect(repo=os.environ['SEMANTIC_DESKTOP_AGRAPH_REPO'],
                          host=host,
                          port=port,
                          user=os.environ['SEMANTIC_DESKTOP_AGRAPH_USER'],
                          password=os.environ['SEMANTIC_DESKTOP_AGRAPH_PASS'],
                          clear=clear)
        logging.getLogger(__name__).info(f'connected.')
        ensure_common_namespaces_are_defined(conn)
    return conn
예제 #7
0
def ag_init(repo_name):
    """
    Initialize and connect to an Allegrograph database.
    https://franz.com/agraph/support/documentation/6.4.0/python/index.html
    """
    repo_name = repo_name.strip()

    try:
        from franz.openrdf.connect import ag_connect
    except:
        print("\nCould not find the AllegroGraph client.\n")
        exit(1)

    session = Session()
    rec = session.query(RDFstore).filter_by(name=repo_name).first()

    if rec == None:
        print("\nCould not find a RDF repository named: " + repo_name)
        exit(1)

    print('Checking AllegroGraph connections.\n')
    # Set environment variables for AllegroGraph
    os.environ['AGRAPH_HOST'] = rec.host
    os.environ['AGRAPH_PORT'] = rec.port
    os.environ['AGRAPH_USER'] = rec.user
    os.environ['AGRAPH_PASSWORD'] = rec.pw

    try:
        with ag_connect(rec.dbname,
                        host=os.environ.get('AGRAPH_HOST'),
                        port=os.environ.get('AGRAPH_PORT'),
                        user=os.environ.get('AGRAPH_USER'),
                        password=os.environ.get('AGRAPH_PASSWORD')) as conn:
            conn.clear(contexts='ALL_CONTEXTS')
            print('Initial Kunteksto RDF Repository Size: ', conn.size(), '\n')
            print("Loading RM OWL and RDF.")
            conn.addFile(os.path.join(rmdir, 's3model.owl'), serverSide=True)
            conn.addFile(os.path.join(rmdir, 's3model_' + RMVERSION + '.rdf'),
                         serverSide=True)
            print('Current Kunteksto RDF Repository Size: ', conn.size(), '\n')
            print('AllegroGraph connections are okay.\n')
            print(
                "Remember to upload the Data Model RDF file(s) after exporting them.\n\n"
            )
    except:
        print(
            "Could not establish a connection to AllegroGraph. Check to see that the server is running and the RDF repository values are correct.\n\n"
        )
예제 #8
0
def get_data():

    offset = 1586
    limit = 1000
    numero_total_de_silabos = 0
    silabos_procesados = 1586
    with ag_connect(allegroREPOSITORY,
                    host=allegroHOST,
                    create=False,
                    clear=False,
                    port=allegroPORT,
                    user=allegroUSER,
                    password=allegroPASSWORD) as conn:
        get_numero_de_silabos_query = str(get_count_syllabus())
        result_num_syllabus = executeSparql(get_numero_de_silabos_query, conn)
        numero_total_de_silabos = int(
            result_num_syllabus['values'][0][0].split('"')[1])
        conn.close()

    while (limit):

        with ag_connect(allegroREPOSITORY,
                        host=allegroHOST,
                        create=False,
                        clear=False,
                        port=allegroPORT,
                        user=allegroUSER,
                        password=allegroPASSWORD) as conn:
            get_syllabus_query = str(get_title_query()).format(offset, limit)
            result = executeSparql(get_syllabus_query, conn)
            num_silabo_por_proceso = 0
            for silabosA in result['values']:
                print "Silabo procesado: " + str(silabos_procesados)
                json_to_send = []
                jsonsilaboA = get_content("0", silabosA, conn)
                json_to_send.append(jsonsilaboA)

                for silabosB in result['values'][
                        num_silabo_por_proceso:len(result['values'])]:
                    if (
                            silabosA[0] != silabosB[0]
                    ):  #que el silabo (URI) B no sea el mismo que el silabo (URI) A
                        jsonsilaboB = get_content("1", silabosB, conn)
                        json_to_send.append(
                            jsonsilaboB
                        )  #Aqui ya se tiene los dos silabos en JSON
                        #print str(silabosA[1].encode('utf-8').strip() + " -- vs -- " + silabosB[1].encode('utf-8').strip())
                        #print json_to_send
                        try:
                            similarity = json.loads(
                                get_similarity(json_to_send))['value']
                            save_similarity(
                                offset, limit,
                                silabosA[0][1:-1].encode('utf-8').strip(),
                                silabosB[0][1:-1].encode('utf-8').strip(),
                                similarity)
                        except ValueError:
                            save_error(
                                "Error  en consulta de similitud o construccion del JSON. Silabos que se estaban analizando:"
                                + silabosA[0][1:-1].encode('utf-8').strip() +
                                " vs " +
                                silabosB[0][1:-1].encode('utf-8').strip() +
                                " - Silabo procesado: " +
                                str(silabos_procesados))
                            print "Error  en consulta de similitud o construccion del JSON. Silabos que se estaban analizando:" + silabosA[
                                0][1:-1].encode('utf-8').strip(
                                ) + " vs " + silabosB[0][1:-1].encode(
                                    'utf-8').strip(
                                    ) + " - Silabo procesado: " + str(
                                        silabos_procesados)
                            #detectado error cuando un capitulo no tiene subchapters. Se debe controlar eso, detectando la similitud con los titulos de los capitulos.
                        json_to_send = []
                        json_to_send.append(jsonsilaboA)

                num_silabo_por_proceso = num_silabo_por_proceso + 1
                silabos_procesados = silabos_procesados + 1
        #print json_to_send
        conn.close()
        if (offset > numero_total_de_silabos):
            break
        offset = offset + 1000
        limit = limit + 1000
예제 #9
0
# This document contains definitions of functions and variables used 
# in multiple tutorial examples. Each code fragment is actually passed to Sphinx
# twice:
#   - This whole file is imported in a hidden, global setup block
#   - The example in which the function is defined included its code from this 
#     file.
#
# The reason for all this is that definitions in one document, even if contained 
# in a 'testsetup:: *' block, are not visible in other documents. The global 
# setup block is the only way of sharing definitions, but we still want them
# presented in the appropriate part of the tutorial.
#
# Each fragment is surrounded by START-<NAME> and END-<NAME> markers to allow
# easy inclusions through the literalinclude directive.

# BEGIN-CONNECT
from franz.openrdf.connect import ag_connect

conn = ag_connect('python-tutorial', create=True, clear=True)
# END-CONNECT

예제 #10
0
# This document contains definitions of functions and variables used
# in multiple tutorial examples. Each code fragment is actually passed to Sphinx
# twice:
#   - This whole file is imported in a hidden, global setup block
#   - The example in which the function is defined included its code from this
#     file.
#
# The reason for all this is that definitions in one document, even if contained
# in a 'testsetup:: *' block, are not visible in other documents. The global
# setup block is the only way of sharing definitions, but we still want them
# presented in the appropriate part of the tutorial.
#
# Each fragment is surrounded by START-<NAME> and END-<NAME> markers to allow
# easy inclusions through the literalinclude directive.

# BEGIN-CONNECT
from franz.openrdf.connect import ag_connect

conn = ag_connect('python-tutorial', create=True, clear=True)
# END-CONNECT
예제 #11
0
def test_ag_connect_create_exists():
    with pytest.raises(Exception):
        ag_connect(STORE, create=True, fail_if_exists=True, **common_args)
예제 #12
0
파일: app.py 프로젝트: ppsirg/puj_dbs
import os
from fastapi import FastAPI
from franz.openrdf.connect import ag_connect
from populate import check_data
from queries import search

app = FastAPI()

host = 'allegrograph-db' if os.getenv('WAIT_HOSTS') else 'localhost'

conn = ag_connect('try1', host=host, port=10035, user='******', password='******')
check_data(conn, 'data.txt')


@app.get('/list/hashtag/{hashtag}')
def list_hashtag(hashtag: str):
    """
    Listar todos los mensajes de un hashtag dado en orden cronológico.
    """
    query = """prefix  res:   <http://example.com/resource/>
        prefix  ex:    <http://example.com/>
        prefix  class: <http://example.com/class/>
        prefix  prop:  <http://example.com/property/>
        prefix  rdf:   <http://www.w3.org/1999/02/22-rdf-syntax-ns#>

        SELECT ?message ?txt ?ht ?dt WHERE {
        ?message rdf:type class:Message. 
        ?message prop:content ?txt. 
        ?message prop:hashtag ?ht.
        ?message prop:date ?dt 
        FILTER( ?ht = "#--hashtag--")}
예제 #13
0
def test_ag_connect_open_no_create(non_existing_repo):
    with pytest.raises(Exception):
        ag_connect(non_existing_repo, create=False, **common_args)
예제 #14
0
def test_ag_connect_fail_if_exists_but_not_create(repo_name):
    with ag_connect(repo_name,
                    create=False,
                    fail_if_exists=True,
                    **common_args) as conn:
        assert conn.size() == 0
예제 #15
0
def test_ag_connect_create(non_existing_repo):
    with ag_connect(non_existing_repo, create=True, **common_args) as conn:
        assert conn.size() == 0
예제 #16
0
def test_ag_connect_open(repo_name):
    with ag_connect(repo_name, create=False, **common_args) as conn:
        assert conn.size() == 0
예제 #17
0
def test_ag_connect_session(repo_name):
    with ag_connect(repo_name, create=False, session=True,
                    **common_args) as conn:
        assert conn.is_session_active