Ejemplo n.º 1
0
def query_database(query_type: str, query: str, database: str) -> (bool, dict):
    """
    Process a SELECT or UPDATE query

    :param query_type: A string = 'select' or 'update'
    :param query: The text of a SPARQL query
    :param database: The database (name) to be queried
    :return: True if successful; False otherwise
             Query results (if the query_type is 'select'); An empty dictionary otherwise
    """
    logging.info(
        f'Querying database, {database}, using {query_type}, with query, {query}'
    )
    if query_type != 'select' and query_type != 'update':
        capture_error(f'Invalid query_type {query_type} for query_db', True)
        return False, dict()
    try:
        conn = stardog.Connection(database, **sd_conn_details)
        if query_type == 'select':
            # Select query, which will return results, if successful
            query_results = conn.select(
                query, content_type='application/sparql-results+json')
            if query_results['results']['bindings']:
                return True, query_results
            else:
                return True, dict()
        else:
            # Update query; No results (either success or failure)
            conn.update(query)
            return True, dict()
    except Exception as e:
        capture_error(
            f'Database ({database}) query exception for {query}: {str(e)}',
            True)
        return False, dict()
Ejemplo n.º 2
0
def getTerms(input_term):
    with stardog.Admin(**conn_details) as admin:
        with stardog.Connection('capability', **conn_details) as conn:
            # get all equivalent calsses query
            # No. 1 : get all parents
            query_for_parents = """
                SELECT ?entity where {{
                    ?src {0}* <http://stardog.com/{1}> .
                    FILTER(?src != <http://stardog.com/ManufacturingCapabilities>)
                    BIND(?src AS ?entity) .                   
                }}
                """.format(PARENT_OF, input_term)
            result_for_parents = conn.select(query_for_parents, reasoning=True)

            # No. 2 : get all requires
            query_for_requires = """
                SELECT ?entity where {{
                    <http://stardog.com/{1}> {0}* ?tgt .
                    FILTER(?tgt != <http://stardog.com/ManufacturingCapabilities>)
                    BIND(?tgt AS ?entity) .                   
                }}
                """.format(REQUIRES, input_term)
            result_for_requires = conn.select(query_for_requires,
                                              reasoning=True)

            stardog_responses = [result_for_parents, result_for_requires]

            result_list = set()
            for response in stardog_responses:
                for result in response['results']['bindings']:
                    temp_out = result[next(iter(result))]['value']
                    temp_out = temp_out.rsplit("/", 1)[-1]
                    result_list.add(temp_out)

            return result_list
Ejemplo n.º 3
0
    def download_ontology(self, save_as, progress_callback=None):
        try:
            assert self.conn_details
            if progress_callback:
                progress_callback("establishing connection ...")

            dbname = self.conn_details['dbname']

            with stardog.Connection(dbname, **self.open_conn_details) as conn:
                if progress_callback: progress_callback("connection OK")
                # conn.export(content_type='application/rdf+xml')  # to get RDF/XML
                contents = conn.export()

                # запись в файл
                ext = '.ttl'
                if not save_as.endswith(ext):
                    save_as += ext
                if progress_callback:
                    progress_callback("writing to file: " + save_as[-50:])
                with open(save_as, 'wb') as f:
                    f.write(contents)

            if progress_callback:
                progress_callback("dropping database if set so...")
            self.stardog_drop_db_if_set()

            if progress_callback: progress_callback("finished!")
        except Exception as e:
            print(e)
            return str(e)
Ejemplo n.º 4
0
    def stardog_create_db_if_set(self):
        # conn_details = {
        #   'endpoint':   self.server_url_var.get(),
        #   'username': '******',
        #   'password': '******',
        #   'schemafile': self.schema_file_var.get(),
        #   'dbname':     self.server_dbname_var.get(),
        #   'createdb':   self.server_db_create_var.get(),
        #   'dropdb':     self.server_db_drop_var.get(),
        # }
        assert self.conn_details

        if self.conn_details['createdb']:
            dbname = self.conn_details['dbname']

            schema_file = stardog.content.File(self.conn_details['schemafile'])
            print('Probably schema_file OK: ', self.conn_details['schemafile'])

            with stardog.Admin(**self.open_conn_details) as admin:
                # check if database already exist and drop it
                try:
                    self.stardog_drop_db_if_set(force=True)
                except:
                    pass

                db = admin.new_database(dbname)
                db = None  # forget pointer
                # init schema
                with stardog.Connection(dbname,
                                        **self.open_conn_details) as conn:
                    conn.begin()
                    conn.add(schema_file)
                    conn.commit()
Ejemplo n.º 5
0
    def get_query(self, spqrql, prefix):
        conn = stardog.Connection(
            DATASOURCE_YML_CONFIG_FILE["database"]["stardog"]["db"],
            **STARDOG_CONN_DETAILS)

        results = conn.select(spqrql,
                              content_type='application/sparql-results+xml')

        return results
Ejemplo n.º 6
0
 def runQuery(self, query):
     results = {}
     if self.timeout == 0:
         return query, results
     try:
         with stardog.Connection(self.database, **conn_details) as conn:
             output = conn.select(query)
             results = output.get("results").get("bindings")
             logging.info("Results: " + str(len(results)))
     except Exception as inst:
         logging.error(inst)
     return query, results
Ejemplo n.º 7
0
 def post(self):
     if not request.json:
         return jsonify({'status': 'could not get requested data', 'statuscode': 409})
     else:
         query = buildQueryFromJSON(request.json)
         # Anfrage an Stardog
         with stardog.Connection(dbName, **conn_details) as conn:
             conn.begin()
             print(query)
             results = conn.select(query)
             print(results)
         return results
def append_rule(message):
    print("new rule received")
    rule_obj = json.loads(message)
    rule = rule_obj['semantic_ruleminer']
    with open("rule_tmp.ttl", "w") as text_file:
        text_file.write(rule)

    for i in range(0, DATABASES):
        with stardog.Connection('db'+str(i), **conn_details) as conn:
            conn.begin()
            conn.add(stardog.content.File('rule_tmp.ttl'))
            conn.commit()
Ejemplo n.º 9
0
def stardog_eg_csv(ark):
    conn_details = {
        'endpoint': 'http://stardog.uvadcos.io',
        'username': '******',
        'password': '******'
    }
    with stardog.Connection('db', **conn_details) as conn:
        conn.begin()
        #results = conn.select('select * { ?a ?p ?o }')
        results = conn.paths("PATHS START ?x=<" + ark + "> END ?y VIA ?p",
                             content_type='text/csv')
    with open(root_dir + '/star/test.csv', 'wb') as f:
        f.write(results)

    return
Ejemplo n.º 10
0
def create_named_graph(meta, id):
    with open(root_dir + '/star/meta.json', 'w') as f:
        json.dump(meta, f)
    conn_details = {
        'endpoint': 'http://stardog.uvadcos.io',
        'username': '******',
        'password': '******'
    }
    with stardog.Connection('db', **conn_details) as conn:
        conn.begin()
        conn.add(stardog.content.File(root_dir + "/star/meta.json"),
                 graph_uri='http://ors.uvadcos/' + id)
        conn.commit()
    # cmd = 'stardog data add --named-graph http://ors.uvadcos.io/' + id + ' -f JSONLD test "/star/meta.json"'
    # test = os.system(cmd)
    # warnings.warn('Creating named graph returned: ' + str(test))
    return
Ejemplo n.º 11
0
def add_remove_data(op_type: str,
                    triples: str,
                    database: str,
                    graph: str = '') -> bool:
    """
    Add or remove data to/from the database/store

    :param op_type: A string = 'add' or 'remove'
    :param triples: A string with the triples to be inserted/removed
    :param database: The database name
    :param graph: An optional named graph in which to insert/remove the triples
    :return: True if successful; False otherwise
    """
    logging.info(
        f'Data {"added to" if op_type == "add" else "removed from"} {database}'
        f'{" and graph, " if graph else ""}{graph}')
    if op_type != 'add' and op_type != 'remove':
        capture_error(f'Invalid op_type {op_type} for add_remove_graph', True)
        return False
    try:
        conn = stardog.Connection(database, **sd_conn_details)
        conn.begin()
        if op_type == 'add':
            # Add to the database
            if graph:
                conn.add(stardog.content.Raw(triples, 'text/turtle'),
                         graph_uri=graph)
            else:
                conn.add(stardog.content.Raw(triples, 'text/turtle'))
        else:
            # Remove from the database
            if graph:
                conn.remove(stardog.content.Raw(triples, 'text/turtle'),
                            graph_uri=graph)
            else:
                conn.remove(stardog.content.Raw(triples, 'text/turtle'))
        conn.commit()
        return True
    except Exception as e:
        capture_error(f'Database ({op_type}) exception: {str(e)}', True)
        return False
Ejemplo n.º 12
0
def create_delete_database(op_type: str, database: str) -> str:
    """
    Create or delete a database. If created, add the DNA ontologies.

    :param op_type: A string = 'create' or 'delete'
    :param database: The database name
    :return: Empty string if successful or the details of an exception
    """
    logging.info(f'Database {database} being {op_type}d')
    if op_type != 'create' and op_type != 'delete':
        capture_error(f'Invalid op_type {op_type} for create_delete_db', True)
        return ''
    try:
        admin = stardog.Admin(**sd_conn_details)
        if op_type == 'create':
            # Create database
            admin.new_database(
                database, {
                    'search.enabled': True,
                    'edge.properties': True,
                    'reasoning': True,
                    'reasoning.punning.enabled': True,
                    'query.timeout': '20m'
                })
            # Load ontologies to the newly created database
            conn = stardog.Connection(database, **sd_conn_details)
            conn.begin()
            logging.info(f'Loading DNA ontologies to {database}')
            _load_directory_to_database(ontol_path, conn)
            _load_directory_to_database(f'{ontol_path}domain-context/', conn)
            conn.commit()
        else:
            # Delete database
            database_obj = admin.database(database)
            database_obj.drop()
        return ''
    except Exception as e:
        return f'Database ({op_type}) exception: {str(e)}'
Ejemplo n.º 13
0
    def create_dataset_database(self, database_name, dataset_path):
        connection_details = {
            'endpoint': self.endpoint,
            'username': self.username,
            'password': self.password
        }

        with stardog.Admin(**connection_details) as admin:
            if database_name in [db.name for db in admin.databases()]:
                admin.database(database_name).drop()
            db = admin.new_database(database_name)

            with stardog.Connection(database_name,
                                    **connection_details) as conn:
                conn.begin()
                conn.add(stardog.content.File(str(dataset_path)))
                conn.commit()

                if conn.size(exact=True) <= 0:
                    admin.database(database_name).drop()
                    raise StardogException('No triples loaded!')

        return database_name
Ejemplo n.º 14
0
    def upload_triples(self, triples, progress_callback=None):
        try:
            assert self.conn_details
            if progress_callback:
                progress_callback("establishing connection ...")

            self.stardog_create_db_if_set()

            prefix_str = """
            BASE <http://vstu.ru/poas/se/c_schema_2020-01#>
            PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
            """
            f = lambda s: make_namespace_prefix(
                s, default_prefix=':', known_prefixes={'rdf'})

            dbname = self.conn_details['dbname']

            with stardog.Connection(dbname, **self.open_conn_details) as conn:
                if progress_callback: progress_callback("connection OK")

                for i, trpl in enumerate(triples):
                    # ensure prefixes OK
                    trpl = tuple(f(a) for a in trpl)

                    q = triple_to_sparql_insert(trpl, prefix_str)
                    print('Send: ', trpl, end='')
                    # отправка запросов SPARQL UPDATE
                    results = conn.update(q, reasoning=False)
                    print(' done!')
                    if progress_callback:
                        progress_callback("%d/%d done" %
                                          ((i + 1), len(triples)))
            if progress_callback: progress_callback("100% finished!")
        except Exception as e:
            print(e)
            return str(e)
def reason_group(message_group, db_i, box, roomtype):
    try:
        with stardog.Connection('db'+str(db_i), **conn_details) as conn:
            task.reason(conn, message_group, db_i, box, roomtype)
    except Exception as e:
        print(e)
Ejemplo n.º 16
0
#session = requests.Session()
#session.auth = (SD_USERNAME,SD_PASSWORD)

SD_URL = os.environ.get('STARDOG_URL','http://stardog.uvadcos.io')
SD_USERNAME = os.environ.get('STARDOG_USERNAME')
SD_PASSWORD = os.environ.get('STARDOG_PASSWORD')
HOST_URL = os.environ.get('HOST_URL','')

ORS_URL = os.environ.get("ORS_URL","ors.uvadco.io/")
EVI_PREFIX = 'evi:'
conn_details = {
        'endpoint': SD_URL,
        'username': SD_USERNAME,
        'password': SD_PASSWORD
    }
conn = stardog.Connection('ors', **conn_details)

class EverythingConverter(PathConverter):
    regex = '.*?'

def mint_eg_id(eg):
    '''
    Mints Id for newly created evidence graph
    '''

    r = requests.post(ORS_URL + "shoulder/ark:99999",data = json.dumps(eg))

    if 'created' in r.json():
        return HOST_URL + 'evidence/' + r.json()['created']

def add_eg_to_og_id(ark,eg_id):
    time.sleep(30)

    manager = Manager()
    db_pool = manager.list([True for x in range(0, DATABASES)])

    # create database pool
    for i in range(0, DATABASES):
        try:
            with stardog.Admin(**conn_details) as admin:
                admin.database('db'+str(i)).drop()
        except:
            print("no database to drop")
        print('db'+str(i))
        with stardog.Admin(**conn_details) as admin:
            admin.new_database('db'+str(i), {'index.type': 'Memory'})
            with stardog.Connection('db'+str(i), **conn_details) as conn:
                conn.begin()
                conn.add(stardog.content.File('rule.ttl'))
                conn.commit()

    message_queue = {}
    proccesses = []
    counter = 0
    start = time.time()
    for message in consumer:
        counter += 1
        plain_message = message.value

        if counter%100==0:
            print("processed: 100 messages in ", time.time()-start)
            start = time.time()
Ejemplo n.º 18
0


# Далее должен быть цикл загрузки всех триплетов ...

import stardog

conn_details = {
  'endpoint': 'http://localhost:5820',
  'username': '******',
  'password': '******'
}

with stardog.Admin(**conn_details) as admin:

	with stardog.Connection('с_owl', **conn_details) as conn:

		for trpl in triples:
			q = triple_to_sparql_insert(trpl, prefix_str)
			print('Go...', end='')
			# как называется метод для отправки запросов SPARQL UPDATE, я не в курсе: проверь. Но не `select` - точно.
			results = conn.update(q, reasoning=False)
			print(' done!')

			# pprint(results)


		contents = str(conn.export())
		contents = contents[2:]
		contents = contents[:-1]
		contents = contents.replace('\\n', '\n')
Ejemplo n.º 19
0
 def update(self, query: str):
     with stardog.Connection(self.__database, **self.__credentials) as conn:
         conn.update(query)
Ejemplo n.º 20
0
 def select(self, query: str, reasoning: bool = False):
     with stardog.Connection(self.__database, **self.__credentials) as conn:
         results = conn.select(query, reasoning=reasoning)
         return results["results"]["bindings"]
Ejemplo n.º 21
0
class ConnectionFactory():
    '''
    A class used to represent and manage a Stardog database connection
    
    Attributes
    ----------
    
    connectionPool : dict
        A dict with three items used to connect to stardog.
            endpoint : str
                the host where stardog is running. Default = 'http://localhost:5820'
            username : str
                the username used to log in to the base. Default = 'admin'
            password : str
                the password dused to log in to the base. Default = 'admin'
    '''
    connectionPool = {
        'endpoint': 'http://192.168.56.104:5820',
        'username': '******',
        'password': '******'
    }

    connectionPoolUfes = {
        'endpoint': 'http://200.137.66.31:5820',
        'username': '******',
        'password': '******'
    }

    databaseName = 'riodoce'

    connection = stardog.Connection(databaseName, **connectionPool)
    if (connection):
        print(str(connection))
    else:
        print('null')

    def __init__(self):
        pass

    def __listAllProps(self, prd):
        return self.connection.select(
            'PREFIX prd: <' + Prefixes.PREFIX + '>' +
            ' SELECT distinct ?prop WHERE{?prop rdf:type prd:' + prd + '}')

    def listProperty(self):
        return self.__listAllProps('Measurable_Property')

    def listParty(self):
        return self.__listAllProps('Agent_Party')

    def listRiver(self):
        return self.__listAllProps('River')

    def getFilteredData(self, allFilters):
        finalQuery = ''
        filterPropertiesSelected = ''
        print('all filters ' + allFilters)
        finalQuery = (
            'PREFIX prd: <' + Prefixes.PREFIX + '>' +
            'SELECT ?author ?property ?value ?unity ?sample' +
            '?sampleDate ?lat ?long ?monitoringFacility ?locale' + ' WHERE {' +
            ' ?measurement rdf:type prd:Measurement.' +
            ' ?measurement prd:measure_unit ?unity.' +
            ' ?measurement prd:measures ?property.' +
            ' ?measurement prd:val ?value.' +
            ' ?measurement prd:uses ?sample.' +
            ' ?measurement prd:isDoneBy ?author.' + filterPropertiesSelected +
            ' ?sample rdf:type prd:Sampling.' +
            ' ?sample prd:date ?sampleDate.' +
            ' ?monitoringFacility rdf:type prd:Monitoring_Facility.' +
            ' ?monitoringFacility prd:performs ?sample.' +
            ' ?monitoringFacility prd:locates ?locale.' +
            ' ?locale rdf:type prd:Geographic_Point.' +
            ' ?locale prd:latitude ?lat.' + ' ?locale prd:longitude ?long' +
            allFilters + '.}LIMIT 10')
        return self.connection.select(finalQuery)

    def measurement(self, allFilters):
        finalQuery = ''
        filterPropertiesSelected = ''
        print('all filters ' + allFilters)
        finalQuery = (
            'PREFIX prd: <' + Prefixes.PREFIX + '>' +
            'SELECT ?author ?property ?value ?unity ?sample' +
            '?sampleDate ?lat ?long ?monitoringFacility ?locale' + ' WHERE {' +
            ' ?measurement rdf:type prd:Measurement.' +
            ' ?measurement prd:measure_unit ?unity.' +
            ' ?measurement prd:measures ?property.' +
            ' ?measurement prd:val ?value.' +
            ' ?measurement prd:uses ?sample.' +
            ' ?measurement prd:isDoneBy ?author.' + filterPropertiesSelected +
            ' ?sample rdf:type prd:Sampling.' +
            ' ?sample prd:date ?sampleDate.' +
            ' ?monitoringFacility rdf:type prd:Monitoring_Facility.' +
            ' ?monitoringFacility prd:performs ?sample.' +
            ' ?monitoringFacility prd:locates ?locale.' +
            ' ?locale rdf:type prd:Geographic_Point.' +
            ' ?locale prd:latitude ?lat.' + ' ?locale prd:longitude ?long' +
            allFilters + '.}LIMIT 10')
        return self.connection.select(finalQuery)