def load_data_from_file(self, fp, urlbase): """ Load a locally created ttl file in the triplestore using http (with load_data(url)) or with the filename for Fuseki (with fuseki_load_data(fp.name)). :param fp: a file handle for the file to load :param urlbase:the base URL of current askomics instance. It is used to let triple stores access some askomics temporary ttl files using http. :return: a dictionnary with information on the success or failure of the operation """ if not fp.closed: fp.flush( ) # This is required as otherwise, data might not be really written to the file before being sent to triplestore ql = QueryLauncher(self.settings, self.session) if self.is_defined('askomics.load_url'): urlbase = self.settings['askomics.load_url'] url = urlbase + "/ttl/" + self.session[ 'username'] + '/' + os.path.basename(fp.name) data = {} data["status"] = "ok" try: if self.is_defined("askomics.file_upload_url"): queryResults = ql.upload_data(fp.name, self.graph) else: queryResults = ql.load_data(url, self.graph) except Exception as e: self.log.error(self._format_exception(e)) raise e finally: os.remove(fp.name) return data
def get_value(self): """ Build a request from a json whith the following contents :variates,constraintesRelations,constraintesFilters""" self.log.debug("== Attribute Value ==") data = {} tse = TripleStoreExplorer(self.settings, self.request.session) body = self.request.json_body try: results,query = tse.build_sparql_query_from_json(body["variates"],body["constraintesRelations"],body["limit"],True) # Remove prefixes in the results table data['values'] = results if not body['export']: return data # Provide results file ql = QueryLauncher(self.settings, self.request.session) rb = ResultsBuilder(self.settings, self.request.session) data['file'] = ql.format_results_csv(rb.build_csv_table(results)) except Exception as e: #exc_type, exc_value, exc_traceback = sys.exc_info() #traceback.print_exc(limit=8) traceback.print_exc(file=sys.stdout) data['values'] = "" data['file'] = "" data['error'] = traceback.format_exc(limit=8)+"\n\n\n"+str(e) self.log.error(str(e)) return data
def get_start_points(self): """ Get the possible starting points for your graph. :return: List of starting points :rtype: Node list """ self.log.debug(" =========== TripleStoreExplorer:get_start_points ===========") nodes = [] sqg = SparqlQueryGraph(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) results = ql.process_query(sqg.get_start_point().query) for result in results: g = result["g"] uri = result["nodeUri"] label = result["nodeLabel"] if 'private' in result['accesLevel']: public = False private = True else: public = True private = False nodes.append({'g': g, 'uri': uri, 'label': label, 'public': public, 'private': private}) return nodes
def positionable_attr(self): """ Return the positionable attributes in common between two positionable entity """ #FIXEME: Rewrite this ugly method body = self.request.json_body data = {} sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) # Check if the two entity are positionable positionable1 = ql.process_query(sqb.get_if_positionable(body['node']).query) positionable2 = ql.process_query(sqb.get_if_positionable(body['node']).query) if positionable1 == 0 or positionable2 == 0: data['error'] = 'not positionable nodes' return data results = ql.process_query(sqb.get_common_positionable_attributes(body['node'], body['second_node']).query) self.log.debug(results) data['results'] = {} list_pos_attr = [] for elem in results: if elem['pos_attr'] not in list_pos_attr: list_pos_attr.append(elem['pos_attr'].replace("http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#", "")) for elem in list_pos_attr: data['results'][elem] = False not in [bool(int(p['status'])) for p in results if p['pos_attr'] == "http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#"+elem] return data
def empty(self): #empty database sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) namedGraphs = self.list_named_graphs() for graph in namedGraphs: ql.execute_query(sqb.get_delete_query_string(graph).query)
def delete_apikey(self, key): """delete an apikey""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.delete_apikey(key).query)
def __init__(self, settings, session,lendpoints): QueryLauncher.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.log.debug(" =================== Federation Request ====================") #comments added in sparql request to get all url endpoint. self.commentsForFed="" for endp in lendpoints: if 'askomics' not in endp: raise ValueError("endpoint var have to defined an 'askomics' key with a boolean value endp="+str(endp)) if endp['askomics']: self.commentsForFed+="#endpoint,askomics,"+endp['name']+','+endp['endpoint']+',false\n' else: self.commentsForFed+="#endpoint,external,"+endp['name']+','+endp['endpoint']+',false\n' #add local TPS #self.commentsForFed+="#endpoint,local,"+self.get_param("askomics.endpoint")+',false\n' if not self.is_defined("askomics.fdendpoint") : raise ValueError("can not find askomics.fdendpoint property in the config file !") self.name = 'FederationEngine' self.endpoint = self.get_param("askomics.fdendpoint") self.username = None self.password = None self.urlupdate = None self.auth = 'Basic' self.allowUpdate = False
def delete_askograph(self): """Delete the askomics graph""" sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) query_laucher.process_query(sqb.get_drop_named_graph('urn:sparql:test_askomics'))
def load_data_from_file(self, fp, urlbase): """ Load a locally created ttl file in the triplestore using http (with load_data(url)) or with the filename for Fuseki (with fuseki_load_data(fp.name)). :param fp: a file handle for the file to load :param urlbase:the base URL of current askomics instance. It is used to let triple stores access some askomics temporary ttl files using http. :return: a dictionnary with information on the success or failure of the operation """ if not fp.closed: fp.flush() # This is required as otherwise, data might not be really written to the file before being sent to triplestore ql = QueryLauncher(self.settings, self.session) if self.is_defined('askomics.load_url'): urlbase = self.settings['askomics.load_url'] url = urlbase+"/ttl/"+ self.session['username'] + '/' + os.path.basename(fp.name) data = {} data["status"] = "ok" try: if self.is_defined("askomics.file_upload_url"): queryResults = ql.upload_data(fp.name, self.graph) else: queryResults = ql.load_data(url, self.graph) except Exception as e: self.log.error(self._format_exception(e)) raise e finally: if self.settings['askomics.debug_ttl'] != 'true': os.remove(fp.name) return data
def test_triple_presence(self, graph, triple): """Test the presence of a triple in the triplestore get if a triple is present in a specific graph of the triplestore :param graph: the named graph :type graph: string :param triple: the triple to test :type triple: string :returns: Result of the test :rtype: bool """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) query = sqb.prepare_query(""" SELECT count(*) AS ?count WHERE { GRAPH <""" + graph + """> { """ + triple + """ . } } """) res = query_laucher.process_query(query) print(bool(int(res[0]['count']))) return bool(int(res[0]['count']))
def insert_metadatas(self,accessL): """ Insert the metadatas into the parent graph """ self.log.debug('--- insert_metadatas ---') sqb = SparqlQueryBuilder(self.settings, self.session) query_laucher = QueryLauncher(self.settings, self.session) valAcces = 'public' if accessL else 'private' ttl = '<' + self.graph + '> prov:generatedAtTime "' + self.timestamp + '"^^xsd:dateTime .\n' ttl += '<' + self.graph + '> dc:creator "' + self.session['username'] + '" .\n' ttl += '<' + self.graph + '> :accessLevel "' + valAcces + '" .\n' ttl += '<' + self.graph + '> foaf:Group "' + self.session['group'] + '" .\n' ttl += '<' + self.graph + '> prov:wasDerivedFrom "' + self.name + '" .\n' ttl += '<' + self.graph + '> dc:hasVersion "' + get_distribution('Askomics').version + '" .\n' ttl += '<' + self.graph + '> prov:describesService "' + os.uname()[1] + '" .\n' if self.is_defined("askomics.endpoint"): ttl += '<' + self.graph + '> prov:atLocation "' + self.get_param("askomics.endpoint") + '" .\n' else: raise ValueError("askomics.endpoint does not exit.") sparql_header = sqb.header_sparql_config('') query_laucher.insert_data(ttl, self.graph, sparql_header)
def compare_file_to_database(self, headers): """ Ask the database to compare the headers of a file to convert to the corresponding class in the database """ curr_entity = headers[0] sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) sparql_template = self.get_template_sparql(self.ASKOMICS_get_class_info_from_abstraction_queryFile) query = sqb.load_from_file(sparql_template, {"#nodeClass#": curr_entity}).query results = ql.process_query(query) if results == []: return [], headers, [] bdd_relations, new_headers, missing_headers, present_headers = [], [], [], [] for result in results: bdd_relation = result["relation"].replace(self.get_param("askomics.prefix"), "").replace("has_", "") bdd_relations.append(bdd_relation) if bdd_relation not in headers: self.log.warning('Relation "%s" not found in tables columns: %s.', bdd_relation, repr(headers)) missing_headers.append(bdd_relation) for header in headers: if header != curr_entity: if header not in bdd_relations: self.log.info('Adding column "%s".', header) new_headers.append(header) elif header not in missing_headers: present_headers.append(header) return missing_headers, new_headers, present_headers
def delete_galaxy(self): """Delete galaxy triple for the user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.delete_galaxy(self.username).query)
def update_email(self): """ change the mail of a user """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.process_query(sqa.update_mail(self.username, self.email).query)
def delete_askograph(self): """Delete the askomics graph""" sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) query_laucher.process_query( sqb.get_drop_named_graph('urn:sparql:test_askomics'))
def update_passwd(self): """ Change the password of a user, and his randomsalt """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.process_query(sqa.update_passwd(self.username, self.sha256_pw, self.randomsalt).query)
def delete_users(self): """Delete the test users graph""" sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) query_laucher.execute_query( sqb.get_drop_named_graph('urn:sparql:test_askomics:users').query)
def check_galaxy(self): """Check if user have galaxy triples""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.check_galaxy(self.username).query) return ParamManager.Bool(result[0]['status'])
def update_email(self): """ change the mail of a user """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.process_query( sqa.update_mail(self.username, self.email).query)
def deleteMoState(self,urimo): self.log.debug(' ***** Delete module '+urimo+' on TPS ***** ') sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) ql.execute_query(sqb.prepare_query( """ DELETE WHERE { GRAPH <"""+self.graph_modules+"""> { <"""+urimo+"""> ?p ?o } } """ ).query)
def check_galaxy(self): """Check if user have galaxy triples""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.check_galaxy(self.username).query) return ParamManager.Bool(result[0]['status'])
def update_passwd(self): """ Change the password of a user, and his randomsalt """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.process_query( sqa.update_passwd(self.username, self.sha256_pw, self.randomsalt).query)
def deleteMoState(self, urimo): self.log.debug(' ***** Delete module ' + urimo + ' on TPS ***** ') sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) ql.execute_query( sqb.prepare_query(""" DELETE WHERE { GRAPH <""" + self.graph_modules + """> { <""" + urimo + """> ?p ?o } } """).query)
def set_username_by_email(self): """Get the username of a user by his email""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.get_username_by_email(self.email).query) # return result[0]['username'] self.username = result[0]['username']
def get_galaxy_infos(self): """Get Galaxy url and apikey of a user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.get_galaxy_infos(self.username).query) if result: return result[0] return []
def add_apikey(self, keyname): """Add an api key :param keyname: the keyname :type keyname: string """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.add_apikey(self.username, keyname).query)
def test_statistics(self): #load files self.it.empty() self.it.load_test2() ql = QueryLauncher(self.settings, self.request.session) queryResults = ql.insert_data(':sujet :predicat :objet .', 'test', 'prefix :<test>') server = queryResults.info()['server'] self.request.json_body = {'namedGraphs': ['test']} self.askview.delete_graph() data = self.askview.statistics() assert data['ntriples'] == 279 assert data['nclasses'] == '6' assert data['nentities'] == '19' assert data['ngraphs'] == '5' assert data['class'] == { 'Personne': { 'count': '7' }, 'Sexe': { 'count': '2' }, 'Instrument': { 'count': '2' } } for key in data['metadata'].keys(): self.assertRegexpMatches( key, r'^urn:sparql:(instrument|enseigne|connait|joue|personne)\.tsv_[0-9]+\.[0-9]+$' ) for key2 in data['metadata'][key]: self.assertRegexpMatches( key2, r'^(version|username|filename|loadDate|server)$') if key2 == 'version': assert data['metadata'][key][key2] == '2.0' elif key2 == 'username': assert data['metadata'][key][key2] == getpass.getuser() elif key2 == 'filename': self.assertRegexpMatches( data['metadata'][key][key2], r'^(instrument|enseigne|connait|joue|personne)\.tsv$') elif key2 == 'loadDate': self.assertRegexpMatches(data['metadata'][key][key2], r'^[0-9]+\.[0-9]+$') elif key2 == 'server': assert data['metadata'][key][key2] == server
def empty_database(self): """ Delete all triples in the triplestore """ self.log.debug("=== DELETE ALL TRIPLES ===") sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) ql.execute_query(sqb.get_delete_query_string().query)
def set_username_by_email(self): """Get the username of a user by his email""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.get_username_by_email(self.email).query) # return result[0]['username'] self.username = result[0]['username']
def list_named_graphs(self): sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append(res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def get_galaxy_infos(self): """Get Galaxy url and apikey of a user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.get_galaxy_infos(self.username).query) if result: return result[0] return []
def add_apikey(self, keyname): """Add an api key :param keyname: the keyname :type keyname: string """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query( sqa.add_apikey(self.username, keyname).query)
def list_named_graphs(self): sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append( res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def ckeck_key_belong_user(self, key): """Check if a key belong to a user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.ckeck_key_belong_user(self.username, key).query) self.log.debug('---> result: ' + str(result)) if len(result) <= 0: return False return ParamManager.Bool(result[0]['count'])
def create_user_graph(self): """ Create a subgraph for the user. All his data will be inserted in this subgraph """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) ttl = '<' + self.settings['askomics.graph'] + ':' + self.username + \ '> rdfg:subGraphOf <' + self.settings['askomics.graph'] + '>' header_ttl = sqa.header_sparql_config(ttl) query_laucher.insert_data(ttl, self.settings["askomics.graph"], header_ttl)
def get_number_of_users(self): """ get the number of users in the TS """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.get_number_of_users().query) if len(result) <= 0: return 0 return int(result[0]['count'])
def launch_query(self): """ Converts the constraints table created by the graph to a sparql query, send it to the database and compile the results""" data = {} body = self.request.json_body export = bool(int(body['export'])) sqb = SparqlQueryBuilder(self.settings, self.request.session) return_only_query = bool(int(body['return_only_query'])) if body['uploaded'] != '': if export: query = body['uploaded'].replace('LIMIT 30', 'LIMIT 10000') else: query = body['uploaded'] else: query = sqb.load_from_query_json(body).query if return_only_query: data['query'] = query return data ql = QueryLauncher(self.settings, self.request.session) rb = ResultsBuilder(self.settings, self.request.session) results = ql.process_query(query) if export: data['file'] = ql.format_results_csv(rb.build_csv_table(results)) else: entity_name_list, entity_list_attributes = rb.organize_attribute_and_entity(results, body['constraint']) data['results_entity_name'] = entity_name_list data['results_entity_attributes'] = entity_list_attributes data['results'] = [ { k: res[k].replace(self.settings["askomics.prefix"], '') for k in res.keys() } for res in results ] self.log.debug("== results ==") for elt in results: self.log.debug(elt) # data['query'] = query return data
def get_admins_emails(self): """ Get all admins emails """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.get_admins_emails().query) email_list = [] for dic in result: email_list.append(re.sub(r'^mailto:', '', dic['email'])) return email_list
def ckeck_key_belong_user(self, key): """Check if a key belong to a user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.ckeck_key_belong_user(self.username, key).query) self.log.debug('---> result: ' + str(result)) if len(result) <= 0: return False return ParamManager.Bool(result[0]['count'])
def getUserAbstraction(self, service): """ Get the user abstraction (relation and entity as subject and object) :return: :rtype: """ data = {} self.log.debug( " =========== TripleStoreExplorer:getUserAbstraction ===========") sqg = SparqlQueryGraph(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) data['relations'] = ql.process_query( sqg.get_abstraction_relation('owl:ObjectProperty').query) data['subclassof'] = ql.process_query( sqg.get_isa_relation_entities().query) data['entities'] = ql.process_query(sqg.get_abstraction_entity().query) data['attributes'] = ql.process_query( sqg.get_abstraction_attribute_entity().query) data['categories'] = ql.process_query( sqg.get_abstraction_category_entity().query) data['positionable'] = ql.process_query( sqg.get_abstraction_positionable_entity().query) data['graph'] = sqg.getGraphUser() return data
def check_email_in_database(self): """ Check if the email is present in the TS """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.check_email_presence(self.email).query) if len(result) <= 0: return False return ParamManager.Bool(result[0]['status'])
def moStateOnTPS(self): ''' check if module files state is saved on the TPS. if not all modules files are saved with the unchecked status ! ''' sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) results = ql.process_query( sqb.build_query_on_the_fly( { 'select': '?uri ?module ?comment ?version ?graph ?state', 'from': [self.graph_modules], 'query': '{\n' + '?uri rdfs:label ?module .\n' + '?uri rdfs:comment ?comment .\n' + '?uri :module_version ?version .\n' + '?uri :module_state ?state .\n' + 'OPTIONAL { ?uri :module_graph ?graph . } \n' + '}\n' }, True).query) self.log.debug(' ***** module on TPS ***** ') listMoOnTps = {} for result in results: result['checked'] = (result['state'] == "ok") result['wait'] = (result['state'] == "wait") listMoOnTps[result['module']] = 0 self.log.debug('module : ' + result['module']) #=======================*************** A ENLEVER *********======================================= #pour debugger #if result['wait'] : # result['wait'] = False #============================================================== self.log.debug(' ***** check Available Modules ***** ') requestAgain = False for mo in self.moduleFiles: self.log.debug(" --> module " + mo) if mo not in listMoOnTps: self.log.debug(" --====== > new module < =======") self.importMoSate(self.moduleFiles[mo], 'off') requestAgain = True if requestAgain: return False return results
def get_owner_of_apikey(self, key): """Get the owner of an API kei [description] :param key: The API key :type key: string """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query(sqa.get_owner_apikey(key).query) if result: self.username = result[0]['username']
def check_email_in_database(self): """ Check if the email is present in the TS """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.check_email_presence(self.email).query) if len(result) <= 0: return False return ParamManager.Bool(result[0]['status'])
def get_attributes_of(self, uri): """ Get all attributes of a node class (identified by his uri). These attributes are known thanks to the domain knowledge of your RDF database. :param uri: Uri of the node class :type uri: str :return: All attributes of a node class :rtype: Attribute list """ self.log.debug(" =========== TripleStoreExplorer:get_attributes_of ===========") attributes = [] results = [] sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) fragment = uri.rsplit('#', 1)[-1] parent = fragment + str(self.get_new_id(fragment)) # Send a request to know all the neighbors of a node (from uri). sparql_template = self.get_template_sparql(self.ASKOMICS_neighbor_query_file) query = sqb.load_from_file(sparql_template, { "nodeClass": '<%s>' % uri, "neighborClass": "?nodeUri" }).query results = (ql.process_query(query)) for result in results: neighbor_uri = result["relationUri"] if 'nodeLabel' in result: neighbor_label = result["nodeLabel"] else: neighbor_label = result["relationLabel"] neighbor_id = neighbor_label + str(self.get_new_id(neighbor_label)) if self.has_setting(result["nodeUri"], 'attribute') or self.has_setting(neighbor_uri, 'attribute'): attributes.append( Attribute(neighbor_id, neighbor_uri, result["nodeUri"], neighbor_label, parent) ) return attributes
def empty(self): """Delete all test data Get the list of all public and private graphs and delete them """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) private_graphs = self.list_private_graphs() public_graphs = self.list_public_graphs() for graph in private_graphs: query_laucher.process_query(sqb.get_delete_query_string(graph)) for graph in public_graphs: query_laucher.process_query(sqb.get_delete_query_string(graph))
def get_metadatas(self): """ Create metadatas and insert them into AskOmics main graph. """ self.log.debug("====== INSERT METADATAS ======") sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) ttlMetadatas = "<" + self.metadatas['graphName'] + "> " + "prov:generatedAtTime " + '"' + self.metadatas['loadDate'] + '"^^xsd:dateTime .\n' ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "dc:creator " + '"' + self.metadatas['username'] + '"^^xsd:string .\n' ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "prov:wasDerivedFrom " + '"' + self.metadatas['fileName'] + '"^^xsd:string .\n' ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "dc:hasVersion " + '"' + self.metadatas['version'] + '"^^xsd:string .\n' ttlMetadatas += "<" + self.metadatas['graphName'] + "> " + "prov:describesService " + '"' + self.metadatas['server'] + '"^^xsd:string .' sparqlHeader = sqb.header_sparql_config("") ql.insert_data(ttlMetadatas, self.get_param("askomics.graph"), sparqlHeader)
def positionable_attr(self): """ Return the positionable attributes in common between two positionable entity """ #FIXEME: Rewrite this ugly method body = self.request.json_body data = {} sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) # Check if the two entity are positionable positionable1 = ql.process_query( sqb.get_if_positionable(body['node']).query) positionable2 = ql.process_query( sqb.get_if_positionable(body['node']).query) if positionable1 == 0 or positionable2 == 0: data['error'] = 'not positionable nodes' return data results = ql.process_query( sqb.get_common_positionable_attributes(body['node'], body['second_node']).query) self.log.debug(results) data['results'] = {} list_pos_attr = [] for elem in results: if elem['pos_attr'] not in list_pos_attr: list_pos_attr.append(elem['pos_attr'].replace( "http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#", "")) for elem in list_pos_attr: data['results'][elem] = False not in [ bool(int(p['status'])) for p in results if p['pos_attr'] == "http://www.semanticweb.org/irisa/ontologies/2016/1/igepp-ontology#" + elem ] return data
def existing_relations(self): """ Fetch from triplestore the existing relations if entities of the same name exist :return: a List of relation names :rtype: List """ #FIXME: Useless function, always return an empty list self.log.debug("existing_relations") existing_relations = [] sqg = SparqlQueryGraph(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) results = ql.process_query( sqg.get_class_info_from_abstraction(self.headers[0]).query) return existing_relations
def list_private_graphs(self): """List the private graphs :returns: decription of the private graphs :rtype: dict """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) res = query_laucher.process_query(sqb.get_user_graph_infos_with_count().query) named_graphs = [] for index_result in range(len(res)): named_graphs.append(res[index_result]['g']) return named_graphs
def list_public_graphs(self): """list the public graphs :returns: description of the public graph :rtype: dict """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) res = query_laucher.process_query(sqb.get_public_graphs()) named_graphs = [] print(res) for index_result in range(len(res)): named_graphs.append(res[index_result]['g']) return named_graphs
def check_email_password(self): """ check if the password is the good password associate with the email """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.get_password_with_email(self.email).query) ts_salt = result[0]['salt'] ts_shapw = result[0]['shapw'] concat = self.settings["askomics.salt"] + self.passwd + ts_salt shapw = hashlib.sha256(concat.encode('utf8')).hexdigest() return ts_shapw == shapw
def persist_user(self, host_url): """ Persist all user infos in the TS """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) #check if user is the first. if yes, set him admin if self.get_number_of_users() == 0: admin = 'true' blocked = 'false' self.set_admin(True) self.set_blocked(False) else: admin = 'false' blocked = 'true' self.set_admin(False) self.set_blocked(True) chunk = ':' + self.username + ' rdf:type foaf:Person ;\n' indent = len(self.username) * ' ' + ' ' chunk += indent + 'foaf:name \"' + self.username + '\" ;\n' chunk += indent + ':password \"' + self.sha256_pw + '\" ;\n' chunk += indent + 'foaf:mbox <mailto:' + self.email + '> ;\n' chunk += indent + ':isadmin \"' + admin + '\"^^xsd:boolean ;\n' chunk += indent + ':isblocked \"' + blocked + '\"^^xsd:boolean ;\n' chunk += indent + ':randomsalt \"' + self.randomsalt + '\" .\n' header_ttl = sqa.header_sparql_config(chunk) query_laucher.insert_data(chunk, self.settings["askomics.users_graph"], header_ttl) emails = self.get_admins_emails() # Send a mail to all admins body = 'Hello,\n' body += 'User \'' + self.username + '\' just created an account on Askomics.\n' body += 'Log into the admin interface in order to unblock this user, or contact him ' body += 'at ' + self.email + '.\n\n\n' body += host_url + '\n\n' self.send_mails(host_url, emails, '[AskOmics@' + host_url + '] New account created', body)
def get_list_named_graphs(self): """ Return a list with all the named graphs. """ self.log.debug("=== LIST OF NAMED GRAPHS ===") sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append( res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def list_public_graphs(self): """list the public graphs :returns: description of the public graph :rtype: dict """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) res = query_laucher.execute_query(sqb.get_public_graphs().query) named_graphs = [] for index_result in range(len(res['results']['bindings'])): named_graphs.append( res['results']['bindings'][index_result]['g']['value']) return named_graphs
def test_statistics(self): request = testing.DummyRequest() sqb = SparqlQueryBuilder(self.settings, request.session) ql = QueryLauncher(self.settings, request.session) sqb.get_statistics_number_of_triples() sqb.get_statistics_number_of_entities() sqb.get_statistics_distinct_classes() sqb.get_statistics_list_classes() sqb.get_statistics_nb_instances_by_classe() sqb.get_statistics_by_startpoint() sqb.get_list_named_graphs() res = ql.execute_query(sqb.get_list_named_graphs().query) for indexResult in range(len(res['results']['bindings'])): sqb.get_delete_query_string( res['results']['bindings'][indexResult]['g']['value']) sqb.get_metadatas( res['results']['bindings'][indexResult]['g']['value'])