def delete_apikey(self, key): """delete an apikey""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.delete_apikey(key).query)
def empty(self): #empty database sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) namedGraphs = self.list_named_graphs() for graph in namedGraphs: ql.execute_query(sqb.get_delete_query_string(graph).query)
def delete_galaxy(self): """Delete galaxy triple for the user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.delete_galaxy(self.username).query)
def delete_askograph(self): """Delete the askomics graph""" sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) query_laucher.execute_query( sqb.get_drop_named_graph('urn:sparql:test_askomics').query)
def deleteMoState(self, urimo): self.log.debug(' ***** Delete module ' + urimo + ' on TPS ***** ') sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) ql.execute_query( sqb.prepare_query(""" DELETE WHERE { GRAPH <""" + self.graph_modules + """> { <""" + urimo + """> ?p ?o } } """).query)
def deleteMoState(self,urimo): self.log.debug(' ***** Delete module '+urimo+' on TPS ***** ') sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) ql.execute_query(sqb.prepare_query( """ DELETE WHERE { GRAPH <"""+self.graph_modules+"""> { <"""+urimo+"""> ?p ?o } } """ ).query)
def empty_database(self): """ Delete all triples in the triplestore """ self.log.debug("=== DELETE ALL TRIPLES ===") sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) ql.execute_query(sqb.get_delete_query_string().query)
def add_apikey(self, keyname): """Add an api key :param keyname: the keyname :type keyname: string """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.add_apikey(self.username, keyname).query)
def add_apikey(self, keyname): """Add an api key :param keyname: the keyname :type keyname: string """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query( sqa.add_apikey(self.username, keyname).query)
def test_triple_presence(self, graph, triple): """Test the presence of a triple in the triplestore get if a triple is present in a specific graph of the triplestore :param graph: the named graph :type graph: string :param triple: the triple to test :type triple: string :returns: Result of the test :rtype: bool """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) query = sqb.prepare_query(""" SELECT count(*) AS ?count WHERE { GRAPH <""" + graph + """> { """ + triple + """ . } } """) res = query_laucher.execute_query(query.query) print(bool(int(res['results']['bindings'][0]['count']['value']))) return bool(int(res['results']['bindings'][0]['count']['value']))
def delete_graph(self): """ Delete triples from a list of graph """ self.log.debug("=== DELETE SELECTED GRAPHS ===") sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) graphs = self.request.json_body['namedGraphs'] for graph in graphs: self.log.debug("--- DELETE GRAPH : %s", graph) ql.execute_query(sqb.get_drop_named_graph(graph).query) #delete metadatas ql.execute_query(sqb.get_delete_metadatas_of_graph(graph).query)
def empty(self): """Delete all test data Get the list of all public and private graphs and delete them """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) private_graphs = self.list_private_graphs() public_graphs = self.list_public_graphs() for graph in private_graphs: query_laucher.execute_query( sqb.get_delete_query_string(graph).query) for graph in public_graphs: query_laucher.execute_query( sqb.get_delete_query_string(graph).query)
def list_named_graphs(self): sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append(res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def add_galaxy(self, url, key): """Connect a galaxy account to Askomics add triples for the url of galaxy, and the user api key :param self; url: the galaxy url :type self; url: string :param key: the galaxy user api key :type key: string """ # try to connect to the galaxy server galaxy = GalaxyConnector(self.settings, self.session, url, key) try: galaxy.check_galaxy_instance() except Exception as e: raise e query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query(sqa.add_galaxy(self.username, url, key).query)
def empty_database(self): """ Delete all triples in the triplestore """ data = {} self.log.debug("=== DELETE ALL TRIPLES ===") try: sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) namedGraphs = self.get_list_named_graphs() namedGraphs.append(ql.get_param("askomics.graph")) for graph in namedGraphs: ql.execute_query(sqb.get_delete_query_string(graph).query) except Exception as e: traceback.print_exc(file=sys.stdout) data['error'] = traceback.format_exc(limit=8)+"\n\n\n"+str(e) self.log.error(str(e)) return data
def empty_database(self): """ Delete all triples in the triplestore """ data = {} self.log.debug("=== DELETE ALL TRIPLES ===") try: sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) namedGraphs = self.get_list_named_graphs() namedGraphs.append(ql.get_param("askomics.graph")) for graph in namedGraphs: ql.execute_query(sqb.get_delete_query_string(graph).query) except Exception as e: traceback.print_exc(file=sys.stdout) data['error'] = traceback.format_exc(limit=8) + "\n\n\n" + str(e) self.log.error(str(e)) return data
def list_named_graphs(self): sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append( res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def add_galaxy(self, url, key): """Connect a galaxy account to Askomics add triples for the url of galaxy, and the user api key :param self; url: the galaxy url :type self; url: string :param key: the galaxy user api key :type key: string """ # try to connect to the galaxy server galaxy = GalaxyConnector(self.settings, self.session, url, key) try: galaxy.check_galaxy_instance() except Exception as e: raise e query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) query_laucher.execute_query( sqa.add_galaxy(self.username, url, key).query)
def test_statistics(self): request = testing.DummyRequest() sqb = SparqlQueryBuilder(self.settings, request.session) ql = QueryLauncher(self.settings, request.session) sqb.get_statistics_number_of_triples() sqb.get_statistics_number_of_entities() sqb.get_statistics_distinct_classes() sqb.get_statistics_list_classes() sqb.get_statistics_nb_instances_by_classe() sqb.get_statistics_by_startpoint() sqb.get_list_named_graphs() res = ql.execute_query(sqb.get_list_named_graphs().query) for indexResult in range(len(res['results']['bindings'])): sqb.get_delete_query_string(res['results']['bindings'][indexResult]['g']['value']) sqb.get_metadatas(res['results']['bindings'][indexResult]['g']['value'])
def get_list_named_graphs(self): """ Return a list with all the named graphs. """ self.log.debug("=== LIST OF NAMED GRAPHS ===") sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append(res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def list_public_graphs(self): """list the public graphs :returns: description of the public graph :rtype: dict """ sqb = SparqlQueryGraph(self.settings, self.request.session) query_laucher = QueryLauncher(self.settings, self.request.session) res = query_laucher.execute_query(sqb.get_public_graphs().query) named_graphs = [] for index_result in range(len(res['results']['bindings'])): named_graphs.append( res['results']['bindings'][index_result]['g']['value']) return named_graphs
def get_list_named_graphs(self): """ Return a list with all the named graphs. """ self.log.debug("=== LIST OF NAMED GRAPHS ===") sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] for indexResult in range(len(res['results']['bindings'])): namedGraphs.append( res['results']['bindings'][indexResult]['g']['value']) return namedGraphs
def test_statistics(self): request = testing.DummyRequest() sqb = SparqlQueryBuilder(self.settings, request.session) ql = QueryLauncher(self.settings, request.session) sqb.get_statistics_number_of_triples() sqb.get_statistics_number_of_entities() sqb.get_statistics_distinct_classes() sqb.get_statistics_list_classes() sqb.get_statistics_nb_instances_by_classe() sqb.get_statistics_by_startpoint() sqb.get_list_named_graphs() res = ql.execute_query(sqb.get_list_named_graphs().query) for indexResult in range(len(res['results']['bindings'])): sqb.get_delete_query_string( res['results']['bindings'][indexResult]['g']['value']) sqb.get_metadatas( res['results']['bindings'][indexResult]['g']['value'])
def build_sparql_query_from_json(self, variates, constraintesRelations, limit, sendRequestToTPS): self.log.debug("variates") self.log.debug(variates) self.log.debug("constraintesRelations") self.log.debug(constraintesRelations) sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] #for indexResult in range(len(res['results']['bindings'])): # namedGraphs.append(res['results']['bindings'][indexResult]['g']['value']) req = "" req += "SELECT DISTINCT " + ' '.join(variates) + "\n" #TODO OFI: External Service do not work and, anyway, graphes have to be selectionned by the user in the UI # #for graph in namedGraphs: # req += "FROM "+ "<"+graph+ ">"+"\n" req += "WHERE \n" req += self.buildRecursiveBlock('', constraintesRelations) if limit != None and limit > 0: req += " LIMIT " + str(limit) sqb = SparqlQueryBuilder(self.settings, self.session) prefixes = sqb.header_sparql_config(req) query = prefixes + req results = {} if sendRequestToTPS: ql = QueryLauncher(self.settings, self.session) results = ql.process_query(query) else: # add comment inside query to inform user query = "# endpoint = " + self.get_param( "askomics.endpoint") + "\n" + query return results, query
def build_sparql_query_from_json(self,variates,constraintesRelations,limit,sendRequestToTPS): self.log.debug("variates") self.log.debug(variates) self.log.debug("constraintesRelations") self.log.debug(constraintesRelations) sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) res = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphs = [] #for indexResult in range(len(res['results']['bindings'])): # namedGraphs.append(res['results']['bindings'][indexResult]['g']['value']) req = "" req += "SELECT DISTINCT "+' '.join(variates)+"\n" #TODO OFI: External Service do not work and, anyway, graphes have to be selectionned by the user in the UI # #for graph in namedGraphs: # req += "FROM "+ "<"+graph+ ">"+"\n" req += "WHERE \n" req += self.buildRecursiveBlock('',constraintesRelations) if limit != None and limit >0 : req +=" LIMIT "+str(limit) sqb = SparqlQueryBuilder(self.settings, self.session) prefixes = sqb.header_sparql_config(req) query = prefixes+req results = {} if sendRequestToTPS: ql = QueryLauncher(self.settings, self.session) results = ql.process_query(query) else: # add comment inside query to inform user query = "# endpoint = "+self.get_param("askomics.endpoint") + "\n" + query return results,query
def manageModules(self, host_url, urimodule, namemodule, active): ''' activate/desactivate module ''' self.log.debug( " --======================> manageModules <========================--- " ) self.log.debug(" uri:" + urimodule) self.log.debug(" namemodule:" + namemodule) self.log.debug(" active:" + str(active)) listMo = self.getListModules() mo = None for i in listMo: if i["uri"] == urimodule: mo = i break if mo == None: raise ValueError("Can not find Mo on TPS !") ########################################################################################## if mo['state'] == 'wait': self.log.debug( " ****************** WAIT MODE **************** :" + urimodule) return self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "wait") ql = QueryLauncher(self.settings, self.session) if active: try: self.importRDF(mo, namemodule, host_url) #loading owl file if 'owl' in self.moduleFiles[namemodule] and self.moduleFiles[ namemodule]['owl'].strip() != '': ql.load_data(self.moduleFiles[namemodule]['owl'], mo['graph']) except Exception as e: self.log.error('failed: ' + str(e)) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "off") raise e self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "ok") ########################################################################################## # manage owl if dos not exist in the MO file if 'rdf' not in self.moduleFiles[namemodule]: self.moduleFiles[namemodule]['rdf'] = [] if len(self.moduleFiles[namemodule]['rdf']) <= 0: self.moduleFiles[namemodule][ 'rdf'] = self.generateAbstractAskomicsRDF(mo['graph']) self.importRDF(mo, namemodule, host_url, mo['graph']) self.saveMo(namemodule) else: if 'graph' in mo: sqb = SparqlQueryBuilder(self.settings, self.session) ql.execute_query(sqb.get_drop_named_graph(mo['graph']).query) ql.execute_query( sqb.get_delete_metadatas_of_graph(mo['graph']).query) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "off")
def source_files_overview(self): """ Get preview data for all the available files """ self.log.debug(" ========= Askview:source_files_overview =============") sfc = SourceFileConvertor(self.settings, self.request.session) source_files = sfc.get_source_files() data = {} data['files'] = [] # get all taxon in the TS sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) res = ql.execute_query(sqb.get_all_taxon().query) taxons_list = [] for elem in res['results']['bindings']: taxons_list.append(elem['taxon']['value']) data['taxons'] = taxons_list for src_file in source_files: infos = {} infos['name'] = src_file.name infos['type'] = src_file.type if src_file.type == 'tsv': try: infos['headers'] = src_file.headers infos['preview_data'] = src_file.get_preview_data() infos['column_types'] = [] header_num = 0 for ih in range(0, len(infos['headers'])): #if infos['headers'][ih].find("@")>0: # infos['column_types'].append("entity") #else: infos['column_types'].append(src_file.guess_values_type(infos['preview_data'][ih], infos['headers'][header_num])) header_num += 1 except Exception as e: traceback.print_exc(file=sys.stdout) infos['error'] = 'Could not read input file, are you sure it is a valid tabular file?' self.log.error(str(e)) data['files'].append(infos) elif src_file.type == 'gff': try: entities = src_file.get_entities() infos['entities'] = entities except Exception as e: self.log.debug('error !!') traceback.print_exc(file=sys.stdout) infos['error'] = 'Could not parse the file, are you sure it is a valid GFF3 file?' self.log.error('error with gff examiner: ' + str(e)) data['files'].append(infos) elif src_file.type == 'ttl': infos['preview'] = src_file.get_preview_ttl() data['files'].append(infos) return data
def statistics(self): """ Get information about triplet store """ self.log.debug("== STATS ==") data = {} pm = ParamManager(self.settings, self.request.session) sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) tse = TripleStoreExplorer(self.settings, self.request.session) results = ql.process_query(sqb.get_statistics_number_of_triples().query) resultsGraphs = ql.process_query(sqb.get_statistics_number_of_triples_AskOmics_graphs().query) data["ntriples"] = int(results[0]["no"]) + int(resultsGraphs[0]["no"]) results = ql.process_query(sqb.get_statistics_number_of_entities().query) data["nentities"] = results[0]["no"] results = ql.process_query(sqb.get_statistics_distinct_classes().query) data["nclasses"] = results[0]["no"] # Get the number of graphs results = ql.process_query(sqb.get_statistics_number_of_graphs().query) data["ngraphs"] = results[0]["no"] self.log.debug("=== LIST OF METADATAS ") # Get the list of named graphs namedGraphsResults = ql.execute_query(sqb.get_list_named_graphs().query) namedGraphsMetadatas = {} # Get a dictionnary containing the metadatas for each graph for indexResult in range(len(namedGraphsResults['results']['bindings'])): metadatasResults = ql.execute_query(sqb.get_metadatas(namedGraphsResults['results']['bindings'][indexResult]['g']['value']).query) metadatas = {} for indexMetadatasResults in range(len(metadatasResults['results']['bindings'])): if metadatasResults['results']['bindings'][indexMetadatasResults]['p']['value'] == "http://www.w3.org/ns/prov#generatedAtTime": metadatas['loadDate'] = metadatasResults['results']['bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][indexMetadatasResults]['p']['value'] == "http://purl.org/dc/elements/1.1/creator": metadatas['username'] = metadatasResults['results']['bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][indexMetadatasResults]['p']['value'] == "http://purl.org/dc/elements/1.1/hasVersion": metadatas['version'] = metadatasResults['results']['bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][indexMetadatasResults]['p']['value'] == "http://www.w3.org/ns/prov#describesService": metadatas['server'] = metadatasResults['results']['bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][indexMetadatasResults]['p']['value'] == "http://www.w3.org/ns/prov#wasDerivedFrom": metadatas['filename'] = metadatasResults['results']['bindings'][indexMetadatasResults]['o']['value'] namedGraphsMetadatas[namedGraphsResults['results']['bindings'][indexResult]['g']['value']] = metadatas data['metadata'] = namedGraphsMetadatas # Get the list of classes res_list_classes = ql.process_query(sqb.get_statistics_list_classes().query) data["class"] = {} for obj in res_list_classes: print(obj['class']) class_name = pm.remove_prefix(obj['class']) print(class_name) data["class"][class_name] = {} # Get the number of instances by class res_nb_instances = ql.process_query(sqb.get_statistics_nb_instances_by_classe().query) for obj in res_nb_instances: if 'class' in obj: print(data['class']) class_name = pm.remove_prefix(obj['class']) data["class"][class_name]["count"] = obj['count'] return data
def drop_graph(self, graph): sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) ql.execute_query(sqb.get_drop_named_graph(graph).query)
def statistics(self): """ Get information about triplet store """ self.log.debug("== STATS ==") data = {} pm = ParamManager(self.settings, self.request.session) sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) tse = TripleStoreExplorer(self.settings, self.request.session) results = ql.process_query( sqb.get_statistics_number_of_triples().query) resultsGraphs = ql.process_query( sqb.get_statistics_number_of_triples_AskOmics_graphs().query) data["ntriples"] = int(results[0]["no"]) + int(resultsGraphs[0]["no"]) results = ql.process_query( sqb.get_statistics_number_of_entities().query) data["nentities"] = results[0]["no"] results = ql.process_query(sqb.get_statistics_distinct_classes().query) data["nclasses"] = results[0]["no"] # Get the number of graphs results = ql.process_query(sqb.get_statistics_number_of_graphs().query) data["ngraphs"] = results[0]["no"] self.log.debug("=== LIST OF METADATAS ") # Get the list of named graphs namedGraphsResults = ql.execute_query( sqb.get_list_named_graphs().query) namedGraphsMetadatas = {} # Get a dictionnary containing the metadatas for each graph for indexResult in range(len( namedGraphsResults['results']['bindings'])): metadatasResults = ql.execute_query( sqb.get_metadatas(namedGraphsResults['results']['bindings'] [indexResult]['g']['value']).query) metadatas = {} for indexMetadatasResults in range( len(metadatasResults['results']['bindings'])): if metadatasResults['results']['bindings'][ indexMetadatasResults]['p'][ 'value'] == "http://www.w3.org/ns/prov#generatedAtTime": metadatas['loadDate'] = metadatasResults['results'][ 'bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][ indexMetadatasResults]['p'][ 'value'] == "http://purl.org/dc/elements/1.1/creator": metadatas['username'] = metadatasResults['results'][ 'bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][ indexMetadatasResults]['p'][ 'value'] == "http://purl.org/dc/elements/1.1/hasVersion": metadatas['version'] = metadatasResults['results'][ 'bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][ indexMetadatasResults]['p'][ 'value'] == "http://www.w3.org/ns/prov#describesService": metadatas['server'] = metadatasResults['results'][ 'bindings'][indexMetadatasResults]['o']['value'] if metadatasResults['results']['bindings'][ indexMetadatasResults]['p'][ 'value'] == "http://www.w3.org/ns/prov#wasDerivedFrom": metadatas['filename'] = metadatasResults['results'][ 'bindings'][indexMetadatasResults]['o']['value'] namedGraphsMetadatas[namedGraphsResults['results']['bindings'] [indexResult]['g']['value']] = metadatas data['metadata'] = namedGraphsMetadatas # Get the list of classes res_list_classes = ql.process_query( sqb.get_statistics_list_classes().query) data["class"] = {} for obj in res_list_classes: print(obj['class']) class_name = pm.remove_prefix(obj['class']) print(class_name) data["class"][class_name] = {} # Get the number of instances by class res_nb_instances = ql.process_query( sqb.get_statistics_nb_instances_by_classe().query) for obj in res_nb_instances: if 'class' in obj: print(data['class']) class_name = pm.remove_prefix(obj['class']) data["class"][class_name]["count"] = obj['count'] return data
def manageModules(self,host_url,urimodule,namemodule,active): ''' activate/desactivate module ''' self.log.debug(" --======================> manageModules <========================--- "); self.log.debug(" uri:"+urimodule) self.log.debug(" namemodule:"+namemodule) self.log.debug(" active:"+str(active)) listMo = self.getListModules() mo = None for i in listMo: if i["uri"] == urimodule: mo = i break if mo == None: raise ValueError("Can not find Mo on TPS !") ########################################################################################## if mo['state'] == 'wait': self.log.debug(" ****************** WAIT MODE **************** :" + urimodule) return self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"wait") ql = QueryLauncher(self.settings, self.session) if active: try: self.importRDF(mo,namemodule,host_url) #loading owl file if 'owl' in self.moduleFiles[namemodule] and self.moduleFiles[namemodule]['owl'].strip() != '': ql.load_data(self.moduleFiles[namemodule]['owl'],mo['graph']) except Exception as e: self.log.error('failed: ' + str(e)) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"off") raise e self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"ok") ########################################################################################## # manage owl if dos not exist in the MO file if 'rdf' not in self.moduleFiles[namemodule]: self.moduleFiles[namemodule]['rdf'] = [] if len(self.moduleFiles[namemodule]['rdf'])<=0: self.moduleFiles[namemodule]['rdf'] = self.generateAbstractAskomicsRDF(mo['graph']) self.importRDF(mo,namemodule,host_url,mo['graph']) self.saveMo(namemodule) else: if 'graph' in mo: sqb = SparqlQueryBuilder(self.settings, self.session) ql.execute_query(sqb.get_drop_named_graph(mo['graph']).query) ql.execute_query(sqb.get_delete_metadatas_of_graph(mo['graph']).query) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"off")