def load_data_from_file(self, fp, urlbase): """ Load a locally created ttl file in the triplestore using http (with load_data(url)) or with the filename for Fuseki (with fuseki_load_data(fp.name)). :param fp: a file handle for the file to load :param urlbase:the base URL of current askomics instance. It is used to let triple stores access some askomics temporary ttl files using http. :return: a dictionnary with information on the success or failure of the operation """ if not fp.closed: fp.flush() # This is required as otherwise, data might not be really written to the file before being sent to triplestore ql = QueryLauncher(self.settings, self.session) if self.is_defined('askomics.load_url'): urlbase = self.settings['askomics.load_url'] url = urlbase+"/ttl/"+ self.session['username'] + '/' + os.path.basename(fp.name) data = {} data["status"] = "ok" try: if self.is_defined("askomics.file_upload_url"): queryResults = ql.upload_data(fp.name, self.graph) else: queryResults = ql.load_data(url, self.graph) except Exception as e: self.log.error(self._format_exception(e)) raise e finally: if self.settings['askomics.debug_ttl'] != 'true': os.remove(fp.name) return data
def load_data_from_file(self, fp, urlbase): """ Load a locally created ttl file in the triplestore using http (with load_data(url)) or with the filename for Fuseki (with fuseki_load_data(fp.name)). :param fp: a file handle for the file to load :param urlbase:the base URL of current askomics instance. It is used to let triple stores access some askomics temporary ttl files using http. :return: a dictionnary with information on the success or failure of the operation """ if not fp.closed: fp.flush( ) # This is required as otherwise, data might not be really written to the file before being sent to triplestore ql = QueryLauncher(self.settings, self.session) if self.is_defined('askomics.load_url'): urlbase = self.settings['askomics.load_url'] url = urlbase + "/ttl/" + self.session[ 'username'] + '/' + os.path.basename(fp.name) data = {} data["status"] = "ok" try: if self.is_defined("askomics.file_upload_url"): queryResults = ql.upload_data(fp.name, self.graph) else: queryResults = ql.load_data(url, self.graph) except Exception as e: self.log.error(self._format_exception(e)) raise e finally: os.remove(fp.name) return data
def load_data_from_file(self, fp, urlbase): """ Load a locally created ttl file in the triplestore using http (with load_data(url)) or with the filename for Fuseki (with fuseki_load_data(fp.name)). :param fp: a file handle for the file to load :param urlbase:the base URL of current askomics instance. It is used to let triple stores access some askomics temporary ttl files using http. :return: a dictionnary with information on the success or failure of the operation """ if not fp.closed: fp.flush() # This is required as otherwise, data might not be really written to the file before being sent to triplestore sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) graphName = "askomics:graph:" + self.name + '_' + self.timestamp self.metadatas['graphName'] = graphName ttlNamedGraph = "<" + graphName + "> " + "rdfg:subGraphOf" + " <" + self.get_param("askomics.graph") + "> ." sparqlHeader = sqb.header_sparql_config("") ql.insert_data(ttlNamedGraph, self.get_param("askomics.graph"), sparqlHeader) url = urlbase+"/ttl/"+os.path.basename(fp.name) self.log.debug(url) data = {} try: if self.is_defined("askomics.file_upload_url"): queryResults = ql.upload_data(fp.name, graphName) self.metadatas['server'] = queryResults.headers['Server'] self.metadatas['loadDate'] = self.timestamp else: queryResults = ql.load_data(url, graphName) self.metadatas['server'] = queryResults.info()['server'] self.metadatas['loadDate'] = self.timestamp data['status'] = 'ok' except Exception as e: self._format_exception(e, data=data) finally: if self.settings["askomics.debug"]: data['url'] = url else: os.remove(fp.name) # Everything ok, remove temp file self.get_metadatas() return data
def load_data_from_file(self, fp, urlbase): """ Load a locally created ttl file in the triplestore using http (with load_data(url)) or with the filename for Fuseki (with fuseki_load_data(fp.name)). :param fp: a file handle for the file to load :param urlbase:the base URL of current askomics instance. It is used to let triple stores access some askomics temporary ttl files using http. :return: a dictionnary with information on the success or failure of the operation """ if not fp.closed: fp.flush() # This is required as otherwise, data might not be really written to the file before being sent to triplestore sqb = SparqlQueryBuilder(self.settings, self.session) ql = QueryLauncher(self.settings, self.session) graphName = "urn:sparql:" + self.name + '_' + self.timestamp self.metadatas['graphName'] = graphName ttlNamedGraph = "<" + graphName + "> " + "rdfg:subGraphOf" + " <" + self.get_param("askomics.graph") + "> ." sparqlHeader = sqb.header_sparql_config("") ql.insert_data(ttlNamedGraph, self.get_param("askomics.graph"), sparqlHeader) url = urlbase+"/ttl/"+os.path.basename(fp.name) self.log.debug(url) data = {} try: if self.is_defined("askomics.file_upload_url"): queryResults = ql.upload_data(fp.name, graphName) self.metadatas['server'] = queryResults.headers['Server'] self.metadatas['loadDate'] = self.timestamp else: queryResults = ql.load_data(url, graphName) self.metadatas['server'] = queryResults.info()['server'] self.metadatas['loadDate'] = self.timestamp data['status'] = 'ok' except Exception as e: self._format_exception(e, data=data) finally: if self.settings["askomics.debug"]: data['url'] = url else: os.remove(fp.name) # Everything ok, remove temp file self.get_metadatas() return data
def load_data_from_url(self, url,public): """ insert the ttl sourcefile in the TS """ data = {} ql = QueryLauncher(self.settings, self.session) try: queryResults = ql.load_data(url, self.graph) except Exception as e: self.log.error(self._format_exception(e)) raise e finally: if self.settings["askomics.debug"]: data['url'] = url data["status"] = "ok" self.insert_metadatas(public) return data
def load_data_from_url(self, url,public): """ insert the ttl sourcefile in the TS """ data = {} ql = QueryLauncher(self.settings, self.session) try: queryResults = ql.load_data(url, self.graph) except Exception as e: self.log.error(self._format_exception(e)) raise e finally: if self.settings["askomics.debug"]: data['url'] = url data["status"] = "ok" self.insert_metadatas(public) return data
def manageModules(self, host_url, urimodule, namemodule, active): ''' activate/desactivate module ''' self.log.debug( " --======================> manageModules <========================--- " ) self.log.debug(" uri:" + urimodule) self.log.debug(" namemodule:" + namemodule) self.log.debug(" active:" + str(active)) listMo = self.getListModules() mo = None for i in listMo: if i["uri"] == urimodule: mo = i break if mo == None: raise ValueError("Can not find Mo on TPS !") ########################################################################################## if mo['state'] == 'wait': self.log.debug( " ****************** WAIT MODE **************** :" + urimodule) return self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "wait") ql = QueryLauncher(self.settings, self.session) if active: try: self.importRDF(mo, namemodule, host_url) #loading owl file if 'owl' in self.moduleFiles[namemodule] and self.moduleFiles[ namemodule]['owl'].strip() != '': ql.load_data(self.moduleFiles[namemodule]['owl'], mo['graph']) except Exception as e: self.log.error('failed: ' + str(e)) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "off") raise e self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "ok") ########################################################################################## # manage owl if dos not exist in the MO file if 'rdf' not in self.moduleFiles[namemodule]: self.moduleFiles[namemodule]['rdf'] = [] if len(self.moduleFiles[namemodule]['rdf']) <= 0: self.moduleFiles[namemodule][ 'rdf'] = self.generateAbstractAskomicsRDF(mo['graph']) self.importRDF(mo, namemodule, host_url, mo['graph']) self.saveMo(namemodule) else: if 'graph' in mo: sqb = SparqlQueryBuilder(self.settings, self.session) ql.execute_query(sqb.get_drop_named_graph(mo['graph']).query) ql.execute_query( sqb.get_delete_metadatas_of_graph(mo['graph']).query) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :" + urimodule) self.importMoSate(mo, "off")
def manageModules(self,host_url,urimodule,namemodule,active): ''' activate/desactivate module ''' self.log.debug(" --======================> manageModules <========================--- "); self.log.debug(" uri:"+urimodule) self.log.debug(" namemodule:"+namemodule) self.log.debug(" active:"+str(active)) listMo = self.getListModules() mo = None for i in listMo: if i["uri"] == urimodule: mo = i break if mo == None: raise ValueError("Can not find Mo on TPS !") ########################################################################################## if mo['state'] == 'wait': self.log.debug(" ****************** WAIT MODE **************** :" + urimodule) return self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"wait") ql = QueryLauncher(self.settings, self.session) if active: try: self.importRDF(mo,namemodule,host_url) #loading owl file if 'owl' in self.moduleFiles[namemodule] and self.moduleFiles[namemodule]['owl'].strip() != '': ql.load_data(self.moduleFiles[namemodule]['owl'],mo['graph']) except Exception as e: self.log.error('failed: ' + str(e)) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"off") raise e self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"ok") ########################################################################################## # manage owl if dos not exist in the MO file if 'rdf' not in self.moduleFiles[namemodule]: self.moduleFiles[namemodule]['rdf'] = [] if len(self.moduleFiles[namemodule]['rdf'])<=0: self.moduleFiles[namemodule]['rdf'] = self.generateAbstractAskomicsRDF(mo['graph']) self.importRDF(mo,namemodule,host_url,mo['graph']) self.saveMo(namemodule) else: if 'graph' in mo: sqb = SparqlQueryBuilder(self.settings, self.session) ql.execute_query(sqb.get_drop_named_graph(mo['graph']).query) ql.execute_query(sqb.get_delete_metadatas_of_graph(mo['graph']).query) self.log.debug(" delete MO state :" + urimodule) self.deleteMoState(urimodule) self.log.debug(" insert new MO state :"+urimodule) self.importMoSate(mo,"off")