def __init__(self, relation_type, identifier, label, identifier_prefix,rdfs_domain, prefixDomain, rdfs_range, prefixRange): type_range = identifier idx = identifier.find("@") if idx > 0: uridi = identifier[0:idx] else: uridi = identifier if label == "": if idx > 0: self.label = identifier[0:idx] else: self.label = identifier else: self.label = label self.uri = ParamManager.encode_to_rdf_uri(uridi,prefix="askomics:") self.rdfs_range = rdfs_range if relation_type.startswith("entity"): self.relation_type = "owl:ObjectProperty" elif relation_type == "goterm": self.relation_type = "owl:ObjectProperty" self.rdfs_range = "owl:Class" else: self.relation_type = "owl:DatatypeProperty" self.rdfs_domain = ParamManager.encode_to_rdf_uri(rdfs_domain,prefixDomain) self.log = logging.getLogger(__name__)
def test_get_database_user_directory(self): m = ParamManager(self.settings, self.request.session) d = m.get_database_user_directory() assert os.path.isdir(d) del self.request.session['username'] d = m.get_database_user_directory() assert os.path.isdir(d)
def test_get_param(self): m = ParamManager(self.settings, self.request.session) m.set_param("test","testValue") d = m.get_param("test") assert d == "testValue" d = m.get_param("test2") assert d == ""
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.databasename = "jobs.db" self.pathdb = self.get_database_user_directory( ) + "/" + self.databasename self.log.info(" ==> " + self.pathdb + "<==") conn = sqlite3.connect("file:" + self.pathdb, uri=True) c = conn.cursor() reqSql = '''CREATE TABLE IF NOT EXISTS jobs ( jobID INTEGER PRIMARY KEY AUTOINCREMENT, type text, state text, start int, end int , data text, file text, preview string, requestGraph string, nr int )''' c.execute(reqSql) conn.commit() conn.close()
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.databasename = "endpoints.db" self.pathdb = self.get_common_user_directory( ) + "/" + self.databasename self.create_db()
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.databasename = "jobs.db" self.pathdb = self.get_database_user_directory()+"/"+self.databasename self.log.info(" ==> "+ self.pathdb +"<=="); conn = sqlite3.connect("file:"+self.pathdb,uri=True) c = conn.cursor() reqSql = '''CREATE TABLE IF NOT EXISTS jobs ( jobID INTEGER PRIMARY KEY AUTOINCREMENT, type text, state text, start int, end int , data text, file text, preview string, requestGraph string, variates string, nr int )''' c.execute(reqSql) conn.commit() conn.close()
def test_get_rdf_directory(self): m = ParamManager(self.settings, self.request.session) d = m.get_rdf_directory() assert os.path.isdir(d) shutil.rmtree(d) d = m.get_rdf_directory() assert os.path.isdir(d)
def test_encode_to_rdf_uri(self): r = ParamManager.encode_to_rdf_uri("A",prefix="http://helloworld/test/") assert r == "<http://helloworld/test/A>" r = ParamManager.encode_to_rdf_uri("A",prefix="<http://helloworld/test/>") assert r == "<http://helloworld/test/A>" r = ParamManager.encode_to_rdf_uri("A<A>A",prefix="<http://helloworld/test/>") assert r == "<http://helloworld/test/A_s3_3CA_s3_3EA>"
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.manage_rdf_format = [ 'application/rdf+xml', 'owl', 'rdf', 'n3', 'nt', 'json-ld' ] self.log = logging.getLogger(__name__)
def test_get_turtle_template(self): m = ParamManager(self.settings, self.request.session) try: m.get_turtle_template(None) assert False except Exception as e: assert True m.get_turtle_template(":a :b :c.")
def test_get_rdf_user_directory(self): m = ParamManager(self.settings, self.request.session) d = m.get_rdf_user_directory() assert os.path.isdir(d) shutil.rmtree(d) del self.request.session['username'] d = m.get_rdf_user_directory() assert os.path.isdir(d)
def test_get_json_user_directory(self): m = ParamManager(self.settings, self.request.session) d = m.get_json_user_directory() assert os.path.isdir(d) shutil.rmtree(d) del self.request.session['username'] d = m.get_json_user_directory() assert os.path.isdir(d)
def __init__(self, settings, session, path, preview_limit): ParamManager.__init__(self, settings, session) self.timestamp = str(time.time()) self.path = path # The name should not contain extension as dots are not allowed in rdf names self.name = os.path.splitext(os.path.basename(path))[0] # FIXME check name uniqueness as we remove extension (collision if uploading example.tsv and example.txt) self.preview_limit = preview_limit self.forced_column_types = ['entity'] self.category_values = defaultdict(set) self.type_dict = { 'numeric' : 'xsd:decimal', 'text' : 'xsd:string', 'category': ':', 'taxon': ':', 'ref': ':', 'strand': ':', 'start': 'xsd:decimal', 'end': 'xsd:decimal', 'entity' : ':', 'entitySym' : ':', 'entity_start' : ':', 'entityGoterm' : ''} self.delims = { 'numeric' : ('', ''), 'text' : ('"', '"'), 'category': (':', ''), 'taxon': (':', ''), 'ref': (':', ''), 'strand': (':', ''), 'start' : ('', ''), 'end' : ('', ''), 'entity' : (':', ''), 'entitySym' : (':', ''), 'entity_start' : (':', ''), 'entityGoterm' : ('"', '"')} self.metadatas = { 'loadDate': '', 'username': getpass.getuser(), 'fileName': self.name, 'version': get_distribution('Askomics').version, 'server': '', 'graphName':''} self.log = logging.getLogger(__name__) self.reset_cache()
def upload_file_into_history(self, filename): """Upload a file (present in user upload directory) into the testing history :param filename: the file to upload """ param_manager = ParamManager(self.settings, self.request.session) src_file = param_manager.get_upload_directory() self.galaxy_instance.tools.upload_file(src_file + filename, self.history_id, file_name=filename)
def statistics(self): """ Get information about triplet store """ self.log.debug("== STATS ==") data = {} pm = ParamManager(self.settings, self.request.session) sqb = SparqlQueryBuilder(self.settings, self.request.session) ql = QueryLauncher(self.settings, self.request.session) tse = TripleStoreExplorer(self.settings, self.request.session) results = ql.process_query(sqb.get_statistics_number_of_triples().query) data["ntriples"] = results[0]["no"] results = ql.process_query(sqb.get_statistics_number_of_entities().query) data["nentities"] = results[0]["no"] results = ql.process_query(sqb.get_statistics_distinct_classes().query) data["nclasses"] = results[0]["no"] # Get the list of classes res_list_classes = ql.process_query(sqb.get_statistics_list_classes().query) data["class"] = {} for obj in res_list_classes: class_name = pm.remove_prefix(obj['class']) data["class"][class_name] = {} # Get the number of instances by class res_nb_instances = ql.process_query(sqb.get_statistics_nb_instances_by_classe().query) for obj in res_nb_instances: if 'class' in obj: class_name = pm.remove_prefix(obj['class']) data["class"][class_name]["count"] = obj['count'] # Get details on relations for each classes for obj in res_list_classes: if 'class' in obj: class_name = pm.remove_prefix(obj['class']) uri = obj['class'] shortcuts_list = tse.has_setting(uri, 'shortcut') src = Node(class_name, # We don't care about counter in stats uri, class_name, shortcuts_list) attributes, nodes, links = tse.get_neighbours_for_node(src, None) data["class"][class_name]["attributes"] = [a.to_dict() for a in attributes] data["class"][class_name]["neighbours"] = [n.to_dict() for n in nodes] data["class"][class_name]["relations"] = [l.to_dict() for l in links] return data
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.database_path = self.get_param("askomics.database_path") # Create tables self.create_user_table() self.create_galaxy_table() self.create_integration_table() self.create_query_table() self.create_endpoints_table()
def test_send_to_history(self): """Test the send_to_history method""" galaxy_connector = GalaxyConnector(self.settings, self.request.session, self.galaxy['url'], self.galaxy['key']) param_manager = ParamManager(self.settings, self.request.session) src_file = param_manager.get_upload_directory() filepath = src_file + 'play_instrument.tsv' galaxy_connector.send_to_history(filepath, 'play_instrument.tsv', 'tabular') assert self.interface_galaxy.check_dataset_presence('play_instrument.tsv') is True
def __init__(self, settings, session, name = None, endpoint = None, username=None, password=None, urlupdate=None): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.name = name self.endpoint = endpoint self.username = username self.password = password self.urlupdate = urlupdate self.allowUpdate = False
def __init__(self, settings, session): ''' Manage Modules Askomics ''' ParamManager.__init__(self, settings, session) ''' All modules have to be composed with thes keys ''' self.latt = ['module', 'comment', 'version', 'owl', 'rdf'] self.moduleFiles = {} self.graph_modules = "askomics:graph:module" self.modulesdir = 'askomics/static/modules/' self.data = {}
def __init__(self, settings, session, name = None, endpoint = None ,username=None, password=None,urlupdate=None,auth='Basic'): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.name = name self.endpoint = endpoint self.username = username self.password = password self.urlupdate = urlupdate self.auth = auth self.allowUpdate = False if self.auth != 'Basic' and self.auth != 'Digest': raise ValueError("Invalid Auth parameter :"+self.auth)
def __init__(self, settings, session): ''' Manage Modules Askomics ''' ParamManager.__init__(self, settings, session) ''' All modules have to be composed with thes keys ''' self.latt = ['module','comment','version','owl','rdf'] self.moduleFiles = {} self.graph_modules="askomics:graph:module" self.modulesdir='askomics/static/modules/' self.data = {}
def saveStartSparqlJob(self, typeJob, requestGraph="{}", variates="{}"): conn = sqlite3.connect(self.pathdb, uri=True) c = conn.cursor() requestGraph = urllib.parse.quote(requestGraph) variates = ParamManager.encode(str(variates)) reqSql = "INSERT INTO jobs VALUES ("\ + "NULL," \ +"'"+typeJob+"'," \ + "'Wait'," \ + "strftime('%s','now'),"\ + "0,"\ + "NULL,"\ + "''," \ + "''," \ + "'"+requestGraph+"'," \ + "'"+variates+"'," \ + "-1" \ + ");" c.execute(reqSql) ID = c.lastrowid conn.commit() conn.close() return ID
def updateEndSparqlJob(self,jobid,state,nr=-1, data=None, file=None): import json conn = sqlite3.connect(self.pathdb,uri=True) c = conn.cursor() d = 'NULL' if data: d = "'"+ParamManager.encode(json.dumps(data, ensure_ascii=False))+"'" f = 'NULL' if file: f = "'"+file+"'" reqSql = "UPDATE jobs SET "\ + " state = '"+ state +"'," \ + " end = strftime('%s','now'),"\ + " nr = "+str(nr)+","\ + " data = "+ d +"," \ + " file = "+ f \ + " WHERE jobID = "+str(jobid) c.execute(reqSql) conn.commit() conn.close()
def saveStartSparqlJob(self,typeJob,requestGraph="{}",variates="{}"): conn = sqlite3.connect(self.pathdb,uri=True) c = conn.cursor() requestGraph = urllib.parse.quote(requestGraph) variates = ParamManager.encode(str(variates)) reqSql = "INSERT INTO jobs VALUES ("\ + "NULL," \ +"'"+typeJob+"'," \ + "'Wait'," \ + "strftime('%s','now'),"\ + "0,"\ + "NULL,"\ + "''," \ + "''," \ + "'"+requestGraph+"'," \ + "'"+variates+"'," \ + "-1" \ + ");" c.execute(reqSql) ID = c.lastrowid conn.commit() conn.close() return ID
def __init__(self, settings, session, type_dict=None, delims=None): #FIXME: Can we get dict()s from config ? self.type_dict = { 'Numeric' : 'xsd:decimal', 'Text' : 'xsd:string', 'Category': ':', 'Entity' : ':'} if type_dict is None else type_dict self.delims = { 'Numeric' : ('', ''), 'Text' : ('"', '"'), 'Category': (':', ''), 'Entity' : (':', '')} if delims is None else delims ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__)
def updateEndSparqlJob(self, jobid, state, nr=-1, data=None, file=None): import json conn = sqlite3.connect(self.pathdb, uri=True) c = conn.cursor() d = 'NULL' if data: d = "'" + ParamManager.encode(json.dumps(data, ensure_ascii=False)) + "'" f = 'NULL' if file: f = "'" + file + "'" reqSql = "UPDATE jobs SET "\ + " state = '"+ state +"'," \ + " end = strftime('%s','now'),"\ + " nr = "+str(nr)+","\ + " data = "+ d +"," \ + " file = "+ f \ + " WHERE jobID = "+str(jobid) c.execute(reqSql) conn.commit() conn.close()
def test_send_to_history(self): """Test the send_to_history method""" galaxy_connector = GalaxyConnector(self.settings, self.request.session, self.galaxy['url'], self.galaxy['key']) param_manager = ParamManager(self.settings, self.request.session) src_file = param_manager.get_upload_directory() filepath = src_file + 'play_instrument.tsv' galaxy_connector.send_to_history(filepath, 'play_instrument.tsv', 'tabular') assert self.interface_galaxy.check_dataset_presence( 'play_instrument.tsv') is True
def source_file_overview(self): """ Get the first lines of the tabulated files to convert """ data = {} sfc = SourceFileConvertor(self.settings, self.request.session) pm = ParamManager(self.settings, self.request.session) source_files_first_lines = sfc.get_first_lines(int(self.settings["askomics.overview_lines_limit"])) # FIXME handle default value/value validation html_template = sfc.get_template(pm.ASKOMICS_html_template) # FIXME there must be a more elegant solution #turtle_template = sfc.get_template(self.settings["askomics.turtle_template"]) # FIXME there must be a more elegant solution turtle_template = pm.turtle_template() data["sourceFiles"] = {s.get_name(): s.to_dict() for s in source_files_first_lines} data["html_template"] = html_template data["turtle_template"] = turtle_template return data
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.ldap_server = self.settings['askomics.ldap_host'] self.ldap_port = self.settings['askomics.ldap_port'] # self.ldap_bind_dn = self.settings['askomics.ldap_bind_dn'] # self.ldap_bind_passwd = self.settings['askomics.ldap_bind_passwd'] self.ldap_user_search_base = self.settings['askomics.ldap_user_search_base'] self.ldap_user_filter = self.settings['askomics.ldap_user_filter'] self.ldap_username_attr = self.settings['askomics.ldap_username_attr'] self.ldap_email_attr = self.settings['askomics.ldap_email_attr'] self.username = None self.password = None self.email = None
def __init__(self, settings, session, name=None, endpoint=None, username=None, password=None, urlupdate=None): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.name = name self.endpoint = endpoint self.username = username self.password = password self.urlupdate = urlupdate self.allowUpdate = False
def __init__(self, settings, session, username, email, password, password2): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.username = str(username) self.email = str(email) self.passwd = str(password) self.passwd2 = str(password2) self.admin = False self.blocked = True self.galaxy = False # concatenate askmics salt, password and random salt and hash it with sha512 function # see --"https://en.wikipedia.org/wiki/Salt_(cryptography)"-- for more info about salt alpabet = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" self.randomsalt = ''.join(random.choice(alpabet) for i in range(20)) salted_pw = self.settings["askomics.salt"] + self.passwd + self.randomsalt self.sha256_pw = hashlib.sha512(salted_pw.encode('utf8')).hexdigest()
def __init__(self, settings, session, username, email, password, password2): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__) self.username = str(username) self.email = str(email) self.passwd = str(password) self.passwd2 = str(password2) self.admin = False self.blocked = True self.galaxy = False # concatenate askmics salt, password and random salt and hash it with sha256 function # see --"https://en.wikipedia.org/wiki/Salt_(cryptography)"-- for more info about salt alpabet = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" self.randomsalt = ''.join(random.choice(alpabet) for i in range(20)) salted_pw = self.settings["askomics.salt"] + self.passwd + self.randomsalt self.sha256_pw = hashlib.sha256(salted_pw.encode('utf8')).hexdigest()
def check_galaxy(self): """Check if user have galaxy triples""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.check_galaxy(self.username).query) return ParamManager.Bool(result[0]['status'])
def __init__(self, settings, session, path, uri_set=None): ParamManager.__init__(self, settings, session) self.timestamp = datetime.datetime.now().isoformat() self.path = path self.metadatas = {} # The name should not contain extension as dots are not allowed in rdf names # self.name = os.path.splitext(os.path.basename(path))[0] self.name = os.path.basename(path) self.alphanum_name = re.sub('[^0-9a-zA-Z]+', '_', self.name) self.graph = 'askomics:unkown:uri:graph' if 'graph' in self.session: self.graph = self.session['graph'] # Graph name can't contain any non alphanumeric characters. replace all with _ self.graph = self.graph + ':' + self.alphanum_name + '_' + self.timestamp self.log = logging.getLogger(__name__) self.reset_cache() self.uri = {} pref_uri = "http://semanticweb.org/askomics/entity/" if self.is_defined('askomics.prefix'): pref_uri = self.get_param("askomics.prefix") self.uri = [pref_uri for idx in range(20)] if uri_set != None: for idx, uri in uri_set.items(): if uri: # uri have to end with # or / if not uri.endswith('#') and not uri.endswith('/'): uri = uri + "/" self.uri[int(idx)] = uri else: self.uri[int(idx)] = self.get_param("askomics.prefix")
def __init__(self, settings, session, path, uri_set=None): ParamManager.__init__(self, settings, session) self.timestamp = datetime.datetime.now().isoformat() self.path = path self.metadatas = {} # The name should not contain extension as dots are not allowed in rdf names # self.name = os.path.splitext(os.path.basename(path))[0] self.name = os.path.basename(path) self.alphanum_name = re.sub('[^0-9a-zA-Z]+', '_', self.name) self.graph = 'askomics:unkown:uri:graph' if 'graph' in self.session: self.graph = self.session['graph'] # Graph name can't contain any non alphanumeric characters. replace all with _ self.graph = self.graph + ':' + self.alphanum_name + '_' + self.timestamp self.log = logging.getLogger(__name__) self.reset_cache() self.uri = {} pref_uri = "http://semanticweb.org/askomics/entity/" if self.is_defined('askomics.prefix'): pref_uri = self.get_param("askomics.prefix") self.uri = [ pref_uri for idx in range(20) ] if uri_set != None: for idx,uri in uri_set.items(): if uri: # uri have to end with # or / if not uri.endswith('#') and not uri.endswith('/'): uri = uri + "/" self.uri[int(idx)] = uri else: self.uri[int(idx)] = self.get_param("askomics.prefix")
def get_admin_blocked_by_email(self): """ get the admin status of the user by his username """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.get_admin_blocked_by_email(self.email).query) results = {} if len(result) <= 0: results['blocked'] = True results['admin'] = True else: results['blocked'] = ParamManager.Bool(result[0]['blocked']) results['admin'] = ParamManager.Bool(result[0]['admin']) return results
def __init__(self, relation_type, identifier, rdfs_domain, rdfs_range): idx = identifier.find("@") type_range = identifier #Keep compatibility with old version if idx != -1: type_range = identifier[idx + 1:len(identifier)] self.label = identifier[0:idx] else: self.label = identifier if self.label.find(":") < 0: self.uri = ":" + ParamManager.encode_to_rdf_uri(self.label) else: self.uri = self.label self.col_type = relation_type if relation_type.startswith("entity"): self.relation_type = "owl:ObjectProperty" if type_range.find(":") < 0: self.rdfs_range = ":" + ParamManager.encode_to_rdf_uri( type_range) else: self.rdfs_range = type_range elif relation_type == "goterm": self.relation_type = "owl:ObjectProperty" self.rdfs_range = "owl:Class" elif relation_type.lower() in ('category', 'taxon', 'ref', 'strand'): self.relation_type = "owl:ObjectProperty" self.rdfs_range = ":" + ParamManager.encode_to_rdf_uri(type_range + "Category") else: self.relation_type = "owl:DatatypeProperty" self.rdfs_range = rdfs_range self.rdfs_domain = ":" + ParamManager.encode_to_rdf_uri(rdfs_domain) self.log = logging.getLogger(__name__)
def listJobs(self): import json data = [] try: conn = sqlite3.connect(self.pathdb,uri=True) conn.row_factory = sqlite3.Row c = conn.cursor() reqSql = """ SELECT jobid, type, state, start, end, data, file, preview, requestGraph, variates, nr FROM jobs""" c.execute(reqSql) rows = c.fetchall() for row in rows: d = {} d['jobid'] = row['jobid'] d['type'] = row['type'] d['state'] = row['state'] d['start'] = row['start'] d['end'] = row['end'] if row['data'] != None : d['data'] = json.loads(ParamManager.decode(row['data'])) if row['file'] != None : d['file'] = row['file'] d['preview'] = row['preview'] d['requestGraph'] = urllib.parse.unquote(row['requestGraph']) d['variates'] = eval(ParamManager.decode(row['variates'])) d['nr'] = row['nr'] data.append(d) except sqlite3.OperationalError as e : self.log.info("Jobs database does not exist .") c.execute(reqSql) conn.commit() conn.close() return data
def listJobs(self): import json data = [] try: conn = sqlite3.connect(self.pathdb, uri=True) conn.row_factory = sqlite3.Row c = conn.cursor() reqSql = """ SELECT jobid, type, state, start, end, data, file, preview, requestGraph, variates, nr FROM jobs""" c.execute(reqSql) rows = c.fetchall() for row in rows: d = {} d['jobid'] = row['jobid'] d['type'] = row['type'] d['state'] = row['state'] d['start'] = row['start'] d['end'] = row['end'] if row['data'] != None: d['data'] = json.loads(ParamManager.decode(row['data'])) if row['file'] != None: d['file'] = row['file'] d['preview'] = row['preview'] d['requestGraph'] = urllib.parse.unquote(row['requestGraph']) d['variates'] = eval(ParamManager.decode(row['variates'])) d['nr'] = row['nr'] data.append(d) except sqlite3.OperationalError as e: self.log.info("Jobs database does not exist .") c.execute(reqSql) conn.commit() conn.close() return data
def __init__(self, request): self.log = logging.getLogger(__name__) self.request = request request.response.headers['Access-Control-Allow-Origin'] = '*' request.response.headers['Access-Control-Allow-Methods'] = 'OPTIONS, HEAD, GET, POST, PUT, DELETE' #self.dir_string = '__' + self.request.session['username'] + '__' # Set the tmp dir #if 'upload_directory' not in request.session.keys() or self.dir_string not in request.session['upload_directory'] or not os.path.isdir(request.session['upload_directory']): # request.session['upload_directory'] = tempfile.mkdtemp(suffix='_tmp', prefix='__' + self.request.session['username'] + '__') self.settings = request.registry.settings pm = ParamManager(self.settings, self.request.session) self.upload_dir = pm.get_upload_directory() #self.upload_dir = request.session['upload_directory'] self.log.debug("upload_directory => "+self.upload_dir) self.allowed_types = self.settings["askomics.allowed_file_types"] self.delete_method = self.settings["askomics.delete_method"] self.min_size = int(self.settings["askomics.upload_min_size"]) self.max_size = int(self.settings["askomics.upload_max_size"])
def get_admin_blocked_by_email(self): """ get the admin status of the user by his username """ database = DatabaseConnector(self.settings, self.session) query = ''' SELECT admin, blocked FROM users WHERE email=? ''' rows = database.execute_sql_query(query, (self.email, )) results = {} if len(rows) <= 0: results['blocked'] = True results['admin'] = False else: results['blocked'] = ParamManager.Bool(rows[0][1]) results['admin'] = ParamManager.Bool(rows[0][0]) return results
def __init__(self, settings, session, path): ParamManager.__init__(self, settings, session) self.timestamp = datetime.datetime.now().isoformat() self.path = path # The name should not contain extension as dots are not allowed in rdf names self.name = os.path.splitext(os.path.basename(path))[0] # FIXME check name uniqueness as we remove extension (collision if uploading example.tsv and example.txt) self.log = logging.getLogger(__name__) self.metadatas = { 'loadDate': '', 'username': getpass.getuser(), 'fileName': self.name, 'version': get_distribution('Askomics').version, 'server': '', 'graphName':''} self.reset_cache()
def ckeck_key_belong_user(self, key): """Check if a key belong to a user""" query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.ckeck_key_belong_user(self.username, key).query) self.log.debug('---> result: ' + str(result)) if len(result) <= 0: return False return ParamManager.Bool(result[0]['count'])
def test_get_users_infos(self): """Test get_users_infos""" self.tps.clean_up() # first test with non admin try : data = self.askview.get_users_infos() assert False except Exception as e: assert True # then, is user is admin self.request.session['admin'] = True data = self.askview.get_users_infos() assert data == {'result': [], 'me': 'jdoe'} #result is empty cause there is no user #test with user self.request.json_body = { 'username': '******', 'email': '*****@*****.**', 'password': '******', 'password2': 'iamjohndoe' } # get dir size pm = ParamManager(self.settings, self.request.session) dir_size = pm.get_size(pm.get_user_dir_path()) human_dir_size = humanize.naturalsize(dir_size) self.askview.signup() data = self.askview.get_users_infos() assert data == {'result': [{'ldap': False, 'username': '******', 'email': '*****@*****.**', 'admin': True, 'blocked': False, 'gurl': None, 'nquery': 0, 'nintegration': 0, 'dirsize': dir_size, 'hdirsize': human_dir_size}], 'me': 'jdoe', 'error': [], 'user_id': 1, 'username': '******', 'email': '*****@*****.**', 'admin': True, 'blocked': False, 'galaxy': None}
def check_email_in_database(self): """ Check if the email is present in the TS """ query_laucher = QueryLauncher(self.settings, self.session) sqa = SparqlQueryAuth(self.settings, self.session) result = query_laucher.process_query( sqa.check_email_presence(self.email).query) if len(result) <= 0: return False return ParamManager.Bool(result[0]['status'])
def __init__(self, settings, session): ParamManager.__init__(self, settings, session) self.log = logging.getLogger(__name__)
def test_send_mails(self): m = ParamManager(self.settings, self.request.session) m.send_mails("bidon_url","*****@*****.**","Subject","Message") m.set_param('askomics.smtp_host','smtp.test.fr') m.send_mails("bidon_url","*****@*****.**","Subject","Message") m.set_param('askomics.smtp_port','20') m.send_mails("bidon_url","*****@*****.**","Subject","Message") m.set_param('askomics.smtp_login','test') m.send_mails("bidon_url","*****@*****.**","Subject","Message") m.set_param('askomics.smtp_password','test') m.send_mails("bidon_url","*****@*****.**","Subject","Message")
def test_get_rdf_directory(self): m = ParamManager(self.settings, self.request.session) d = m.get_rdf_directory() assert os.path.isdir(d)
def test_update_list_prefix(self): m = ParamManager(self.settings, self.request.session) m.update_list_prefix(["eat","toto"]);
def test_is_defined(self): m = ParamManager(self.settings, self.request.session) m.set_param("test","testValue") assert m.is_defined("test") assert not m.is_defined("test2")
def test_set_param(self): m = ParamManager(self.settings, self.request.session) m.set_param("test","test")