def delete_datasets(self): """delete the datasets from the database and the triplestore""" sparql = SparqlQueryLauncher(self.app, self.session) for dataset in self.datasets: # Delete from triplestore if dataset.graph_name: Utils.redo_if_failure(self.log, 3, 1, sparql.drop_dataset, dataset.graph_name) # Delete from db dataset.delete_from_db()
def delete_user_rdf(self, username): """Delete a user rdf graphs Delete in DB, TS and filesystem Parameters ---------- username : string Username to delete """ tse = TriplestoreExplorer(self.app, self.session) query_launcher = SparqlQueryLauncher(self.app, self.session) graphs = tse.get_graph_of_user(username) for graph in graphs: Utils.redo_if_failure(self.log, 3, 1, query_launcher.drop_dataset, graph)
def delete_datasets(self): """delete the datasets from the database and the triplestore""" sparql = SparqlQueryLauncher(self.app, self.session) tse = TriplestoreExplorer(self.app, self.session) for dataset in self.datasets: # Delete from triplestore if dataset.graph_name: Utils.redo_if_failure(self.log, 3, 1, sparql.drop_dataset, dataset.graph_name) # Delete from db dataset.delete_from_db() # Uncache abstraction tse.uncache_abstraction(public=dataset.public)
def rdfize(self, string, custom_namespace=None): """Rdfize a string Return the literal if string is an url, else, prefix it with askomics prefix Parameters ---------- string : string Term to rdfize Returns ------- rdflib.??? Rdfized term """ if Utils.is_valid_url(string): return rdflib.URIRef(string) elif ":" in string and len(string.split(":")) == 2: prefix, val = string.split(":") if prefix: prefix_manager = PrefixManager(self.app, self.session) namespace = prefix_manager.get_namespace(prefix) if namespace: return rdflib.URIRef("{}{}".format(namespace, val)) else: # If not prefix, default to entity prefix string = val if custom_namespace: return custom_namespace[self.format_uri(string)] else: return self.namespace_data[self.format_uri(string)]
def get_file_name(self): """Get a random file name Returns ------- string file name """ name = Utils.get_random_string(10) file_path = "{}/{}".format(self.upload_path, name) # Make sure it is not in use already while os.path.isfile(file_path): name = Utils.get_random_string(10) file_path = "{}/{}".format(self.upload_path, name) return name
def create_reset_token(self, login): """Insert a token into the db Parameters ---------- login : str username or email Returns ------- str The reset token """ token = "{}:{}".format(int(time.time()), Utils.get_random_string(20)) database_field = 'username' if validate_email(login): database_field = 'email' database = Database(self.app, self.session) query = """ UPDATE users SET reset_token=? WHERE {}=? """.format(database_field) database.execute_sql_query(query, (token, login)) return token
def replace_variables_in_blocks(self, var_to_replace): """Replace variables in blocks Parameters ---------- var_to_replace : list of tuples var to replace in block """ for var_source, var_target in var_to_replace: # Interate throught blocks for nblock, block in enumerate(self.triples_blocks): # Iterate over sub-blocks for nsblock, sblock in enumerate(block["sblocks"]): # Iterate over triples for ntriple, triple_dict in enumerate(sblock["triples"]): for key, value in triple_dict.items(): if key != "optional": self.triples_blocks[nblock]["sblocks"][nsblock]["triples"][ntriple][key] = value.replace(var_source, var_target) for i, filtr in enumerate(sblock["filters"]): self.triples_blocks[nblock]["sblocks"][nsblock]["filters"][i] = filtr.replace(var_source, var_target) for i, value in enumerate(sblock["values"]): self.triples_blocks[nblock]["sblocks"][nsblock]["values"][i] = value.replace(var_source, var_target) self.triples_blocks[nblock]["sblocks"][nsblock]["triples"] = Utils.unique(self.triples_blocks[nblock]["sblocks"][nsblock]["triples"])
def update_apikey(self, user): """Create a new api key and store in the database Parameters ---------- user : dict The current user Returns ------- dict error, error message and updated user """ error = False error_message = '' database = Database(self.app, self.session) # get a new api key new_apikey = Utils.get_random_string(20) query = ''' UPDATE users SET apikey=? WHERE username=? ''' database.execute_sql_query(query, (new_apikey, user['username'])) user['apikey'] = new_apikey return {'error': error, 'error_message': error_message, 'user': user}
def get_uri_label(self, string): """Labelize a string Try to extract a label from an URI Parameters ---------- uri : string Term to extract label from Returns ------- String Label """ if Utils.is_valid_url(string): string = string.rstrip("/") if "/" in string: end_term = string.split("/")[-1].rstrip("#") if "#" in end_term: end_term = end_term.split("#")[-1] else: end_term = string elif ":" in string and len(string.split(":")) == 2: end_term = string.split(":")[-1] else: end_term = string return end_term
def integrate(self, public=False): """Integrate the file into the triplestore Parameters ---------- public : bool, optional Integrate in private or public graph """ sparql = SparqlQueryLauncher(self.app, self.session) tse = TriplestoreExplorer(self.app, self.session) self.public = public method = self.settings.get('triplestore', 'upload_method') # Load file into a RDF graph self.graph_chunk.parse(self.path, format=self.type_dict[self.type]) # get metadata self.set_metadata() # Remove metadata from data self.delete_metadata_location() # insert metadata sparql.insert_data(self.graph_metadata, self.file_graph, metadata=True) if method == "load": # write rdf into a tmpfile and load it temp_file_name = 'tmp_{}_{}.{}'.format(Utils.get_random_string(5), self.name, self.rdf_extention) # Try to load data. if failure, wait 5 sec and retry 5 time Utils.redo_if_failure(self.log, 5, 5, self.load_graph, self.graph_chunk, temp_file_name) else: # Insert # Try to insert data. if failure, wait 5 sec and retry 5 time Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data, self.graph_chunk, self.file_graph) # Remove chached abstraction tse.uncache_abstraction(public=self.public) self.set_triples_number()
def set_graphs_and_endpoints(self, entities=None, graphs=None, endpoints=None): """Get all public and private graphs containing the given entities Parameters ---------- entities : list, optional list of entity uri """ substrlst = [] filter_entity_string = '' if entities: for entity in entities: substrlst.append("?entity_uri = <{}>".format(entity)) filter_entity_string = 'FILTER (' + ' || '.join(substrlst) + ')' filter_public_string = 'FILTER (?public = <true>)' if 'user' in self.session: filter_public_string = 'FILTER (?public = <true> || ?creator = <{}>)'.format( self.session["user"]["username"]) query = ''' SELECT DISTINCT ?graph ?endpoint WHERE {{ ?graph :public ?public . ?graph dc:creator ?creator . GRAPH ?graph {{ ?graph prov:atLocation ?endpoint . ?entity_uri a :entity . }} {} {} }} '''.format(filter_public_string, filter_entity_string) query_launcher = SparqlQueryLauncher(self.app, self.session) header, results = query_launcher.process_query( self.prefix_query(query)) self.graphs = [] self.endpoints = [] for res in results: if not graphs or res["graph"] in graphs: self.graphs.append(res["graph"]) # If local triplestore url is not accessible by federetad query engine if res["endpoint"] == self.settings.get( 'triplestore', 'endpoint') and self.local_endpoint_f is not None: endpoint = self.local_endpoint_f else: endpoint = res["endpoint"] if not endpoints or endpoint in endpoints: self.endpoints.append(endpoint) self.endpoints = Utils.unique(self.endpoints) self.federated = len(self.endpoints) > 1
def get_file_name(self): """Get a random file name Returns ------- string file name """ return Utils.get_random_string(10)
def replace_variables_in_triples(self, var_to_replace): """Replace variables in triples Parameters ---------- var_to_replace : list of tuples var to replace in triples """ for tpl_var in var_to_replace: var_source = tpl_var[0] var_target = tpl_var[1] for i, triple_dict in enumerate(self.triples): for key, value in triple_dict.items(): if key != "optional": self.triples[i][key] = value.replace(var_source, var_target) for i, select in enumerate(self.selects): self.selects[i] = select.replace(var_source, var_target) for i, filtr in enumerate(self.filters): self.filters[i] = filtr.replace(var_source, var_target) # uniq lists self.triples = Utils.unique(self.triples) self.selects = Utils.unique(self.selects)
def get_checked_asked_graphs(self, asked_graphs): """Check if asked graphs are present in public and private graphs Parameters ---------- asked_graphs : list list of graphs asked by the user Returns ------- list list of graphs asked by the user, in the public and private graphs """ return Utils.intersect(asked_graphs, self.private_graphs + self.public_graphs)
def persist_user_admin(self, inputs): """Persist a new user (admin action) Parameters ---------- inputs : User input The new user info Returns ------- dict The new user """ inputs["password"] = Utils.get_random_string(8) return self.persist_user(inputs, return_password=True)
def update_users_table(self): """Add the quota col on the users table Update the users table for the instance who don't have this column """ default_quota = Utils.humansize_to_bytes(self.settings.get("askomics", "quota")) query = ''' ALTER TABLE users ADD quota INTEGER NOT NULL DEFAULT {} '''.format(default_quota) try: self.execute_sql_query(query) except Exception: pass
def create_reset_token(self, username, old_token=False): """Create a reset token""" if old_token: old_timestamp = int(time.time()) - 14400 token = "{}:{}".format(old_timestamp, Utils.get_random_string(20)) database = Database(self.app, self.session) query = """ UPDATE users SET reset_token=? WHERE username=? """ database.execute_sql_query(query, (token, username)) return token else: auth = LocalAuth(self.app, self.session) return auth.create_reset_token(username)
def __init__(self, app, session, result_info, force_no_db=False): """init object Parameters ---------- app : Flask flask app session : AskOmics session, contain the user result_info : dict Result file info """ Params.__init__(self, app, session) if "user" in self.session: self.result_path = "{}/{}_{}/results".format( self.settings.get("askomics", "data_directory"), self.session['user']['id'], self.session['user']['username']) if "id" in result_info and not force_no_db: self.id = result_info["id"] if not self.set_info_from_db_with_id(): return None else: self.id = result_info["id"] if "id" in result_info else None self.graph_state = result_info[ "graph_state"] if "graph_state" in result_info else None self.graphs = result_info[ "graphs"] if "graphs" in result_info else [] self.endpoints = result_info[ "endpoints"] if "endpoints" in result_info else [] self.sparql_query = result_info[ "sparql_query"] if "sparql_query" in result_info else None self.celery_id = result_info[ "celery_id"] if "celery_id" in result_info else None self.file_name = result_info[ "file_name"] if "file_name" in result_info else Utils.get_random_string( 10) self.file_path = "{}/{}".format(self.result_path, self.file_name) self.start = None self.end = None self.nrows = 0 self.has_form_attr = False self.template = False self.form = False
def set_quota(self, quota, username): """Set a new quota to a user Parameters ---------- quota : int New quota username : string The concerned username """ database = Database(self.app, self.session) query = ''' UPDATE users SET quota=? WHERE username=? ''' database.execute_sql_query(query, (Utils.humansize_to_bytes(quota), username))
def rdfize(self, string): """Rdfize a string Return the literal is string is an url, else, prefix it with askomics prefix Parameters ---------- string : string Term to rdfize Returns ------- rdflib.??? Rdfized term """ if Utils.is_valid_url(string): return rdflib.URIRef(string) else: return self.askomics_prefix[self.format_uri(string)]
def get_uri_filter_value(self, value): """Get full uri from a filter value (curie or uri) :xxx --> :xxx uniprot:xxx --> <http://purl.uniprot.org/core/xxx> http://example.org/xxx --> <http://example.org/xxx> xxx --> xxx is not a valid URI or CURIE bla:xxx --> bla: is not a known prefix Parameters ---------- value : string Input filter Returns ------- string corresponding uri Raises ------ ValueError Invalid URI or CURIE return a value error """ if Utils().is_url(value): return "<{}>".format(value) elif ":" in value: prefix, val = value.split(":") if prefix: prefix_manager = PrefixManager(self.app, self.session) namespace = prefix_manager.get_namespace(prefix) if namespace: return "<{}{}>".format(namespace, val) else: raise ValueError( "{}: is not a known prefix".format(prefix)) else: return value raise ValueError("{} is not a valid URI or CURIE".format(value))
def download_datasets(self, datasets_id): """Download galaxy datasets into AskOmics Parameters ---------- datasets_id : list List of Galaxy datasets id """ galaxy_instance = galaxy.GalaxyInstance(self.url, self.apikey) files_handler = FilesHandler(self.app, self.session) for dataset_id in datasets_id: dataset = galaxy_instance.datasets.show_dataset(dataset_id) file_name = Utils.get_random_string(10) path = "{}/{}".format(self.upload_path, file_name) filetype = dataset["file_ext"] size = dataset["file_size"] galaxy_instance.datasets.download_dataset( dataset_id, file_path=path, use_default_filename=False) files_handler.store_file_info_in_db(dataset["name"], filetype, file_name, size)
def integrate(self, public=False): """Integrate the file into the triplestore Parameters ---------- public : bool, optional Integrate in private or public graph """ sparql = SparqlQueryLauncher(self.app, self.session) self.public = public method = self.settings.get('triplestore', 'upload_method') # insert metadata sparql.insert_data(self.get_metadata(), self.file_graph, metadata=True) if method == "load": # cp file into ttl dir tmp_file_name = 'tmp_{}_{}.ttl'.format( Utils.get_random_string(5), self.name, ) temp_file_path = '{}/{}'.format(self.ttl_dir, tmp_file_name) copyfile(self.path, temp_file_path) # Load the chunk sparql.load_data(tmp_file_name, self.file_graph, self.host_url) # Remove tmp file if not self.settings.getboolean('askomics', 'debug_ttl'): os.remove(temp_file_path) else: with open(self.path) as ttl_file: ttl_content = ttl_file.read() sparql.insert_ttl_string(ttl_content, self.user_graph) self.set_triples_number()
def update_pw_db(self, username, password): """Update a password in database Parameters ---------- username : str User username password : str New password """ database = Database(self.app, self.session) salt = Utils.get_random_string(20) salted_pw = self.settings.get('askomics', 'password_salt') + password + salt sha512_pw = hashlib.sha512(salted_pw.encode('utf-8')).hexdigest() query = ''' UPDATE users SET password=?, salt=? WHERE username=? ''' database.execute_sql_query(query, (sha512_pw, salt, username))
def integrate(self, dataset_id=None): """Integrate the file into the triplestore""" sparql = SparqlQueryLauncher(self.app, self.session) # insert metadata sparql.insert_data(self.get_metadata(), self.file_graph, metadata=True) content_generator = self.generate_rdf_content() # Insert content chunk_number = 0 for _ in content_generator: if self.graph_chunk.ntriple >= self.max_chunk_size: if self.graph_chunk.percent and dataset_id: self.update_percent_in_db(self.graph_chunk.percent, dataset_id) if self.method == 'load': # write rdf into a tmpfile and load it temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format( Utils.get_random_string(5), self.name, chunk_number, self.rdf_extention) # Try to load data. if failure, wait 5 sec and retry 5 time Utils.redo_if_failure(self.log, 5, 5, self.load_graph, self.graph_chunk, temp_file_name) else: # Insert # Try to insert data. if failure, wait 5 sec and retry 5 time Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data, self.graph_chunk, self.file_graph) chunk_number += 1 self.graph_chunk = RdfGraph(self.app, self.session) # Load the last chunk if self.graph_chunk.percent and dataset_id: self.update_percent_in_db(100, dataset_id) if self.method == 'load': temp_file_name = 'tmp_{}_{}_chunk_{}.{}'.format( Utils.get_random_string(5), self.name, chunk_number, self.rdf_extention) # Try to load data. if failure, wait 5 sec and retry 5 time Utils.redo_if_failure(self.log, 5, 5, self.load_graph, self.graph_chunk, temp_file_name) else: # Insert # Try to insert data. if failure, wait 5 sec and retry 5 time Utils.redo_if_failure(self.log, 5, 5, sparql.insert_data, self.graph_chunk, self.file_graph) # Content is inserted, now insert abstraction and domain_knowledge self.set_rdf_abstraction_domain_knowledge() if self.method == 'load': temp_file_name = 'tmp_{}_{}_abstraction_domain_knowledge.{}'.format( Utils.get_random_string(5), self.name, self.rdf_extention) self.load_graph(self.graph_abstraction_dk, temp_file_name) else: # Insert sparql.insert_data(self.graph_abstraction_dk, self.file_graph) self.set_triples_number()
def build_query_from_json(self, json_query, preview=False, for_editor=False): """Build a sparql query for the json dict of the query builder Parameters ---------- json_query : dict The json query from the query builder Returns ------- str SPARQL query """ entities = [] attributes = {} linked_attributes = [] self.selects = [] triples_relations = [] triples_attributes = [] values = [] filters = [] start_end = [] strands = [] var_to_replace = [] # Browse node to get graphs for node in json_query["nodes"]: if not node["suggested"]: entities.append(node["uri"]) self.set_graphs_and_endpoints(entities=entities) # self.log.debug(json_query) # Browse links (relations) for link in json_query["links"]: if not link["suggested"]: source = self.format_sparql_variable("{}{}_uri".format( link["source"]["label"], link["source"]["id"])) target = self.format_sparql_variable("{}{}_uri".format( link["target"]["label"], link["target"]["id"])) # Position if link["uri"] in ('included_in', 'overlap_with'): common_block = self.format_sparql_variable( "block_{}_{}".format(link["source"]["id"], link["target"]["id"])) # Get start & end sparql variables for attr in json_query["attr"]: if not attr["faldo"]: continue if attr["nodeId"] == link["source"]["id"]: if attr["faldo"].endswith("faldoStart"): start_end.append(attr["id"]) start_1 = self.format_sparql_variable( "{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoEnd"): start_end.append(attr["id"]) end_1 = self.format_sparql_variable( "{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoStrand"): strand_1 = self.format_sparql_variable( "{}{}_{}_faldoStrand".format( attr["entityLabel"], attr["nodeId"], attr["label"])) strands.append(attr["id"]) if attr["nodeId"] == link["target"]["id"]: if attr["faldo"].endswith("faldoStart"): start_end.append(attr["id"]) start_2 = self.format_sparql_variable( "{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoEnd"): start_end.append(attr["id"]) end_2 = self.format_sparql_variable( "{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoStrand"): strand_2 = self.format_sparql_variable( "{}{}_{}_faldoStrand".format( attr["entityLabel"], attr["nodeId"], attr["label"])) strands.append(attr["id"]) triples_relations.append({ "subject": source, "predicate": "askomics:{}".format("includeInReference" if link["sameRef"] else "includeIn"), "object": common_block, "optional": False }) triples_relations.append({ "subject": target, "predicate": "askomics:{}".format("includeInReference" if link["sameRef"] else "includeIn"), "object": common_block, "optional": False }) if link["sameStrand"]: var_to_replace.append((strand_1, strand_2)) else: strands = [] equal_sign = "" if link["strict"] else "=" if link["uri"] == "included_in": filters.append( "FILTER ({start1} >{equalsign} {start2} && {end1} <{equalsign} {end2}) ." .format(start1=start_1, start2=start_2, end1=end_1, end2=end_2, equalsign=equal_sign)) elif link["uri"] == "overlap_with": filters.append( "FILTER (({start2} >{equalsign} {start1} && {start2} <{equalsign} {end1}) || ({end2} >{equalsign} {start1} && {end2} <{equalsign} {end1}))" .format(start1=start_1, start2=start_2, end1=end_1, end2=end_2, equalsign=equal_sign)) # Classic relation else: relation = "<{}>".format(link["uri"]) triples_relations.append({ "subject": source, "predicate": relation, "object": target, "optional": False }) # Store linked attributes for attribute in json_query["attr"]: attributes[attribute["id"]] = { "label": attribute["label"], "entity_label": attribute["entityLabel"], "entity_id": attribute["nodeId"] } if attribute["linked"]: linked_attributes.extend( (attribute["id"], attribute["linkedWith"])) # Browse attributes for attribute in json_query["attr"]: # URI --- if attribute["type"] == "uri": subject = self.format_sparql_variable("{}{}_uri".format( attribute["entityLabel"], attribute["nodeId"])) predicate = attribute["uri"] obj = "<{}>".format(attribute["entityUri"]) if not self.is_bnode(attribute["entityUri"], json_query["nodes"]): triples_attributes.append({ "subject": subject, "predicate": predicate, "object": obj, "optional": False }) if attribute["visible"]: self.selects.append(subject) # filters/values if attribute["filterValue"] != "" and not attribute["linked"]: filter_value = self.get_uri_filter_value( attribute["filterValue"]) if attribute["filterType"] == "regexp": negative_sign = "" if attribute["negative"]: negative_sign = "!" filters.append("FILTER ({}regex({}, {}, 'i'))".format( negative_sign, subject, filter_value)) elif attribute["filterType"] == "exact": if attribute["negative"]: filters.append("FILTER (str({}) != {}) .".format( subject, filter_value)) else: values.append("VALUES {} {{ {} }} .".format( subject, filter_value)) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_uri".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"])) var_to_replace.append((subject, var_2)) # Text if attribute["type"] == "text": if attribute["visible"] or attribute[ "filterValue"] != "" or attribute[ "id"] in linked_attributes: subject = self.format_sparql_variable("{}{}_uri".format( attribute["entityLabel"], attribute["nodeId"])) if attribute["uri"] == "rdfs:label": predicate = attribute["uri"] else: predicate = "<{}>".format(attribute["uri"]) obj = self.format_sparql_variable("{}{}_{}".format( attribute["entityLabel"], attribute["nodeId"], attribute["label"])) triples_attributes.append({ "subject": subject, "predicate": predicate, "object": obj, "optional": True if attribute["optional"] else False }) if attribute["visible"]: self.selects.append(obj) # filters/values if attribute["filterValue"] != "" and not attribute[ "optional"] and not attribute["linked"]: if attribute["filterType"] == "regexp": negative_sign = "" if attribute["negative"]: negative_sign = "!" filters.append( "FILTER ({}regex({}, '{}', 'i'))".format( negative_sign, obj, attribute["filterValue"])) elif attribute["filterType"] == "exact": if attribute["negative"]: filters.append("FILTER (str({}) != '{}') .".format( obj, attribute["filterValue"])) else: values.append( "VALUES {} {{ '{}'^^xsd:string }} .".format( obj, attribute["filterValue"])) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_{}".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"])) var_to_replace.append((obj, var_2)) # Numeric if attribute["type"] == "decimal": if attribute["visible"] or attribute[ "filterValue"] != "" or attribute[ "id"] in start_end or attribute[ "id"] in linked_attributes: subject = self.format_sparql_variable("{}{}_uri".format( attribute["entityLabel"], attribute["nodeId"])) if attribute["faldo"]: predicate = "faldo:location/faldo:{}/faldo:position".format( "begin" if attribute["faldo"]. endswith("faldoStart") else "end") else: predicate = "<{}>".format(attribute["uri"]) obj = self.format_sparql_variable("{}{}_{}".format( attribute["entityLabel"], attribute["nodeId"], attribute["label"])) triples_attributes.append({ "subject": subject, "predicate": predicate, "object": obj, "optional": True if attribute["optional"] else False }) if attribute["visible"]: self.selects.append(obj) # filters if attribute["filterValue"] != "" and not attribute[ "optional"] and not attribute["linked"]: if attribute['filterSign'] == "=": values.append("VALUES {} {{ {} }} .".format( obj, attribute["filterValue"])) else: filter_string = "FILTER ( {} {} {} ) .".format( obj, attribute["filterSign"], attribute["filterValue"]) filters.append(filter_string) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_{}".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"])) var_to_replace.append((obj, var_2)) # Category if attribute["type"] == "category": if attribute["visible"] or attribute[ "filterSelectedValues"] != [] or attribute[ "id"] in strands or attribute[ "id"] in linked_attributes: node_uri = self.format_sparql_variable("{}{}_uri".format( attribute["entityLabel"], attribute["nodeId"])) category_value_uri = self.format_sparql_variable( "{}{}_{}Category".format(attribute["entityLabel"], attribute["nodeId"], attribute["label"])) category_label = self.format_sparql_variable( "{}{}_{}".format(attribute["entityLabel"], attribute["nodeId"], attribute["label"])) faldo_strand = self.format_sparql_variable( "{}{}_{}_faldoStrand".format(attribute["entityLabel"], attribute["nodeId"], attribute["label"])) if attribute["faldo"] and attribute["faldo"].endswith( "faldoReference"): category_name = 'faldo:location/faldo:begin/faldo:reference' triples_attributes.append({ "subject": node_uri, "predicate": category_name, "object": category_value_uri, "optional": True if attribute["optional"] else False }) if attribute["visible"]: triples_attributes.append({ "subject": category_value_uri, "predicate": "rdfs:label", "object": category_label, "optional": True if attribute["optional"] else False }) elif attribute["faldo"] and attribute["faldo"].endswith( "faldoStrand"): category_name = 'faldo:location/faldo:begin/rdf:type' triples_attributes.append({ "subject": node_uri, "predicate": category_name, "object": category_value_uri, "optional": True if attribute["optional"] else False }) triples_attributes.append({ "subject": faldo_strand, "predicate": "a", "object": category_value_uri, "optional": True if attribute["optional"] else False }) if attribute["visible"]: triples_attributes.append({ "subject": faldo_strand, "predicate": "rdfs:label", "object": category_label, "optional": False }) values.append( "VALUES {} {{ faldo:ReverseStrandPosition faldo:ForwardStrandPosition }} ." .format(category_value_uri)) else: category_name = "<{}>".format(attribute["uri"]) triples_attributes.append({ "subject": node_uri, "predicate": category_name, "object": category_value_uri, "optional": True if attribute["optional"] else False }) if attribute["visible"]: triples_attributes.append({ "subject": category_value_uri, "predicate": "rdfs:label", "object": category_label, "optional": True if attribute["optional"] else False }) if attribute["visible"]: self.selects.append(category_label) # values if attribute["filterSelectedValues"] != [] and not attribute[ "optional"] and not attribute["linked"]: uri_val_list = [] for value in attribute["filterSelectedValues"]: if attribute["faldo"] and attribute["faldo"].endswith( "faldoStrand"): value_var = faldo_strand uri_val_list.append("<{}>".format(value)) else: value_var = category_value_uri uri_val_list.append("<{}>".format(value)) if uri_val_list: values.append("VALUES {} {{ {} }}".format( value_var, ' '.join(uri_val_list))) if attribute["linked"]: var_2 = self.format_sparql_variable( "{}{}_{}Category".format( attributes[ attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"])) var_to_replace.append((category_value_uri, var_2)) from_string = self.get_froms_from_graphs(self.graphs) federated_from_string = self.get_federated_froms_from_graphs( self.graphs) endpoints_string = self.get_endpoints_string() # Linked attributes: replace SPARQL variable target by source for tpl_var in var_to_replace: var_source = tpl_var[0] var_target = tpl_var[1] for i, triple_dict in enumerate(triples_relations): for key, value in triple_dict.items(): if key != "optional": triples_relations[i][key] = value.replace( var_source, var_target) for i, triple_dict in enumerate(triples_attributes): for key, value in triple_dict.items(): if key != "optional": triples_attributes[i][key] = value.replace( var_source, var_target) for i, select in enumerate(self.selects): self.selects[i] = select.replace(var_source, var_target) for i, filtr in enumerate(filters): filters[i] = filtr.replace(var_source, var_target) # uniq lists triples_relations = Utils.unique(triples_relations) triples_attributes = Utils.unique(triples_attributes) self.selects = Utils.unique(self.selects) # Write the query # query is for editor (no froms, no federated) if for_editor: query = """ SELECT DISTINCT {selects} WHERE {{ {relations} {attributes} {filters} {values} }} """.format(selects=' '.join(self.selects), relations='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_relations ]), attributes='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_attributes ]), filters='\n '.join(filters), values='\n '.join(values)) # Query is federated, add federated lines @federate & @from) elif self.federated: query = """ {endpoints} {federated} SELECT DISTINCT {selects} WHERE {{ {relations} {attributes} {filters} {values} }} """.format(endpoints=endpoints_string, federated=federated_from_string, selects=' '.join(self.selects), relations='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_relations ]), attributes='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_attributes ]), filters='\n '.join(filters), values='\n '.join(values)) # Query on the local endpoint (add froms) elif self.endpoints == [self.local_endpoint_f]: query = """ SELECT DISTINCT {selects} {froms} WHERE {{ {relations} {attributes} {filters} {values} }} """.format(selects=' '.join(self.selects), froms=from_string, relations='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_relations ]), attributes='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_attributes ]), filters='\n '.join(filters), values='\n '.join(values)) # Query an external endpoint (no froms) else: query = """ SELECT DISTINCT {selects} WHERE {{ {relations} {attributes} {filters} {values} }} """.format(selects=' '.join(self.selects), relations='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_relations ]), attributes='\n '.join([ self.triple_dict_to_string(triple_dict) for triple_dict in triples_attributes ]), filters='\n '.join(filters), values='\n '.join(values)) if preview: query += "\nLIMIT {}".format( self.settings.getint('triplestore', 'preview_limit')) return self.prefix_query(textwrap.dedent(query))
def build_query_from_json(self, preview=False, for_editor=False): """Build a sparql query for the json dict of the query builder Parameters ---------- preview : bool, optional Build a preview query (with LIMIT) for_editor : bool, optional Remove FROMS and @federate """ entities = [] attributes = {} linked_attributes = [] self.selects = [] self.triples = [] self.triples_blocks = [] self.values = [] self.filters = [] start_end = [] strands = [] var_to_replace = [] # Browse attributes to get entities for attr in self.json["attr"]: if attr["visible"]: entities = entities + attr["entityUris"] entities = list(set(entities)) # uniq list # Set graphs in function of entities needed self.set_graphs_and_endpoints(entities=entities) # self.log.debug(self.json) # Browse links (relations) for link in self.json["links"]: if not link["suggested"]: # if link is special, replace the special node variable with its real node if link["type"] == "specialLink": special_node = link["source"] if link["source"]["type"] in ("unionNode", "minusNode") else link["target"] real_node = link["target"] if link["source"]["type"] in ("unionNode", "minusNode") else link["source"] var_to_replace.append(( self.format_sparql_variable("{}{}_uri".format(special_node["label"], special_node["id"])), self.format_sparql_variable("{}{}_uri".format(real_node["label"], real_node["id"])) )) continue source = self.format_sparql_variable("{}{}_uri".format(link["source"]["label"], link["source"]["id"])) target = self.format_sparql_variable("{}{}_uri".format(link["target"]["label"], link["target"]["id"])) # Check if relation is in a block block_id = None sblock_id = None pblock_ids = (None, None) if link["source"]["specialNodeId"] or link["target"]["specialNodeId"]: block_id = link["source"]["specialNodeId"] sblock_id = link["source"]["specialNodeGroupId"] if link["source"]["specialNodeGroupId"] else link["target"]["specialNodeGroupId"] pblock_ids = link["source"]["specialPreviousIds"] # Position if link["uri"] in ('included_in', 'overlap_with'): # If source of target is a special node, replace the id with the id of the concerned node source_id = link["source"]["id"] target_id = link["target"]["id"] if link["source"]["type"] in ("unionNode", "minusNode"): source_id = self.get_source_of_special_node(link["source"]["id"]) if link["target"]["type"] in ("unionNode", "minusNode"): target_id = self.get_source_of_special_node(link["target"]["id"]) common_block = self.format_sparql_variable("block_{}_{}".format(link["source"]["id"], link["target"]["id"])) # Get start & end sparql variables for attr in self.json["attr"]: if not attr["faldo"]: continue if attr["nodeId"] == source_id: if attr["faldo"].endswith("faldoStart"): start_end.append(attr["id"]) start_1 = self.format_sparql_variable("{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoEnd"): start_end.append(attr["id"]) end_1 = self.format_sparql_variable("{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoStrand"): strand_1 = self.format_sparql_variable("{}{}_{}_faldoStrand".format(attr["entityLabel"], attr["nodeId"], attr["label"])) strands.append(attr["id"]) if attr["nodeId"] == target_id: if attr["faldo"].endswith("faldoStart"): start_end.append(attr["id"]) start_2 = self.format_sparql_variable("{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoEnd"): start_end.append(attr["id"]) end_2 = self.format_sparql_variable("{}{}_{}".format(attr["entityLabel"], attr["nodeId"], attr["label"])) if attr["faldo"].endswith("faldoStrand"): strand_2 = self.format_sparql_variable("{}{}_{}_faldoStrand".format(attr["entityLabel"], attr["nodeId"], attr["label"])) strands.append(attr["id"]) self.store_triple({ "subject": source, "predicate": "askomics:{}".format("includeInReference" if link["sameRef"] else "includeIn"), "object": common_block, "optional": False }, block_id, sblock_id, pblock_ids) self.store_triple({ "subject": target, "predicate": "askomics:{}".format("includeInReference" if link["sameRef"] else "includeIn"), "object": common_block, "optional": False }, block_id, sblock_id, pblock_ids) if link["sameStrand"]: var_to_replace.append((strand_1, strand_2)) else: strands = [] equal_sign = "" if link["strict"] else "=" if link["uri"] == "included_in": self.store_filter("FILTER ({start1} >{equalsign} {start2} && {end1} <{equalsign} {end2}) .".format( start1=start_1, start2=start_2, end1=end_1, end2=end_2, equalsign=equal_sign ), block_id, sblock_id, pblock_ids) elif link["uri"] == "overlap_with": self.store_filter("FILTER (({start2} >{equalsign} {start1} && {start2} <{equalsign} {end1}) || ({end2} >{equalsign} {start1} && {end2} <{equalsign} {end1}))".format( start1=start_1, start2=start_2, end1=end_1, end2=end_2, equalsign=equal_sign ), block_id, sblock_id, pblock_ids) # Classic relation else: relation = "<{}>".format(link["uri"]) triple = { "subject": source, "predicate": relation, "object": target, "optional": False } self.store_triple(triple, block_id, sblock_id, pblock_ids) # Store linked attributes for attribute in self.json["attr"]: attributes[attribute["id"]] = { "label": attribute["label"], "entity_label": attribute["entityLabel"], "entity_id": attribute["nodeId"] } if attribute["linked"]: linked_attributes.extend((attribute["id"], attribute["linkedWith"])) # Browse attributes for attribute in self.json["attr"]: # Get blockid and sblockid of the attribute node block_id, sblock_id, pblock_ids = self.get_block_ids(attribute["nodeId"]) # URI --- if attribute["type"] == "uri": subject = self.format_sparql_variable("{}{}_uri".format(attribute["entityLabel"], attribute["nodeId"])) predicate = attribute["uri"] obj = "<{}>".format(attribute["entityUris"][0]) if not self.is_bnode(attribute["entityUris"][0], self.json["nodes"]): self.store_triple({ "subject": subject, "predicate": predicate, "object": obj, "optional": False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.selects.append(subject) # filters/values if attribute["filterValue"] != "" and not attribute["linked"]: filter_value = self.get_uri_filter_value(attribute["filterValue"]) if attribute["filterType"] == "regexp": negative_sign = "" if attribute["negative"]: negative_sign = "!" self.store_filter("FILTER ({}regex({}, {}, 'i'))".format(negative_sign, subject, filter_value), block_id, sblock_id, pblock_ids) elif attribute["filterType"] == "exact": if attribute["negative"]: self.store_filter("FILTER (str({}) != {}) .".format(subject, filter_value), block_id, sblock_id, pblock_ids) else: self.store_value("VALUES {} {{ {} }} .".format(subject, filter_value), block_id, sblock_id, pblock_ids) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_uri".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"] )) var_to_replace.append((subject, var_2)) if attribute["type"] == "boolean": if attribute["visible"] or attribute["filterSelectedValues"] != [] or attribute["id"] in linked_attributes: subject = self.format_sparql_variable("{}{}_uri".format(attribute["entityLabel"], attribute["nodeId"])) predicate = "<{}>".format(attribute["uri"]) obj = self.format_sparql_variable("{}{}_{}".format(attribute["entityLabel"], attribute["humanNodeId"], attribute["label"])) self.store_triple({ "subject": subject, "predicate": predicate, "object": obj, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.selects.append(obj) # values if attribute["filterSelectedValues"] != [] and not attribute["optional"] and not attribute["linked"]: uri_val_list = [] for value in attribute["filterSelectedValues"]: if value == "true": bool_value = "'true'^^xsd:boolean" else: bool_value = "'false'^^xsd:boolean" value_var = obj uri_val_list.append(bool_value) if uri_val_list: self.store_value("VALUES {} {{ {} }}".format(value_var, ' '.join(uri_val_list)), block_id, sblock_id, pblock_ids) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_{}".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"] )) var_to_replace.append((obj, var_2)) # Text if attribute["type"] == "text": if attribute["visible"] or attribute["filterValue"] != "" or attribute["id"] in linked_attributes: subject = self.format_sparql_variable("{}{}_uri".format(attribute["entityLabel"], attribute["nodeId"])) if attribute["uri"] == "rdfs:label": predicate = attribute["uri"] else: predicate = "<{}>".format(attribute["uri"]) obj = self.format_sparql_variable("{}{}_{}".format(attribute["entityLabel"], attribute["humanNodeId"], attribute["label"])) self.store_triple({ "subject": subject, "predicate": predicate, "object": obj, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.selects.append(obj) # filters/values if attribute["filterValue"] != "" and not attribute["optional"] and not attribute["linked"]: if attribute["filterType"] == "regexp": negative_sign = "" if attribute["negative"]: negative_sign = "!" self.store_filter("FILTER ({}regex({}, '{}', 'i'))".format(negative_sign, obj, attribute["filterValue"]), block_id, sblock_id, pblock_ids) elif attribute["filterType"] == "exact": if attribute["negative"]: self.store_filter("FILTER (str({}) != '{}') .".format(obj, attribute["filterValue"]), block_id, sblock_id, pblock_ids) else: self.store_value("VALUES {} {{ '{}' }} .".format(obj, attribute["filterValue"]), block_id, sblock_id, pblock_ids) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_{}".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"] )) var_to_replace.append((obj, var_2)) # Numeric if attribute["type"] == "decimal": if attribute["visible"] or Utils.check_key_in_list_of_dict(attribute["filters"], "filterValue") or attribute["id"] in start_end or attribute["id"] in linked_attributes: subject = self.format_sparql_variable("{}{}_uri".format(attribute["entityLabel"], attribute["nodeId"])) if attribute["faldo"]: predicate = "faldo:location/faldo:{}/faldo:position".format("begin" if attribute["faldo"].endswith("faldoStart") else "end") else: predicate = "<{}>".format(attribute["uri"]) obj = self.format_sparql_variable("{}{}_{}".format(attribute["entityLabel"], attribute["nodeId"], attribute["label"])) self.store_triple({ "subject": subject, "predicate": predicate, "object": obj, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.selects.append(obj) # filters for filtr in attribute["filters"]: if filtr["filterValue"] != "" and not attribute["optional"] and not attribute["linked"]: if filtr['filterSign'] == "=": self.store_value("VALUES {} {{ {} }} .".format(obj, filtr["filterValue"]), block_id, sblock_id, pblock_ids) else: filter_string = "FILTER ( {} {} {} ) .".format(obj, filtr["filterSign"], filtr["filterValue"]) self.store_filter(filter_string, block_id, sblock_id, pblock_ids) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_{}".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"] )) var_to_replace.append((obj, var_2)) # Category if attribute["type"] == "category": if attribute["visible"] or attribute["filterSelectedValues"] != [] or attribute["id"] in strands or attribute["id"] in linked_attributes: node_uri = self.format_sparql_variable("{}{}_uri".format(attribute["entityLabel"], attribute["nodeId"])) category_value_uri = self.format_sparql_variable("{}{}_{}Category".format(attribute["entityLabel"], attribute["nodeId"], attribute["label"])) category_label = self.format_sparql_variable("{}{}_{}".format(attribute["entityLabel"], attribute["humanNodeId"], attribute["label"])) faldo_strand = self.format_sparql_variable("{}{}_{}_faldoStrand".format(attribute["entityLabel"], attribute["nodeId"], attribute["label"])) if attribute["faldo"] and attribute["faldo"].endswith("faldoReference"): category_name = 'faldo:location/faldo:begin/faldo:reference' self.store_triple({ "subject": node_uri, "predicate": category_name, "object": category_value_uri, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.store_triple({ "subject": category_value_uri, "predicate": "rdfs:label", "object": category_label, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) elif attribute["faldo"] and attribute["faldo"].endswith("faldoStrand"): category_name = 'faldo:location/faldo:begin/rdf:type' self.store_triple({ "subject": node_uri, "predicate": category_name, "object": category_value_uri, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) self.store_triple({ "subject": faldo_strand, "predicate": "a", "object": category_value_uri, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.store_triple({ "subject": faldo_strand, "predicate": "rdfs:label", "object": category_label, "optional": False }, block_id, sblock_id, pblock_ids) self.store_value("VALUES {} {{ faldo:ReverseStrandPosition faldo:ForwardStrandPosition }} .".format(category_value_uri), block_id, sblock_id, pblock_ids) else: category_name = "<{}>".format(attribute["uri"]) self.store_triple({ "subject": node_uri, "predicate": category_name, "object": category_value_uri, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.store_triple({ "subject": category_value_uri, "predicate": "rdfs:label", "object": category_label, "optional": True if attribute["optional"] else False }, block_id, sblock_id, pblock_ids) if attribute["visible"]: self.selects.append(category_label) # values if attribute["filterSelectedValues"] != [] and not attribute["optional"] and not attribute["linked"]: uri_val_list = [] for value in attribute["filterSelectedValues"]: if attribute["faldo"] and attribute["faldo"].endswith("faldoStrand"): value_var = faldo_strand uri_val_list.append("<{}>".format(value)) else: value_var = category_value_uri uri_val_list.append("<{}>".format(value)) if uri_val_list: self.store_value("VALUES {} {{ {} }}".format(value_var, ' '.join(uri_val_list)), block_id, sblock_id, pblock_ids) if attribute["linked"]: var_2 = self.format_sparql_variable("{}{}_{}Category".format( attributes[attribute["linkedWith"]]["entity_label"], attributes[attribute["linkedWith"]]["entity_id"], attributes[attribute["linkedWith"]]["label"] )) var_to_replace.append((category_value_uri, var_2)) from_string = self.get_froms_from_graphs(self.graphs) federated_from_string = self.get_federated_froms_from_graphs(self.graphs) endpoints_string = self.get_endpoints_string() # Linked attributes: replace SPARQL variable target by source self.replace_variables_in_blocks(var_to_replace) self.replace_variables_in_triples(var_to_replace) # Write the query # query is for editor (no froms, no federated) if for_editor: query = """ SELECT DISTINCT {selects} WHERE {{ {triples} {blocks} {filters} {values} }} """.format( selects=' '.join(self.selects), triples='\n '.join([self.triple_dict_to_string(triple_dict) for triple_dict in self.triples]), blocks='\n '.join([self.triple_block_to_string(triple_block) for triple_block in self.triples_blocks]), filters='\n '.join(self.filters), values='\n '.join(self.values)) # Query is federated, add federated lines @federate & @from) elif self.federated: query = """ {endpoints} {federated} SELECT DISTINCT {selects} WHERE {{ {triples} {blocks} {filters} {values} }} """.format( endpoints=endpoints_string, federated=federated_from_string, selects=' '.join(self.selects), triples='\n '.join([self.triple_dict_to_string(triple_dict) for triple_dict in self.triples]), blocks='\n '.join([self.triple_block_to_string(triple_block) for triple_block in self.triples_blocks]), filters='\n '.join(self.filters), values='\n '.join(self.values) ) # Query on the local endpoint (add froms) elif self.endpoints == [self.local_endpoint_f]: query = """ SELECT DISTINCT {selects} {froms} WHERE {{ {triples} {blocks} {filters} {values} }} """.format( selects=' '.join(self.selects), froms=from_string, triples='\n '.join([self.triple_dict_to_string(triple_dict) for triple_dict in self.triples]), blocks='\n '.join([self.triple_block_to_string(triple_block) for triple_block in self.triples_blocks]), filters='\n '.join(self.filters), values='\n '.join(self.values)) # Query an external endpoint (no froms) else: query = """ SELECT DISTINCT {selects} WHERE {{ {triples} {blocks} {filters} {values} }} """.format( selects=' '.join(self.selects), triples='\n '.join([self.triple_dict_to_string(triple_dict) for triple_dict in self.triples]), blocks='\n '.join([self.triple_block_to_string(triple_block) for triple_block in self.triples_blocks]), filters='\n '.join(self.filters), values='\n '.join(self.values)) if preview: query += "\nLIMIT {}".format(self.settings.getint('triplestore', 'preview_limit')) self.sparql = self.prefix_query(textwrap.dedent(query))
def persist_user(self, inputs, ldap_login=False, return_password=False): """ Persist user in the TS Parameters ---------- inputs : dict User infos ldap_login : bool, optional If True, user is ldap Returns ------- dict The user """ database = Database(self.app, self.session) # Check if user is the first. if yes, set him admin if self.get_number_of_users() == 0: admin = True blocked = False else: admin = False blocked = self.settings.getboolean('askomics', 'default_locked_account') api_key = Utils.get_random_string(20) if "apikey" not in inputs else inputs["apikey"] query = ''' INSERT INTO users VALUES( NULL, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NULL ) ''' salt = None sha512_pw = None email = None fname = None lname = None if not ldap_login: # Create a salt salt = Utils.get_random_string(20) if "salt" not in inputs else inputs["salt"] # Concat askomics_salt + user_password + salt salted_pw = self.settings.get('askomics', 'password_salt') + inputs['password'] + salt # hash sha512_pw = hashlib.sha512(salted_pw.encode('utf8')).hexdigest() email = inputs["email"] fname = inputs["fname"] lname = inputs["lname"] # Store user in db user_id = database.execute_sql_query( query, (ldap_login, fname, lname, inputs['username'], email, sha512_pw, salt, api_key, admin, blocked, Utils.humansize_to_bytes(self.settings.get("askomics", "quota")), int(time.time())), True) user = { 'id': user_id, 'ldap': ldap_login, 'fname': fname, 'lname': lname, 'username': inputs['username'], 'email': email, 'admin': admin, 'blocked': blocked, 'quota': Utils.humansize_to_bytes(self.settings.get("askomics", "quota")), 'apikey': api_key, 'galaxy': None } if return_password and not ldap_login: user["password"] = inputs["password"] # Return user infos return user