def glycan_to_biosynthesis_enzymes(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("usecases_group_one", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = get_mongo_query("glycan_to_biosynthesis_enzymes", query_obj) #return mongo_query collection = "c_glycan" cache_collection = "c_cache" search_type = "glycan_to_biosynthesis_enzymes" record_type = "protein" record_list = [] prj_obj = {"enzyme": 1} obj = dbh[collection].find_one(mongo_query, prj_obj) seen = {} if obj != None: for o in obj["enzyme"]: if o["uniprot_canonical_ac"] not in seen: seen[o["uniprot_canonical_ac"]] = True tax_id = o["tax_id"] if query_obj["tax_id"] == 0: record_list.append(o["uniprot_canonical_ac"]) elif tax_id == query_obj["tax_id"]: record_list.append(o["uniprot_canonical_ac"]) query_obj["organism"] = { "id": query_obj["tax_id"], "name": config_obj["taxid2name"][str(query_obj["tax_id"])] } query_obj.pop("tax_id") res_obj = {} ts_format = "%Y-%m-%d %H:%M:%S %Z%z" ts = datetime.datetime.now(pytz.timezone('US/Eastern')).strftime(ts_format) cache_coll = "c_cache" list_id = "" if len(record_list) != 0: hash_obj = hashlib.md5(record_type + "_" + json.dumps(query_obj)) list_id = hash_obj.hexdigest() cache_info = { "query": query_obj, "ts": ts, "record_type": record_type, "search_type": search_type } util.cache_record_list(dbh, list_id, record_list, cache_info, cache_coll, config_obj) res_obj = {"list_id": list_id} return res_obj
def auth_userid(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("auth_userid", {}, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_userid" res_obj = {} i = 0 while True: user_id = util.get_random_string(32).lower() user_obj = {"userid": user_id} if dbh[collection].find(user_obj).count() == 0: ts = datetime.datetime.now( pytz.timezone('US/Eastern')).strftime('%Y-%m-%d %H:%M:%S %Z%z') user_obj["created_ts"] = ts result = dbh[collection].insert_one(user_obj) return {"user": user_id} if i > 100000: return {"error_list": [{"error_code": "userid-generator-failed"}]} i += 1
def disease_to_glycosyltransferases(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query( "disease_to_glycosyltransferases_direct", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_protein" results = [] mongo_query = usecases_apilib.get_mongo_query( "disease_to_glycosyltransferases", query_obj) results_dict = {} i = 0 collection = "c_protein" main_id = "uniprot_canonical_ac" for obj_one in dbh[collection].find(mongo_query): canon = obj_one[main_id] i += 1 if i > config_obj["max_results_count"]["protein"]: break results_dict[canon] = obj_one res_obj = get_results_batch(results_dict, query_obj, config_obj) return res_obj
def commonquery_search_protein(query_obj, config_obj, result_type): collection = "c_protein" db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj query_obj = clean_protein_query(dbh, query_obj, config_obj) error_list = errorlib.get_errors_in_query("commonquery_search_protein", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = get_protein_mongo_query(query_obj) res_obj = {} seen = {} prj_obj = {"uniprot_canonical_ac": 1, "glycosylation": 1} for obj in dbh[collection].find(mongo_query, prj_obj): if result_type in ["protein_list", "count_list"]: canon = obj["uniprot_canonical_ac"] res_obj[canon] = get_protein_list_record(obj) elif result_type == "glycan_list": if "glycosylation" in obj: for xobj in obj["glycosylation"]: glytoucan_ac = xobj["glytoucan_ac"] o = dbh["c_glycan"].find_one( {"glytoucan_ac": glytoucan_ac}) if o != None and glytoucan_ac not in seen: res_obj[glytoucan_ac] = get_glycan_list_record(o) seen[glytoucan_ac] = True return res_obj
def protein_to_glycosequons(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("usecases_group_seven", query_obj, config_obj) if error_list != []: return {"error_list": error_list} #mongo_query = {"uniprot_canonical_ac":query_obj["uniprot_canonical_ac"]} mongo_query = { "$or": [{ "uniprot_canonical_ac": { '$eq': query_obj["uniprot_canonical_ac"] } }, { "uniprot_ac": { '$eq': query_obj["uniprot_canonical_ac"] } }] } #return mongo_query collection = "c_protein" cache_collection = "c_cache" obj = dbh[collection].find_one(mongo_query) tmp_list = obj["site_annotation"] if obj != None else [] results = [] for o in tmp_list: if o["annotation"] == "n_glycosylation_sequon": results.append(o) res_obj = {} if len(results) == 0: res_obj = {"list_id": ""} else: ts = datetime.datetime.now( pytz.timezone('US/Eastern')).strftime('%Y-%m-%d %H:%M:%S %Z%z') hash_obj = hashlib.md5(json.dumps(query_obj)) list_id = hash_obj.hexdigest() search_results_obj = {} search_results_obj["list_id"] = list_id cache_info = { "query": query_obj, "ts": ts, "record_type": "glycosequon", "search_type": "protein_to_glycosequons" } search_results_obj["cache_info"] = cache_info search_results_obj["results"] = results result = dbh[cache_collection].delete_many({"list_id": list_id}) result = dbh[cache_collection].insert_one(search_results_obj) res_obj["list_id"] = list_id return res_obj
def gene(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("gene_search_direct", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = protein_apilib.get_mongo_query(query_obj) #return mongo_query collection = "c_protein" main_id = "uniprot_canonical_ac" results_dict = {} i = 0 for obj in dbh[collection].find(mongo_query): i += 1 if i > config_obj["max_results_count"]["protein"]: break if main_id not in obj: continue results_dict[obj[main_id]] = obj res_obj = get_results_batch(results_dict, query_obj, config_obj) return res_obj
def auth_userinfo(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("auth_userinfo", query_obj, config_obj) if error_list != []: return {"error_list": error_list} res_obj = auth_tokenstatus({"token": query_obj["token"]}, config_obj) user_info = dbh["c_users"].find_one({'email': res_obj["email"].lower()}) if "access" not in user_info: return {"error_list": [{"error_code": "no-write-access"}]} if user_info["access"] != "write": return {"error_list": [{"error_code": "no-write-access"}]} res_obj = dbh["c_users"].find_one({'email': query_obj["email"].lower()}) res_obj.pop("_id") res_obj.pop("password") return res_obj
def biosynthesis_enzyme_to_glycans(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query( "biosynthesis_enzyme_to_glycans_direct", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_glycan" results = [] mongo_query = usecases_apilib.get_mongo_query( "biosynthesis_enzyme_to_glycans", query_obj) results_dict = {} i = 0 collection = "c_glycan" main_id = "glytoucan_ac" for obj_two in dbh[collection].find(mongo_query): if main_id not in obj_two: continue glytoucan_ac = obj_two[main_id] if i > config_obj["max_results_count"]["glycan"]: break results_dict[glytoucan_ac] = obj_two res_obj = get_results_batch(results_dict, query_obj, config_obj) return res_obj
def auth_register(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("auth_register", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_users" query_obj["email"] = query_obj["email"].lower() query_obj["password"] = bcrypt.hashpw( query_obj["password"].encode('utf-8'), bcrypt.gensalt()) query_obj["status"] = 0 query_obj["access"] = "readonly" query_obj["role"] = "" res_obj = {} if dbh[collection].find({"email": query_obj["email"]}).count() != 0: res_obj = {"error_list": [{"error_code": "email-already-regisgered"}]} else: res = dbh[collection].insert_one(query_obj) res_obj = {"type": "success"} return res_obj
def protein_alignment(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("protein_alignment", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_cluster" mongo_query = {"uniprot_canonical_ac": query_obj["uniprot_canonical_ac"]} obj = dbh[collection].find_one(mongo_query) selected_cls_id = "" for cls_id in obj["clusterlist"]: if cls_id.find(query_obj["cluster_type"]) != -1: selected_cls_id = cls_id break if selected_cls_id == "": return {"error_list": [{"error_code": "non-existent-cluster-type"}]} #check for post-access error, error_list should be empty upto this line post_error_list = [] if selected_cls_id == "": post_error_list.append({"error_code": "non-existent-record"}) return {"error_list": post_error_list} collection = "c_alignment" mongo_query = {"cls_id": selected_cls_id} #return mongo_query obj = dbh[collection].find_one(mongo_query) if obj == None: post_error_list.append({"error_code": "non-existent-record"}) return {"error_list": post_error_list} #If the object has a property that is not in the specs, remove it util.clean_obj(obj, config_obj["removelist"]["c_alignment"], "c_alignment") #make canoncal sequence first in the list new_list_one = [] new_list_two = [] for o in obj["sequences"]: if o["uniprot_ac"] == query_obj["uniprot_canonical_ac"]: new_list_one.append(o) else: new_list_two.append(o) obj["sequences"] = new_list_one + new_list_two new_obj = obj return obj
def home_init(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("pages_home_init",{}, config_obj) if error_list != []: return {"error_list":error_list} ts = datetime.datetime.now(pytz.timezone('US/Eastern')).strftime('%Y-%m-%d %H:%M:%S %Z%z') res_obj = {"version":[], "statistics":[]} for doc in dbh["c_version"].find({}): doc.pop("_id") res_obj["version"].append(doc) path_obj = config_obj[config_obj["server"]]["pathinfo"] species_obj = {} in_file = path_obj["datareleasespath"] in_file += "data/v-%s/misc/species_info.csv" % (config_obj["datarelease"]) libgly.load_species_info(species_obj, in_file) tax_id_list = [] for k in species_obj: obj = species_obj[k] if obj["is_reference"] == "yes": tax_id_list.append(str(obj["tax_id"])) for doc in dbh["c_stat"].find({}): for tax_id in sorted(doc["oldstat"]): if tax_id not in tax_id_list: tax_id_list.append(tax_id) for doc in dbh["c_stat"].find({}): doc.pop("_id") for tax_id in list(set(tax_id_list)): res_obj["statistics"].append(doc["oldstat"][tax_id]) #uncomment this when the frontend is ready to consume new stat format #res_obj["statistics"] = doc["newstat"] res_obj["events"] = [] doc_list = dbh["c_event"].find({"visibility":"visible"}).sort('createdts', pymongo.DESCENDING) for doc in doc_list: doc["id"] = str(doc["_id"]) doc.pop("_id") for k in ["createdts", "updatedts"]: if k not in doc: continue doc[k] = doc[k].strftime('%Y-%m-%d %H:%M:%S %Z%z') res_obj["events"].append(doc) return res_obj
def verlist(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] path_obj = config_obj[config_obj["server"]]["pathinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb out_obj = [] for coll in dbh.collection_names(): if coll.find("c_bco_v-") != -1: rel = coll[8:] out_obj.append(rel) return out_obj
def categorized_typeahead(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("categorized_typeahead", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_protein" mongo_query = {} if query_obj["field"] == "go_term": mongo_query = { "go_annotation.categories.go_terms.name": { '$regex': query_obj["value"], '$options': 'i' } } hit_dict = {} seen = {} total = 0 limit_one = query_obj["total_limit"] limit_two = query_obj["categorywise_limit"] prj_obj = {"go_annotation": 1} for obj in dbh[collection].find(mongo_query, prj_obj): for cat_obj in obj["go_annotation"]["categories"]: cat = cat_obj["name"] for term_obj in cat_obj["go_terms"]: term = term_obj["name"] if term.lower().find(query_obj["value"].lower()) != -1: if cat not in hit_dict: hit_dict[cat] = [] seen[cat] = {} if term not in seen[cat] and len( hit_dict[cat]) < limit_two: o = {"label": term, "category": cat} hit_dict[cat].append(o) seen[cat][term] = True total += 1 if total >= limit_one: break res_obj = [] for cat in hit_dict: for o in hit_dict[cat]: res_obj.append(o) return res_obj
def site_detail(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("site_detail", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_protein" canon, start_pos, end_pos = query_obj["site_id"].split(".") mongo_query = {"uniprot_canonical_ac": canon} if canon.find("-") == -1: mongo_query = {"uniprot_ac": canon} canon_doc = dbh[collection].find_one(mongo_query) collection = "c_site" mongo_query = {"id": query_obj["site_id"]} site_doc = dbh[collection].find_one(mongo_query) if site_doc == None: canon = canon_doc["uniprot_canonical_ac"] mongo_query = {"id": "%s.%s.%s" % (canon, start_pos, end_pos)} site_doc = dbh[collection].find_one(mongo_query) #check for post-access error, error_list should be empty upto this line post_error_list = [] if site_doc == None: post_error_list.append({"error_code": "non-existent-record"}) return {"error_list": post_error_list} url = config_obj["urltemplate"]["uniprot"] % ( canon_doc["uniprot_canonical_ac"]) site_doc["uniprot_id"] = canon_doc[ "uniprot_id"] if "uniprot_id" in canon_doc else "" site_doc["uniprot"] = { "uniprot_canonical_ac": canon_doc["uniprot_canonical_ac"], "uniprot_id": canon_doc["uniprot_id"], "url": url, "length": canon_doc["sequence"]["length"] } for k in [ "uniprot", "sequence", "mass", "protein_names", "gene", "gene_names", "species", "refseq" ]: if k in canon_doc and k not in site_doc: site_doc[k] = canon_doc[k] return site_doc
def site_search_init(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj collection = "c_searchinit" doc = dbh[collection].find_one({}) res_obj = doc["site"] return res_obj
def glycan_detail(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("glycan_detail", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_glycan" q = {"record_id": {'$eq': query_obj["glytoucan_ac"].upper()}} history_obj = dbh["c_idtrack"].find_one(q) mongo_query = {"glytoucan_ac": {'$eq': query_obj["glytoucan_ac"].upper()}} obj = dbh[collection].find_one(mongo_query) #check for post-access error, error_list should be empty upto this line post_error_list = [] if obj == None: post_error_list.append({"error_code": "non-existent-record"}) res_obj = {"error_list": post_error_list} if history_obj != None: res_obj["reason"] = history_obj["history"] return res_obj url = config_obj["urltemplate"]["glytoucan"] % (obj["glytoucan_ac"]) obj["glytoucan"] = { "glytoucan_ac": obj["glytoucan_ac"], "glytoucan_url": url } obj["history"] = history_obj["history"] if history_obj != None else [] #Remove 0 count residues tmp_list = [] for o in obj["composition"]: if o["count"] > 0: tmp_list.append(o) obj["composition"] = tmp_list util.clean_obj(obj, config_obj["removelist"]["c_glycan"], "c_glycan") if "enzyme" in obj: for o in obj["enzyme"]: if "gene_url" in o: o["gene_link"] = o["gene_url"] return util.order_obj(obj, config_obj["objectorder"]["glycan"])
def disease_to_glycosyltransferases(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("usecases_group_six", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = get_mongo_query("disease_to_glycosyltransferases", query_obj) #return mongo_query collection = "c_protein" cache_collection = "c_cache" search_type = "disease_to_glycosyltransferases" record_type = "protein" record_list = [] for obj in dbh[collection].find(mongo_query, config_obj["projectedfields"][collection]): record_list.append(obj["uniprot_canonical_ac"]) query_obj["organism"] = { "id": query_obj["tax_id"], "name": config_obj["taxid2name"][str(query_obj["tax_id"])] } query_obj.pop("tax_id") res_obj = {} ts_format = "%Y-%m-%d %H:%M:%S %Z%z" ts = datetime.datetime.now(pytz.timezone('US/Eastern')).strftime(ts_format) cache_coll = "c_cache" list_id = "" if len(record_list) != 0: hash_obj = hashlib.md5(record_type + "_" + json.dumps(query_obj)) list_id = hash_obj.hexdigest() cache_info = { "query": query_obj, "ts": ts, "record_type": record_type, "search_type": search_type } util.cache_record_list(dbh, list_id, record_list, cache_info, cache_coll, config_obj) res_obj = {"list_id": list_id} return res_obj
def auth_login(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("auth_login", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_users" query_obj["email"] = query_obj["email"].lower() login_user = dbh[collection].find_one({'email': query_obj["email"]}) res_obj = {} if login_user: stored_password = login_user['password'].encode('utf-8') submitted_password = query_obj['password'].encode('utf-8') if login_user["status"] == 0: res_obj = {"error_list": [{"error_code": "inactive-account"}]} elif bcrypt.hashpw(submitted_password, stored_password) == stored_password: token = make_hash_string() + make_hash_string() ts = datetime.datetime.now() session_obj = { "email": query_obj["email"], "token": token, "createdts": ts } res = dbh["c_session"].insert_one(session_obj) res_obj = {"type": "success", "token": token} else: res_obj = { "error_list": [{ "error_code": "invalid-email/password-combination" }] } else: res_obj = { "error_list": [{ "error_code": "invalid-email/password-combination" }] } return res_obj
def glycan_search_init(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj error_list = errorlib.get_errors_in_query("glycan_searchinit", {}, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_searchinit" res_obj = dbh[collection].find_one({}) return res_obj["glycan"]
def protein_search(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("protein_search", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = get_mongo_query(query_obj) #return mongo_query #path_obj = config_obj[config_obj["server"]]["htmlpath"] #blast_db = path_obj["datareleasespath"] + "data/v-%s/blastdb/canonicalsequences" #blast_db = blast_db % (config_obj["datarelease"]) #cmd = "%s -db %s -query %s -evalue %s -outfmt %s" #cmd = cmd % (path_obj["blastp"], blast_db,"tmp/q.fasta", 0.1, 7) collection = "c_protein" record_list = [] record_type = "protein" prj_obj = {"uniprot_canonical_ac": 1} for obj in dbh[collection].find(mongo_query, prj_obj): record_list.append(obj["uniprot_canonical_ac"]) ts_format = "%Y-%m-%d %H:%M:%S %Z%z" ts = datetime.datetime.now(pytz.timezone('US/Eastern')).strftime(ts_format) cache_coll = "c_cache" list_id = "" if len(record_list) != 0: hash_obj = hashlib.md5(record_type + "_" + json.dumps(query_obj)) list_id = hash_obj.hexdigest() cache_info = { "query": query_obj, "ts": ts, "record_type": record_type, "search_type": "search" } util.cache_record_list(dbh, list_id, record_list, cache_info, cache_coll, config_obj) res_obj = {"list_id": list_id} return res_obj
def event_delete(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] path_obj = config_obj[config_obj["server"]]["pathinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb #Collect errors error_list = errorlib.get_errors_in_query("event_delete", query_obj, config_obj) if error_list != []: return {"error_list": error_list} res_obj = auth_apilib.auth_tokenstatus({"token": query_obj["token"]}, config_obj) #check validity of token if "error_list" in res_obj: return res_obj if "status" not in res_obj: return {"error_list": [{"error_code": "invalid-token"}]} if res_obj["status"] != 1: return {"error_list": [{"error_code": "invalid-token"}]} #check write-access user_info = dbh["c_users"].find_one({'email': res_obj["email"].lower()}) if "access" not in user_info: return {"error_list": [{"error_code": "no-write-access"}]} if user_info["access"] != "write": return {"error_list": [{"error_code": "no-write-access"}]} res_obj = {} try: q_obj = {"_id": ObjectId(query_obj["id"])} doc = dbh["c_event"].find_one(q_obj) if doc == None: res_obj = {"error_list": [{"error_code": "record-not-found"}]} else: update_obj = {"visibility": "hidden"} res = dbh["c_event"].update_one(q_obj, {'$set': update_obj}, upsert=True) res_obj = {"type": "success"} except Exception as e: res_obj = {"error_list": [{"error_code": str(e)}]} return res_obj
def auth_userupdate(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] path_obj = config_obj[config_obj["server"]]["pathinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb #Collect errors error_list = errorlib.get_errors_in_query("auth_userupdate", query_obj, config_obj) if error_list != []: return {"error_list": error_list} res_obj = auth_tokenstatus({"token": query_obj["token"]}, config_obj) if "error_list" in res_obj: return res_obj if "status" not in res_obj: return {"error_list": [{"error_code": "invalid-token"}]} if res_obj["status"] != 1: return {"error_list": [{"error_code": "invalid-token"}]} try: user_info = dbh["c_users"].find_one( {'email': res_obj["email"].lower()}) q_obj = {"email": query_obj["email"]} update_obj = {} if "access" not in query_obj and "role" not in query_obj and "password" in query_obj and res_obj[ "email"].lower() == query_obj["email"]: update_obj["password"] = bcrypt.hashpw( query_obj["password"].encode('utf-8'), bcrypt.gensalt()) else: if "role" not in user_info: return {"error_list": [{"error_code": "no-admin-role"}]} if user_info["role"] != "admin": return {"error_list": [{"error_code": "no-admin-role"}]} for k in query_obj: if k == "password": update_obj[k] = bcrypt.hashpw(query_obj[k].encode('utf-8'), bcrypt.gensalt()) elif k not in ["token", "email"]: update_obj[k] = query_obj[k] res = dbh["c_users"].update_one(q_obj, {'$set': update_obj}, upsert=True) return {"type": "success"} except Exception as e: return {"error_list": [{"error_code": str(e)}]}
def bcolist(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] path_obj = config_obj[config_obj["server"]]["pathinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb out_obj = {} for doc in dbh["c_bco"].find({}): if "bco_id" in doc: bco_id = doc["bco_id"] if "io_domain" in doc: if "output_subdomain" in doc["io_domain"]: if doc["io_domain"]["output_subdomain"] != []: file_name = doc["io_domain"]["output_subdomain"][0][ "uri"]["filename"] out_obj[bco_id] = file_name return out_obj
def protein(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #glycan.attached is not implemented in c_protein jsons yet if "glycan" in query_obj: if "relation" in query_obj["glycan"]: if query_obj["glycan"]["relation"] not in [ "attached", "binding", "any" ]: return { "error_list": [{ "error_code": "invalid-parameter-value", "field": "glycan.relation" }] } #Collect errors error_list = errorlib.get_errors_in_query("protein_search_direct", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = protein_apilib.get_mongo_query(query_obj) #return mongo_query collection = "c_protein" main_id = "uniprot_canonical_ac" results_dict = {} i = 0 for obj in dbh[collection].find(mongo_query): i += 1 if i > config_obj["max_results_count"]["protein"]: break if main_id not in obj: continue results_dict[obj[main_id]] = obj res_obj = get_results_batch(results_dict, query_obj, config_obj) return res_obj
def event_addnew(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("event_addnew", query_obj, config_obj) if error_list != []: return {"error_list": error_list} res_obj = auth_apilib.auth_tokenstatus({"token": query_obj["token"]}, config_obj) #check validity of token if "error_list" in res_obj: return res_obj if "status" not in res_obj: return {"error_list": [{"error_code": "invalid-token"}]} if res_obj["status"] != 1: return {"error_list": [{"error_code": "invalid-token"}]} #check write-access user_info = dbh["c_users"].find_one({'email': res_obj["email"].lower()}) if "access" not in user_info: return {"error_list": [{"error_code": "no-write-access"}]} if user_info["access"] != "write": return {"error_list": [{"error_code": "no-write-access"}]} res_obj = {} try: query_obj.pop("token") query_obj["createdts"] = datetime.datetime.now() query_obj["updatedts"] = query_obj["createdts"] res = dbh["c_event"].insert_one(query_obj) res_obj = {"type": "success"} except Exception as e: res_obj = {"error_list": [{"error_code": str(e)}]} return res_obj
def search_init(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("usecases_search_init", {}, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_searchinit" res_obj = dbh[collection].find_one({}) if res_obj == None or "usecases" not in res_obj: return {"error_list": [{"error_code": "non-existent-search-init"}]} return res_obj["usecases"]
def messagelist(config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] path_obj = config_obj[config_obj["server"]]["pathinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb out_obj = [] import pymongo #q_obj = { "creation_time": { "$exists": True} } q_obj = {} for doc in dbh["c_message"].find(q_obj).sort('creation_time', pymongo.DESCENDING): doc.pop("_id") for k in ["creation_time", "update_time", "ts"]: if k not in doc: continue doc[k] = doc[k].strftime('%Y-%m-%d %H:%M:%S %Z%z') out_obj.append(doc) return out_obj
def protein_search_simple(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("protein_search_simple", query_obj, config_obj) if error_list != []: return {"error_list": error_list} mongo_query = get_simple_mongo_query(query_obj) #print mongo_query collection = "c_protein" record_list = [] record_type = "protein" prj_obj = {"uniprot_canonical_ac": 1} for obj in dbh[collection].find(mongo_query, prj_obj): record_list.append(obj["uniprot_canonical_ac"]) ts_format = "%Y-%m-%d %H:%M:%S %Z%z" ts = datetime.datetime.now(pytz.timezone('US/Eastern')).strftime(ts_format) cache_coll = "c_cache" list_id = "" if len(record_list) != 0: hash_obj = hashlib.md5(record_type + "_" + json.dumps(query_obj)) list_id = hash_obj.hexdigest() cache_info = { "query": query_obj, "ts": ts, "record_type": record_type, "search_type": "search_simple" } util.cache_record_list(dbh, list_id, record_list, cache_info, cache_coll, config_obj) res_obj = {"list_id": list_id} return res_obj
def auth_contactlist(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] path_obj = config_obj[config_obj["server"]]["pathinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb #Collect errors error_list = errorlib.get_errors_in_query("auth_contactlist", query_obj, config_obj) if error_list != []: return {"error_list": error_list} res_obj = auth_tokenstatus({"token": query_obj["token"]}, config_obj) if "error_list" in res_obj: return res_obj if "status" not in res_obj: return {"error_list": [{"error_code": "invalid-token"}]} if res_obj["status"] != 1: return {"error_list": [{"error_code": "invalid-token"}]} import pymongo doc_list = [] try: q_obj = {} if query_obj["visibility"] == "all" else { "visibility": query_obj["visibility"] } doc_list = dbh["c_message"].find(q_obj).sort('creation_time', pymongo.DESCENDING) except Exception as e: return {"error_list": [{"error_code": str(e)}]} out_obj = [] for doc in doc_list: doc["id"] = str(doc["_id"]) doc.pop("_id") for k in ["creation_time", "update_time", "ts"]: if k not in doc: continue doc[k] = doc[k].strftime('%Y-%m-%d %H:%M:%S %Z%z') out_obj.append(doc) return out_obj
def protein_to_homologs(query_obj, config_obj): db_obj = config_obj[config_obj["server"]]["dbinfo"] dbh, error_obj = util.connect_to_mongodb(db_obj) #connect to mongodb if error_obj != {}: return error_obj #Collect errors error_list = errorlib.get_errors_in_query("protein_to_homologs_direct", query_obj, config_obj) if error_list != []: return {"error_list": error_list} collection = "c_protein" results = [] mongo_query = { "$or": [{ "uniprot_canonical_ac": { '$eq': query_obj["uniprot_canonical_ac"] } }, { "uniprot_ac": { '$eq': query_obj["uniprot_canonical_ac"] } }] } results_dict = {} i = 0 collection = "c_protein" main_id = "uniprot_canonical_ac" for obj_one in dbh[collection].find(mongo_query): for o in obj_one["orthologs"]: canon = o[main_id] obj_two = dbh[collection].find_one({"uniprot_canonical_ac": canon}) i += 1 if i > config_obj["max_results_count"]["protein"]: break results_dict[canon] = obj_two res_obj = get_results_batch(results_dict, query_obj, config_obj) return res_obj