def get_jobs(author_uid=None, job_id=None): constraints = ['job_valid_date >= CURDATE()'] if author_uid: constraints.append('jobs.uid = "%s"' % author_uid) if job_id: constraints.append('jobs.jobid = "%s"' % job_id) db = connect_db() db_c = db.cursor(DictCursor) sql_q = """SELECT jobs.*, COUNT(job_kw.kwid) AS kwid_nb, GROUP_CONCAT(keywords.kwstr) AS keywords FROM jobs LEFT JOIN job_kw ON jobs.jobid = job_kw.jobid LEFT JOIN keywords ON keywords.kwid = job_kw.kwid WHERE %s GROUP BY jobs.jobid; """ % " AND ".join(constraints) mlog("DEBUGSQL", "get_jobs", sql_q) db_c.execute(sql_q) job_rows = [prepare_job(jr) for jr in db_c.fetchall()] db.close() return job_rows
def __init__(self, luid, doors_uid=None): """ Normal user syntax: User(luid) (user already in db) => has luid Empty user syntax: User(None, doors_uid=foobar) (user exists only in doors but not in db) => no luid, but has doors_uid NB load_user() wants a *single id for both*, which is provided by self.get_id() """ mlog('DEBUG', 'new User(luid=%s, doors_uid="%s")' % (str(luid), str(doors_uid))) # normal user has a nice info dict if luid is not None: luid = int(luid) scholar = get_full_scholar(luid) if scholar == None: raise ValueError( 'this uid %i references a scholar that is not really in the DB... Did you change the database recently and have still some old cookies with IDs?' % luid) else: self.uid = luid self.info = scholar self.doors_uid = self.info['doors_uid'] self.empty = False # add pic_src into info if 'pic_fname' in self.info and self.info['pic_fname']: self.info['pic_src'] = '/' + '/'.join( IMAGE_SAVING_POINT + [self.info['pic_fname']]) elif 'pic_url' in self.info and self.info['pic_url']: self.info['pic_src'] = self.info['pic_url'] else: self.info['pic_src'] = None # self.info --> js uinfo for client-side self.json_info = dumps(prejsonize(self.info)) # user exists in doors but has nothing in scholars DB yet elif doors_uid is not None: self.uid = None self.info = {} self.json_info = "{}" self.doors_uid = doors_uid self.doors_info = get_doors_temp_user(doors_uid) self.empty = True else: raise TypeError( "User can either be initialized with comex_db luid or with doors_uid" )
def save_pairs_fkey_tok(pairings_list, cmx_db=None, map_table='sch_kw'): """ Simply save all pairings (luid, kwid) or (luid, htid) in the list @pairings_list: list of tuples """ if cmx_db: db = cmx_db else: db = connect_db() db_cursor = db.cursor() for id_pair in set(pairings_list): db_cursor.execute('INSERT INTO %s VALUES %s' % (map_table, str(id_pair))) db.commit() mlog("DEBUG", "%s: saved %s pair" % (map_table, str(id_pair))) if not cmx_db: db.close()
def rm_scholar(luid, cmx_db=None): """ Remove a scholar by id (removals from sch_kw and sch_ht maps are triggered by cascade) """ if cmx_db: db = cmx_db else: db = connect_db() db_c = db.cursor() stmt = 'DELETE FROM scholars WHERE luid = %s' % str(luid) mlog("DEBUGSQL", "rm_scholar STATEMENT:\n-- SQL\n%s\n-- /SQL" % stmt) dbresp = db_c.execute(stmt) db.commit() mlog('INFO', 'deleted user %i at his request' % int(luid)) if not cmx_db: db.close()
def update_scholar_cols(selected_safe_recs, cmx_db, where_luid=None): """ For modification of selected columns: -> *update* row with the values that are present and are real columns (if values are absent, they are left unchanged) (if values are present and None, they are become NULL in db) see also COLS variable and doc/table_specifications.md """ # column names and column quoted values db_tgtcols = [] db_qstrvals = [] mlog("INFO", "DB selective update %s" % selected_safe_recs) for colinfo in USER_COLS: colname = colinfo[0] # selective updating: only provided columns if colname in selected_safe_recs and colname != 'luid': val = selected_safe_recs[colname] if val is None: quotedstrval = "NULL" else: quotedstrval = "'" + str(val) + "'" mlog("DEBUG", "DB selective update %s %s" % (colname, quotedstrval)) db_tgtcols.append(colname) db_qstrvals.append(quotedstrval) cmx_db_c = cmx_db.cursor() set_full_str = key_val_expr(db_tgtcols, db_qstrvals) # UPDATE: full_statement with formated values full_statmt = 'UPDATE scholars SET %s WHERE luid = "%s"' % (set_full_str, where_luid) cmx_db_c.execute(full_statmt) cmx_db.commit() return where_luid
def load_user(mixedid): """ Used by flask-login to bring back user object from a special id stored in session... this special id is defined in User.get_id() """ u = None mlog("DEBUG", "load_user: %s" % mixedid) if mixedid is not None: testluid = match('normal/luid:(\d+)$', mixedid) testduid = match('empty/doors:([a-f\d-]+)$', mixedid) if testluid: luid = int(testluid.groups()[0]) u = User(luid) elif testduid: doors_uid = testduid.groups()[0] u = User(None, doors_uid=doors_uid) mlog("DEBUG", "load_user: empty user recreated from doors_uid") return u
def doors_register(email, password, name, config=REALCONFIG): """ Remote query to Doors API to register a user """ uid = None sentdata = {'login': email.lower(), 'password': password, 'name': name} http_scheme = "https:" ssl_verify = True if config['DOORS_NOSSL']: # /!\ unsafe param: only useful for local tests /!\ http_scheme = 'http:' ssl_verify = False mlog( "WARNING", "user.doors_register: SSL and HTTPS turned off (after tests remove DOORS_NOSSL from config file)" ) if config['DOORS_PORT'] in ['80', '443']: # implicit port doors_base_url = http_scheme + '//' + config['DOORS_HOST'] else: doors_base_url = http_scheme + '//' + config[ 'DOORS_HOST'] + ':' + config['DOORS_PORT'] doors_response = post(doors_base_url + '/api/register', data=sentdata, verify=ssl_verify) mlog("INFO", "/api/register doors_response", doors_response) if doors_response.ok: # eg doors_response.content = b'{"status":"registration email sent", # "email":"*****@*****.**"}'' answer = loads(doors_response.content.decode()) # mlog("INFO", "/api/register answer",answer) return answer['userID'] else: return None
def save_job(job_infos, optional_job_id_to_update=None): """ Save a new row in jobs table """ db = connect_db() db_cursor = db.cursor() db_tgtcols = [] db_qstrvals = [] for colinfo in JOB_COLS: colname = colinfo[0] val = job_infos.get(colname, None) if val != None: val = str(normalize_forms(normalize_chars(val, rm_qt=True))) if val and len(val): quotedstrval = "'" + val + "'" # for insert, if needed later db_tgtcols.append(colname) db_qstrvals.append(quotedstrval) if optional_job_id_to_update: # an updated job job_id = optional_job_id_to_update key_values = key_val_expr(db_tgtcols, db_qstrvals) db_cursor.execute('UPDATE jobs SET %s WHERE jobid = %s' % (key_values, job_id)) else: # a new job db_cursor.execute('INSERT INTO jobs(%s) VALUES (%s)' % (','.join(db_tgtcols), ','.join(db_qstrvals))) job_id = db_cursor.lastrowid db.commit() db.close() mlog("DEBUG", "jobs #%s: saved %s infos" % (job_id, job_infos)) return job_id
def delete_pairs_fkey_tok(idkey, cmx_db=None, map_table='sch_kw'): """ Simply deletes all pairings (foreign_key, *) in the table """ if cmx_db: db = cmx_db else: db = connect_db() db_cursor = db.cursor() if map_table not in ['sch_kw', 'sch_ht', 'job_kw']: raise TypeError('ERROR: Unknown map_table') table_to_col = {'sch_kw': 'uid', 'sch_ht': 'uid', 'job_kw': 'jobid'} fkey = table_to_col[map_table] db_cursor = db.cursor() sql = 'DELETE FROM %s WHERE %s = "%s"' % (map_table, fkey, str(idkey)) n = db_cursor.execute(sql) db.commit() mlog("DEBUG", "%s: DELETED %i pairings for %s" % (map_table, n, str(idkey))) if not cmx_db: db.close()
def sanitize(value, specific_type=None): """ One of the main goals is to remove ';' POSS better args: @value: any string to santize @specific_type: None or one of {surl,sdate,sbool,sorg} """ vtype = type(value) if vtype not in [int, str]: raise ValueError("Value has an incorrect type %s" % str(vtype)) str_val = str(value) if specific_type == "sbool": # DB uses int(0) or int(1) if match('^[01]$', str_val): san_val = int(str_val) else: san_val = 0 # NB san_val_bool = bool(san_val) elif specific_type == "surl": try: url_elts = [elt for elt in map(quote, list(urlparse(str_val)))] if len(url_elts) and url_elts[0] == 'http': # force https to prevent mixed content url_elts[0] = 'https' san_val = urlunparse(url_elts) except: mlog( "WARNING", "sanitize via urllib couldn't parse url '%s', using regexp sanitize instead" % str_val) san_val = sub(r'[^\w@\.: /~_+$?=&%-]', '_', str_val) elif specific_type == "sdate": san_val = sub(r'[^0-9/-:]', '_', str_val) elif specific_type == "scountry": # cc unifies variants like France, france, U.S.A., USA found = cc.searchCode(str_val) if found != None: san_val = cc.ISO[found][0] else: # default: "some input" capitalized => "Some Input" san_val = str_val.capitalize() # free string types else: clean_val = normalize_forms(normalize_chars(str_val)) san_val = sub(r'\b(?:drop|select|update|delete)\b', '_', clean_val) if not specific_type: san_val = sub(r'[^\w@\.:,()# \n-]', '_', san_val) elif specific_type == "sorg": # most troublesome because we'll want to parse the label # (to split name and acronym and perhaps suggest similar org) san_val = sub(r'[\n;"\']', '_', san_val) # cast back to orginal type san_typed_val = vtype(san_val) return san_typed_val
def get_or_create_org(org_info, oclass, cmx_db=None): """ (scholar's parent org(s)) ---> lookup/add to *orgs* table -> orgid 1) query to *orgs* table <= unicity constraint is oclass + name + acro + org_type (<=> is_key) => £TODO if institution almost matches API to send suggestion - then TODO also allow completing existing entry 2) return id => if an institution matches return orgid => if no institution matches create new and return orgid """ if cmx_db: db = cmx_db else: db = connect_db() db_c = db.cursor(DictCursor) the_aff_id = None db_tgtcols = [] db_qstrvals = [] db_constraints = [] if oclass: org_info['class'] = oclass mlog("DEBUG", "get_or_create_org, org_info:", org_info) for colinfo in ORG_COLS: colname = colinfo[0] # is_key <=> field is part of the distinctive "signature" of a known org if colname == 'inst_type': is_key = (oclass == "inst") else: is_key = colinfo[3] val = org_info.get(colname, None) if val != None: val = str(normalize_forms(normalize_chars(val, rm_qt=True))) if val and len(val): quotedstrval = "'" + val + "'" # for insert, if needed later db_tgtcols.append(colname) db_qstrvals.append(quotedstrval) if is_key: # for select db_constraints.append("%s = %s" % (colname, quotedstrval)) # being NULL is also a distinctive feature if is_key else: if is_key: db_constraints.append("%s IS NULL" % colname) db_cursor = cmx_db.cursor() mlog("DEBUGSQL", "SELECT org.. WHERE %s" % ("\n AND ".join(db_constraints))) n_matched = db_cursor.execute('SELECT orgid FROM orgs WHERE %s' % " AND ".join(db_constraints)) # ok existing affiliation => row id if n_matched == 1: the_aff_id = db_cursor.fetchone()[0] mlog( "INFO", "dbcrud: found affiliation (orgid %i) (WHERE %s)" % (the_aff_id, " AND ".join(db_constraints))) # no matching affiliation => add => row id elif n_matched == 0: db_cursor.execute('INSERT INTO orgs(%s) VALUES (%s)' % (','.join(db_tgtcols), ','.join(db_qstrvals))) the_aff_id = db_cursor.lastrowid cmx_db.commit() mlog("INFO", "dbcrud: added org '%s'" % str(db_qstrvals)) else: raise Exception("ERROR: get_or_create_org non-unique match '%s'" % str(db_qstrvals)) if not cmx_db: db.close() return the_aff_id
def get_or_create_tokitems(tok_list, cmx_db=None, tok_table='keywords'): """ kw_str -> lookup/add to *keywords* table -> kw_id ht_str -> lookup/add to *hashtags* table -> ht_id ------------------------------------------------- tok_list is an array of strings NB keywords are mandatory, hashtags are not (minimum number of entries is checked client-side) for loop 1) query to *keywords* or *hashtags* table (exact match) 2) return id => if a keyword/tag matches return kwid/htid => if no keyword/tag matches create new and return kwid/htid """ if cmx_db: db = cmx_db else: db = connect_db() # sql names fill = {'tb': tok_table} if tok_table == 'keywords': fill['idc'] = 'kwid' fill['strc'] = 'kwstr' elif tok_table == 'hashtags': fill['idc'] = 'htid' fill['strc'] = 'htstr' db_cursor = db.cursor() found_ids = [] for tok_str in tok_list: # TODO better string normalization here or in read_record tok_str = tok_str.lower() fill['q'] = tok_str # ex: SELECT kwid FROM keywords WHERE kwstr = "complexity" n_matched = db_cursor.execute( 'SELECT %(idc)s FROM %(tb)s WHERE %(strc)s = "%(q)s"' % fill) # ok existing keyword => row id if n_matched == 1: found_ids.append(db_cursor.fetchone()[0]) # no matching keyword => add => row id elif n_matched == 0: # ex: INSERT INTO keywords(kwstr) VALUES ("complexity") db_cursor.execute('INSERT INTO %(tb)s(%(strc)s) VALUES ("%(q)s")' % fill) db.commit() mlog("INFO", "Added '%s' to %s table" % (tok_str, tok_table)) found_ids.append(db_cursor.lastrowid) else: raise Exception("ERROR: non-unique token '%s'" % tok_str) if not cmx_db: db.close() return found_ids
def save_full_scholar(safe_recs, cmx_db, uactive=True, update_user=None): """ For new registration: -> add to *scholars* table, return new local uid For profile change (just pass previous local user info in update_user) -> *update* entire scholar row (if values are null or absent in safe_recs, they become null) see also COLS variable and doc/table_specifications.md """ # column names and column quoted values db_tgtcols = [] db_qstrvals = [] actual_len_dbg = 0 for colinfo in USER_COLS: colname = colinfo[0] # NB: each val already contains no quotes because of sanitize() val = safe_recs.get(colname, None) # when updating, we keep all values that have changed, including None if update_user: if colname in ["luid", "email"]: # these two can't be updated continue old_val = update_user[colname] if val != old_val: actual_len_dbg += 1 if val == None: quotedstrval = "NULL" else: quotedstrval = "'" + str(val) + "'" mlog("DEBUG", "DB update %s (was: %s)" % (quotedstrval, str(old_val))) db_tgtcols.append(colname) db_qstrvals.append(quotedstrval) # when inserting, we keep all values != None else: if val != None: actual_len_dbg += 1 quotedstrval = "'" + str(val) + "'" mlog("DEBUG", "DB saving" + quotedstrval) db_tgtcols.append(colname) db_qstrvals.append(quotedstrval) if uactive: db_tgtcols.append('record_status') db_qstrvals.append('"active"') cmx_db_c = cmx_db.cursor() if not update_user: # expected colnames "(doors_uid, last_modified_date, email, ...)" db_tgtcols_str = ','.join(db_tgtcols) # fields converted to sql syntax db_vals_str = ','.join(db_qstrvals) # INSERT: full_statement with formated values full_statmt = 'INSERT INTO scholars (%s) VALUES (%s)' % ( db_tgtcols_str, db_vals_str) else: set_full_str = key_val_expr(db_tgtcols, db_qstrvals) # UPDATE: full_statement with formated values full_statmt = 'UPDATE scholars SET %s WHERE luid = "%s"' % ( set_full_str, update_user['luid']) mlog("DEBUG", "UPDATE" if update_user else "INSERT", "SQL statement:", full_statmt) cmx_db_c.execute(full_statmt) if not update_user: luid = cmx_db_c.lastrowid else: luid = update_user['luid'] cmx_db.commit() return luid
def get_full_scholar(uid, cmx_db=None): """ uid : int or int str local user id aka luid Autonomous function to be used by User class => Retrieves one line from *scholars* table, with joined optional concatenated *affiliations*, *keywords* and *linked_ids* => Parse it all into a structured python user info dict => NB: None if user doesn't exist in cmx_db (but may exist in doors db) """ u_row = None if cmx_db: db = cmx_db else: db = connect_db() db_c = db.cursor(DictCursor) # # print('DBG', 'uid', uid) # print('DBG', 'type(uid)', type(uid)) # one user + all linked infos concatenated in one row # <= 3 LEFT JOINS sequentially GROUPed # (b/c if simultaneous, loses unicity) one_usr_stmt = """ SELECT sch_n_aff_n_kws_n_hts.*, -- linked_ids info condensed -- (format : "type1:ID1,type2:ID2,...") GROUP_CONCAT( CONCAT(linked_ids.ext_id_type,":", linked_ids.ext_id) ) AS linked_ids, COUNT(linked_ids.ext_id) AS linked_ids_nb FROM ( SELECT sch_n_aff_n_kws.*, -- hts info condensed COUNT(hashtags.htid) AS hashtags_nb, -- GROUP_CONCAT(hashtags.htid) AS htids, GROUP_CONCAT(hashtags.htstr) AS hashtags FROM ( SELECT sch_n_orgs.*, -- kws info condensed COUNT(keywords.kwid) AS keywords_nb, -- GROUP_CONCAT(keywords.kwid) AS kwids, GROUP_CONCAT(keywords.kwstr) AS keywords FROM ( SELECT sch_n_labs.*, COUNT(insts.orgid) AS insts_ids_nb, GROUP_CONCAT(insts.orgid) AS insts_ids FROM ( SELECT scholars.*, COUNT(labs.orgid) AS labs_ids_nb, GROUP_CONCAT(labs.orgid) AS labs_ids FROM scholars LEFT JOIN sch_org AS map_labs ON map_labs.uid = luid LEFT JOIN ( -- class constraint can't appear later, -- it would give no scholar when empty SELECT * FROM orgs WHERE class='lab' ) AS labs ON map_labs.orgid = labs.orgid GROUP BY luid ) AS sch_n_labs LEFT JOIN sch_org AS map_insts ON map_insts.uid = luid LEFT JOIN ( SELECT * FROM orgs WHERE class='inst' ) AS insts ON map_insts.orgid = insts.orgid GROUP BY luid ) AS sch_n_orgs -- two step JOIN for keywords LEFT JOIN sch_kw ON sch_kw.uid = luid LEFT JOIN keywords ON sch_kw.kwid = keywords.kwid GROUP BY luid ) AS sch_n_aff_n_kws -- also two step JOIN for hashtags LEFT JOIN sch_ht ON sch_ht.uid = luid LEFT JOIN hashtags ON sch_ht.htid = hashtags.htid GROUP BY luid ) AS sch_n_aff_n_kws_n_hts LEFT JOIN linked_ids ON linked_ids.uid = luid -- WHERE our user UID WHERE luid = %i GROUP BY luid """ % int(uid) mlog("DEBUGSQL", "DB get_full_scholar STATEMENT:\n-- SQL\n%s\n-- /SQL" % one_usr_stmt) n_rows = db_c.execute(one_usr_stmt) if n_rows > 1: raise IndexError( "DB one_usr_stmt returned %i rows instead of 1 for user %s" % (n_rows, uid)) urow_dict = db_c.fetchone() # break with None if no results if urow_dict is None: mlog("WARNING", "DB get_full_scholar attempt got no rows for: %s" % uid) return None # normal case <=> exactly one row # Exemple initial data in urow_dict # ---------------------------------- # {'hashtags': '#something, #another', # 'country': 'France', 'doors_uid': '5e3adbc1-bcfb-42da-a2c4-4af006fe2b91', # 'email': '*****@*****.**', 'first_name': 'John', 'gender': 'M', # 'home_url': 'http://localhost/regcomex/', 'hon_title': 'Student', # 'initials': 'JFK', 'interests_text': 'Blablabla', # 'job_looking_date': datetime.date(2019, 9, 28), # 'hashtags': '#eccs15', 'hashtags_nb': 1, # 'keywords': 'complex networks,complex systems,text mining,machine learning', 'keywords_nb': 4, # 'labs_ids': '3888,3444', 'labs_ids_nb': 2, # 'insts_ids': '3295', 'insts_ids_nb': 1, # 'last_modified_date': datetime.datetime(2017, 2, 22, 12, 25, 59), # 'last_name': 'Kennedy', # 'linked_ids': 'twitter:@jfk,yoyo:42,foobar:XWING', 'linked_ids_nb': 3, # 'middle_name': 'Fitzgerald', # 'pic_fname': '12345.jpg', 'pic_url': None, 'position': 'Research Fellow', # 'record_status': 'legacy', 'valid_date': datetime.date(2017, 5, 22)} # post-treatments # --------------- # 1/ split concatenated kw, ht, lab id, inst id lists and check correct length for toktype in ['keywords', 'hashtags', 'labs_ids', 'insts_ids']: if urow_dict[toktype + '_nb'] == 0: urow_dict[toktype] = [] else: tokarray = urow_dict[toktype].split(',') if len(tokarray) != urow_dict[toktype + '_nb']: raise ValueError("Can't correctly split %s for user %s" % (toktype, uid)) else: urow_dict[toktype] = tokarray # 2/ must do a secondary SELECT for detailed org info # dict['labs_ids']: [id1, id2 ..] # => dict['labs'] : [{info1},{info2}..] for orgclass in ['labs', 'insts']: id_list = urow_dict[orgclass + "_ids"] # <- ! naming convention if not len(id_list): urow_dict[orgclass] = [] else: org_info = """SELECT name, acro, locname, inst_type, lab_code, label FROM orgs WHERE orgid IN (%s)""" % ','.join( id_list) mlog('DEBUGSQL', "org_info stmt :", org_info) new_cursor = db.cursor(DictCursor) new_cursor.execute(org_info) urow_dict[orgclass] = new_cursor.fetchall() # print("get_full_scholar orgs::", urow_dict[orgclass]) # print('===urow_dict with orgs[]===') # print(urow_dict) # print('==/urow_dict with orgs[]===') # 3/ also split and parse all linked_ids if urow_dict['linked_ids_nb'] == 0: urow_dict['linked_ids'] = {} else: lkids_array = urow_dict['linked_ids'].split(',') if len(lkids_array) != urow_dict['linked_ids_nb']: raise ValueError("Can't correctly split linked_ids for user %s" % uid) else: # additionaly reparse dict for linked_ids # exemple ==> {type1:ID1, type2:ID2} urow_dict['linked_ids'] = {} for lkid_str in lkids_array: lkid_couple = lkid_str.split(':') if len(lkid_couple) != 2: raise ValueError( "Can't correctly find type and id value in linked_id string '%s' for user %s" % (lkid_str, uid)) else: lkid_type = lkid_couple[0] lkid_id = lkid_couple[1] urow_dict['linked_ids'][lkid_type] = lkid_id mlog("INFO", "get_full_scholar %s: OK" % uid) if not cmx_db: db.close() # full user info as a dict return urow_dict
def doors_login(email, password, config=REALCONFIG): """ Remote query to Doors API to login a user Doors responses look like this: {'status': 'login ok', 'userInfo': { 'hashAlgorithm': 'PBKDF2', 'password': '******', 'id': {'id': '9e30ce89-72a1-46cf-96ca-cf2713b7fe9d'}, 'name': 'Corser, Peter', 'hashParameters': { 'iterations' : 1000, 'keyLenght' : 128 } } } NB: returned doors_uid will be None if user not found """ uid = None sentdata = {'login': email.lower(), 'password': password} ssl_verify = True http_scheme = "https:" if config['DOORS_NOSSL']: # /!\ unsafe param: only useful for local tests /!\ http_scheme = 'http:' ssl_verify = False mlog( "WARNING", "user.doors_login: SSL and HTTPS turned off (after tests remove DOORS_NOSSL from config file)" ) if config['DOORS_PORT'] in ['80', '443']: # implicit port doors_base_url = http_scheme + '//' + config['DOORS_HOST'] else: doors_base_url = http_scheme + '//' + config[ 'DOORS_HOST'] + ':' + config['DOORS_PORT'] doors_response = post(doors_base_url + '/api/user', data=sentdata, verify=ssl_verify) mlog("INFO", "/api/user doors_response", doors_response) if doors_response.ok: login_info = loads(doors_response.content.decode()) if login_info['status'] == "LoginOK": uid = login_info['userID'] # ID is a string of the form: "UserID(12849e74-b039-481f-b8eb-1e52562fbda6)" capture = match(r'UserID\(([0-9a-f-]+)\)', uid) if capture: uid = capture.groups()[0] elif match(r'User .* not found$', doors_response.json()): uid = None mlog('INFO', "doors_login says user '%s' was not found" % email) else: raise Exception('Doors request failed') return uid