def add_user(): """ 添加用户 :return: """ usernames = db.db_execute(conn, cursor, 'select username from users') print(usernames) usernames_list = [] for i in range(len(usernames)): usernames_list.append(usernames[i][0]) username = request.form['name'] if username in usernames_list: return resultmsg(200, '用户名已存在!', '', '') password = request.form['pwd'] right = 2 phone = request.form['phone'] if len(phone) != 11: return resultmsg(200, '手机号错误', '', '') realname = request.form['realname'] data_user = (username, password, right, phone, realname) sql_add_user = '******' % data_user print(sql_add_user) result = db.db_execute(conn, cursor, sql_add_user) print(result) if len(result) == 0: return resultmsg(200, 'ok', result, '') else: return resultmsg(500, '服务器错误,请重新尝试!', result, '')
def del_product_name(): """ 删除产品 :return: """ product_name_be_del = int(request.form['tag']) if product_name_be_del != 1: return resultmsg(400, '删除标记错误!', '', '') product_name_be_del_time = request.form['time'] id = int(request.form['id']) operator = request.form['user'] sql_find_id = 'select id from product_category where product_name_be_del is null' pids = db.db_execute(conn, cursor, sql_find_id) print(pids) pisd_list = [] for i in range(len(pids)): pisd_list.append(pids[i][0]) if id in pisd_list: # sql_del_product_name = 'delete from product_category where id=%d' % id sql_del_product_name = 'update product_category set product_name_be_del=%d,' % product_name_be_del + ' product_name_be_del_time="%s", ' % product_name_be_del_time + 'product_name_be_del_operator="%s"' % operator + ' where id=%d ' % id print(sql_del_product_name) result = db.db_execute(conn, cursor, sql_del_product_name) print(result) if len(result) == 0: return resultmsg(200, '操作成功!', result, '') else: return resultmsg(500, '服务器错误,请重新尝试!', result, '') else: return resultmsg(200, '当前产品不存在!', [], '')
def total(): """ 查询 各个库的产品总存量,并累加成为所有的库存量 :return: """ warehouse_name1 = int(request.form['seat1']) print(warehouse_name1) warehouse_name2 = int(request.form['seat2']) print(warehouse_name2) sql1 = 'select sum(num*out_in) from warehouse where warehouse_name=%d' % warehouse_name1 sql2 = 'select sum(num*out_in) from warehouse where warehouse_name=%d' % warehouse_name2 print(sql1) result1 = db.db_execute(conn, cursor, sql1) result2 = db.db_execute(conn, cursor, sql2) datas = [result1, result2] print(datas) for i in range(len(datas)): if datas[i][0][0] == None: datas[i] = 0 else: datas[i] = datas[i][0][0] print(datas) print(result1) print(result2) warehouse1_name_total = int(str(datas[0])) warehouse2_name_total = int(str(datas[1])) all_total = warehouse1_name_total + warehouse2_name_total result = { "allstock": all_total, "seat1_stock": warehouse1_name_total, "seat2_stock": warehouse2_name_total } return resultmsg(200, 'ok', result, '')
def warehouse(): """ 查询指定库下的所有产品种类及对应数量 :return: """ warehouse_name = int(request.form['seat']) sql1 = 'select id,product_name,batch,`size`,mark,note from product_category where product_name_be_del is null' product_names = db.db_execute(conn, cursor, sql1) print('product_names:\n', product_names) if product_names == None: product_names = [] result = [] for i in range(len(product_names)): sql2 = 'select sum(num*out_in) from warehouse ' \ 'where product_name=%d' % int(product_names[i][0]) + ' and warehouse_name=%d' % warehouse_name data = db.db_execute(conn, cursor, sql2) if data[0][0] == None: data = 0 else: data = data[0][0] print(data) result.append({ 'id': product_names[i][0], "name": product_names[i][1], "batch": product_names[i][2], "size": product_names[i][3], "mark": product_names[i][4], "note": product_names[i][5], "value": int(data) }) return resultmsg(200, 'ok', result, '')
def get_product_del_list(): """ 查询已删除产品的列表 :return: """ sql_product_del_list = 'select id,product_name,DATE_FORMAT(product_name_be_del_time,"%Y-%m-%d %H:%i:%s"),product_name_be_del_operator,batch,`size`,mark,note ' \ 'from product_category ' \ 'where product_name_be_del is not null' product_del_list = db.db_execute(conn, cursor, sql_product_del_list) print(product_del_list) if product_del_list == None: product_del_list = [] result = [] for i in range(len(product_del_list)): result.append({ 'id': str(product_del_list[i][0]), 'product_name': str(product_del_list[i][1]), 'product_name_be_del_time': str(product_del_list[i][2]), 'product_name_be_del_operator': str(product_del_list[i][3]), 'batch': str(product_del_list[i][4]), 'size': str(product_del_list[i][5]), 'mark': str(product_del_list[i][6]), 'note': str(product_del_list[i][7]) }) return resultmsg(200, '已删除产品名录详情', result, '已全部返回.')
def get_db_dict_array(self): if self.media_file_id and self.media_id is None: sql = """ SELECT * FROM `ms_media_download_stats` AS s LEFT JOIN ca_users AS u ON u.user_id = s.user_id WHERE `media_file_id` = %s """ args = self.media_file_id elif self.media_file_id is None and self.media_id: sql = """ SELECT * FROM `ms_media_download_stats` AS s LEFT JOIN ca_users AS u ON u.user_id = s.user_id WHERE `media_id` = %s """ args = self.media_id elif self.media_file_id and self.media_id: sql = """ SELECT * FROM `ms_media_download_stats` AS s LEFT JOIN ca_users AS u ON u.user_id = s.user_id WHERE `media_file_id` = %s AND `media_id` = %s """ args = [self.media_file_id, self.media_id] else: raise ValueError() c = self.init_db() self.db_dict_array = db.db_execute(c, sql, args)
def warehouse_manage(): """ 出入库操作(出库时需要判断余量是否满足条件) :return: """ warehouse_name = int(request.form['seat']) product_name = int(request.form['produce_name']) out_in = request.form['out_in'] num = request.form['num'] date_managed = request.form['date'] cost = request.form['fare'] # 此处必须限制小数点后两位 operator = request.form['operator'] s = (warehouse_name, int(out_in), product_name, int(num), operator, date_managed, float(cost)) if out_in == '-1': sql_sum_remaining = 'select sum(num*out_in) from warehouse where warehouse_name=%d' % warehouse_name + ' and product_name=%d' % product_name print(sql_sum_remaining) product_name_sum_num = db.db_execute(conn, cursor, sql_sum_remaining)[0][0] if product_name_sum_num == None: product_name_sum_num = 0 else: product_name_sum_num = int(product_name_sum_num) print("product_name_sum_num:\n", product_name_sum_num) if product_name_sum_num >= int(num) > 0: sql = 'insert into warehouse(warehouse_name, out_in, product_name,num, operator, date_managed, cost) ' \ 'values ("%s",%d,"%s",%d,"%s","%s",%2f)' % s # print(sql) result = db.db_execute(conn, cursor, sql) return resultmsg(200, 'OK', result, '数据插入成功') else: return resultmsg(422, '数值错误,请重新输入!', '', '') elif out_in == '1': # 数量超过11位数,不能插入数据 if len(str(num)) > 11: return resultmsg(422, '入库数量超限!', '', '') elif int(num) > 0: sql = 'insert into warehouse(warehouse_name, out_in, product_name,num, operator, date_managed, cost)' \ 'values ("%s",%d,"%s",%d,"%s","%s",%2f)' % s print(sql) result = db.db_execute(conn, cursor, sql) return resultmsg(200, 'OK', result, '数据插入成功') else: return resultmsg(422, '入库数量不可小于1!')
def get_num_mf_derived_from(mf_id): conn = db.db_conn_socket() c = conn.cursor() sql = """ SELECT * FROM `ms_media_files` WHERE `derived_from_media_file_id` = %s """ return len(db.db_execute(c, sql, mf_id))
def reuse_product_name(): id = int(request.form['id']) sql_reuse_product_name = 'update product_category ' \ 'set product_name_be_del=NULL,product_name_be_del_time=NULL,product_name_be_del_operator=NULL ' \ 'where id=%d' % id result = db.db_execute(conn, cursor, sql_reuse_product_name) print(result) return resultmsg(200, '恢复成功!', '', '')
def product_category_info(): sql = 'select id, product_name from product_category where product_name_be_del is null' result = db.db_execute(conn, cursor, sql) id_product_category = [] name_product_category = [] for i in range(len(result)): id_product_category.append(result[i][0]) name_product_category.append(result[i][1]) return id_product_category, name_product_category
def warehouse_category_info(): sql = 'select id, warehouse_name from warehouse_category' result = db.db_execute(conn, cursor, sql) id_product_category = [] name_product_category = [] for i in range(len(result)): id_product_category.append(str(result[i][0])) name_product_category.append(result[i][1]) return id_product_category, name_product_category
def check(): conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_specimens` AS s LEFT JOIN ms_specimens_x_taxonomy AS x ON x.specimen_id = s.specimen_id LEFT JOIN ms_taxonomy_names AS n ON n.alt_id = x.alt_id """ r = db.db_execute(c, sql) mislinked = pandas.DataFrame(columns= ['specimen_id', 'MS_institution_code', 'iDB_institution_code', 'MS_collection_code', 'iDB_collection_code', 'MS_catalog_number', 'iDB_catalog_number', 'MS_genus', 'iDB_genus', 'MS_species', 'iDB_species' ]) # mislinked = pandas.read_csv('mislinked_specimens.csv') for s in r: if s['uuid']: if int(s['specimen_id']) in list(mislinked['specimen_id']): continue print(s['uuid']) resp = requests.get('https://search.idigbio.org/v2/view/records/' + s['uuid']) json = resp.json() # check institution code, collection code, specimen number, genus, and species if (get_db_field(s, 'institution_code') != get_json_field(json, ['indexTerms', 'institutioncode']).lower() or get_db_field(s, 'collection_code') != get_json_field(json, ['indexTerms', 'collectioncode']).lower() or get_db_field(s, 'catalog_number') != get_json_field(json, ['indexTerms', 'catalognumber']).lower() or get_db_field(s, 'genus') != get_json_field(json, ['indexTerms', 'genus']).lower() or get_db_field(s, 'species') != get_json_field(json, ['indexTerms', 'specificepithet']).lower()): print ('Found mislinked specimen') row = { 'specimen_id': s['specimen_id'], 'MS_institution_code': s['institution_code'], 'MS_collection_code': s['collection_code'], 'MS_catalog_number': s['catalog_number'], 'MS_genus': s['genus'], 'MS_species': s['species'], 'iDB_institution_code': get_json_field(json, ['indexTerms', 'institutioncode']), 'iDB_collection_code': get_json_field(json, ['indexTerms', 'collectioncode']), 'iDB_catalog_number': get_json_field(json, ['indexTerms', 'catalognumber']), 'iDB_genus': get_json_field(json, ['indexTerms', 'genus']), 'iDB_species': get_json_field(json, ['indexTerms', 'specificepithet']) } mislinked = mislinked.append(row, ignore_index=True) mislinked.to_csv('mislinked_specimens_it.csv', index=False, index_label=False)
def del_user(): """ 删除用户 :return: """ id = int(request.form['id']) sql_del_user = '******' % id result = db.db_execute(conn, cursor, sql_del_user) if len(result) == 0: return resultmsg(200, '账号已删除!', result, '') else: return resultmsg(500, '服务器错误,请重新尝试!', result, '')
def addproduct(): """ 在产品名录product_category中添加产品 :return: """ if len(str(request.form['product_name'])) == 0 or len( str(request.form['add_date'])) == 0: return resultmsg(400, '信息添加错误,请重新添加.', '', '') sql = 'select id,product_name from product_category where product_name_be_del is null' productlist = db.db_execute(conn, cursor, sql) result = [] for i in range(len(productlist)): result.append(productlist[i][1]) if request.form['product_name'] in result: return resultmsg(200, '产品已存在(或者从已删除列表中恢复)!', '', '') add_product_info = (str(request.form['product_name']), str(request.form['add_date']), str(request.form['batch']), str(request.form['size']), str(request.form['mark']), str(request.form['note'])) sql = 'insert into product_category(product_name,add_product_name_date,batch,`size`,mark,note) ' \ 'values ("%s","%s","%s","%s","%s","%s")' % add_product_info result = db.db_execute(conn, cursor, sql) return resultmsg(200, '成功', result, '产品名称已添加.')
def file_controller_post(new_file_name, new_file_content): new_file_id = db.db_execute("INSERT INTO file VALUES (null, ?, ?, 0)", (new_file_name, new_file_content)) response_body = { 'response': { 'status': 'OK', 'file': { 'id': new_file_id, 'file_name': new_file_name, 'path': '/file/%s/%s' % (new_file_id, new_file_name) } } } return json.dumps(response_body), http_const.HTTP_CREATED
def file_controller_put(file_id, file_name, file_content): db_result = db.db_select( 'SELECT count(*) FROM file WHERE id = ? AND deleted = 0', (file_id, ), True) if db_result[0] == 0: return json.dumps({'response': { 'status': 'error' }}), http_const.HTTP_NOT_FOUND db.db_execute( "UPDATE file SET file_name = ?, file_content_b64 = ? WHERE id = ?", (file_name, file_content, file_id)) response_body = { 'response': { 'status': 'OK', 'file': { 'id': file_id, 'file_name': file_name, 'path': '/file/%s/%s' % (file_id, file_name) } } } return json.dumps(response_body), http_const.HTTP_OK
def allwarehouse(): """ 计算所有库中各个产品的总余量 :return: """ sql1 = 'select id,product_name from product_category where product_name_be_del is Null' product_names = db.db_execute(conn, cursor, sql1) result = [] for i in range(len(product_names)): sql3 = 'select sum(num*out_in) from warehouse where product_name=%d' % product_names[ i][0] print(sql3) data = db.db_execute(conn, cursor, sql3) if data[0][0] == None: data = 0 else: data = data[0][0] print(data) result.append({ 'id': product_names[i][0], "name": product_names[i][1], "value": int(data) }) return resultmsg(200, 'ok', result, '')
def login(): """ 用户登录 :return: """ if request.form['username'] == '' or request.form['password'] == '': return jsonify(resultmsg(400, "input wrong", "", '')) sql = 'select username,`right` from users where username="******" and password="******"' % ( request.form['username'], request.form['password']) db_result = db.db_execute(conn, cursor, sql) if len(db_result) == 0: return jsonify(resultmsg(400, "账号或密码错误,请重新输入!", "", '')) result = {"username": db_result[0][0], "right": db_result[0][1]} return jsonify(resultmsg(200, 'ok', result, 'page'))
def edit(): """ 修改操作记录 :return: """ id = int(request.form['id']) product_name = int(request.form['product_name']) out_in = int(request.form['out_in']) num = int(request.form['num']) date_managed = request.form['date'] cost = float(request.form['fare']) sql = 'update warehouse set num=%d,date_managed="%s",cost=%2f,product_name=%d,out_in=%d' % ( num, date_managed, cost, product_name, out_in) + ' where id=%d' % (id) print(sql) result = db.db_execute(conn, cursor, sql) return resultmsg(200, 'ok', result, '')
def userlist(): """ 查询所有的用户信息 :return: """ sql_userlist = 'select id,username,`right`,phone,realname from users' result = db.db_execute(conn, cursor, sql_userlist) print(result) data = [] for i in range(len(result)): data.append({ 'id': result[i][0], 'username': result[i][1], 'right': result[i][2], 'phone': result[i][3], 'realname': result[i][4] }) return resultmsg(200, 'ok', data, '')
def warehouseinfo(): """ 查询仓库信息 :return: """ sql = 'select id, warehouse_name,`position`,telphone,master from warehouse_category' result = db.db_execute(conn, cursor, sql) print(result) data = [] for i in range(len(result)): data.append({ "id": result[i][0], "name": result[i][1], "position": result[i][2], "telphone": result[i][3], "master": result[i][4], }) return resultmsg(200, '', data, '')
def get_productlist(): """ 查询产品列表(猪肉、羊肉。。。) :return: """ sql_productlist = 'select id,product_name,DATE_FORMAT(add_product_name_date,"%Y-%m-%d %H:%i:%s"),batch,`size`,mark,note ' \ 'from product_category ' \ 'where product_name_be_del is null' productlist = db.db_execute(conn, cursor, sql_productlist) print(productlist) if productlist == None: resultmsg(200, 'ok', '', '查询结果为空!') result = [] for i in range(len(productlist)): result.append({ 'id': str(productlist[i][0]), 'product_name': str(productlist[i][1]), 'add_product_name_date': str(productlist[i][2]), 'batch': productlist[i][3], 'size': productlist[i][4], 'mark': productlist[i][5], 'note': productlist[i][6] }) return resultmsg(200, '产品名录详情', result, '已全部返回.')
def correct(): """ 修正仓库中某产品的信息 :return: """ warehouse_name = int(request.form['seat']) out_in = int(request.form['out_in']) product_name = int(request.form['name']) num = int(request.form['num']) operator = request.form['operator'] date_managed = request.form['time'] cost = float(request.form['fare']) correct = int(request.form['correct']) correct_data = (warehouse_name, out_in, product_name, num, operator, date_managed, cost, correct) sql_correat = 'insert into warehouse(warehouse_name,out_in,product_name,num,operator,date_managed,cost,correct) ' \ 'values (%d,%d,%d,%d,"%s","%s",%2f,%d)' % correct_data print(sql_correat) result = db.db_execute(conn, cursor, sql_correat) if len(result) == 0: print(result) return resultmsg(200, '修正完成。', result, '') else: return resultmsg(500, '服务器错误,请重新尝试!', result, '')
# OR m.grant_support COLLATE UTF8_GENERAL_CI LIKE '%1702143%' # OR m.grant_support COLLATE UTF8_GENERAL_CI LIKE '%1701769%' # """ sql = """ SELECT * FROM `ms_media_files` AS mf LEFT JOIN `ms_media` AS m ON m.media_id = mf.media_id LEFT JOIN `ms_specimens` AS s ON s.specimen_id = m.specimen_id LEFT JOIN `ms_specimens_x_taxonomy` AS sxt ON sxt.specimen_id = s.specimen_id LEFT JOIN `ms_taxonomy_names` AS tn ON tn.alt_id = sxt.alt_id LEFT JOIN `ca_users` AS u ON u.user_id = mf.user_id LEFT JOIN `ms_scanners` AS sc ON sc.scanner_id = m.scanner_id LEFT JOIN `ms_facilities` AS f ON f.facility_id = m.facility_id """ media_files = db.db_execute(c, sql) mf_n = len(media_files) mf_i = 1 for mf in media_files: print(str(mf_i) + '/' + str(mf_n)) mf_i += 1 mf['external_genus'] = '' mf['specimen_views'] = 0 mf['media_group_views'] = 0 mf['media_file_views'] = 0 if 'specimen_id' in mf and mf['specimen_id'] is not None: # Look up resolved taxonomy names eg_sql = """ SELECT * FROM `ms_specimens_x_resolved_taxonomy` AS sxrt
import sys, os sys.path.append(os.path.abspath(os.path.join('..', '..', 'lib'))) import db import requests conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_specimens` """ r = db.db_execute(c, sql) for s in r: recordset = None if s['uuid']: resp = requests.get('https://search.idigbio.org/v2/view/records/' + s['uuid']) json = resp.json() recordset = json['indexTerms']['recordset'] print(recordset) sql = 'UPDATE `ms_specimens` SET `recordset` = "' + recordset + '" WHERE `specimen_id` = ' + str( s['specimen_id']) + '' print(sql) db.db_execute(c, sql)
def main(args_dict): # Connect to the database db = psycopg2.connect(user='******', dbname='kataster') # Set up enum type division_level, table parties, table offices, table terms q = """ DO $$ BEGIN IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'division_level') THEN CREATE TYPE kataster.division_level AS ENUM ('country', 'region', 'district', 'municipality', 'borough'); END IF; END $$; """ q += """ DROP TABLE kataster.politicianterms; DROP TABLE kataster.politicians; DROP TABLE kataster.parties; DROP TABLE kataster.terms; DROP TABLE kataster.offices; """ q += """ CREATE TABLE IF NOT EXISTS kataster.offices( id int PRIMARY KEY, name_male text UNIQUE, name_female text UNIQUE, level kataster.division_level ); CREATE TABLE IF NOT EXISTS kataster.terms( id int PRIMARY KEY, officeid int REFERENCES kataster.offices(id), start int, finish int ); CREATE TABLE IF NOT EXISTS kataster.parties( id int PRIMARY KEY, abbreviation text, name text UNIQUE ); CREATE TABLE IF NOT EXISTS kataster.politicians( id serial PRIMARY KEY, firstname text, surname text, title text, dobhash int ); CREATE TABLE IF NOT EXISTS kataster.politicianterms( politicianid int REFERENCES kataster.politicians(id), termid int REFERENCES kataster.terms(id), party_nomid int REFERENCES kataster.parties(id), partyid int REFERENCES kataster.parties(id), source_url text, picture_url text, PRIMARY KEY (politicianid, termid) ); """ db_execute(db, q) # Insert offices path_offices = DIR_DATA + 'offices.json' offices = json_load(path_offices) columns_offices = ['id', 'name_male', 'name_female', 'level'] db_insert_jsons(db, 'kataster.offices', offices, columns_offices) # Insert terms path_terms = DIR_DATA + 'terms.json' terms = json_load(path_terms) columns_terms = ['id', 'officeid', 'start', 'finish'] db_insert_jsons(db, 'kataster.terms', terms, columns_terms) # Insert parties path_terms = DIR_DATA + 'parties.json' parties = json_load(path_terms) columns_parties = ['id', 'abbreviation', 'name'] db_insert_jsons(db, 'kataster.parties', parties, columns_parties) # Insert poslanci NRSR path_poslanci_NRSR = DIR_DATA + 'poslanci_NRSR.json' poslanci_NRSR = json_load(path_poslanci_NRSR) for poslanec in poslanci_NRSR: poslanec['dobhash'] = hash_timestamp(datetime.strptime(poslanec['birthdate'], '%Y-%m-%dT%H:%M:%SZ')) poslanci_NRSR_sorted = sorted(poslanci_NRSR, key=lambda p: (p['PoslanecID'], p['CisObdobia'])) poslanci_NRSR_unique = [next(group) for key, group in groupby(poslanci_NRSR_sorted, key=lambda p: p['PoslanecID'])] columns_politicians = ['firstname', 'surname', 'title', 'dobhash'] db_insert_jsons(db, 'kataster.politicians', poslanci_NRSR_unique, columns_politicians) # Obtain assigned IDs q = """SELECT id, firstname, surname, dobhash FROM kataster.politicians;""" politicians = db_query(db, q) fsd_to_politicianid = {(p['firstname'], p['surname'], p['dobhash']): p['id'] for p in politicians} # Construct politicianterms relations for poslanci NRSR CisObdobia_to_termid = {term['CisObdobia']: term['id'] for term in terms if 'CisObdobia' in term} party_name_to_id = {party['name']: party['id'] for party in parties} for poslanec in poslanci_NRSR: # Obtain politicianid key_fsd = (poslanec['firstname'], poslanec['surname'], poslanec['dobhash']) politicianid = fsd_to_politicianid[key_fsd] # Obtain termid termid = CisObdobia_to_termid[poslanec['CisObdobia']] # Obtain partynom party_nom = poslanec['party_nom'] party_nom_id = party_name_to_id[party_nom] if party_nom in party_name_to_id else '\N' # Insert the relation q = """ INSERT INTO kataster.politicianterms(politicianid, termid, party_nomid, partyid, source_url, picture_url) VALUES (%s, %s, %s, %s, %s, %s) ON CONFLICT DO NOTHING; """ q_data = (politicianid, termid, party_nom_id, None, poslanec['source'], poslanec['picture']) db_execute(db, q, q_data) # Commit changes and close connection to database db.commit() db.close()
import sys, os sys.path.append(os.path.abspath(os.path.join('..', '..', 'lib'))) import db import pandas csv = pandas.read_csv('MorphoSource_oVert_update_3_15_18.csv') for index, row in csv.iterrows(): ms_code = row['media'][1:] ms_code = ms_code.split('-', 2)[0] grant_attrib = row['grant support'] if type(ms_code) is str and ms_code and type( grant_attrib) is str and grant_attrib: print(ms_code) print(grant_attrib) conn = db.db_conn() c = conn.cursor() sql = """ UPDATE `ms_media` SET `grant_support` = %s WHERE `media_id` = %s """ db.db_execute(c, sql, [grant_attrib, ms_code])
conn = db.db_conn_socket() c = conn.cursor() sql = """ SELECT * FROM `ms_media_download_requests` AS r LEFT JOIN `ms_media` AS m ON m.media_id = r.media_id LEFT JOIN `ms_specimens` AS s ON s.specimen_id = m.specimen_id LEFT JOIN `ca_users` AS u ON u.user_id = r.user_id LEFT JOIN `ms_specimens_x_taxonomy` AS sxt ON sxt.specimen_id = s.specimen_id LEFT JOIN `ms_taxonomy_names` AS t ON t.alt_id = sxt.alt_id WHERE `recordset` = "bd7cfd55-bf55-46fc-878d-e6e11f574ccd" """ requests = db.db_execute(c, sql) request_report = pandas.DataFrame(columns=[ 'request_time', 'media_id', 'specimen_institution_code', 'specimen_collection_code', 'specimen_catalog_number', 'specimen_genus', 'specimen_species', 'request_text', 'request_user_first_name', 'request_user_last_name', 'request_user_email', 'request_approval' ]) for r in requests: row = { 'request_time': datetime.datetime.fromtimestamp( r['requested_on']).strftime('%Y-%m-%d %H:%M:%S'), 'media_id': r['media_id'],
return len(db.db_execute(c, sql, mf_id)) conn = db.db_conn_socket() c = conn.cursor() sql = """ SELECT * FROM `ms_media_files` AS mf LEFT JOIN `ms_media` AS m ON m.media_id = mf.media_id LEFT JOIN `ms_specimens` AS s ON s.specimen_id = m.specimen_id LEFT JOIN `ms_specimens_x_taxonomy` AS sxt ON sxt.specimen_id = s.specimen_id LEFT JOIN `ms_taxonomy_names` AS t ON t.alt_id = sxt.alt_id WHERE `recordset` = "bd7cfd55-bf55-46fc-878d-e6e11f574ccd" """ media = db.db_execute(c, sql) m_stats = {} for m in media: m_id = m['media_id'] mf_id = m['media_file_id'] if m_id not in m_stats: # New entry, must compile media group stats m_stats[m_id] = { 'mg': m, 'mg_stats': stats.MediaDownloadStats(None, m_id), 'mf': {}, 'mf_stats': {} } m_stats[m_id]['mf'][mf_id] = m
def update_specimen_taxon(): success_csv = pandas.DataFrame( columns=['specimen_id', 'success', 'new_taxon']) specimen_csv = pandas.read_csv('specimens_to_be_renamed.csv') for index, row in specimen_csv.iterrows(): success_csv = pandas.read_csv('update_taxon_success_summary.csv') specimen_id = str(row['specimen_id']) new_row = {} new_taxon = 0 print(specimen_id) # Get MS specimen record conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_specimens` AS s JOIN `ms_specimens_x_taxonomy` AS sxt ON sxt.specimen_id = s.specimen_id JOIN `ms_taxonomy_names` AS n ON n.alt_id = sxt.alt_id WHERE s.specimen_id = %s """ spec_rec = db.db_execute(c, sql, specimen_id) spec_rec = spec_rec[0] # Get iDB specimen record for new UUID idb = requests.get('https://search.idigbio.org/v2/view/records/' + spec_rec['uuid']).json() item = idb # ------------------TAXONOMY RECORD------------------------- # Do taxa differ? if get_json_field(item, ['indexTerms', 'genus' ]).lower() != spec_rec['genus'].lower(): print('Current taxon and real taxon differ') print(get_json_field(item, ['indexTerms', 'genus']).lower()) print(spec_rec['genus'].lower()) # Taxa differ. Is there a MS taxon for the 'new' taxon? conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_taxonomy` AS t JOIN `ms_taxonomy_names` AS n ON n.taxon_id = t.taxon_id WHERE n.genus = %s AND n.species = %s """ r = db.db_execute(c, sql, [ get_json_field(item, ['indexTerms', 'genus']), get_json_field(item, ['indexTerms', 'specificepithet']) ]) if len(r) == 0: print('Need to generate new taxon') # Need to generate an all new taxon ''' Steps: 1) Delete the sxt link to the current taxon 2) Create a new ms_taxonomy record for this taxon 3) Create a new ms_taxonomy_names record for this taxon, using iDB 4) Create a new sxt link from specimen to new taxon ''' # Get old taxon_id conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_specimens_x_taxonomy` WHERE specimen_id = %s """ old_sxt = db.db_execute(c, sql, int(specimen_id)) if len(old_sxt) == 1: old_taxon_id = old_sxt[0]['taxon_id'] old_link_id = old_sxt[0]['link_id'] else: raise ValueError('More than one sxt for specimen_id ' + str(specimen_id)) # delete old link conn = db.db_conn() c = conn.cursor() sql = """ DELETE FROM `ms_specimens_x_taxonomy` WHERE link_id = %s """ del_res = db.db_execute(c, sql, int(old_link_id)) # create new ms_taxonomy_record taxon_vals = [ int(spec_rec['project_id']), int(spec_rec['user_id']), int(time.time()), int(time.time()) ] conn = db.db_conn() c = conn.cursor() sql = """ INSERT INTO `ms_taxonomy` (project_id, user_id, created_on, last_modified_on) VALUES (%s, %s, %s, %s) """ tax_res = db.db_execute(c, sql, taxon_vals) # create new ms_taxonomy_names new_taxon_id = c.lastrowid tn_vals = [ int(spec_rec['project_id']), int(new_taxon_id), int(spec_rec['user_id']), get_json_field(item, ['indexTerms', 'specificepithet']), get_json_field(item, ['indexTerms', 'kingdom']), get_json_field(item, ['indexTerms', 'phylum']), get_json_field(item, ['indexTerms', 'class']), get_json_field(item, ['indexTerms', 'order']), get_json_field(item, ['indexTerms', 'family']), int(time.time()), int(time.time()), get_json_field(item, ['indexTerms', 'genus']).capitalize() ] conn = db.db_conn() c = conn.cursor() sql = """ INSERT INTO `ms_taxonomy_names` (project_id, taxon_id, user_id, species, ht_kingdom, ht_phylum, ht_class, ht_order, ht_family, created_on, last_modified_on, genus, is_primary) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 1) """ tn_res = db.db_execute(c, sql, tn_vals) # create new sxt new_alt_id = c.lastrowid conn = db.db_conn() c = conn.cursor() sql = """ INSERT INTO `ms_specimens_x_taxonomy` (specimen_id, taxon_id, alt_id, user_id) VALUES (%s, %s, %s, %s) """ ins_res = db.db_execute(c, sql, [ int(specimen_id), int(new_taxon_id), int(new_alt_id), int(spec_rec['user_id']) ]) success = 1 new_taxon = 1 elif len(r) == 1: print('Matching specimen to currently existing taxon record') # Need to associate this specimen with currently existing new taxon ''' Steps: 1) Get old taxon id 2) Get new taxon id 3) Check for previously existing link between specimen to new_taxon_id 4) delete old link 5) create new link ''' # Get old taxon_id conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_specimens_x_taxonomy` WHERE specimen_id = %s """ old_sxt = db.db_execute(c, sql, [specimen_id]) if len(old_sxt) == 1: old_taxon_id = int(old_sxt[0]['taxon_id']) old_link_id = int(old_sxt[0]['link_id']) else: raise ValueError('More than one sxt for specimen_id ' + specimen_id) # Get new taxon_id new_taxon_id = int(r[0]['taxon_id']) new_alt_id = int(r[0]['alt_id']) # Is there already an sxt for the new_taxon_id? conn = db.db_conn() c = conn.cursor() sql = """ SELECT * FROM `ms_specimens_x_taxonomy` WHERE specimen_id = %s AND taxon_id = %s """ new_sxt = db.db_execute(c, sql, [int(specimen_id), new_taxon_id]) if len(new_sxt) > 0: raise ValueError( 'Already existing link between taxon id ' + str(new_taxon_id) + ' and specimen_id ' + str(specimen_id)) else: #delete old sxt conn = db.db_conn() c = conn.cursor() sql = """ DELETE FROM `ms_specimens_x_taxonomy` WHERE link_id = %s """ del_res = db.db_execute(c, sql, old_link_id) #create new sxt conn = db.db_conn() c = conn.cursor() sql = """ INSERT INTO `ms_specimens_x_taxonomy` (specimen_id, taxon_id, alt_id, user_id) VALUES (%s, %s, %s, %s) """ ins_res = db.db_execute(c, sql, [ int(specimen_id), new_taxon_id, new_alt_id, int(spec_rec['user_id']) ]) success = 1 new_taxon = 1 else: raise ValueError( str(len(r)) + ' MS taxon records for specimen uuid ' + str(row['New_iDB_uuid'])) else: success = 0 warnings.warn('Genera do not differ for specimen ' + str(specimen_id) + ', skipping') new_specimen_id = int(specimen_id) new_row['success'] = success new_row['new_taxon'] = new_taxon success_csv = success_csv.append(new_row, ignore_index=True) success_csv.to_csv('update_taxon_success_summary.csv', index=False, index_label=False)