def save_user_info(info): time = datetime.datetime.now() info['create_time'] = time info['update_time'] = time info['status'] = 'R' # 'R' means review if len(info['password']) < 10: return {'data': '', 'errcode': 3, 'msg': "密码设置过短,请设置为10位以上!"} db = DBConn() name_result = db.execute("select id, status from user_info where username='******'" % info.get('username'), get_all=False) tel_result = db.execute("select id, status from user_info where tel='%s'" % info.get('tel'), get_all=False) def fail_info(sql_result, check_type): status = sql_result[1] if status == 'R': msg = '该{check_type}之前已经注册,但状态处于等待中,请使用其它{check_type}注册!' elif status == 'Y': msg = '注册失败:该{check_type}已经注册,请使用其它{check_type}!' else: msg = '注册失败:该{check_type}已经存在!' return {'data': '', 'errcode': 2, 'msg': msg.format(check_type=check_type)} if name_result: ret = fail_info(name_result, '名字') elif tel_result: ret = fail_info(tel_result, '电话号码') else: db.insert('user_info', info) ret = {'data': '', 'errcode': 0, 'msg': '注册成功:等待审核!'} return ret
def get_manager_list(): db = DBConn() cmd = "select username from user_info where role='manager'" result = db.execute(cmd) return [i[0] for i in result]
def get_one_project_data(project_number): #select multiple tables # why use project_id = spm.id: see save_info where data_info['project_id'] = int(project_id) cmd = """select * from sample_project_master spm, sample_species ss, sample_type st, dna_sample_sequencing_type dsst, rna_sample_sequencing_type rsst, sample_other other where ss.project_id=spm.id and st.project_id=spm.id and dsst.project_id=spm.id and rsst.project_id=spm.id and other.project_id=spm.id and spm.project_number='%s'""" % (project_number) db = DBConn() result = db.execute(cmd, get_all=False) data = dict(result) #dorp sample_info_detail table ''' cmd = "select * from sample_info_detail where project_id=%s" % result['project_id'] table_data = [] results = db.execute(cmd) for result in results: table_data.append(dict(result)) data['table_data'] = table_data ''' return data
def main(): ''' Application main entry point ''' logger = logging.Logger('mainApp') logger.setLevel(logging.INFO) fh = logging.FileHandler('envo.log') fmt = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(fmt) logger.addHandler(fh) logger.info('ENVOMUSE DELIVERY TOOL STARTED') db_conn = DBConn() db_conn.check() #prepare database app=MainApp(conn=db_conn,session=requests.session()) app.title("ENVOMUSE交付工具") app.minsize(825,600) app.geometry("825x600") app.configure(background="white") app.rowconfigure(0,weight=1) app.columnconfigure(0,weight=1) app.mainloop()
def get_one_project_data(project_number): cmd = """select * from sample_project_master spm, sample_species ss, sample_type st, dna_sample_sequencing_type dsst, rna_sample_sequencing_type rsst, sample_other other where ss.project_id=spm.id and st.project_id=spm.id and dsst.project_id=spm.id and rsst.project_id=spm.id and other.project_id=spm.id and spm.project_number=%s""" % project_number db = DBConn() result = db.execute(cmd, get_all=False) data = dict(result) cmd = "select * from sample_info_detail where project_id=%s" % result['project_id'] table_data = [] results = db.execute(cmd) for result in results: table_data.append(dict(result)) data['table_data'] = table_data return data
def get_user_role(username, password=''): db = DBConn() cmd = "select role from user_info where username='******'" % username if password: cmd += " and password='******'" % password result = db.execute(cmd, get_all=False) role = result[0] if result else '' return role
def get_analysis_data(username, role, project_id): if not project_id: return {} cmd = """select * from analysis_master where project_id=%s""" % project_id db = DBConn() result = db.execute(cmd, get_all=False) return dict(result) if result else {}
def get_other_info(username): db = DBConn() if phone_check(username): cmd = "select e_mail, tel, company, field, customer_name from user_info where tel='%s'" % username else: cmd = "select e_mail, tel, company, field, customer_name from user_info where username='******'" % username result = db.execute(cmd, get_all=False) return result if result else ('', '', '', '')
def get_sample_list_by_project(project_id): sample_list = {} db = DBConn() cmd = "SELECT id_alias,sample_name FROM SEQ_SA_INFO.sample_info_detail where project_id=%s" % project_id results = db.execute(cmd) for result in results: sample_list[result[0]] = result[1] return sample_list
def get_sample_table_data(project_id): all_project_sample_data = [] db = DBConn() cmd = "SELECT * FROM SEQ_SA_INFO.sample_table where project_id=%s" % project_id results = db.execute(cmd) for result in results: all_project_sample_data.append(dict(result)) return all_project_sample_data
def get_project_number_list(username, user_role): if user_role == 'manager': cmd = "select concat(project_name, '-', project_number) from sample_project_master where project_leader='%s'" % username elif user_role == 'user': cmd = "select concat(project_name, '-', project_number) from sample_project_master where created_by='%s'" % username db = DBConn() results = db.execute(cmd) return [i[0] for i in results]
def get_detail_sample_data(project_number): data = [] cmd = """SELECT d.* FROM sample_info_detail d, sample_project_master m where m.id=d.project_id and m.project_number=%s""" % project_number db = DBConn() results = db.execute(cmd) for result in results: data.append(dict(result)) return data
def get_analysis_data(username, role, selected_project): if not selected_project: return {} project_number = selected_project.split('-')[-1] cmd = """select a.* from analysis_master a,sample_project_master s where a.project_id=s.id and s.project_number=%s""" % project_number db = DBConn() result = db.execute(cmd, get_all=False) return dict(result) if result else {}
def get_detail_sample_data(project_number): data = [] cmd = """SELECT d.* FROM sample_project_master m where m.id=d.project_id and m.project_number='%s'""" % project_number db = DBConn() results = db.execute(cmd) for result in results: data.append(dict(result)) return data
def save_user_info(info): time = datetime.datetime.now() info['create_time'] = time info['update_time'] = time db = DBConn() result = db.execute("select id from user_info where username='******'" % info.get('username'), get_all=False) if not result: db.insert('user_info', info) return {'data': '', 'errcode': 0, 'msg': 'SUCCESS'} else: return {'data': '', 'errcode': 2, 'msg': '注册失败:该用户已经注册,请使用其它名字!'}
def get_user_role(username, password=''): db = DBConn() if phone_check(username): cmd = "select role,status from user_info where tel='%s'" % username else: cmd = "select role,status from user_info where username='******'" % username if password: cmd += " and password='******'" % password result = db.execute(cmd, get_all=False) role = result[0] if result else '' status = result[1] if result else '' return role, status
def modify_base_info(info): time = datetime.datetime.now() info['update_time'] = time username = info.get('username') tel = info.get('tel') db = DBConn() cmd = "select id from user_info where username!='%s' and tel='%s'" % (username, tel) check_result = db.execute(cmd, get_all=False) if check_result: return {'data': '', 'errcode': 1, 'msg': '该电话号码已经被其它用户注册,请使用其它号码!'} else: db.update('user_info', {'username': username}, info) return {'data': '', 'errcode': 0, 'msg': '更新成功!'}
def get_project_number_list(username, user_role): if user_role == 'manager': cmd = "select id, concat(project_name, '-', project_number) from sample_project_master where project_leader='%s'" % username elif user_role == 'user': cmd = "select id, concat(project_name, '-', project_number) from sample_project_master where created_by='%s'" % username db = DBConn() results = db.execute(cmd) data = {} for result in results: data[result[0]] = result[1] return data
def get_analysis_table_data(project_id): data = {} db = DBConn() cmd = """select info.* from sample_packet_information info, analysis_master m where m.id=info.master_id and m.project_id=%s""" % project_id results = db.execute(cmd) data['sample_packet_information'] = [dict(i) for i in results] cmd = """select info.* from compare_table info, analysis_master m where m.id=info.master_id and m.project_id=%s""" % project_id results = db.execute(cmd) data['compare_table'] = [dict(i) for i in results] return data
def save_simple_data(project_id, data): try: table_name = "sample_table" db = DBConn() db.delete(table_name, {'project_id': project_id}) for row in data: row['project_id'] = project_id db.insert(table_name, row) ret = {'data': '', 'errcode': 0, 'msg': ''} except Exception, e: import traceback traceback.print_exc() ret = {'data': '', 'errcode': 1, 'msg': '%s DB issue!' % e}
def modify_base_info(info): time = datetime.datetime.now() info['update_time'] = time username = info.get('username') tel = info.get('tel') db = DBConn() cmd = "select id from user_info where username!='%s' and tel='%s'" % ( username, tel) check_result = db.execute(cmd, get_all=False) if check_result: return {'data': '', 'errcode': 1, 'msg': '该电话号码已经被其它用户注册,请使用其它号码!'} else: db.update('user_info', {'username': username}, info) return {'data': '', 'errcode': 0, 'msg': '更新成功!'}
def main(): logger = logging.getLogger('') handler = logging.handlers.WatchedFileHandler('/var/log/cfmrda.qrz.log') handler.setFormatter(logging.Formatter(\ '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')) logger.setLevel(logging.DEBUG) logger.addHandler(handler) handler.setLevel(logging.DEBUG) conf = site_conf() _db = DBConn(conf.items('db')) yield from _db.connect() qrzru = QRZRuLink(asyncio.get_event_loop()) pfx = Pfx('/usr/local/webcluster/cty.dat') @asyncio.coroutine def db_write(data): yield from _db.execute( """ insert into callsigns_rda (callsign, source, rda) values (%(callsign)s, 'QRZ.ru', %(rda)s)""", data) callsigns = yield from _db.execute(\ """select distinct hunter from rda_hunter""") logging.debug('callsigns list received -- ' + str(len(callsigns))) params = [] cnt = 0 ru_cnt = 0 fnd_cnt = 0 for _cs in callsigns: cnt += 1 cs_pfx = pfx.get(_cs) if cs_pfx in ['R', 'R2F', 'R9']: m_special = RE_SPECIAL.search(_cs) if m_special: continue ru_cnt += 1 data = yield from qrzru.query(_cs) if data and 'state' in data and data['state']: fnd_cnt += 1 params.append({'callsign': _cs, 'rda': data['state']}) logging.debug(_cs + ' found') if len(params) >= 100: yield from db_write(params) params = [] logging.debug('Processed ' + str(cnt) + '/' + str(ru_cnt) + '/'\ + str(fnd_cnt) + ' of ' + str(len(callsigns))) cnt += 1 logging.debug('qrz query complete')
def db_setup(): ''' PURPOSE: Using the db.DBConn() class, this method is used to create the avatardata database table, if the table does not already exist. ''' global DBC #CREATE DB INSTANCE DBC = DBConn('db_config.json') #ENSURE TABLE(S) IS/ARE CREATES DBC.create()
def get_analysis_table_data(username, selected_project): data = {} db = DBConn() project_number = selected_project.split('-')[-1] project_id = get_project_id_by_num(db, project_number) cmd = """select info.* from sample_packet_information info, analysis_master m where m.id=info.master_id and m.project_id=%s""" % project_id results = db.execute(cmd) data['sample_packet_information'] = [dict(i) for i in results] cmd = """select info.* from compare_table info, analysis_master m where m.id=info.master_id and m.project_id=%s""" % project_id results = db.execute(cmd) data['compare_table'] = [dict(i) for i in results] return data
def change_password(info): username = info['username'] db = DBConn() cmd = "select password from user_info where username='******'" % username result = db.execute(cmd, get_all=False) if result and result[0] == info['old_passwd'] and info['new_passwd'] != info['old_passwd']: db.update('user_info', {'username': username}, {'password': info['new_passwd'], 'update_time': datetime.datetime.now()}) return {'data': '', 'errcode': 0, 'msg': '更新成功!'} elif result and result[0] != info['old_passwd']: return {'data': '', 'errcode': 1, 'msg': '旧密码错误!'} elif info['new_passwd'] == info['old_passwd']: return {'data': '', 'errcode': 1, 'msg': '新密码和旧密码不能一样!'} else: return {'data': '', 'errcode': 1, 'msg': '用户名不存在!'}
def export_rankings(conf): """rebuild rankings table in db and export top100 to json file for web""" logging.debug('export rankings') _db = DBConn(conf.items('db')) yield from _db.connect() yield from _db.execute("select from build_rankings()") logging.debug('rankings table rebuilt') rankings = yield from _db.execute( """ select rankings_json('_rank < 104') as data """, None, False) save_json(rankings, conf.get('web', 'root') + '/json/rankings.json') logging.debug('export rankings finished')
def upper(self): sql_get = 'SELECT id,suburb FROM overall_sold_history_increment WHERE id<=1000' sql_update = 'UPDATE overall_sold_history_increment SET suburb=\'%s\' WHERE id=%s' with DBConn() as db: history = db.fetchall(sql_get) for item in history: db.execute(sql_update % (item[1].upper(), item[0]))
def get_all_user_data(): data = [] cmd = "select username,e_mail email,tel,company,age,role,field,status,notes from user_info" results = DBConn().execute(cmd) for result in results: data.append(dict(result)) return data
def show_all_data(username, role='user'): data = [] cmd = """select project_number,project_name,cust_organization,cust_user,status,create_time from sample_project_master spm where """ if role == 'manager': cmd += "spm.project_leader='%s'" % username elif role == 'user': cmd += "spm.created_by='%s'" % username db = DBConn() result = db.execute(cmd) for i in result: temp_dict = dict(i) data.append(temp_dict) return data
def main(): logger = logging.getLogger('') logger.setLevel(logging.DEBUG) conf = site_conf() _db = DBConn(conf.items('db')) yield from _db.connect() yield from _db.execute('delete from old_rda;') yield from _db.execute('delete from rda;') rda_rus = requests.get('http://rdaward.org/rda_rus.txt').text params_old = [] params = [] lines = rda_rus.split('\r\n') re_rda_line = re.compile( r'(^[A-Z][A-Z]-\d\d)\s+[^\t]+\t*([A-Z][A-Z]-\d\d|\*\*\*)?') for line in lines: match_rda_line = re_rda_line.match(line) if match_rda_line: if match_rda_line.group(2): old = match_rda_line.group(1) new = None if match_rda_line.group( 2) == '***' else match_rda_line.group(2) params_old.append({'old': old, 'new': new}) else: params.append({'rda': match_rda_line.group(1)}) yield from _db.execute("""insert into old_rda values (%(old)s, %(new)s)""",\ params_old, progress=True) yield from _db.execute("""insert into rda values (%(rda)s)""",\ params, progress=True)
def main(): logger = logging.getLogger('') logger.setLevel(logging.DEBUG) conf = site_conf() _db = DBConn(conf.items('db')) yield from _db.connect() with open('/var/www/adxc.test/csv/rda_old_new.csv', 'r') as f_data: params = [] for line in f_data.readlines(): fields = {} fields['old'], fields['new'] = line.strip().split(';') if fields['old'] and fields['new']: params.append(fields) yield from _db.execute("""insert into old_rda values (%(old)s, %(new)s)""",\ params, progress=True)
def change_password(info): username = info['username'] db = DBConn() cmd = "select password from user_info where username='******'" % username result = db.execute(cmd, get_all=False) if result and result[0] == info[ 'old_passwd'] and info['new_passwd'] != info['old_passwd']: db.update('user_info', {'username': username}, { 'password': info['new_passwd'], 'update_time': datetime.datetime.now() }) return {'data': '', 'errcode': 0, 'msg': '更新成功!'} elif result and result[0] != info['old_passwd']: return {'data': '', 'errcode': 1, 'msg': '旧密码错误!'} elif info['new_passwd'] == info['old_passwd']: return {'data': '', 'errcode': 1, 'msg': '新密码和旧密码不能一样!'} else: return {'data': '', 'errcode': 1, 'msg': '用户名不存在!'}
def save_info(all_info, username, action='new'): db_instance = DBConn() try: project_id = save_sample_project_master_info( db_instance, all_info.get('sample_project_master_info'), username, action) except Exception, e: print e return {'data': '', 'errcode': 1, 'msg': '保存失败!项目编号已经存在,请另外选择项目编号。'}
def export_msc(conf): """export misc db data to json file for web""" logging.debug('export misc') _db = DBConn(conf.items('db')) yield from _db.connect() data = {} data['qsoCount'] = (yield from _db.execute( """ select count(*) as qso_count from qso; """, None, False)) data['userActivity'] = (yield from _db.execute( """ select json_agg(data) from (select json_build_object('callsign', coalesce(qsl_wait.callsign, qsl_today.callsign, email.callsign), 'qslWait', qsl_wait, 'qslToday', qsl_today, 'email', email) as data from (select user_cs as callsign, count(*) as qsl_wait from cfm_qsl_qso where state is null group by user_cs) as qsl_wait full join (select user_cs as callsign, count(*) as qsl_today from cfm_qsl_qso where state group by user_cs) as qsl_today on qsl_wait.callsign = qsl_today.callsign full join (select user_cs as callsign, count(*) as email from cfm_request_qso where not sent and user_cs is not null group by user_cs) as email on coalesce(qsl_wait.callsign, qsl_today.callsign) = email.callsign order by coalesce(qsl_wait.callsign, qsl_today.callsign, email.callsign) ) as data""", None, False)) save_json(data, conf.get('web', 'root') + '/json/msc.json') logging.debug('export misc finished')
def export_table_info(table_name, project_id, title_list): book = Workbook() sheet1 = book.worksheets[0] #connect database: db = DBConn() cmd = "select * from {table_name} where project_id = '{project_id}'".format( table_name=table_name, project_id=project_id) results = db.execute(cmd) for i, title in enumerate(title_list): sheet1.cell(row=1, column=i + 1).value = title for i, result in enumerate(results): for j, title in enumerate(title_list): sheet1.cell(row=i + 2, column=j + 1).value = result.get(header_map[title]) export_path = os.path.dirname(__file__) + '/static/export/' file_name = table_name + '_' + project_id + '_' + datetime.datetime.now( ).strftime("%Y%m%d%H%M%S") + '.xls' full_path_name = os.path.join(export_path, file_name) book.save(filename=full_path_name) return file_name
def export_recent_uploads(conf): """export 20 recent uploaded file batches to json file for web""" logging.debug('export recent uploads') _db = DBConn(conf.items('db')) yield from _db.connect() data = yield from _db.execute( """ select json_agg(json_build_object( 'activators', activators, 'rda', rda, 'uploadDate', to_char(max_tstamp, 'DD mon YYYY'), 'uploadTime', to_char(max_tstamp, 'HH24:MI'), 'uploadType', upload_type, 'uploader', uploader)) as data from (select user_cs as uploader, upload_type, max(tstamp) as max_tstamp, array_agg(id) as ids from uploads where ext_logger_id is null group by date(tstamp), user_cs, upload_type order by max_tstamp desc limit 20) as ru, lateral (select array_agg(distinct station_callsign) as activators from qso where upload_id = any(ids)) as acts, lateral (select array_agg(rda) as rda from (select json_build_object('rda', array_agg(distinct rda), 'id', upload_id) as rda from qso where upload_id = any(ids) group by upload_id) as rdas0) as rdas """, None, False) save_json(data, conf.get('web', 'root') + '/json/recent_uploads.json') logging.debug('export recent uploads finished')
def show_all_data(username, role='user'): data = [] cmd = """select * from sample_project_master spm, sample_species ss, sample_type st, dna_sample_sequencing_type dsst, rna_sample_sequencing_type rsst where ss.project_id=spm.id and st.project_id=spm.id and dsst.project_id=spm.id and rsst.project_id=spm.id""" if role == 'manager': cmd += " and spm.project_leader='%s'" % username elif role == 'user': cmd += " and spm.created_by='%s'" % username db = DBConn() result = db.execute(cmd) for i in result: data.append(dict(i)) return data
def export_stat(conf): """export statistic data to json file for web""" logging.debug('export stats') _db = DBConn(conf.items('db')) yield from _db.connect() data = {} data['qso by rda'] = (yield from _db.execute( """ select json_object_agg(rda, data) as data from (select rda, json_object_agg(band, data) as data from (select rda, band, json_object_agg(mode, qso_count) as data from (select count(*) as qso_count, rda, band, mode from qso where upload_id is null or (select enabled from uploads where id=upload_id) group by mode, band, rda ) as q0 group by rda, band) as q1 group by rda) as q2 """, None, False)) for rda_data in data['qso by rda'].values(): rda_total = {'total': 0} for band_data in rda_data.values(): band_total = 0 for mode, qso_count in band_data.items(): band_total += qso_count if mode not in rda_total: rda_total[mode] = 0 rda_total[mode] += qso_count band_data['total'] = band_total rda_total['total'] += band_total rda_data['total'] = rda_total save_json(data, conf.get('web', 'root') + '/json/stat.json') logging.debug('export stats finished')
def save_compare_text(project_id, all_info, current_time): try: db = DBConn() result = db.execute("select project_name from sample_project_master where id=%s" % project_id, get_all=False) project_name = result[0] project_dir = os.path.join(os.path.dirname(__file__), 'static/export/' + project_name) if not os.path.exists(project_dir): os.mkdir(project_dir) group_file_name = os.path.join(project_dir, 'sample_inf' + current_time + '.txt') compare_file_name = os.path.join(project_dir, 'group_compare' + current_time + '.txt') sample_packet_information = all_info['sample_packet_information'] compare_table = all_info['compare_table'] fd_group_file = open(group_file_name, 'w+') fd_compare_file = open(compare_file_name, 'w+') fd_group_file.write(all_info['reference_genome'] + '\n') for row in sample_packet_information: fd_group_file.write(row['sample_group'] + '\t' + row['sample_name'] + '\n') for row in compare_table: fd_compare_file.write(row['comparison_name'] + '\t' + row['sample_group1'] + '\t' + row['sample_group2'] + '\n') fd_group_file.close() fd_compare_file.close() except Exception, e: print e
def distct_history(self): sql_startid = 'SELECT MIN(id) FROM %s WHERE processed_date IS NULL' sql_getid = 'SELECT id FROM %s WHERE id NOT IN (SELECT MIN(id) FROM overall_sold_history_increment GROUP BY url) AND id>=%s' sql_delete = 'DELETE FROM %s WHERE id=%s' sql_date = 'UPDATE %s SET processed_date=\'%s\' WHERE processed_date IS NULL' current_date = datetime.datetime.now().strftime('%Y-%m-%d') with DBConn() as db: startid = db.fetchall(sql_startid % TB_INCREM)[0][0] if startid != None: id_set = db.fetchall(sql_getid % (TB_INCREM, startid)) for id in id_set: db.execute(sql_delete % (TB_INCREM, id[0])) print('[*]Deleting id=%s' % id[0]) db.execute(sql_date % (TB_INCREM, current_date)) else: print('[*]No new record is found.')
def post(self): try: # reads request parser = reqparse.RequestParser() parser.add_argument('data', type=str) args = parser.parse_args() _userData = json.loads(args['data']) pprint(_userData) # parsing to commit db _userMagnetic = _userData['Magnetic'] _userWifiInfo = _userData['WifiInfo'] sampleMagnetic = dict() sampleMagnetic[u'x'] = _userMagnetic[0] sampleMagnetic[u'y'] = _userMagnetic[1] sampleMagnetic[u'z'] = _userMagnetic[2] sampleWifi = dict() for i, data in enumerate(_userWifiInfo): sampleWifi[str(i)] = data pprint(sampleMagnetic) pprint(sampleWifi) # commit db point = DBConn.Point(x=1, y=1, magnetic=sampleMagnetic, wifiscan=sampleWifi) db.setData('it2', point.to_dict()) pprint('done') return {"status": "success"} ''' return { 'Email': _userEmail, 'UserName': _userName, 'Password': _userPassword } ''' except Exception as e: return {'error': str(e)}
def post(self): try: # reads request parser = reqparse.RequestParser() parser.add_argument('data', type=str) args = parser.parse_args() _userData = json.loads(args['data']) pprint(_userData) # print("data,", request.data) # read all payload # print("get_data(),", request.get_data()) # read data # print("get_json(),", request.get_json()) # read only json # print("stream.read(),", request.stream.read()) # parsing to commit db _userMagnetic = _userData['Magnetic'] _userWifiInfo = _userData['WifiInfo'] _userX = _userData['x'] _userY = _userData['y'] sampleMagnetic = dict() sampleMagnetic[u'x'] = _userMagnetic[0] sampleMagnetic[u'y'] = _userMagnetic[1] sampleMagnetic[u'z'] = _userMagnetic[2] sampleWifi = dict() for i, data in enumerate(_userWifiInfo): sampleWifi[str(i)] = data pprint(sampleMagnetic) pprint(sampleWifi) # commit db point = DBConn.Point(x=_userX, y=_userY, magnetic=sampleMagnetic, wifiscan=sampleWifi) db.setData('it5', point.to_dict()) pprint('done') return {"status": "success"} except Exception as e: print("Error:", str(e)) return {'error': str(e)}
def add_subid(self, *, date=None): dbwrapper = DBWrapper() if date != None: date_format = re.compile(r'(\d{4})\-(\d{1,2})\-(\d{1,2})') date_YMD = date_format.match(date).groups() date_YMD = list(map(int, date_YMD)) if len(date_YMD) != 3: print( '[!]Eror: Optional param of add_subid requires a string like \'2017-1-1\'' ) raise TypeError if date_YMD[0] > 1900 and date_YMD[1] <= 12 and date_YMD[ 2] <= 31 and date_YMD[1] > 0 and date_YMD[2] > 0: pass else: print( '[!]Eror: Optional param of add_subid requires a string like \'2017-1-1\'' ) raise ValueError else: sql_latestdate = 'SELECT MAX(update_date) FROM %s' % TB_INCREM latest_date = dbwrapper.db_fetchall(sql_latestdate) date = latest_date[0][0].strftime('%Y-%m-%d') sql_gethistory = 'SELECT id,state,postcode,suburb FROM %s WHERE suburb_id is NULL AND update_date=\'%s\'' latest_history = dbwrapper.db_fetchall(sql_gethistory % (TB_INCREM, date)) sql_getsubid = 'SELECT id FROM %s WHERE state=\'%s\' AND postcode=\'%s\' AND suburb=\'%s\'' sql_updatesubid = 'UPDATE %s SET suburb_id=%s WHERE id=%s' with DBConn() as db: for item in latest_history: suburb_id = db.fetchall( sql_getsubid % (TB_POSTCODES, item[1], item[2], item[3])) print('[*]id=%s, state=%s, postcode=%s, suburb=%s' % (item[0], item[1], item[2], item[3])) if len(suburb_id) != 1 or len(suburb_id[0]) != 1: print( '[*]Error: Fetch suburb_id failed: id=%s, state=%s, postcode=%s, suburb=%s' % (item[0], item[1], item[2], item[3])) else: suburb_id = suburb_id[0][0] db.execute(sql_updatesubid % (TB_INCREM, suburb_id, item[0]))
def save_user_info(info): time = datetime.datetime.now() info['create_time'] = time info['update_time'] = time info['status'] = 'R' # 'R' means review if len(info['password']) < 10: return {'data': '', 'errcode': 3, 'msg': "密码设置过短,请设置为10位以上!"} db = DBConn() name_result = db.execute( "select id, status from user_info where username='******'" % info.get('username'), get_all=False) tel_result = db.execute("select id, status from user_info where tel='%s'" % info.get('tel'), get_all=False) def fail_info(sql_result, check_type): status = sql_result[1] if status == 'R': msg = '该{check_type}之前已经注册,但状态处于等待中,请使用其它{check_type}注册!' elif status == 'Y': msg = '注册失败:该{check_type}已经注册,请使用其它{check_type}!' else: msg = '注册失败:该{check_type}已经存在!' return { 'data': '', 'errcode': 2, 'msg': msg.format(check_type=check_type) } if name_result: ret = fail_info(name_result, '名字') elif tel_result: ret = fail_info(tel_result, '电话号码') else: db.insert('user_info', info) ret = {'data': '', 'errcode': 0, 'msg': '注册成功:等待审核!'} return ret
class ImgLoader: def __init__(self, db_conf): self.db = DBConn(db_conf['host'], db_conf['user'], db_conf['pwd'], db_conf['db']) def load_tasks(self, count): # slow #sql = 'select id, content from documents where id not in (select id from thumb) order by updated_at desc limit %d' % count sql = 'select id, content from documents where thumb_status = 1 order by updated_at desc limit %d' % count rets = self.db.execute_short(sql) if not rets: return None tasks = [] for rec in rets: url = self._get_image_url(rec['content']) if not url: self.on_done({'id': rec['id']}, None, 2) continue tasks.append({'id': rec['id'], 'url': url}) return tasks def on_done(self, task, data, status=3): sql = 'insert into thumb(id, data) values (%s, %s)' if not self._thumb_exist(task['id']): self.db.update(sql, (task['id'], data if data else None)) self._update_doc_status(task['id'], 0 if data else status) def _update_doc_status(self, id, status): sql = 'update documents set thumb_status = %s where id = %s' print('update doc set status: %d: %s' % (id, status)) self.db.update(sql, (status, id)) def _thumb_exist(self, id): sql = 'select id from thumb where id=%d' % int(id) return self.db.execute_short(sql) def _get_image_url(self, content): if not content: return '' soup = BeautifulSoup(content) imgs = soup.find('img') return imgs.get('src') if imgs else ''
#!/usr/bin/python3 #coding=utf-8 import json from db import DBConn from conf import CONF DB = DBConn(CONF.items('db')) DB.connect() feeders_data = DB.execute(""" select id, props from devices where device_type_id = 3""") for feeder_data in feeders_data: for timer in feeder_data['props'][2]: timer.append(0) feeder_data['props'] = json.dumps(feeder_data['props']) DB.execute( """ update devices set props = %(props)s where id = %(id)s""", feeder_data)
def admin_save_user_info(username, status): db = DBConn() db.update('user_info', {'username': username}, {'status': status})
def get_project_info(project_id): db = DBConn() cmd = "SELECT project_name, project_number FROM sample_project_master where id=%s" % project_id result = db.execute(cmd, get_all=False) return dict(result)
def save_compare_input(all_info, username, project_id, action='new'): current_time = datetime.datetime.now() save_compare_text(project_id, all_info, str(current_time)) db_instance = DBConn() project_log = get_project_log_by_num(db_instance, project_id) all_info['project_id'] = project_id cmd = "select id from analysis_master where created_by='%s' and project_id=%s" % (username, project_id) result = db_instance.execute(cmd, get_all=False) if result: action = 'update' master_id = result[0] db_instance.delete('sample_packet_information', {'master_id': master_id}) db_instance.delete('compare_table', {'master_id': master_id}) sample_packet_information = all_info['sample_packet_information'] compare_table = all_info['compare_table'] del all_info['sample_packet_information'] del all_info['compare_table'] all_info['updated_by'] = username all_info['update_time'] = current_time if action == 'new': all_info['create_time'] = current_time all_info['created_by'] = username master_id = db_instance.insert('analysis_master', all_info) project_log += '\n%s: %s created compare method.\n' % (current_time, username) db_instance.update('sample_project_master', {'id': project_id}, {'project_log': project_log}) else: project_log += '\n%s: %s update compare method.\n' % (current_time, username) db_instance.update('sample_project_master', {'id': project_id}, {'project_log': project_log}) db_instance.update('analysis_master', {'id': master_id}, all_info) for row in sample_packet_information: del row['id'] row['master_id'] = master_id db_instance.insert('sample_packet_information', row) for row in compare_table: del row['id'] row['master_id'] = master_id db_instance.insert('compare_table', row) msg = '更新成功!' if action == 'update' else '保存成功!' return {'data': '', 'errcode': 0, 'msg': msg}
def save_compare_input(all_info, username, selected_project, action='new'): time = datetime.datetime.now() db_instance = DBConn() project_number = selected_project.split('-')[-1] project_id = get_project_id_by_num(db_instance, project_number) all_info['project_id'] = project_id cmd = "select id from analysis_master where created_by='%s' and project_id=%s" % (username, project_id) result = db_instance.execute(cmd, get_all=False) if result: action = 'update' master_id = result[0] db_instance.delete('sample_packet_information', {'master_id': master_id}) db_instance.delete('compare_table', {'master_id': master_id}) sample_packet_information = all_info['sample_packet_information'] compare_table = all_info['compare_table'] del all_info['sample_packet_information'] del all_info['compare_table'] all_info['updated_by'] = username all_info['update_time'] = time if action == 'new': all_info['create_time'] = time all_info['created_by'] = username master_id = db_instance.insert('analysis_master', all_info) else: db_instance.update('analysis_master', {'id': master_id}, all_info) for row in sample_packet_information: del row['id'] row['master_id'] = master_id db_instance.insert('sample_packet_information', row) for row in compare_table: del row['id'] row['master_id'] = master_id db_instance.insert('compare_table', row) save_compare_text(selected_project, compare_table) msg = '更新成功!' if action == 'update' else '保存成功!' return {'data': '', 'errcode': 0, 'msg': msg}
def save_status(project_number, status): db = DBConn() db.update('sample_project_master', {'project_number': project_number}, {'status': status})