def upper(self): sql_get = 'SELECT id,suburb FROM overall_sold_history_increment WHERE id<=1000' sql_update = 'UPDATE overall_sold_history_increment SET suburb=\'%s\' WHERE id=%s' with DBConn() as db: history = db.fetchall(sql_get) for item in history: db.execute(sql_update % (item[1].upper(), item[0]))
def get_one_project_data(project_number): #select multiple tables # why use project_id = spm.id: see save_info where data_info['project_id'] = int(project_id) cmd = """select * from sample_project_master spm, sample_species ss, sample_type st, dna_sample_sequencing_type dsst, rna_sample_sequencing_type rsst, sample_other other where ss.project_id=spm.id and st.project_id=spm.id and dsst.project_id=spm.id and rsst.project_id=spm.id and other.project_id=spm.id and spm.project_number='%s'""" % (project_number) db = DBConn() result = db.execute(cmd, get_all=False) data = dict(result) #dorp sample_info_detail table ''' cmd = "select * from sample_info_detail where project_id=%s" % result['project_id'] table_data = [] results = db.execute(cmd) for result in results: table_data.append(dict(result)) data['table_data'] = table_data ''' return data
def get_manager_list(): db = DBConn() cmd = "select username from user_info where role='manager'" result = db.execute(cmd) return [i[0] for i in result]
def main(): logger = logging.getLogger('') logger.setLevel(logging.DEBUG) conf = site_conf() _db = DBConn(conf.items('db')) yield from _db.connect() yield from _db.execute('delete from old_rda;') yield from _db.execute('delete from rda;') rda_rus = requests.get('http://rdaward.org/rda_rus.txt').text params_old = [] params = [] lines = rda_rus.split('\r\n') re_rda_line = re.compile( r'(^[A-Z][A-Z]-\d\d)\s+[^\t]+\t*([A-Z][A-Z]-\d\d|\*\*\*)?') for line in lines: match_rda_line = re_rda_line.match(line) if match_rda_line: if match_rda_line.group(2): old = match_rda_line.group(1) new = None if match_rda_line.group( 2) == '***' else match_rda_line.group(2) params_old.append({'old': old, 'new': new}) else: params.append({'rda': match_rda_line.group(1)}) yield from _db.execute("""insert into old_rda values (%(old)s, %(new)s)""",\ params_old, progress=True) yield from _db.execute("""insert into rda values (%(rda)s)""",\ params, progress=True)
def get_all_user_data(): data = [] cmd = "select username,e_mail email,tel,company,age,role,field,status,notes from user_info" results = DBConn().execute(cmd) for result in results: data.append(dict(result)) return data
def get_other_info(username): db = DBConn() if phone_check(username): cmd = "select e_mail, tel, company, field, customer_name from user_info where tel='%s'" % username else: cmd = "select e_mail, tel, company, field, customer_name from user_info where username='******'" % username result = db.execute(cmd, get_all=False) return result if result else ('', '', '', '')
def save_info(all_info, username, action='new'): db_instance = DBConn() try: project_id = save_sample_project_master_info( db_instance, all_info.get('sample_project_master_info'), username, action) except Exception, e: print e return {'data': '', 'errcode': 1, 'msg': '保存失败!项目编号已经存在,请另外选择项目编号。'}
def get_detail_sample_data(project_number): data = [] cmd = """SELECT d.* FROM sample_project_master m where m.id=d.project_id and m.project_number='%s'""" % project_number db = DBConn() results = db.execute(cmd) for result in results: data.append(dict(result)) return data
def get_project_number_list(username, user_role): if user_role == 'manager': cmd = "select concat(project_name, '-', project_number) from sample_project_master where project_leader='%s'" % username elif user_role == 'user': cmd = "select concat(project_name, '-', project_number) from sample_project_master where created_by='%s'" % username db = DBConn() results = db.execute(cmd) return [i[0] for i in results]
def get_analysis_data(username, role, selected_project): if not selected_project: return {} project_number = selected_project.split('-')[-1] cmd = """select a.* from analysis_master a,sample_project_master s where a.project_id=s.id and s.project_number=%s""" % project_number db = DBConn() result = db.execute(cmd, get_all=False) return dict(result) if result else {}
def get_user_role(username, password=''): db = DBConn() if phone_check(username): cmd = "select role,status from user_info where tel='%s'" % username else: cmd = "select role,status from user_info where username='******'" % username if password: cmd += " and password='******'" % password result = db.execute(cmd, get_all=False) role = result[0] if result else '' status = result[1] if result else '' return role, status
def main(): logger = logging.getLogger('') handler = logging.handlers.WatchedFileHandler('/var/log/cfmrda.qrz.log') handler.setFormatter(logging.Formatter(\ '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')) logger.setLevel(logging.DEBUG) logger.addHandler(handler) handler.setLevel(logging.DEBUG) conf = site_conf() _db = DBConn(conf.items('db')) yield from _db.connect() qrzru = QRZRuLink(asyncio.get_event_loop()) pfx = Pfx('/usr/local/webcluster/cty.dat') @asyncio.coroutine def db_write(data): yield from _db.execute( """ insert into callsigns_rda (callsign, source, rda) values (%(callsign)s, 'QRZ.ru', %(rda)s)""", data) callsigns = yield from _db.execute(\ """select distinct hunter from rda_hunter""") logging.debug('callsigns list received -- ' + str(len(callsigns))) params = [] cnt = 0 ru_cnt = 0 fnd_cnt = 0 for _cs in callsigns: cnt += 1 cs_pfx = pfx.get(_cs) if cs_pfx in ['R', 'R2F', 'R9']: m_special = RE_SPECIAL.search(_cs) if m_special: continue ru_cnt += 1 data = yield from qrzru.query(_cs) if data and 'state' in data and data['state']: fnd_cnt += 1 params.append({'callsign': _cs, 'rda': data['state']}) logging.debug(_cs + ' found') if len(params) >= 100: yield from db_write(params) params = [] logging.debug('Processed ' + str(cnt) + '/' + str(ru_cnt) + '/'\ + str(fnd_cnt) + ' of ' + str(len(callsigns))) cnt += 1 logging.debug('qrz query complete')
def db_setup(): ''' PURPOSE: Using the db.DBConn() class, this method is used to create the avatardata database table, if the table does not already exist. ''' global DBC #CREATE DB INSTANCE DBC = DBConn('db_config.json') #ENSURE TABLE(S) IS/ARE CREATES DBC.create()
def modify_base_info(info): time = datetime.datetime.now() info['update_time'] = time username = info.get('username') tel = info.get('tel') db = DBConn() cmd = "select id from user_info where username!='%s' and tel='%s'" % ( username, tel) check_result = db.execute(cmd, get_all=False) if check_result: return {'data': '', 'errcode': 1, 'msg': '该电话号码已经被其它用户注册,请使用其它号码!'} else: db.update('user_info', {'username': username}, info) return {'data': '', 'errcode': 0, 'msg': '更新成功!'}
def get_analysis_table_data(username, selected_project): data = {} db = DBConn() project_number = selected_project.split('-')[-1] project_id = get_project_id_by_num(db, project_number) cmd = """select info.* from sample_packet_information info, analysis_master m where m.id=info.master_id and m.project_id=%s""" % project_id results = db.execute(cmd) data['sample_packet_information'] = [dict(i) for i in results] cmd = """select info.* from compare_table info, analysis_master m where m.id=info.master_id and m.project_id=%s""" % project_id results = db.execute(cmd) data['compare_table'] = [dict(i) for i in results] return data
def export_rankings(conf): """rebuild rankings table in db and export top100 to json file for web""" logging.debug('export rankings') _db = DBConn(conf.items('db')) yield from _db.connect() yield from _db.execute("select from build_rankings()") logging.debug('rankings table rebuilt') rankings = yield from _db.execute( """ select rankings_json('_rank < 104') as data """, None, False) save_json(rankings, conf.get('web', 'root') + '/json/rankings.json') logging.debug('export rankings finished')
def show_all_data(username, role='user'): data = [] cmd = """select project_number,project_name,cust_organization,cust_user,status,create_time from sample_project_master spm where """ if role == 'manager': cmd += "spm.project_leader='%s'" % username elif role == 'user': cmd += "spm.created_by='%s'" % username db = DBConn() result = db.execute(cmd) for i in result: temp_dict = dict(i) data.append(temp_dict) return data
def distct_history(self): sql_startid = 'SELECT MIN(id) FROM %s WHERE processed_date IS NULL' sql_getid = 'SELECT id FROM %s WHERE id NOT IN (SELECT MIN(id) FROM overall_sold_history_increment GROUP BY url) AND id>=%s' sql_delete = 'DELETE FROM %s WHERE id=%s' sql_date = 'UPDATE %s SET processed_date=\'%s\' WHERE processed_date IS NULL' current_date = datetime.datetime.now().strftime('%Y-%m-%d') with DBConn() as db: startid = db.fetchall(sql_startid % TB_INCREM)[0][0] if startid != None: id_set = db.fetchall(sql_getid % (TB_INCREM, startid)) for id in id_set: db.execute(sql_delete % (TB_INCREM, id[0])) print('[*]Deleting id=%s' % id[0]) db.execute(sql_date % (TB_INCREM, current_date)) else: print('[*]No new record is found.')
def main(): logger = logging.getLogger('') logger.setLevel(logging.DEBUG) conf = site_conf() _db = DBConn(conf.items('db')) yield from _db.connect() with open('/var/www/adxc.test/csv/rda_old_new.csv', 'r') as f_data: params = [] for line in f_data.readlines(): fields = {} fields['old'], fields['new'] = line.strip().split(';') if fields['old'] and fields['new']: params.append(fields) yield from _db.execute("""insert into old_rda values (%(old)s, %(new)s)""",\ params, progress=True)
def change_password(info): username = info['username'] db = DBConn() cmd = "select password from user_info where username='******'" % username result = db.execute(cmd, get_all=False) if result and result[0] == info[ 'old_passwd'] and info['new_passwd'] != info['old_passwd']: db.update('user_info', {'username': username}, { 'password': info['new_passwd'], 'update_time': datetime.datetime.now() }) return {'data': '', 'errcode': 0, 'msg': '更新成功!'} elif result and result[0] != info['old_passwd']: return {'data': '', 'errcode': 1, 'msg': '旧密码错误!'} elif info['new_passwd'] == info['old_passwd']: return {'data': '', 'errcode': 1, 'msg': '新密码和旧密码不能一样!'} else: return {'data': '', 'errcode': 1, 'msg': '用户名不存在!'}
def export_msc(conf): """export misc db data to json file for web""" logging.debug('export misc') _db = DBConn(conf.items('db')) yield from _db.connect() data = {} data['qsoCount'] = (yield from _db.execute( """ select count(*) as qso_count from qso; """, None, False)) data['userActivity'] = (yield from _db.execute( """ select json_agg(data) from (select json_build_object('callsign', coalesce(qsl_wait.callsign, qsl_today.callsign, email.callsign), 'qslWait', qsl_wait, 'qslToday', qsl_today, 'email', email) as data from (select user_cs as callsign, count(*) as qsl_wait from cfm_qsl_qso where state is null group by user_cs) as qsl_wait full join (select user_cs as callsign, count(*) as qsl_today from cfm_qsl_qso where state group by user_cs) as qsl_today on qsl_wait.callsign = qsl_today.callsign full join (select user_cs as callsign, count(*) as email from cfm_request_qso where not sent and user_cs is not null group by user_cs) as email on coalesce(qsl_wait.callsign, qsl_today.callsign) = email.callsign order by coalesce(qsl_wait.callsign, qsl_today.callsign, email.callsign) ) as data""", None, False)) save_json(data, conf.get('web', 'root') + '/json/msc.json') logging.debug('export misc finished')
def add_subid(self, *, date=None): dbwrapper = DBWrapper() if date != None: date_format = re.compile(r'(\d{4})\-(\d{1,2})\-(\d{1,2})') date_YMD = date_format.match(date).groups() date_YMD = list(map(int, date_YMD)) if len(date_YMD) != 3: print( '[!]Eror: Optional param of add_subid requires a string like \'2017-1-1\'' ) raise TypeError if date_YMD[0] > 1900 and date_YMD[1] <= 12 and date_YMD[ 2] <= 31 and date_YMD[1] > 0 and date_YMD[2] > 0: pass else: print( '[!]Eror: Optional param of add_subid requires a string like \'2017-1-1\'' ) raise ValueError else: sql_latestdate = 'SELECT MAX(update_date) FROM %s' % TB_INCREM latest_date = dbwrapper.db_fetchall(sql_latestdate) date = latest_date[0][0].strftime('%Y-%m-%d') sql_gethistory = 'SELECT id,state,postcode,suburb FROM %s WHERE suburb_id is NULL AND update_date=\'%s\'' latest_history = dbwrapper.db_fetchall(sql_gethistory % (TB_INCREM, date)) sql_getsubid = 'SELECT id FROM %s WHERE state=\'%s\' AND postcode=\'%s\' AND suburb=\'%s\'' sql_updatesubid = 'UPDATE %s SET suburb_id=%s WHERE id=%s' with DBConn() as db: for item in latest_history: suburb_id = db.fetchall( sql_getsubid % (TB_POSTCODES, item[1], item[2], item[3])) print('[*]id=%s, state=%s, postcode=%s, suburb=%s' % (item[0], item[1], item[2], item[3])) if len(suburb_id) != 1 or len(suburb_id[0]) != 1: print( '[*]Error: Fetch suburb_id failed: id=%s, state=%s, postcode=%s, suburb=%s' % (item[0], item[1], item[2], item[3])) else: suburb_id = suburb_id[0][0] db.execute(sql_updatesubid % (TB_INCREM, suburb_id, item[0]))
def export_table_info(table_name, project_id, title_list): book = Workbook() sheet1 = book.worksheets[0] #connect database: db = DBConn() cmd = "select * from {table_name} where project_id = '{project_id}'".format( table_name=table_name, project_id=project_id) results = db.execute(cmd) for i, title in enumerate(title_list): sheet1.cell(row=1, column=i + 1).value = title for i, result in enumerate(results): for j, title in enumerate(title_list): sheet1.cell(row=i + 2, column=j + 1).value = result.get(header_map[title]) export_path = os.path.dirname(__file__) + '/static/export/' file_name = table_name + '_' + project_id + '_' + datetime.datetime.now( ).strftime("%Y%m%d%H%M%S") + '.xls' full_path_name = os.path.join(export_path, file_name) book.save(filename=full_path_name) return file_name
def export_recent_uploads(conf): """export 20 recent uploaded file batches to json file for web""" logging.debug('export recent uploads') _db = DBConn(conf.items('db')) yield from _db.connect() data = yield from _db.execute( """ select json_agg(json_build_object( 'activators', activators, 'rda', rda, 'uploadDate', to_char(max_tstamp, 'DD mon YYYY'), 'uploadTime', to_char(max_tstamp, 'HH24:MI'), 'uploadType', upload_type, 'uploader', uploader)) as data from (select user_cs as uploader, upload_type, max(tstamp) as max_tstamp, array_agg(id) as ids from uploads where ext_logger_id is null group by date(tstamp), user_cs, upload_type order by max_tstamp desc limit 20) as ru, lateral (select array_agg(distinct station_callsign) as activators from qso where upload_id = any(ids)) as acts, lateral (select array_agg(rda) as rda from (select json_build_object('rda', array_agg(distinct rda), 'id', upload_id) as rda from qso where upload_id = any(ids) group by upload_id) as rdas0) as rdas """, None, False) save_json(data, conf.get('web', 'root') + '/json/recent_uploads.json') logging.debug('export recent uploads finished')
def save_user_info(info): time = datetime.datetime.now() info['create_time'] = time info['update_time'] = time info['status'] = 'R' # 'R' means review if len(info['password']) < 10: return {'data': '', 'errcode': 3, 'msg': "密码设置过短,请设置为10位以上!"} db = DBConn() name_result = db.execute( "select id, status from user_info where username='******'" % info.get('username'), get_all=False) tel_result = db.execute("select id, status from user_info where tel='%s'" % info.get('tel'), get_all=False) def fail_info(sql_result, check_type): status = sql_result[1] if status == 'R': msg = '该{check_type}之前已经注册,但状态处于等待中,请使用其它{check_type}注册!' elif status == 'Y': msg = '注册失败:该{check_type}已经注册,请使用其它{check_type}!' else: msg = '注册失败:该{check_type}已经存在!' return { 'data': '', 'errcode': 2, 'msg': msg.format(check_type=check_type) } if name_result: ret = fail_info(name_result, '名字') elif tel_result: ret = fail_info(tel_result, '电话号码') else: db.insert('user_info', info) ret = {'data': '', 'errcode': 0, 'msg': '注册成功:等待审核!'} return ret
def export_stat(conf): """export statistic data to json file for web""" logging.debug('export stats') _db = DBConn(conf.items('db')) yield from _db.connect() data = {} data['qso by rda'] = (yield from _db.execute( """ select json_object_agg(rda, data) as data from (select rda, json_object_agg(band, data) as data from (select rda, band, json_object_agg(mode, qso_count) as data from (select count(*) as qso_count, rda, band, mode from qso where upload_id is null or (select enabled from uploads where id=upload_id) group by mode, band, rda ) as q0 group by rda, band) as q1 group by rda) as q2 """, None, False)) for rda_data in data['qso by rda'].values(): rda_total = {'total': 0} for band_data in rda_data.values(): band_total = 0 for mode, qso_count in band_data.items(): band_total += qso_count if mode not in rda_total: rda_total[mode] = 0 rda_total[mode] += qso_count band_data['total'] = band_total rda_total['total'] += band_total rda_data['total'] = rda_total save_json(data, conf.get('web', 'root') + '/json/stat.json') logging.debug('export stats finished')
#!/usr/bin/python3 #coding=utf-8 import json from db import DBConn from conf import CONF DB = DBConn(CONF.items('db')) DB.connect() feeders_data = DB.execute(""" select id, props from devices where device_type_id = 3""") for feeder_data in feeders_data: for timer in feeder_data['props'][2]: timer.append(0) feeder_data['props'] = json.dumps(feeder_data['props']) DB.execute( """ update devices set props = %(props)s where id = %(id)s""", feeder_data)
def admin_save_user_info(username, status): db = DBConn() db.update('user_info', {'username': username}, {'status': status})
def save_status(project_number, status): db = DBConn() db.update('sample_project_master', {'project_number': project_number}, {'status': status})
def __init__(self, db_conf): self.db = DBConn(db_conf['host'], db_conf['user'], db_conf['pwd'], db_conf['db'])