def mongo_update(): host = request.form.get('ip') back_num = 'ls /backup/mongodata/backup/full |wc -l' back_name = 'ls -rt /backup/mongodata/backup/full|tail -1' back_size = "ls -lrt --block-size=M /backup/mongodata/backup/full|tail -1|awk '{print $5}'" back_endtime = "ls -lrt /backup/mongodata/backup/full|tail -1|awk '{print $(NF-1)}'" try: # 如果页面没有设置备份时间,则默认设置05:05存入数据库 if json.loads(db.get_one(['back_starttime'], "wan_ip='%s'" % host, 'virtuals'))['back_starttime'] is None: back_startTime = '05:05' data = dict({'back_startTime': back_startTime}) where = 'wan_ip=' + "'%s'" % host db.update(data, where, 'virtuals') else: back_startTime = json.loads(db.get_one(['back_starttime'], "wan_ip='%s'" % host, 'virtuals'))['back_starttime'] back_endTime = util.paramiko_command(host, back_endtime) back_startTime=datetime.datetime.strptime(back_startTime, "%M:%S") if back_endTime == 1: back_endTime = '00:00' back_endTime = datetime.datetime.strptime(back_endTime, "%M:%S") seconds = (back_endTime-back_startTime).seconds hours = seconds / 60 second = seconds % 60 back_used_time = '%s小时%s分' % (hours, second) referesh_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = dict({'back_num': util.paramiko_command(host, back_num), 'back_name': util.paramiko_command(host, back_name), 'back_size': util.paramiko_command(host, back_size), 'back_endtime': util.paramiko_command(host, back_endtime), 'back_used_time': back_used_time, 'refresh_time': referesh_time}) if len(str(data['back_size'])) >= 5: data['back_size'] = str(round(float(float(data['back_size'][0:-1]) / 1024),2)) + 'G' where = 'wan_ip=' + "'%s'" % (host) db.update(data, where, 'virtuals') except: traceback.print_exc() return render_template('monitor/mongo_monitor.html', display = dis)
def server(): server_columns = ['id', 'hostname', 'lan_ip', 'wan_ip', 'cpu', 'mem', 'disk', 'virtual_nums', 'virtual_names', 'idc_id', 'cabinet_id'] servers = db.get_list(server_columns, 'server') cabinets = json.loads(cabinetinfo()) idcs = json.loads(idcinfo()) for server in servers: virtual_nums_columns = ['count(*)'] where = 'server_id=' + str(server['id']) virtual_nums = json.loads(db.get_one(virtual_nums_columns, where, 'virtuals'))['count(*)'] server['virtual_nums'] = virtual_nums virtual_names_columns = ['qufu'] try: x = json.loads(db.get_one(virtual_names_columns, where, 'virtuals')) if isinstance(x, dict): names = x['qufu'] elif isinstance(x, list): virtual_names = [ ",".join(i) for i in json.loads(db.get_one(virtual_names_columns, where, 'virtuals'))] names = ",".join(virtual_names) else: print 'not correct type' server['virtual_names'] = names except BaseException,e: print e for cabinet in cabinets: if cabinet['id'] == server['cabinet_id']: server['cabinet_id'] = cabinet['name'] for idc in idcs: if idc['id'] == server['idc_id']: server['idc_id'] = idc['name']
def item_info(self, typeid): try: item_row = get_one(g_item['sql'], (typeid)) self.items[typeid] = { 'typeName': item_row[g_item['typeName']], 'typeID': item_row[g_item['typeID']], 'productionTime': item_row[g_item['productionTime']], 'productivityModifier': item_row[g_item['productivityModifier']], 'wasteFactor': item_row[g_item['wasteFactor']], 'maxProductionLimit': item_row[g_item['maxProductionLimit']], 'chance': item_row[g_item['chance']], 'categoryName': item_row[g_item['categoryName']], } item_row = get_one(g_t1bpo['sql'], (typeid)) self.items[typeid]['t1bpo'] = { 'typeID': item_row[g_t1bpo['typeID']], 'blueprintTypeID': item_row[g_t1bpo['blueprintTypeID']], 'researchCopyTime': item_row[g_t1bpo['researchCopyTime']], 'maxProductionLimit': item_row[g_t1bpo['maxProductionLimit']], } if self.names: self.items[typeid]['t1bpo']['typeName'] = item_row[g_t1bpo['typeName']] return True except LookupError: # Ignore this error. Can't find the blueprint for this item in the db. app.logger.warning('Unable to locate blueprint for item <%d>' % (typeid)) return False
def get_idc_info_date(now_year): #now_year = datetime.datetime.now().strftime('%Y') # now_year = '2016' # _sql_all = 'select sum(combined) from idc_data where date like %s group by date order by date;' # _sql_alone = 'select date,idcname,combined from idc_data where date like %s order by date' # _args = (now_year+'%',) rt_list_all = db.get_one(['sum(combined)'],"date like '%s%s' group by date order by date" % (now_year,'%'),'idc_bill', list=True) rt_list = db.get_one(['date','idcname','combined'],"date like '%s%s' order by date" % (now_year,'%'),'idc_bill', list=True) ''' 计算每月总收入------------- ''' rt = [] for x in rt_list_all: rt.append(float(x[0])) all_date = [] all_date.append({'data': rt ,'name': '总支出'}) ''' 计算每个机房每个月的收入 ''' rs = [] for x in rt_list: # print 'rs',rs # print 'x:',x if len(rs) != 0: for y in rs: # print 'y:',y if y['name'] == x[1]: months = x[0].split('-')[1] y['data'][int(months)-1] = (float(x[2])) status = 0 break else: status = 1 else: status = 1 if status == 1: rs.append({"name": x[1], 'data': [0] * 12}) num = x[0].split('-')[1] rs[-1]['data'][int(num)-1] = float(x[2]) # if status == 1: # rs.append({"name": x[1], 'data': [0] * 12}) # if len(rs) != 0: # for y in rs: # if y['name'] == x[1]: # y['data'].append(float(x[2])) # status = 0 # break # else: # status = 1 # else: # status = 1 # if status == 1: # rs.append({'name': x[1], 'data': [float(x[2])]}) # 返回总支出和单机房月支持的列表 return all_date + rs
def gm_update(): sql = "create table if not exists gm_update(" \ "id int auto_increment PRIMARY key not null," \ "filename varchar(50) not null comment '更新文件名'," \ "update_time varchar(30) not null comment '更新时间'," \ "status varchar(10) not null comment '更新状态'," \ "username varchar(20) not null comment '更新操作人') " \ "ENGINE=InnoDB AUTO_INCREMENT=69 DEFAULT CHARSET=utf8 COMMENT='GM更新说明表'" try: db.get_one(['id'], 'id = "1"', 'gm_update', list=True) except BaseException, e: print e db.createTable(sql)
def code_release(): sql = "create table if not exists codePublish(" \ "id int auto_increment PRIMARY key not null," \ "version int(10) not null comment '代码版本号'," \ "introduction varchar(100) not null comment '发布说明'," \ "username varchar(20) not null comment '发布人'," \ "date varchar(30) not null comment '发布日期') " \ "ENGINE=InnoDB AUTO_INCREMENT=69 DEFAULT CHARSET=utf8 COMMENT='代码发布版本说明'" try: db.get_one(['id'], 'id = "1"', 'codePublish', list=True) except BaseException, e: print e db.createTable(sql)
def add_new_before_select(params): idcname = params.get('idcname') date = params.get('date') # _sql = 'select * from idc_data where idcname = %s and date = %s' # _args = (idcname,date) rt_list = db.get_one(['*'],"idcname = '%s' and date = '%s'" % (idcname,date),'idc_bill', list=True) return True,'进行入库操作'
def cabinetdelete(): id = request.form.get('id') columns = ['id', 'name'] where = 'id=' + id cabinet = json.loads(db.get_one(columns, where, 'cabinet')) util.WriteLog('infoLogger').warning('%s delete cabinet %s' % (session['username'], cabinet['name'])) return json.dumps(db.delete(where, 'cabinet'))
def idcdelete(): id = request.form.get('id') columns = ['id', 'name'] where = "id=" + id idc = json.loads(db.get_one(columns, where, 'idc')) util.WriteLog('infoLogger').warning('%s delete idc %s' % (session['username'], idc['name'])) return json.dumps(db.delete(where, 'idc'))
def virtualdel(): id = request.form.get('id') columns = ['id', 'hostname'] where = "id=" + id hostname = json.loads(db.get_one(columns, where, 'virtuals')) util.WriteLog('infoLogger').warning('%s delete virtual hostname %s' % (session['username'], hostname['hostname'])) return json.dumps(db.delete(where, 'virtuals'))
def serverdelete(): id = request.form.get('id') columns = ['id', 'hostname'] where = 'id=' + id hostname = json.loads(db.get_one(columns, where, 'server')) util.WriteLog('infoLogger').warning('%s delete server hostname %s' % (session['username'], hostname['hostname'])) return json.dumps(db.delete(where, 'server'))
def get_new_idcinfo(_local_date=month_get()): colloens = ('id', 'date', 'idcname', 'cabinet', 'cabinet_price','host_amount','bandwidth','bandwidth_price','bandwidth_amount','combined','status','info') # _sql = 'select * from idc_data where date = %s' # _args = (_local_date,) rt = [] rt_list = db.get_one(['*'],"date = '%s'" % (_local_date,),'idc_bill', list=True) for i in rt_list: rt.append(dict(zip(colloens,i))) return _local_date,rt
def stat_export(collections,tablename,ptname,date): # 只加载最近二年的数据 limit_date = datetime.date.today() + datetime.timedelta(days=-720) rt_list = db.get_one(['*'], "ptname = '%s' and date >= '%s' order by date" % (ptname, limit_date), tablename, list=True) rt = [] for i in rt_list: rt.append(dict(zip(collections,i))) return rt
def users_delete(): id = request.args.get('id') columns=['id', 'name'] where = "id=" + id user = db.get_one(columns,where,'users') if db.user_del(id): util.WriteLog('infoLogger').warning('%s delete user %s' % (session['username'], user['name'])) return redirect('/users/') return 'del user failed!'
def chpwdoneself(): userinfo = request.form.to_dict() data = db.change_pass(session['username'],userinfo['username'], userinfo['oldpasswd'], userinfo['newpasswd']) user_email = json.loads(db.get_one(['email'], 'name="%s"' % session['username'], 'users'))['email'] if data['code'] == 1: send_mail.delay([user_email], '个人密码修改失败', data['errmsg']) elif data['code'] == 0: send_mail.delay([user_email], '个人密码修改成功', data['errmsg']) util.WriteLog('infoLogger').info('%s changed his password' % (session['username'])) return json.dumps(data)
def backupServer_monitor_cron(): ip_list = db.get_list(['wan_ip','qufu','hostname'], 'virtuals') for ip in ip_list: dirname = '/data/mongobackup/' + ip['wan_ip'] backNum = util.paramiko_command('121.201.72.22', 'ls %s|wc -l' % dirname) ip['backNum'] = backNum backName = util.paramiko_command('121.201.72.22', 'ls -rt %s|tail -1' % dirname) ip['backName'] = backName backSize = util.paramiko_command('121.201.72.22', "ls -lrt --block-size=M %s |tail -1|awk '{print $5}'" % dirname) ip['backSize'] = backSize if len(ip['backSize']) >= 5: ip['backSize'] = str(round(float(float(ip['backSize'][0:-1]) / 1024), 2)) + 'G' if len(json.loads(db.get_one(['wan_ip'], "wan_ip='%s'" % str(ip['wan_ip']), 'backupServerMonitor'))) > 1: db.delete("wan_ip='%s'" % ip['wan_ip'], 'backupServerMonitor') db.create(ip, 'backupServerMonitor') elif len(json.loads(db.get_one(['wan_ip'], "wan_ip='%s'" % str(ip['wan_ip']), 'backupServerMonitor'))) == 1: db.update(ip, "wan_ip='%s'" % ip['wan_ip'], 'backupServerMonitor') elif len(json.loads(db.get_one(['wan_ip'], "wan_ip='%s'" % str(ip['wan_ip']), 'backupServerMonitor'))) == 0: db.create(ip, 'backupServerMonitor')
def innerUpdate(): if request.method == 'GET': id = request.args.get('id') where = 'id=' + id columns = ['id', 'hostname', 'ip', 'cpu', 'mem', 'disk', 'physicalHost', 'user'] data = db.get_one(columns, where, 'innerServer') return json.dumps(data) else: data = request.form.to_dict() id = data.pop('id') where = 'id=' + id reason = db.update(data, where, 'innerServer') return json.dumps(reason)
def serverupdate(): server_columns = ['id', 'hostname', 'lan_ip', 'wan_ip', 'cpu', 'mem', 'disk', 'virtual_nums', 'virtual_names', 'idc_id', 'cabinet_id'] if request.method == 'GET': id = request.args.get('id') where = "id=" + id data = db.get_one(server_columns, where, 'server') return json.dumps(data) else: data = request.form.to_dict() for k,v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] util.WriteLog('infoLogger').warning('%s update server_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'server'))
def virtualupdate(): if request.method == 'GET': columns = ['id', 'qufu', 'platform', 'serverid', 'hostname', 'lan_ip', 'wan_ip', 'cpu', 'mem', 'disk', 'master_slave', 'server_id'] id = request.args.get('id') where = 'id=' + id virtual = db.get_one(columns, where, 'virtuals') return json.dumps(virtual) else: data = request.form.to_dict() for k,v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] util.WriteLog('infoLogger').warning('%s update virtual_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'virtuals'))
def idcupdate(): idc_columns = ['id', 'name', 'address', 'adminer', 'phone', 'cabinet_num', 'switch_num'] if request.method == 'GET': id = request.args.get('id') where = "id=" + id data = db.get_one(idc_columns, where, 'idc') return json.dumps(data) else: data = request.form.to_dict() for k,v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] util.WriteLog('infoLogger').warning('%s update idc_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'idc'))
def idcupdate(): idc_columns = [ 'id', 'name', 'address', 'adminer', 'phone', 'cabinet_num', 'switch_num' ] if request.method == 'GET': id = request.args.get('id') where = "id=" + id data = db.get_one(idc_columns, where, 'idc') return json.dumps(data) else: data = request.form.to_dict() for k, v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] util.WriteLog('infoLogger').warning('%s update idc_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'idc'))
def virtualupdate(): if request.method == 'GET': columns = [ 'id', 'qufu', 'platform', 'serverid', 'hostname', 'lan_ip', 'wan_ip', 'cpu', 'mem', 'disk', 'master_slave', 'server_id' ] id = request.args.get('id') where = 'id=' + id virtual = db.get_one(columns, where, 'virtuals') return json.dumps(virtual) else: data = request.form.to_dict() for k, v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] util.WriteLog('infoLogger').warning('%s update virtual_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'virtuals'))
def serverupdate(): server_columns = [ 'id', 'hostname', 'lan_ip', 'wan_ip', 'cpu', 'mem', 'disk', 'virtual_nums', 'virtual_names', 'idc_id', 'cabinet_id' ] if request.method == 'GET': id = request.args.get('id') where = "id=" + id data = db.get_one(server_columns, where, 'server') return json.dumps(data) else: data = request.form.to_dict() for k, v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] util.WriteLog('infoLogger').warning('%s update server_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'server'))
def roolBack(): verId = request.form.get('verId') columns = ['version'] where = 'id=%s' % verId version = json.loads(db.get_one(columns, where, 'codePublish'))['version'] client = pysvn.Client() client.callback_get_login = getLogin if os.path.exists(svnRoolBackPath): shutil.rmtree(svnRoolBackPath) rv = pysvn.Revision(pysvn.opt_revision_kind.number, version) client.export(svnurl, svnRoolBackPath, revision=rv) date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') data = dict({'version': version, 'introduction': '回滚到版本', 'username': session['username'], 'date': date}) db.create(data, 'codePublish') cmd = 'rsync -avzLPq --delete %s/* [email protected]:/data/oop/cmdb' % svnRoolBackPath subprocess.call(cmd, shell=True) paramiko_command('tools.uc.ppweb.com.cn', 'cd /data/oop;bash run.sh') paramiko_command('tools.uc.ppweb.com.cn', "sed -i '/代码发布/d' /data/oop/cmdb/app/templates/base.html") return json.dumps({'code': 0, 'errmsg': '回滚成功'})
def cabinetupdate(): cabinet_columns = ['id', 'name', 'idc_id', 'bandwidth', 'money', 'u_num', 'server_num', 'switch_num'] if request.method == 'GET': id = request.args.get('id') where = "id=" + id data = db.get_one(cabinet_columns, where, 'cabinet') return json.dumps(data) else: data = request.form.to_dict() for k,v in data.items(): if v == '': return json.dumps({'code': 1, 'errmsg': '不能为空'}) where = "id=" + data['id'] columns = ['id', 'name'] idcs = db.get_list(columns, 'idc') idcs = dict((idc['id'], idc['name']) for idc in idcs) for k, v in idcs.items(): if data['idc_id'] == v: data['idc_id'] = k util.WriteLog('infoLogger').warning('%s update cabinet_id %s' % (session['username'], data['id'])) return json.dumps(db.update(data, where, 'cabinet'))
def data_trend_data(): params = request.args if request.method == 'GET' else request.form platform = params.get('ptname','all') data_trend_column = ['data_date','sum(pay_money)'] ''' select data_date,sum(pay_money) from data_day where platform = 7 group by data_date limit 1000; select data_date,sum(pay_money) from data_day where platform = 7 and DATE_SUB(CURDATE(), INTERVAL 30 DAY) <= data_date group by data_date; ''' if platform == 'all': where = 'id > 0 group by data_date' else: where = 'platform = ' + str(platform) + ' group by data_date' dates = db.get_one(data_trend_column, where, 'data_day', list=True) data_day_money = [] for x in dates: if platform == '13': data_day_money.append((int(time.mktime(x[0].timetuple()) * 1000),float(x[1]/10))) elif platform == '27': data_day_money.append((int(time.mktime(x[0].timetuple()) * 1000), float(x[1])/10)) else: data_day_money.append((int(time.mktime(x[0].timetuple()) * 1000), float(x[1])/10)) return json.dumps(data_day_money)
def kai_gua(): columns_gua_headers = [ '开挂时间', '大区', 'UID', '昵称', '封禁天数', '封禁原因' ] date = request.args.get('date') year = date[0:4] month = date[4:6] day = date[6:8] date = year + '_' + month + '_' + day table = 'cheat_log_' + date host_column = ['wan_ip'] host_list = db.get_list(host_column, 'virtuals') hosts = [] [ hosts.append(str(host['wan_ip'])) for host in host_list ] gua_column = ['platform_id', 'server_id', 'uid', 'nick_name', 'param'] rows = [] for host in hosts: try: gua_list = db.get_list(gua_column, table, where='reason=3', host=host, user='******', passwd='cygamebh2014', db='bh_log') except BaseException,e: print e for data in gua_list: gua_lists = [] param = data['param'].split('|') start_kaigua = time.localtime(float(param[8].split('=')[1].replace(',','')[:-3])) start_kaigua = time.strftime('%Y-%m-%d %H:%M:%S', start_kaigua) long_fengjin = float(param[10].split('=')[1].replace(',','')) / 60 / 24 data['start_kaigua'] = start_kaigua data['long_fengjin'] = long_fengjin gua_lists.append(data['start_kaigua']) platform_id = data['platform_id'] server_id = data['server_id'] where = 'platform=' + str(platform_id) + ' and serverid=' + str(server_id) qufu = json.loads(db.get_one(['qufu'], where, 'virtuals'))['qufu'] gua_lists.append(qufu) gua_lists.append(data['uid']) gua_lists.append(data['nick_name']) gua_lists.append(data['long_fengjin']) gua_lists.append('透视') rows.append(gua_lists)
def enrich_contact(company_load_list, contact_load_list, company_load_colnames): for index, contact in contact_load_list.iterrows(): company_id = contact['Source_Company_ID'] if company_id is np.nan: contact_load_list.loc[index, 'Load'] = False continue company = company_load_list[company_load_list['Source_ID'] == company_id] if company.empty: company = db.get_one('Source_ID', 'Company', company_id, company_load_colnames) if company.empty: contact_load_list.loc[index, 'Load'] = False continue if pd.isna(contact['Company_Name']) and pd.notna(company['Company_Name']).all(): contact_load_list.loc[index, 'Company_Name'] = company['Company_Name'].values[0] if pd.isna(contact['Company_Name_CN']) and pd.notna(company['Company_Name_CN']).all(): contact_load_list.loc[index, 'Company_Name_CN'] = company['Company_Name_CN'].values[0] if pd.isna(contact['Billing_Address']) and pd.notna(company['Billing_Address']).all(): contact_load_list.loc[index, 'Billing_Address'] = company['Billing_Address'].values[0] if pd.isna(contact['Billing_Address_CN']) and pd.notna(company['Billing_Address_CN']).all(): contact_load_list.loc[index, 'Billing_Address_CN'] = company['Billing_Address_CN'].values[0] if pd.isna(contact['District']) and pd.notna(company['District']).all(): contact_load_list[index, 'District'] = company['District'].values[0] if pd.isna(contact['District_CN']) and pd.notna(company['District_CN']).all(): contact_load_list[index, 'District_CN'] = company['District_CN'].values[0] if pd.isna(contact['City']) and pd.notna(company['City']).all(): contact_load_list.loc[index, 'City'] = company['City'].values[0] if pd.isna(contact['City_CN']) and pd.notna(company['City_CN']).all(): contact_load_list.loc[index, 'City_CN'] = company['City_CN'].values[0] if pd.isna(contact['State']) and pd.notna(company['State']).all(): contact_load_list.loc[index, 'State'] = company['State'].values[0] if pd.isna(contact['State_CN']) and pd.notna(company['State_CN']).all(): contact_load_list.loc[index, 'State_CN'] = company['State_CN'].values[0] if pd.isna(contact['Postal_Code']) and pd.notna(company['Postal_Code']).all(): contact_load_list.loc[index, 'Postal_Code'] = company['Postal_Code'].values[0] if pd.isna(contact['Country']) and pd.notna(company['Country']).all(): contact_load_list.loc[index, 'Country'] = company['Country'].values[0] return contact_load_list
def remove_holiday(id): query = "delete from holidays where id=%s" % id return db.get_one(connection, query)
def staid_to_sysid(station): row = get_one( 'select solarSystemId from staStations where stationID = %(station)s', {'station': station}) return row[0]
def get_stations(id): query = "select * from station where id=%s" % id return db.get_one(connection, query)
def remove_leave(id): query = "delete from leave_types where id=%s" % id return db.get_one(connection, query)
def get_department(id): query = "select * from departments where id=%s" % id return db.get_one(connection, query)
def get_role(id): query = "select * from role where id=%s" % id return db.get_one(connection, query)
def enrich_contact(company_load_list, contact_load_list, company_load_colnames): for index, contact in contact_load_list.iterrows(): company_id = contact['Source_Company_ID'] if company_id is np.nan: contact_load_list.loc[index, 'Load'] = False continue company = company_load_list[company_load_list['Source_ID'] == company_id] if company.empty: company = db.get_one('Source_ID', 'Company', company_id, company_load_colnames) if company.empty: contact_load_list.loc[index, 'Load'] = False continue if pd.isna(contact['Company_Name']) and pd.notna( company['Company_Name']).all(): contact_load_list.loc[ index, 'Company_Name'] = company['Company_Name'].values[0] if pd.isna(contact['Company_Name_CN']) and pd.notna( company['Company_Name_CN']).all(): contact_load_list.loc[ index, 'Company_Name_CN'] = company['Company_Name_CN'].values[0] if pd.isna(contact['Billing_Address']) and pd.notna( company['Billing_Address']).all(): contact_load_list.loc[ index, 'Billing_Address'] = company['Billing_Address'].values[0] if pd.isna(contact['Billing_Address_CN']) and pd.notna( company['Billing_Address_CN']).all(): contact_load_list.loc[ index, 'Billing_Address_CN'] = company['Billing_Address_CN'].values[0] if pd.isna(contact['District']) and pd.notna( company['District']).all(): contact_load_list[index, 'District'] = company['District'].values[0] if pd.isna(contact['District_CN']) and pd.notna( company['District_CN']).all(): contact_load_list[index, 'District_CN'] = company['District_CN'].values[0] if pd.isna(contact['City']) and pd.notna(company['City']).all(): contact_load_list.loc[index, 'City'] = company['City'].values[0] if pd.isna(contact['City_CN']) and pd.notna(company['City_CN']).all(): contact_load_list.loc[index, 'City_CN'] = company['City_CN'].values[0] if pd.isna(contact['State']) and pd.notna(company['State']).all(): contact_load_list.loc[index, 'State'] = company['State'].values[0] if pd.isna(contact['State_CN']) and pd.notna( company['State_CN']).all(): contact_load_list.loc[index, 'State_CN'] = company['State_CN'].values[0] if pd.isna(contact['Postal_Code']) and pd.notna( company['Postal_Code']).all(): contact_load_list.loc[ index, 'Postal_Code'] = company['Postal_Code'].values[0] if pd.isna(contact['Country']) and pd.notna(company['Country']).all(): contact_load_list.loc[index, 'Country'] = company['Country'].values[0] return contact_load_list
def approved_pending_leave(staff_id, leave_type_id): query = "select * from approved_pending_leave_view where Staff_id = '%s' and leave_id = %s" % ( staff_id, leave_type_id) return db.get_one(connection, query)
def get_leave_total_duration(leave_type_id): query = "select duration from leave_days_types where leave_id = '" + str( leave_type_id) + "'" return db.get_one(connection, query)
def varda_diena(vards): sql = "SELECT * FROM vardadienas WHERE vards=%s ORDER BY vards" t = (vards, ) atbilde = get_one(sql, t) return atbilde
def decryptor_category(self, typeid): row = get_one(g_decryptor_category['sql'], (typeid)) self.items[typeid]['decryptor_category'] = row[g_decryptor_category['valueInt']]
def view_holidays(id): query = "select * from holidays where id=%s" % id return db.get_one(connection, query)
float(time.clock() - src_start_time), { 'errorMessage': 'SOURCE ERROR', 'crawlerName': 'credible crawler' }) continue error_articles = [] prev_uuid = '' for article in source.articles: url_uuid = get_uuid(clean_url(article.url)) article.id = url_uuid if prev_uuid == url_uuid: continue if get_one(url_uuid, 'errorArticles') or get_one( url_uuid, 'articles'): print('Skipped: ' + article.url) error_articles.append(article.id) prev_uuid = url_uuid source.articles = [ a for a in source.articles if a.id not in error_articles ] if PY_ENV == 'development': print('Proxy: ' + proxy['http']) print('User-Agent: ' + config.browser_user_agent) print('\nCrawler found new ' + str(len(source.articles)) + ' articles in http://' + source.domain + '\n')
def find_all(username): query = "select p1.*,concat(p2.first_name ,' ',p2.middle_name , ' ', p2.last_name ) line_manager_name,p2.email line_manager_email from personal_data p1 left join personal_data p2 on p1.line_manager = p2.id where p1.staff_number = '" + str( username) + "'" return db.get_one(connection, query)
def get_leave_id(leave): query = "Select id from leave_types where name='%s'" % leave return db.get_one(connection, query)
def approve_reject(status, id, comment): query = "update history set status='%s', Approved_Rejected_date = now(), managers_comment='%s' where id= %s" % ( status, comment, id) db.insert(connection, query) query = "select p.*, p2.email line_manager_email, h.Approved_Rejected_date, h.Status, h.start_date ,h.end_date from personal_data p, history h, personal_data p2 where p.staff_number = h.staff_id and h.id = %s and p2.id = p.line_manager" % id return db.get_one(connection, query)
def get_personal_email(staff_id): query = " select email from personal_data where staff_number=%s" % staff_id return db.get_one(connection, query)
def get_count_holiday(): query = "select count(nameofholiday) holiday from holidays" return db.get_one(connection, query)
def remove_role_leave_days(id): query = "delete from role_leave_days where id=%s" % id return db.get_one(connection, query)
def get_count_station(): query = "select count(name) station from station" return db.get_one(connection, query)
def get_role_leave_days(id): query = "select * from role_leave_days where id=%s" % id return db.get_one(connection, query)
def get_count_role(): query = "select count(role_name) role from role " return db.get_one(connection, query)
def __init__(self): hildon.Program.__init__(self) self.window = hildon.Window() self.window.connect("destroy", gtk.main_quit) self.window.connect("key-press-event", kartkomponent.gui.app.on_key_press) self.add_window(self.window) self.window.fullscreen() self.window.set_title("a title") ## creates a color color = gtk.gdk.color_parse("#FFFFFF") color2 = gtk.gdk.color_parse("#FFFFFF") ## creates the table 2x2 onto the window table = gtk.Table(2, 2, False) self.window.add(table) ## creates layout and puts into the top left corner of the table self.layout = gtk.Layout(None, None) self.layout.set_size(600, 600) table.attach(self.layout, 0, 1, 0, 1, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND, 0, 0) self.layout.modify_bg(gtk.STATE_NORMAL, color) # code for modifying background color ## creates a horisontal box onto the layout ## inital main view self.mainTable = gtk.Table(1, 1, False) self.main_hbox = gtk.HBox(homogeneous=False, spacing=0) self.main_hbox.set_size_request(690, 450) self.main_hbox.pack_start(self.mainTable, True, True, 0) self.mainlayout = gtk.Layout(None, None) self.mainlayout.set_size(700, 400) self.mainTable.attach(self.mainlayout, 0, 1, 0, 1, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND, 0, 0) self.mainlayout.modify_bg(gtk.STATE_NORMAL, color2) self.mainlabel = gtk.Label("Mainbox") self.mainlayout.put(self.mainlabel, 10, 0) ##creates event hbox self.eventTable = gtk.Table(1, 1, False) self.event_hbox = gtk.HBox(homogeneous=False, spacing=0) self.event_hbox.set_size_request(690, 450) self.event_hbox.pack_start(self.eventTable, True, True, 0) self.eventlayout = gtk.Layout(None, None) self.eventlayout.set_size(700, 400) self.eventTable.attach(self.eventlayout, 0, 1, 0, 1, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND, 0, 0) self.eventlayout.modify_bg(gtk.STATE_NORMAL, color2) self.katt = db.get_one(Mission) self.eventlabel = gtk.Label("Uppdrag: " + self.katt.title + ", status: " + "Utryckning") self.eventlayout.put(self.eventlabel, 10, 0) self.check_button = gtk.CheckButton(label="kryssa för avklarat", use_underline=True) self.eventlayout.put(self.check_button, 10, 30) self.check_button.connect("clicked", self.Send_to_DB) ## creates map hbox self.mapTable = gtk.Table(1, 1, False) self.map_hbox = gtk.HBox(homogeneous=False, spacing=0) self.map_hbox.set_size_request(690, 450) self.map_hbox.pack_start(self.mapTable, True, True, 0) self.maplayout = gtk.Layout(None, None) self.maplayout.set_size(700, 400) self.mapTable.attach(self.maplayout, 0, 1, 0, 1, gtk.FILL | gtk.EXPAND, gtk.FILL | gtk.EXPAND, 0, 0) self.maplayout.modify_bg(gtk.STATE_NORMAL, color2) ## loads the map data and puts on the hbox self.mapView = kartkomponent.gui.app.view self.mapView.set_size_request(650, 400) self.maplayout.put(self.mapView, 10, 0) ## ## imports a picture imageEvent = gtk.Image() imageEvent.set_from_file("menybilder/uppdrag2.jpg") imageCompass = gtk.Image() imageCompass.set_from_file("menybilder/kompass2.jpg") imageTelephone = gtk.Image() imageTelephone.set_from_file("menybilder/telefon2.jpg") imageMsg = gtk.Image() imageMsg.set_from_file("menybilder/brev2.jpg") imageOff = gtk.Image() imageOff.set_from_file("menybilder/off.jpg") ## vertical buttons self.button1 = gtk.Button() self.button1.add(imageEvent) self.button1.connect("clicked", self.EventView) self.button1.set_size_request(80, 70) self.layout.put(self.button1, 0, 100) self.button2 = gtk.Button() self.button2.add(imageCompass) self.button2.connect("clicked", self.MapView) self.button2.set_size_request(80, 70) self.layout.put(self.button2, 0, 0) self.button3 = gtk.Button() self.button3.add(imageTelephone) self.button3.connect("clicked", gtk.main_quit) self.button3.set_size_request(80, 70) self.layout.put(self.button3, 0, 200) self.button4 = gtk.Button() self.button4.add(imageMsg) self.button4.connect("clicked", self.MainView) self.button4.set_size_request(80, 70) self.layout.put(self.button4, 0, 300) self.button5 = gtk.Button() self.button5.add(imageOff) self.button5.connect("clicked", gtk.main_quit) self.button5.set_size_request(80, 70) self.layout.put(self.button5, 0, 400) ## horisontal buttons for the map hbox self.button6 = gtk.Button() self.button6.connect("clicked", self.Show_gps_pos) self.button6.set_size_request(150, 30) self.button6.set_label("Visa pos.") self.maplayout.put(self.button6, 100, 400) self.button7 = gtk.Button() self.button7.connect("clicked", gtk.main_quit) self.button7.set_size_request(150, 30) self.maplayout.put(self.button7, 300, 400) self.button8 = gtk.Button() self.button8.connect("clicked", gtk.main_quit) self.button8.set_size_request(150, 30) self.maplayout.put(self.button8, 500, 400) ## top label label = gtk.Label("Nokia n810") label.set_size_request(500, 50) label.set_alignment(xalign=0.5, yalign=0) label.modify_font(pango.FontDescription("sans 12")) self.layout.put(label, 100, 0)
def get_count_staff(): query = "select count(first_name) staffno from personal_data pd" return db.get_one(connection, query)
def sys_to_id(system): row = get_one( 'select solarSystemID from mapSolarSystems where solarSystemName LIKE %(system)s', {'system': system}) return row[0]
def sysid_list_to_object(sysids): objs = [] for sysid in sysids: row = get_one('select solarSystemID, solarSystemName, round(security, 1) from mapSolarSystems where solarSystemID = %(system)s', { 'system': sysid }) objs.append({ 'id': row[0], 'name': row[1], 'sec': row[2] }) return objs
def update_leave_types(id): query = "select * from leave_types where id=%s" % id return db.get_one(connection, query)
def sys_to_id(system): row = get_one('select solarSystemID from mapSolarSystems where solarSystemName LIKE %(system)s', { 'system': system }) return row[0]
def get_idc_info_date(now_year): #now_year = datetime.datetime.now().strftime('%Y') # now_year = '2016' # _sql_all = 'select sum(combined) from idc_data where date like %s group by date order by date;' # _sql_alone = 'select date,idcname,combined from idc_data where date like %s order by date' # _args = (now_year+'%',) rt_list_all = db.get_one(['sum(combined)'], "date like '%s%s' group by date order by date" % (now_year, '%'), 'idc_bill', list=True) rt_list = db.get_one(['date', 'idcname', 'combined'], "date like '%s%s' order by date" % (now_year, '%'), 'idc_bill', list=True) ''' 计算每月总收入------------- ''' rt = [] for x in rt_list_all: rt.append(float(x[0])) all_date = [] all_date.append({'data': rt, 'name': '总支出'}) ''' 计算每个机房每个月的收入 ''' rs = [] for x in rt_list: # print 'rs',rs # print 'x:',x if len(rs) != 0: for y in rs: # print 'y:',y if y['name'] == x[1]: months = x[0].split('-')[1] y['data'][int(months) - 1] = (float(x[2])) status = 0 break else: status = 1 else: status = 1 if status == 1: rs.append({"name": x[1], 'data': [0] * 12}) num = x[0].split('-')[1] rs[-1]['data'][int(num) - 1] = float(x[2]) # if status == 1: # rs.append({"name": x[1], 'data': [0] * 12}) # if len(rs) != 0: # for y in rs: # if y['name'] == x[1]: # y['data'].append(float(x[2])) # status = 0 # break # else: # status = 1 # else: # status = 1 # if status == 1: # rs.append({'name': x[1], 'data': [float(x[2])]}) # 返回总支出和单机房月支持的列表 return all_date + rs
def sta_to_sysid(station): row = get_one('select solarSystemId from staStations where stationName LIKE %(station)s', { 'station': station }) return row[0]