def get_data(db_name, db_account, sql): pc = prpcrypt() # a = Db_name.objects.filter(dbtag=hosttag)[0] #a = Db_name.objects.get(dbtag=hosttag) tar_dbname = db_name #如果instance中有备库role='read',则选择从备库读取 tar_host = db_account.instance.ip tar_port = int(db_account.instance.port) tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) # try: # if a.instance.all().filter(role='read')[0]: # tar_host = a.instance.all().filter(role='read')[0].ip # tar_port = a.instance.all().filter(role='read')[0].port # #如果没有设置或没有role=read,则选择第一个读到的实例读取 # except Exception,e: # tar_host = a.instance.filter(role__in=['write','all'])[0].ip # tar_port = a.instance.filter(role__in=['write','all'])[0].port # for i in a.db_account_set.all(): # if i.role == 'admin': # tar_username = i.user # tar_passwd = pc.decrypt(i.passwd) # break # #print tar_port+tar_passwd+tar_username+tar_host try: results, col = mysql_query(sql, tar_username, tar_passwd, tar_host, tar_port, tar_dbname) except Exception, e: #防止失败,返回一个wrong_message results, col = ([str(e)], ''), ['error']
def parse_binlogfirst(insname, binname, countnum): flag = True pc = prpcrypt() db_account = Db_account.objects.filter(instance=insname, db_account_role='admin') if len(db_account) > 0: tar_username = db_account[0].user tar_passwd = pc.decrypt(db_account[0].passwd) connectionSettings = { 'host': insname.ip, 'port': int(insname.port), 'user': tar_username, 'passwd': tar_passwd } binlogsql = binlog2sql.Binlog2sql( connectionSettings=connectionSettings, startFile=binname, startPos=4, endFile='', endPos=0, startTime='', stopTime='', only_schemas='', only_tables='', nopk=False, flashback=False, stopnever=False, countnum=countnum) binlogsql.process_binlog() sqllist = binlogsql.sqllist return sqllist else: return ['Instance do not have admin role db account!']
def incep_exec(sqltext, myuser, mypasswd, myhost, myport, mydbname, flag=0): pc = prpcrypt() if (int(flag) == 0): flagcheck = '--enable-check' elif (int(flag) == 1): flagcheck = '--enable-execute' myuser = myuser.encode('utf8') mypasswd = pc.decrypt(mypasswd.encode('utf8')) myhost = myhost.encode('utf8') myport = int(myport) mydbname = mydbname.encode('utf8') sql1 = "/*--user=%s;--password=%s;--host=%s;%s;--port=%d;*/\ inception_magic_start;\ use %s;" % (myuser, mypasswd, myhost, flagcheck, myport, mydbname) sql2 = 'inception_magic_commit;' sql = sql1 + sqltext + sql2 try: conn = MySQLdb.connect(host=incp_host, user=incp_user, passwd=incp_passwd, db='', port=incp_port, use_unicode=True, charset="utf8") cur = conn.cursor() ret = cur.execute(sql) result = cur.fetchall() #num_fields = len(cur.description) field_names = [i[0] for i in cur.description] cur.close() conn.close() except MySQLdb.Error, e: return ([str(e)], ''), ['error']
def parse_binlogfirst(insname, binname, countnum): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break connectionSettings = { 'host': insname.ip, 'port': int(insname.port), 'user': tar_username, 'passwd': tar_passwd } binlogsql = binlog2sql.Binlog2sql(connectionSettings=connectionSettings, startFile=binname, startPos=4, endFile='', endPos=0, startTime='', stopTime='', only_schemas='', only_tables='', nopk=False, flashback=False, stopnever=False, countnum=countnum) binlogsql.process_binlog() sqllist = binlogsql.sqllist return sqllist
def check_mysql_host(instance_id,account_id): instance = Db_instance.objects.get(id=instance_id) db_account = Db_account.objects.get(id=account_id) mon_basic(instance,db_account) # longlist = [] py = prpcrypt() # conn_info = Connect(instance.ip,instance.port,db_account.user,py.decrypt(db_account.passwd)) result,col = conn_info.query_mysql("select ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO from information_schema.processlist where COMMAND !='Sleep' and DB not in ('information_schema','sys') and user not in ('system user','event_scheduler') and command!='Binlog Dump' and info like 'select%'") # result,col = conn_info.query_mysql("select ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO from processlist") mysql_monitor = MySQL_monitor.objects.get(instance_id=instance.id) if mysql_monitor.check_longsql == 1: try: longsql_send = filter(lambda x:int(x[5])>int(mysql_monitor.longsql_time),result) except Exception,e: longsql_send='' # print longsql_send alarm_type = 2 if len(longsql_send)>0: flag = record_alarm(mysql_monitor, alarm_type) if mysql_monitor.longsql_autokill == 1: idlist = map(lambda x:'kill '+str(x[0])+';',longsql_send) conn_info.kill_id(idlist) sendmail_monitor.delay(instance.id,mysql_monitor.mail_to.split(';'), longsql_send,3) elif flag: sendmail_monitor.delay(instance.id,mysql_monitor.mail_to.split(';'),longsql_send,alarm_type) else: check_ifok(instance, alarm_type)
def run_process(insname,sql): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break # print tar_port+tar_passwd+tar_username+tar_host if vars().has_key('tar_username'): try: conn = MySQLdb.connect(host=insname.ip, user=tar_username, passwd=tar_passwd, port=int(insname.port),connect_timeout=5, charset='utf8') conn.select_db('information_schema') param=[] curs = conn.cursor() #result = curs.executemany(sql,param) for i in sql.split(';')[0:-1]: try: curs.execute(i) except Exception,e: pass conn.commit() curs.close() conn.close() return ([sql], ''), ['success'] except Exception, e: # 防止失败,返回一个wrong_message results, col = ([str(e)], ''), ['error']
def get_process_data(insname, sql): flag = True pc = prpcrypt() # for a in insname.db_name_set.all(): # for i in a.db_account_set.all(): # if i.role == 'admin': # tar_username = i.user # tar_passwd = pc.decrypt(i.passwd) # flag = False # break # if flag == False: # break db_account = Db_account.objects.filter(instance=insname, db_account_role__in=['admin']) if len(db_account) > 0: tar_username = db_account[0].user tar_passwd = pc.decrypt(db_account[0].passwd) #print tar_port+tar_passwd+tar_username+tar_host if vars().has_key('tar_username'): try: results, col = mysql_query(sql, tar_username, tar_passwd, insname.ip, int(insname.port), 'information_schema') except Exception, e: #防止失败,返回一个wrong_message results, col = ([str(e)], ), ['error'] #results,col = mysql_query(wrong_msg,user,passwd,host,int(port),dbname) return results, col
def get_dbcon(a): # a = Db_name.objects.get(dbtag=hosttag) tar_dbname = a.dbname pc = prpcrypt() try: if a.instance.all().filter(role='read')[0]: tar_host = a.instance.all().filter(role='read')[0].ip tar_port = a.instance.all().filter(role='read')[0].port except Exception, e: tar_host = a.instance.filter(role__in=['write', 'all'])[0].ip tar_port = a.instance.filter(role__in=['write', 'all'])[0].port
def create_acc(tags,user,passwd,dbtagli,acclist,role): if len(tags)>0 and len(user)>0 and len(passwd)>0: py = prpcrypt() account = Db_account(tags=tags,user=user,passwd=py.encrypt(passwd),role=role) account.save() dbli = Db_name.objects.filter(dbtag__in=dbtagli) userli = UserInfo.objects.filter(username__in=acclist) for i in dbli: try: account.dbname.add(i) except Exception,e: pass
def submitSql(request): temp_name = 'archer/archer-header.html' # masters = master_config.objects.all().order_by('cluster_name') masters = func.get_mysql_hostlist(request.user.username, 'incept') if len(masters) == 0: context = {'errMsg': '集群数为0,可能后端数据没有配置集群', 'temp_name': temp_name} return render(request, 'archer/error.html', context) #获取所有集群名称 # listAllClusterName = masters pydecryp = prpcrypt() dictAllClusterDb = OrderedDict() #每一个都首先获取主库地址在哪里 for master in masters: # listMasters = master_config.objects.filter(cluster_name=clusterName) # if len(listMasters) != 1: # context = {'errMsg': '存在两个集群名称一样的集群,请修改数据库'} # return render(request, 'error.html', context) #取出该集群的名称以及连接方式,为了后面连进去获取所有databases master_up = Db_name.objects.get(dbtag=master).db_account_set.get( role='admin') master_ip = Db_name.objects.get(dbtag=master).instance.get() masterHost = master_ip.ip masterPort = master_ip.port masterUser = master_up.user masterPassword = prpCryptor.decrypt(master_up.passwd) listDb = dao.getAlldbByCluster(masterHost, masterPort, masterUser, masterPassword) dictAllClusterDb[master] = listDb #获取所有审核人,当前登2录用户不可以审核 loginUser = request.user.username reviewMen = sqlreview_role.objects.filter(role='审核人').exclude( userid__username=loginUser) if len(reviewMen) == 0: context = {'errMsg': '审核人为0,请配置审核人', 'temp_name': temp_name} return render(request, 'archer/error.html', context) # listAllReviewMen = [user.username for user in reviewMen] context = { 'currentMenu': 'submitsql', 'dictAllClusterDb': dictAllClusterDb, 'reviewMen': reviewMen, 'temp_name': temp_name, 'listDB': json.dumps(dictAllClusterDb) } return render(request, 'archer/submitSql.html', context)
def get_mysql_data(db_account, sql, useraccount, request, limitnum): #确认dbname # a = Db_name.objects.filter(dbtag=hosttag)[0] # #a = Db_name.objects.get(dbtag=hosttag) # tar_dbname = a.dbname # #如果instance中有备库role='read',则选择从备库读取 # try: # if a.instance.all().filter(role='read')[0]: # tar_host = a.instance.all().filter(role='read')[0].ip # tar_port = a.instance.all().filter(role='read')[0].port # #如果没有设置或没有role=read,则选择第一个读到的all实例读取 # except Exception,e: # tar_host = a.instance.filter(role='all')[0].ip # tar_port = a.instance.filter(role='all')[0].port # # tar_host = a.instance.all()[0].ip # # tar_port = a.instance.all()[0].port # pc = prpcrypt() # for i in a.db_account_set.all(): # if i.role!='write' and i.role!='admin': # # find the specified account for the user # if i.account.all().filter(username=useraccount): # tar_username = i.user # tar_passwd = pc.decrypt(i.passwd) # break # #not find specified account for the user ,specified the public account to the user # if not vars().has_key('tar_username'): # for i in a.db_account_set.all(): # if i.role != 'write' and i.role != 'admin': # # find the specified account for the user # if i.account.all().filter(username=public_user): # tar_username = i.user # tar_passwd = pc.decrypt(i.passwd) # break #print tar_port+tar_passwd+tar_username+tar_host pc = prpcrypt() tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) tar_host = db_account.instance.ip tar_port = db_account.instance.port tar_dbname = request.POST['optionsRadios'].split(':')[1] db_tag = db_account.instance.ip + ':' + db_account.instance.port + '__' + db_account.db_account_role try: if (cmp(sql, wrong_msg)): log_mysql_op(useraccount, sql, tar_dbname, db_tag, request) results, col = mysql_query(sql, tar_username, tar_passwd, tar_host, tar_port, tar_dbname, limitnum) except Exception, e: #防止日志库记录失败,返回一个wrong_message results, col = ([str(e)], ''), ['error']
def get_mongo_coninfo(hosttag, useraccount): pc = prpcrypt() # a = Db_name.objects.get(dbtag=hosttag) db_account_id, tar_dbname = hosttag.split(':') db_account = Db_account.objects.get(id=int(db_account_id)) try: tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) tar_host = db_account.instance.ip tar_port = db_account.instance.port db_tag = db_account.instance.ip + ':' + db_account.instance.port + '__' + db_account.db_account_role return tar_host, tar_port, tar_username, tar_passwd, tar_dbname except: pass
def get_dupreport_byins(insname): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break if vars().has_key('tar_username'): cmd = incept.pttool_path + '/pt-duplicate-key-checker' + ' -u %s -p %s -P %d -h %s ' % ( tar_username, tar_passwd, int(insname.port), insname.ip) dup_result = commands.getoutput(cmd) return dup_result
def get_conn_info(db_account): try: instance = db_account.instance tar_host = instance.ip tar_port = instance.port tar_username = db_account.user #a = Db_name.objects.get(dbtag=hosttag) #如果instance中有备库role='read',则选择从备库读取 pc = prpcrypt() tar_passwd = pc.decrypt(db_account.passwd) return tar_host,tar_port,tar_username,tar_passwd except Exception,e: print e
def run_mysql_exec(db_account,sql,useraccount,request): pc = prpcrypt() tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) tar_host = db_account.instance.ip tar_port = db_account.instance.port tar_dbname = request.POST['optionsRadios'].split(':')[1] db_tag = db_account.instance.ip + ':' + db_account.instance.port + '__' + db_account.db_account_role try: if (sql.split()[0] != 'select'): log_mysql_op(useraccount, sql, tar_dbname, db_tag, request) results, col = mysql_exec(sql, tar_username, tar_passwd, tar_host, tar_port, tar_dbname) else: results, col = mysql_query(sql, user, passwd, host, int(port), dbname) except Exception, e: results, col = ([str(e)], ''), ['error']
def get_dbcon(a): # a = Db_name.objects.get(dbtag=hosttag) tar_dbname = 'information_schema' pc = prpcrypt() try: db_account = Db_account.objects.get(id=a.mysql_monitor.account_id) tar_host = a.ip tar_port = a.port tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) return tar_port, tar_passwd, tar_username, tar_host, tar_dbname # if a.instance.all().filter(role='read')[0]: # tar_host = a.instance.all().filter(role='read')[0].ip # tar_port = a.instance.all().filter(role='read')[0].port except Exception, e: print e return 0, 0, 0, 0, 0
def inception_check(tar_dbname,db_account,sql,flag=0): make_sure_mysql_usable() pc = prpcrypt() tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) tar_host = db_account.instance.ip tar_port = db_account.instance.port #print tar_port+tar_passwd+tar_username+tar_host try: results,col = incep_exec(sql,tar_username,tar_passwd,tar_host,tar_port,tar_dbname,flag) return results,col,tar_dbname except Exception,e: wrongmsg = e results, col = func.mysql_query(wrongmsg, user, passwd, host, int(port), dbname) return results, col, tar_dbname
def get_process_data(insname,sql): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break #print tar_port+tar_passwd+tar_username+tar_host if vars().has_key('tar_username'): try: results,col = mysql_query(sql,tar_username,tar_passwd,insname.ip,int(insname.port),'information_schema') except Exception, e: #防止失败,返回一个wrong_message results,col = ([str(e)],''),['error'] #results,col = mysql_query(wrong_msg,user,passwd,host,int(port),dbname) return results,col
def parse_binlog(serverid,binname_start,begintime,tbname,dbselected,username,countnum,flash_back,binname_end='',endtime=''): flag = True pc = prpcrypt() insname = Db_instance.objects.get(id=serverid) db_account = Db_account.objects.filter(instance=insname, db_account_role='admin') if len(db_account) > 0: tar_username = db_account[0].user tar_passwd = pc.decrypt(db_account[0].passwd) connectionSettings = {'host': insname.ip, 'port': int(insname.port), 'user': tar_username, 'passwd': tar_passwd} binlogsql = binlog2sql.Binlog2sql(connectionSettings=connectionSettings, startFile=binname_start, startPos=4, endFile=binname_end, endPos=0, startTime=begintime, stopTime=endtime, only_schemas=None if dbselected == '0' else dbselected, only_tables=None if tbname == '0' else tbname, nopk=False, flashback=flash_back, stopnever=False,countnum=countnum) binlogsql.process_binlog() sqllist = binlogsql.sqllist # sendmail_sqlparse.delay(username, dbselected, tbname, sqllist,flash_back) sendmail_sqlparse.delay(username, '{}:{}__{}'.format(insname.ip,insname.port,'ALL' if dbselected == '0' else dbselected), tbname, sqllist, flash_back) print sqllist return sqllist else: return ['Instance do not have admin role db account!']
def set_acc(old_account,tags,user,passwd,dbtagli,acclist,role): old_account.role = role if len(tags)>0: old_account.tags = tags old_account.save() if len(user)>0: old_account.user=user old_account.save() if len(passwd)>0: py = prpcrypt() old_account.passwd=py.encrypt(passwd) old_account.save() for i in old_account.dbname.all(): old_account.dbname.remove(i) old_account.save() for i in old_account.account.all(): old_account.account.remove(i) old_account.save() for i in Db_name.objects.filter(dbtag__in=dbtagli): try: old_account.dbname.add(i) except Exception,e: pass
def parse_binlog(serverid, binname, begintime, tbname, dbselected, username, countnum, flash_back): flag = True pc = prpcrypt() insname = Db_instance.objects.get(id=serverid) for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break connectionSettings = { 'host': insname.ip, 'port': int(insname.port), 'user': tar_username, 'passwd': tar_passwd } binlogsql = binlog2sql.Binlog2sql(connectionSettings=connectionSettings, startFile=binname, startPos=4, endFile='', endPos=0, startTime=begintime, stopTime='', only_schemas=dbselected, only_tables=tbname, nopk=False, flashback=flash_back, stopnever=False, countnum=countnum) binlogsql.process_binlog() sqllist = binlogsql.sqllist sendmail_sqlparse.delay(username, dbselected, tbname, sqllist, flash_back)
def get_dupreport(hosttag, db, email=''): import os pc = prpcrypt() if incept.pttool_switch != 0: mailto = [] mailto.append(email) try: instance = Db_instance.objects.get(id=int(hosttag)) db_account = Db_account.objects.get(instance=instance, db_account_role='admin') tar_dbname = db tar_port = instance.port tar_host = instance.ip tar_username = db_account.user tar_passwd = pc.decrypt(db_account.passwd) except Exception, e: print e return "please check your db set" f = incept.pttool_path + '/pt-duplicate-key-checker' if os.path.isfile(incept.pttool_path + '/pt-duplicate-key-checker'): cmd = incept.pttool_path + '/pt-duplicate-key-checker' + ' -u %s -p %s -P %d -h %s -d %s ' % ( tar_username, tar_passwd, int(tar_port), tar_host, tar_dbname) dup_result = commands.getoutput(cmd) dup_result = tar_host + ':' + tar_port + '----' + db + '\n' + dup_result if email != '': html_content = loader.render_to_string( 'include/mail_template.html', locals()) sendmail( 'DUPKEY CHECK ON ' + tar_host + ':' + tar_port + '----' + db, mailto, html_content) else: return dup_result else: return 'pt-tool path set wrong'
def get_db_info(task): p = prpcrypt() a = Db_account.objects.get(instance__ip=task.instance.split(':')[0], instance__port=task.instance.split(':')[1],db_account_role=task.db_account_role) tar_dbname = task.dbtag tar_host = a.instance.ip tar_port = a.instance.port tar_passwd = p.decrypt(a.passwd) tar_username = a.user # try: # if a.instance.all().filter(role='write')[0]: # tar_host = a.instance.all().filter(role='write')[0].ip # tar_port = a.instance.all().filter(role='write')[0].port # except Exception, e: # try: # tar_host = a.instance.all().filter(role='all')[0].ip # tar_port = a.instance.all().filter(role='all')[0].port # except Exception, e: # pass # for i in a.db_account_set.all(): # if i.role == 'admin': # tar_username = i.user # tar_passwd = i.passwd # break return tar_username, tar_passwd, tar_host, tar_port,tar_dbname
return results def get_data(hosttag,sql): a = Db_name.objects.filter(dbtag=hosttag)[0] #a = Db_name.objects.get(dbtag=hosttag) tar_dbname = a.dbname #如果instance中有备库role='read',则选择从备库读取 try: if a.instance.all().filter(role='read')[0]: tar_host = a.instance.all().filter(role='read')[0].ip tar_port = a.instance.all().filter(role='read')[0].port #如果没有设置或没有role=read,则选择第一个读到的实例读取 except Exception,e: tar_host = a.instance.filter(role__in=['write','all'])[0].ip tar_port = a.instance.filter(role__in=['write','all'])[0].port pc = prpcrypt() for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) break #print tar_port+tar_passwd+tar_username+tar_host try: results,col = mysql_query(sql,tar_username,tar_passwd,tar_host,tar_port,tar_dbname) except Exception, e: #防止失败,返回一个wrong_message results,col = ([str(e)],''),['error'] #results,col = mysql_query(wrong_msg,user,passwd,host,int(port),dbname) return results,col,tar_dbname def process(insname,flag=1,sql=''):
def encrypt_passwd(): a = Db_account.objects.all() py = prpcrypt() for i in a: i.passwd= py.encrypt(i.passwd) i.save()
def mon_basic(instance, db_account): mysql_monitor = MySQL_monitor.objects.get(instance=instance.id) now_time = timezone.now() try: py = prpcrypt() conn = MySQLdb.connect(host=instance.ip, user=db_account.user, passwd=py.decrypt(db_account.passwd), port=int(instance.port), connect_timeout=3, charset='utf8') conn.autocommit(True) cur = conn.cursor() conn.select_db('information_schema') check_ifok(instance, 7) ############################# CHECK MYSQL #################################################### mysql_variables = get_mysql_variables(cur) mysql_status = get_mysql_status(cur) time.sleep(1) mysql_status_2 = get_mysql_status(cur) ############################# GET VARIABLES ################################################### version = get_item(mysql_variables, 'version') key_buffer_size = get_item(mysql_variables, 'key_buffer_size') sort_buffer_size = get_item(mysql_variables, 'sort_buffer_size') join_buffer_size = get_item(mysql_variables, 'join_buffer_size') max_connections = get_item(mysql_variables, 'max_connections') max_connect_errors = get_item(mysql_variables, 'max_connect_errors') open_files_limit = get_item(mysql_variables, 'open_files_limit') table_open_cache = get_item(mysql_variables, 'table_open_cache') max_tmp_tables = get_item(mysql_variables, 'max_tmp_tables') max_heap_table_size = get_item(mysql_variables, 'max_heap_table_size') max_allowed_packet = get_item(mysql_variables, 'max_allowed_packet') thread_cache_size = get_item(mysql_variables, 'thread_cache_size') ############################# GET INNODB INFO ################################################## # innodb variables innodb_version = get_item(mysql_variables, 'innodb_version') innodb_buffer_pool_instances = get_item( mysql_variables, 'innodb_buffer_pool_instances') innodb_buffer_pool_size = get_item(mysql_variables, 'innodb_buffer_pool_size') innodb_doublewrite = get_item(mysql_variables, 'innodb_doublewrite') innodb_file_per_table = get_item(mysql_variables, 'innodb_file_per_table') innodb_flush_log_at_trx_commit = get_item( mysql_variables, 'innodb_flush_log_at_trx_commit') innodb_flush_method = get_item(mysql_variables, 'innodb_flush_method') innodb_force_recovery = get_item(mysql_variables, 'innodb_force_recovery') innodb_io_capacity = get_item(mysql_variables, 'innodb_io_capacity') innodb_read_io_threads = get_item(mysql_variables, 'innodb_read_io_threads') innodb_write_io_threads = get_item(mysql_variables, 'innodb_write_io_threads') # innodb status innodb_buffer_pool_pages_total = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_total')) innodb_buffer_pool_pages_data = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_data')) innodb_buffer_pool_pages_dirty = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_dirty')) innodb_buffer_pool_pages_flushed = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed')) innodb_buffer_pool_pages_free = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_free')) innodb_buffer_pool_pages_misc = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_misc')) innodb_buffer_pool_wait_free = int( get_item(mysql_status, 'Innodb_buffer_pool_wait_free')) if innodb_buffer_pool_pages_misc > 18046744073709540000: innodb_buffer_pool_pages_misc = 0 innodb_page_size = int(get_item(mysql_status, 'Innodb_page_size')) innodb_pages_created = int( get_item(mysql_status, 'Innodb_pages_created')) innodb_pages_read = int(get_item(mysql_status, 'Innodb_pages_read')) innodb_pages_written = int( get_item(mysql_status, 'Innodb_pages_written')) innodb_row_lock_current_waits = int( get_item(mysql_status, 'Innodb_row_lock_current_waits')) innodb_row_lock_time = int( get_item(mysql_status, 'Innodb_row_lock_time')) innodb_row_lock_waits = int( get_item(mysql_status, 'Innodb_row_lock_waits')) innodb_log_waits = int(get_item(mysql_status, 'Innodb_log_waits')) # innodb persecond info innodb_buffer_pool_read_requests_persecond = int( get_item( mysql_status_2, 'Innodb_buffer_pool_read_requests')) - int( get_item(mysql_status, 'Innodb_buffer_pool_read_requests')) innodb_buffer_pool_reads_persecond = int( get_item(mysql_status_2, 'Innodb_buffer_pool_reads')) - int( get_item(mysql_status, 'Innodb_buffer_pool_reads')) innodb_buffer_pool_write_requests_persecond = int( get_item(mysql_status_2, 'Innodb_buffer_pool_write_requests') ) - int(get_item(mysql_status, 'Innodb_buffer_pool_write_requests')) innodb_buffer_pool_pages_flushed_persecond = int( get_item( mysql_status_2, 'Innodb_buffer_pool_pages_flushed')) - int( get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed')) innodb_rows_deleted_persecond = int( get_item(mysql_status_2, 'Innodb_rows_deleted')) - int( get_item(mysql_status, 'Innodb_rows_deleted')) innodb_rows_inserted_persecond = int( get_item(mysql_status_2, 'Innodb_rows_inserted')) - int( get_item(mysql_status, 'Innodb_rows_inserted')) innodb_rows_read_persecond = int( get_item(mysql_status_2, 'Innodb_rows_read')) - int( get_item(mysql_status, 'Innodb_rows_read')) innodb_rows_updated_persecond = int( get_item(mysql_status_2, 'Innodb_rows_updated')) - int( get_item(mysql_status, 'Innodb_rows_updated')) ############################# GET STATUS ################################################## connect = 1 uptime = get_item(mysql_status, 'Uptime') open_files = get_item(mysql_status, 'Open_files') open_tables = get_item(mysql_status, 'Open_tables') opened_tables = get_item(mysql_status, 'Opened_tables') threads_connected = get_item(mysql_status, 'Threads_connected') threads_running = get_item(mysql_status, 'Threads_running') threads_created = get_item(mysql_status, 'Threads_created') threads_cached = get_item(mysql_status, 'Threads_cached') # threads_waits = 20 max_used_connections = get_item(mysql_status, 'Max_used_connections') connections = get_item(mysql_status, 'Connections') aborted_clients = get_item(mysql_status, 'Aborted_clients') aborted_connects = get_item(mysql_status, 'Aborted_connects') key_blocks_not_flushed = get_item(mysql_status, 'Key_blocks_not_flushed') key_blocks_unused = get_item(mysql_status, 'Key_blocks_unused') key_blocks_used = get_item(mysql_status, 'Key_blocks_used') slow_queries = int(get_item(mysql_status, 'Slow_queries')) ############################# GET STATUS PERSECOND ################################################## threads_created_percond = int( get_item(mysql_status_2, 'Threads_created')) - int(threads_created) connections_persecond = int(get_item( mysql_status_2, 'Connections')) - int( get_item(mysql_status, 'Connections')) bytes_received_persecond = ( int(get_item(mysql_status_2, 'Bytes_received')) - int(get_item(mysql_status, 'Bytes_received'))) / 1024 bytes_sent_persecond = ( int(get_item(mysql_status_2, 'Bytes_sent')) - int(get_item(mysql_status, 'Bytes_sent'))) / 1024 com_select_persecond = int(get_item( mysql_status_2, 'Com_select')) - int( get_item(mysql_status, 'Com_select')) com_insert_persecond = int(get_item( mysql_status_2, 'Com_insert')) - int( get_item(mysql_status, 'Com_insert')) com_update_persecond = int(get_item( mysql_status_2, 'Com_update')) - int( get_item(mysql_status, 'Com_update')) com_delete_persecond = int(get_item( mysql_status_2, 'Com_delete')) - int( get_item(mysql_status, 'Com_delete')) com_commit_persecond = int(get_item( mysql_status_2, 'Com_commit')) - int( get_item(mysql_status, 'Com_commit')) com_rollback_persecond = int(get_item( mysql_status_2, 'Com_rollback')) - int( get_item(mysql_status, 'Com_rollback')) questions_persecond = int(get_item(mysql_status_2, 'Questions')) - int( get_item(mysql_status, 'Questions')) queries_persecond = int(get_item(mysql_status_2, 'Queries')) - int( get_item(mysql_status, 'Queries')) transaction_persecond = ( int(get_item(mysql_status_2, 'Com_commit')) + int(get_item(mysql_status_2, 'Com_rollback'))) - ( int(get_item(mysql_status, 'Com_commit')) + int(get_item(mysql_status, 'Com_rollback'))) created_tmp_disk_tables_persecond = int( get_item(mysql_status_2, 'Created_tmp_disk_tables')) - int( get_item(mysql_status, 'Created_tmp_disk_tables')) created_tmp_files_persecond = int( get_item(mysql_status_2, 'Created_tmp_files')) - int( get_item(mysql_status, 'Created_tmp_files')) created_tmp_tables_persecond = int( get_item(mysql_status_2, 'Created_tmp_tables')) - int( get_item(mysql_status, 'Created_tmp_tables')) table_locks_immediate_persecond = int( get_item(mysql_status_2, 'Table_locks_immediate')) - int( get_item(mysql_status, 'Table_locks_immediate')) table_locks_waited_persecond = int( get_item(mysql_status_2, 'Table_locks_waited')) - int( get_item(mysql_status, 'Table_locks_waited')) key_read_requests_persecond = int( get_item(mysql_status_2, 'Key_read_requests')) - int( get_item(mysql_status, 'Key_read_requests')) key_reads_persecond = int(get_item(mysql_status_2, 'Key_reads')) - int( get_item(mysql_status, 'Key_reads')) key_write_requests_persecond = int( get_item(mysql_status_2, 'Key_write_requests')) - int( get_item(mysql_status, 'Key_write_requests')) key_writes_persecond = int(get_item( mysql_status_2, 'Key_writes')) - int( get_item(mysql_status, 'Key_writes')) ############################# GET MYSQL HITRATE ################################################## if (string.atof(get_item(mysql_status, 'Qcache_hits')) + string.atof(get_item(mysql_status, 'Com_select'))) <> 0: query_cache_hitrate = string.atof( get_item(mysql_status, 'Qcache_hits')) / ( string.atof(get_item(mysql_status, 'Qcache_hits')) + string.atof(get_item(mysql_status, 'Com_select'))) query_cache_hitrate = "%9.2f" % query_cache_hitrate else: query_cache_hitrate = 0 if string.atof(get_item(mysql_status, 'Connections')) <> 0: thread_cache_hitrate = 1 - string.atof( get_item(mysql_status, 'Threads_created')) / string.atof( get_item(mysql_status, 'Connections')) thread_cache_hitrate = "%9.2f" % thread_cache_hitrate else: thread_cache_hitrate = 0 if string.atof(get_item(mysql_status, 'Key_read_requests')) <> 0: key_buffer_read_rate = 1 - string.atof( get_item(mysql_status, 'Key_reads')) / string.atof( get_item(mysql_status, 'Key_read_requests')) key_buffer_read_rate = "%9.2f" % key_buffer_read_rate else: key_buffer_read_rate = 0 if string.atof(get_item(mysql_status, 'Key_write_requests')) <> 0: key_buffer_write_rate = 1 - string.atof( get_item(mysql_status, 'Key_writes')) / string.atof( get_item(mysql_status, 'Key_write_requests')) key_buffer_write_rate = "%9.2f" % key_buffer_write_rate else: key_buffer_write_rate = 0 if (string.atof(get_item(mysql_status, 'Key_blocks_used')) + string.atof(get_item(mysql_status, 'Key_blocks_unused'))) <> 0: key_blocks_used_rate = string.atof( get_item(mysql_status, 'Key_blocks_used')) / ( string.atof(get_item(mysql_status, 'Key_blocks_used')) + string.atof(get_item(mysql_status, 'Key_blocks_unused'))) key_blocks_used_rate = "%9.2f" % key_blocks_used_rate else: key_blocks_used_rate = 0 if (string.atof(get_item(mysql_status, 'Created_tmp_disk_tables')) + string.atof(get_item(mysql_status, 'Created_tmp_tables'))) <> 0: created_tmp_disk_tables_rate = string.atof( get_item(mysql_status, 'Created_tmp_disk_tables') ) / (string.atof(get_item(mysql_status, 'Created_tmp_disk_tables')) + string.atof(get_item(mysql_status, 'Created_tmp_tables'))) created_tmp_disk_tables_rate = "%9.2f" % created_tmp_disk_tables_rate else: created_tmp_disk_tables_rate = 0 if string.atof(max_connections) <> 0: connections_usage_rate = string.atof( threads_connected) / string.atof(max_connections) connections_usage_rate = "%9.2f" % connections_usage_rate else: connections_usage_rate = 0 if string.atof(open_files_limit) <> 0: open_files_usage_rate = string.atof(open_files) / string.atof( open_files_limit) open_files_usage_rate = "%9.2f" % open_files_usage_rate else: open_files_usage_rate = 0 if string.atof(table_open_cache) <> 0: open_tables_usage_rate = string.atof(open_tables) / string.atof( table_open_cache) open_tables_usage_rate = "%9.2f" % open_tables_usage_rate else: open_tables_usage_rate = 0 # repl slave_status = cur.execute('show slave status;') if slave_status <> 0: role = 'slave' role_new = 's' else: role = 'master' role_new = 'm' ############################# INSERT INTO SERVER ################################################## sql_insert = "replace into mysql_status(db_ip,db_port,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate,create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" sql_update = "update mysql_status set db_ip=%s,db_port=%s,connect=%s,role=%s,uptime=%s,version=%s,max_connections=%s,max_connect_errors=%s,open_files_limit=%s,table_open_cache=%s,max_tmp_tables=%s,max_heap_table_size=%s,max_allowed_packet=%s,open_files=%s,open_tables=%s,threads_connected=%s,threads_running=%s,threads_created=%s,threads_cached=%s,connections=%s,aborted_clients=%s,aborted_connects=%s,connections_persecond=%s,bytes_received_persecond=%s,bytes_sent_persecond=%s,com_select_persecond=%s,com_insert_persecond=%s,com_update_persecond=%s,com_delete_persecond=%s,com_commit_persecond=%s,com_rollback_persecond=%s,questions_persecond=%s,queries_persecond=%s,transaction_persecond=%s,created_tmp_tables_persecond=%s,created_tmp_disk_tables_persecond=%s,created_tmp_files_persecond=%s,table_locks_immediate_persecond=%s,table_locks_waited_persecond=%s,key_buffer_size=%s,sort_buffer_size=%s,join_buffer_size=%s,key_blocks_not_flushed=%s,key_blocks_unused=%s,key_blocks_used=%s,key_read_requests_persecond=%s,key_reads_persecond=%s,key_write_requests_persecond=%s,key_writes_persecond=%s,innodb_version=%s,innodb_buffer_pool_instances=%s,innodb_buffer_pool_size=%s,innodb_doublewrite=%s,innodb_file_per_table=%s,innodb_flush_log_at_trx_commit=%s,innodb_flush_method=%s,innodb_force_recovery=%s,innodb_io_capacity=%s,innodb_read_io_threads=%s,innodb_write_io_threads=%s,innodb_buffer_pool_pages_total=%s,innodb_buffer_pool_pages_data=%s,innodb_buffer_pool_pages_dirty=%s,innodb_buffer_pool_pages_flushed=%s,innodb_buffer_pool_pages_free=%s,innodb_buffer_pool_pages_misc=%s,innodb_page_size=%s,innodb_pages_created=%s,innodb_pages_read=%s,innodb_pages_written=%s,innodb_row_lock_current_waits=%s,innodb_buffer_pool_pages_flushed_persecond=%s,innodb_buffer_pool_read_requests_persecond=%s,innodb_buffer_pool_reads_persecond=%s,innodb_buffer_pool_write_requests_persecond=%s,innodb_rows_read_persecond=%s,innodb_rows_inserted_persecond=%s,innodb_rows_updated_persecond=%s,innodb_rows_deleted_persecond=%s,query_cache_hitrate=%s,thread_cache_hitrate=%s,key_buffer_read_rate=%s,key_buffer_write_rate=%s,key_blocks_used_rate=%s,created_tmp_disk_tables_rate=%s,connections_usage_rate=%s,open_files_usage_rate=%s,open_tables_usage_rate=%s,create_time=%s where db_ip=%s and db_port=%s; " sql2 = "insert into mysql_status_his(db_ip,db_port,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate,create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" param = ( instance.ip, int(instance.port), connect, role, uptime, version, max_connections, max_connect_errors, open_files_limit, table_open_cache, max_tmp_tables, max_heap_table_size, max_allowed_packet, open_files, open_tables, threads_connected, threads_running, threads_created, threads_cached, connections, aborted_clients, aborted_connects, connections_persecond, bytes_received_persecond, bytes_sent_persecond, com_select_persecond, com_insert_persecond, com_update_persecond, com_delete_persecond, com_commit_persecond, com_rollback_persecond, questions_persecond, queries_persecond, transaction_persecond, created_tmp_tables_persecond, created_tmp_disk_tables_persecond, created_tmp_files_persecond, table_locks_immediate_persecond, table_locks_waited_persecond, key_buffer_size, sort_buffer_size, join_buffer_size, key_blocks_not_flushed, key_blocks_unused, key_blocks_used, key_read_requests_persecond, key_reads_persecond, key_write_requests_persecond, key_writes_persecond, innodb_version, innodb_buffer_pool_instances, innodb_buffer_pool_size, innodb_doublewrite, innodb_file_per_table, innodb_flush_log_at_trx_commit, innodb_flush_method, innodb_force_recovery, innodb_io_capacity, innodb_read_io_threads, innodb_write_io_threads, innodb_buffer_pool_pages_total, innodb_buffer_pool_pages_data, innodb_buffer_pool_pages_dirty, innodb_buffer_pool_pages_flushed, innodb_buffer_pool_pages_free, innodb_buffer_pool_pages_misc, innodb_page_size, innodb_pages_created, innodb_pages_read, innodb_pages_written, innodb_row_lock_current_waits, innodb_buffer_pool_pages_flushed_persecond, innodb_buffer_pool_read_requests_persecond, innodb_buffer_pool_reads_persecond, innodb_buffer_pool_write_requests_persecond, innodb_rows_read_persecond, innodb_rows_inserted_persecond, innodb_rows_updated_persecond, innodb_rows_deleted_persecond, query_cache_hitrate, thread_cache_hitrate, key_buffer_read_rate, key_buffer_write_rate, key_blocks_used_rate, created_tmp_disk_tables_rate, connections_usage_rate, open_files_usage_rate, open_tables_usage_rate, now_time, instance.ip, int(instance.port)) # print param if not MysqlStatus.objects.filter(db_ip=instance.ip, db_port=instance.port).exists(): mysql_exec(sql_insert, param[:-2]) else: mysql_exec(sql_update, param) mysql_exec(sql2, param[:-2]) if mysql_monitor.check_connections: alarm_type = 6 if mysql_monitor.connection_threshold <= int(threads_connected): if record_alarm(mysql_monitor, alarm_type): sendmail_monitor.delay(instance.id, mysql_monitor.mail_to.split(';'), threads_connected, alarm_type) else: check_ifok(instance, alarm_type) # check mysql connected connected = cur.execute( "select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server,connect_user,connect_db;" ) if connected: for line in cur.fetchall(): sql = "insert into mysql_connected(db_ip,db_port,connect_server,connect_user,connect_db,connect_count,create_time) values(%s,%s,%s,%s,%s,%s,%s);" param = (instance.ip, int(instance.port), line[0], line[1], line[2], line[3], now_time) mysql_exec(sql, param) #check replication master_thread = cur.execute( "select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';" ) slave_status = cur.execute('show slave status;') datalist = [] if master_thread >= 1: datalist.append(int(1)) if slave_status <> 0: datalist.append(int(1)) else: datalist.append(int(0)) else: datalist.append(int(0)) if slave_status <> 0: datalist.append(int(1)) else: datalist.append(int(0)) sql = "delete from mysql_replication where db_ip=%s and db_port=%s;" param = (instance.ip, instance.port) mysql_exec(sql, param) if slave_status <> 0: gtid_mode = cur.execute( "select * from information_schema.global_variables where variable_name='gtid_mode';" ) result = cur.fetchone() if result: gtid_mode = result[1] else: gtid_mode = 'OFF' datalist.append(gtid_mode) read_only = cur.execute( "select * from information_schema.global_variables where variable_name='read_only';" ) result = cur.fetchone() datalist.append(result[1]) #slave_info=cur.execute('show slave status;') if instance.replchannel <> '0': slave_info = cur.execute( "show slave status for channel '%s';" % (instance.replchannel)) else: slave_info = cur.execute('show slave status;') result = cur.fetchone() # print "result" # print slave_info master_server = result[1] master_port = result[3] slave_io_run = result[10] slave_sql_run = result[11] delay = result[32] current_binlog_file = result[9] current_binlog_pos = result[21] master_binlog_file = result[5] master_binlog_pos = result[6] try: slave_sQL_rnning_state = result[44] except Exception, e: slave_sQL_running_state = "NULL" datalist.append(master_server) datalist.append(master_port) datalist.append(slave_io_run) datalist.append(slave_sql_run) datalist.append(delay) datalist.append(current_binlog_file) datalist.append(current_binlog_pos) datalist.append(master_binlog_file) datalist.append(master_binlog_pos) datalist.append(0) datalist.append(slave_sQL_rnning_state) if instance.check_slave: if (slave_io_run == "Yes") and (slave_sql_run == "Yes"): alarm_type = 4 check_ifok(instance, alarm_type) if instance.check_delay: alarm_type = 5 if instance.delay_threshold <= int(delay): if record_alarm(instance, alarm_type): sendmail_monitor.delay( instance.id, mysql_monitor.mail_to.split(';'), delay, alarm_type) else: check_ifok(instance, alarm_type) else: alarm_type = 4 if record_alarm(instance, alarm_type): sendmail_monitor.delay( instance.id, mysql_monitor.mail_to.split(';'), { 'iothread': slave_io_run, 'sqlthread': slave_sql_run }, alarm_type) elif master_thread >= 1: gtid_mode = cur.execute( "select * from information_schema.global_variables where variable_name='gtid_mode';" ) result = cur.fetchone() if result: gtid_mode = result[1] else: gtid_mode = 'OFF' datalist.append(gtid_mode) read_only = cur.execute( "select * from information_schema.global_variables where variable_name='read_only';" ) result = cur.fetchone() datalist.append(result[1]) datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') master = cur.execute('show master status;') master_result = cur.fetchone() datalist.append(master_result[0]) datalist.append(master_result[1]) binlog_file = cur.execute('show master logs;') binlogs = 0 if binlog_file: for row in cur.fetchall(): binlogs = binlogs + row[1] datalist.append(binlogs) datalist.append('---')
from django.shortcuts import render, get_object_or_404 from django.http import HttpResponse, HttpResponseRedirect from django.contrib.auth.decorators import login_required from django.contrib.auth.hashers import check_password from accounts.permission import permission_verify from .dao import Dao from .const import Const from .sendmail import MailSender from .inception import InceptionDao from .models import sqlreview_role, master_config, workflow from django.template.loader import get_template from django.template import Context dao = Dao() inceptionDao = InceptionDao() mailSender = MailSender() prpCryptor = prpcrypt() from dbmanage.myapp.include import function as func from dbmanage.myapp.models import Db_name # def login(request): # return render(request, 'login.html') # # def logout(request): # if request.User.username('login_username', False): # del request.session['login_username'] # return render(request, 'login.html') #首页,也是查看所有SQL工单页面,具备翻页功能 @login_required(login_url='/accounts/login/') @permission_verify()
# verify_date # host_id # # remote_backup_path # remote_backup_host_id # binlog_max_datetime # binlog_max_pos # binlog_min_datetime # binlog_min_pos ) backuplog.save() return HttpResponse("Post the backup Data successfully!") else: if request.GET.has_key('instance_id'): pc = prpcrypt() try: db_instance = Db_instance.objects.get( id=request.GET['instance_id']) t = request.GET['t'] pc.key = t[-16:] account = db_instance.db_account_set.get( db_account_role='admin') ip = db_instance.ip port = db_instance.port user = account.user password = pc.encrypt(prpcrypt().decrypt(account.passwd)) database = 'all' data = { 'ip': ip, 'port': port,
def descrypt(values): py = prpcrypt() values = py.decrypt(values) return values
def run_process(request, instance, kill_list): flag = True pc = prpcrypt() # for a in insname.db_name_set.all(): # for i in a.db_account_set.all(): # if i.role == 'admin': # tar_username = i.user # tar_passwd = pc.decrypt(i.passwd) # flag = False # break # if flag == False: # break db_account = Db_account.objects.filter(instance=instance, db_account_role__in=['admin']) tar_username = db_account[0].user tar_passwd = pc.decrypt(db_account[0].passwd) ipaddr = get_client_ip(request) group_id = Host.objects.get(ip=instance.ip).group_id user = UserInfo.objects.get(username=request.user.username) # lastlogin = user.last_login+datetime.timedelta(hours=8) # create_time = timezone.now()+datetime.timedelta(hours=8) lastlogin = user.last_login create_time = timezone.now() # print tar_port+tar_passwd+tar_username+tar_host if vars().has_key('tar_username'): try: conn = MySQLdb.connect(host=instance.ip, user=tar_username, passwd=tar_passwd, port=int(instance.port), connect_timeout=5, charset='utf8') conn.select_db('information_schema') param = [] curs = conn.cursor() tmpstr = '' for i in kill_list: tmpstr = 'kill ' + i['process_id'] + ';' #result = curs.executemany(sql,param) try: curs.execute(tmpstr) log = Oper_log(host_id=instance.id, group_id=group_id, user=request.user.username, sqltext=json.dumps(i), sqltype='killed process', login_time=lastlogin, create_time=create_time, dbname='', dbtag=i['host'], ipaddr=ipaddr) log.save() except Exception, e: pass conn.commit() curs.close() conn.close() # return 1 return ([kill_list], ''), ['success'] except Exception, e: # 防止失败,返回一个wrong_message results, col = ([str(e)], ''), ['error']