def run_process(insname,sql): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break # print tar_port+tar_passwd+tar_username+tar_host if vars().has_key('tar_username'): try: conn = MySQLdb.connect(host=insname.ip, user=tar_username, passwd=tar_passwd, port=int(insname.port),connect_timeout=5, charset='utf8') conn.select_db('information_schema') param=[] curs = conn.cursor() #result = curs.executemany(sql,param) for i in sql.split(';')[0:-1]: try: curs.execute(i) except Exception,e: pass conn.commit() curs.close() conn.close() return ([sql], ''), ['success'] except Exception, e: # 防止失败,返回一个wrong_message results, col = ([str(e)], ''), ['error']
def get_query_by_ipaddr(ipaddr, sql): try: host = get_mysqlhost(ipaddr) pc = prpcrypt() passwd = pc.decrypt(host.host_pwd) conn = MySQLdb.connect(host=host.hosts_ip, user=host.host_user, passwd=passwd, port=int(host.host_port), connect_timeout=5, charset='utf8') cursor = conn.cursor() count = cursor.execute(sql) index = cursor.description col = [] #get column name for i in index: col.append(i[0]) #result=cursor.fetchall() result = cursor.fetchall() cursor.close() conn.close() return (result, col) except Exception, e: return ([str(e)], ''), ['error']
def incep_exec(sqltext,myuser,mypasswd,myhost,myport,mydbname,flag=0): pc = prpcrypt() if (int(flag)==0): flagcheck='--enable-check' elif(int(flag)==1): flagcheck='--enable-execute' elif(int(flag)==2): flagcheck = '--enable-split' myuser=myuser.encode('utf8') mypasswd = pc.decrypt(mypasswd.encode('utf8')) myhost=myhost.encode('utf8') myport=int(myport) mydbname=mydbname.encode('utf8') sql1="/*--user=%s;--password=%s;--host=%s;%s;--port=%d;*/\ inception_magic_start;\ use %s;"% (myuser,mypasswd,myhost,flagcheck,myport,mydbname) sql2='inception_magic_commit;' sql = sql1 + sqltext + sql2 try: conn=MySQLdb.connect(host=incp_host,user=incp_user,passwd=incp_passwd,db='',port=incp_port,use_unicode=True, charset="utf8") cur=conn.cursor() ret=cur.execute(sql) result=cur.fetchall() #num_fields = len(cur.description) field_names = [i[0] for i in cur.description] #print field_names #for row in result: # print row[0], "|",row[1],"|",row[2],"|",row[3],"|",row[4],"|",row[5],"|",row[6],"|",row[7],"|",row[8],"|",row[9],"|",row[10] cur.close() conn.close() except MySQLdb.Error,e: return([str(e)],''),['error']
def check_mysql_host(db): mon_basic(db) # longlist = [] py = prpcrypt() conn_info = Connect(db.instance.ip, db.instance.port, db.account.user, py.decrypt(db.account.passwd)) result, col = conn_info.query_mysql( "select ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO from processlist where command !='Sleep' and DB not in ('information_schema','sys') and user not in ('system user','event_scheduler') and command!='Binlog Dump'" ) # result,col = conn_info.query_mysql("select ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO from processlist") if db.check_longsql == 1: try: longsql_send = filter(lambda x: x[5] > db.longsql_time, result) except Exception, e: longsql_send = '' # print longsql_send alarm_type = 'long sql' if len(longsql_send) > 0: flag = record_alarm(db, alarm_type) if db.longsql_autokill == 1: idlist = map(lambda x: 'kill ' + str(x[0]) + ';', longsql_send) conn_info.kill_id(idlist) if flag: sendmail_monitor.delay(db.tag + '-LongSql_AutoKilled', db.mail_to.split(';'), longsql_send, alarm_type) else: sendmail_monitor.delay(db.tag + '-LongSql_List', db.mail_to.split(';'), longsql_send, alarm_type) else: check_ifok(db, alarm_type)
def parse_binlogfirst(insname, binname, countnum): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break connectionSettings = { 'host': insname.ip, 'port': int(insname.port), 'user': tar_username, 'passwd': tar_passwd } binlogsql = binlog2sql.Binlog2sql(connectionSettings=connectionSettings, startFile=binname, startPos=4, endFile='', endPos=0, startTime='', stopTime='', only_schemas='', only_tables='', nopk=False, flashback=False, stopnever=False, countnum=countnum) binlogsql.process_binlog() sqllist = binlogsql.sqllist return sqllist
def get_dbcon(a): # a = Db_name.objects.get(dbtag=hosttag) tar_dbname = a.dbname pc = prpcrypt() try: if a.instance.all().filter(role='read')[0]: tar_host = a.instance.all().filter(role='read')[0].ip tar_port = a.instance.all().filter(role='read')[0].port except Exception, e: tar_host = a.instance.filter(role__in=['write', 'all'])[0].ip tar_port = a.instance.filter(role__in=['write', 'all'])[0].port
def get_advice(hosttag, sql, request): if advisor_switch != 0: # 确认dbname a = Db_name.objects.filter(dbtag=hosttag)[0] # a = Db_name.objects.get(dbtag=hosttag) tar_dbname = a.dbname # 如果instance中有备库role='read',则选择从备库读取 try: if a.instance.all().filter(role='read')[0]: tar_host = a.instance.all().filter(role='read')[0].ip tar_port = a.instance.all().filter(role='read')[0].port # 如果没有设置或没有role=read,则选择第一个读到的all实例读取 except Exception, e: tar_host = a.instance.filter(role='all')[0].ip tar_port = a.instance.filter(role='all')[0].port # tar_host = a.instance.all()[0].ip # tar_port = a.instance.all()[0].port pc = prpcrypt() for i in a.db_account_set.all(): if i.role != 'write' and i.role != 'admin': # find the specified account for the user if i.account.all().filter(username=request.user.username): tar_username = i.user tar_passwd = pc.decrypt(i.passwd) break # not find specified account for the user ,specified the public account to the user if not vars().has_key('tar_username'): for i in a.db_account_set.all(): if i.role != 'write' and i.role != 'admin': # find the specified account for the user if i.account.all().filter(username=public_user): tar_username = i.user tar_passwd = pc.decrypt(i.passwd) break # print tar_port+tar_passwd+tar_username+tar_host sql = sql.replace('"', '\\"').replace('`', '\`')[:-1] cmd = sqladvisor + ' -u %s -p %s -P %d -h %s -d %s -v 1 -q "%s"' % ( tar_username, tar_passwd, int(tar_port), tar_host, tar_dbname, sql) # print cmd status, result_tmp = commands.getstatusoutput(cmd) # print result_tmp result_list = result_tmp.split('\n') results = '' for i in result_list: try: unicode(i, 'utf-8') results = results + '\n' + i except Exception, e: pass
def create_acc(tags, user, passwd, dbtagli, acclist, role): if len(tags) > 0 and len(user) > 0 and len(passwd) > 0: py = prpcrypt() account = Db_account(tags=tags, user=user, passwd=py.encrypt(passwd), role=role) account.save() dbli = Db_name.objects.filter(dbtag__in=dbtagli) userli = User.objects.filter(username__in=acclist) for i in dbli: try: account.dbname.add(i) except Exception, e: pass
def get_dupreport_byins(insname): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break if vars().has_key('tar_username'): cmd = incept.pttool_path + '/pt-duplicate-key-checker' + ' -u %s -p %s -P %d -h %s ' % (tar_username, tar_passwd, int(insname.port), insname.ip) dup_result = commands.getoutput(cmd) return dup_result
def monitor_h(): threads = [] list_hosts=func.get_mysql_hosts_list_run("using") # mythread =threading.Thread(target=loop,args=(skey,intval_time,next_time,sqlstr,spatch,sfilename,mail_list,str_title,str_content)) pc = prpcrypt() for host in list_hosts: host_ip=host.hosts_ip host_port=host.host_port host_user=host.host_user host_pwd=pc.decrypt(host.host_pwd) mythread = threading.Thread(target=chk_mysql_h ,args=(host_ip,host_port,host_user,host_pwd)) threads.append(mythread) for t in threads: t.setDaemon(True) t.start() #等待所有结束线程 for s in threads: s.join()
def task_running_status(idnum): task = Task.objects.get(id=idnum) if task.status == 'executed failed' or task.status == 'executed': data = Incep_error_log.objects.filter( create_time=task.create_time).filter( finish_time=task.update_time).order_by("-myid") col = [f.name for f in Incep_error_log._meta.get_fields()] # delete first element "ID" del col[0] return data, col elif task.status == 'running': text = task.sqlsha if text == '': try: tar_username, tar_passwd, tar_host, tar_port, tar_dbname = get_db_info( task.dbtag) pc = prpcrypt() tar_passwd = pc.decrypt(tar_passwd) sql = "select * from information_schema.processlist where Db='" + tar_dbname + "'" + " and USER='******' order by TIME desc" return func.mysql_query(sql, tar_username, tar_passwd, tar_host, int(tar_port), 'information_schema') except Exception, e: return (['get info wrong'], ''), ['info'] else: for i in text.split('^^'): x = i.split('*') if len(x) >= 2: sqlsha = '*' + x[1] datalist, collist, mynum = incep_getstatus(sqlsha) # add sqltext to the end of the tuple if mynum > 0: for d in datalist: data = d + (x[0], ) collist.append('SQLTEXT') cols = collist data = (data, ) break if not vars().has_key('data'): data = (['wait in running queue'], '') cols = ['info'] return data, cols
def check_mysql_host(db): mon_basic(db) # longlist = [] py = prpcrypt() conn_info = Connect(db.instance.ip,db.instance.port,db.account.user,py.decrypt(db.account.passwd)) result,col = conn_info.query_mysql("select ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO from processlist where command !='Sleep' and DB not in ('information_schema','sys') and user not in ('system user','event_scheduler') and command!='Binlog Dump'") # result,col = conn_info.query_mysql("select ID,USER,HOST,DB,COMMAND,TIME,STATE,INFO from processlist") if db.check_longsql == 1: longsql_send = filter(lambda x:x[5]>db.longsql_time,result) # print longsql_send alarm_type = 'long sql' if len(longsql_send)>0: flag = record_alarm(db, alarm_type) if db.longsql_autokill == 1: idlist = map(lambda x:'kill '+str(x[0])+';',longsql_send) conn_info.kill_id(idlist) if flag: sendmail_monitor.delay(db.tag + '-LongSql_AutoKilled', db.mail_to.split(';'), longsql_send,alarm_type) else: sendmail_monitor.delay(db.tag+'-LongSql_List',db.mail_to.split(';'),longsql_send,alarm_type) else: check_ifok(db, alarm_type) if db.check_active == 1: alarm_type = 'active sql' if len(result)>db.active_threshold : if record_alarm(db, alarm_type): sendmail_monitor.delay(db.tag + '-ActiveSql_List', db.mail_to.split(';'), result,alarm_type) else: check_ifok(db, alarm_type) insertlist=[] # for i in result: # insertlist.append(Mysql_processlist(conn_id=i[0],user=i[1],host=i[2],db=i[3],\ # command=i[4],time=i[5],state=i[6],info=i[7])) if len(result)>0: insertlist = map(lambda x:Mysql_processlist(db_ip=db.instance.ip,db_port=db.instance.port,\ conn_id=x[0],user=x[1],host=x[2],db=x[3],\ command=x[4],time=x[5],state=x[6],info=x[7]),result) # print insertlist Mysql_processlist.objects.bulk_create(insertlist)
def get_process_data(insname,sql): flag = True pc = prpcrypt() for a in insname.db_name_set.all(): for i in a.db_account_set.all(): if i.role == 'admin': tar_username = i.user tar_passwd = pc.decrypt(i.passwd) flag = False break if flag == False: break #print tar_port+tar_passwd+tar_username+tar_host if vars().has_key('tar_username'): try: results,col = mysql_query(sql,tar_username,tar_passwd,insname.ip,int(insname.port),'information_schema') except Exception, e: #防止失败,返回一个wrong_message results,col = ([str(e)],''),['error'] #results,col = mysql_query(wrong_msg,user,passwd,host,int(port),dbname) return results,col
def set_acc(old_account, tags, user, passwd, dbtagli, acclist, role): old_account.role = role if len(tags) > 0: old_account.tags = tags old_account.save() if len(user) > 0: old_account.user = user old_account.save() if len(passwd) > 0: py = prpcrypt() old_account.passwd = py.encrypt(passwd) old_account.save() for i in old_account.dbname.all(): old_account.dbname.remove(i) old_account.save() for i in old_account.account.all(): old_account.account.remove(i) old_account.save() for i in Db_name.objects.filter(dbtag__in=dbtagli): try: old_account.dbname.add(i) except Exception, e: pass
#确认dbname a = Db_name.objects.filter(dbtag=hosttag)[0] #a = Db_name.objects.get(dbtag=hosttag) tar_dbname = a.dbname #如果instance中有备库role='read',则选择从备库读取 try: if a.instance.all().filter(role='read')[0]: tar_host = a.instance.all().filter(role='read')[0].ip tar_port = a.instance.all().filter(role='read')[0].port #如果没有设置或没有role=read,则选择第一个读到的all实例读取 except Exception, e: tar_host = a.instance.filter(role='all')[0].ip tar_port = a.instance.filter(role='all')[0].port # tar_host = a.instance.all()[0].ip # tar_port = a.instance.all()[0].port pc = prpcrypt() for i in a.db_account_set.all(): if i.role != 'write' and i.role != 'admin': # find the specified account for the user if i.account.all().filter(username=useraccount): tar_username = i.user tar_passwd = pc.decrypt(i.passwd) break #not find specified account for the user ,specified the public account to the user if not vars().has_key('tar_username'): for i in a.db_account_set.all(): if i.role != 'write' and i.role != 'admin': # find the specified account for the user if i.account.all().filter(username=public_user): tar_username = i.user tar_passwd = pc.decrypt(i.passwd)
def encrypt_passwd(): a = Db_account.objects.all() py = prpcrypt() for i in a: i.passwd = py.encrypt(i.passwd) i.save()
info = "Please check your ip and port input!" return info try: user = User.objects.get(username=func.public_user) dbname = Db_name(dbtag=newdbtag, dbname=newdbname) dbname.save() dbname.instance.add(insname) #for rollback except Exception, e: info = "CREATE dbname Failed!" if flag == 1: insname.delete() return info tags = newdbtag + '+p' py = prpcrypt() if len(newname_all) > 0 and len(newpass_all) > 0: try: newpass_all = py.encrypt(newpass_all) all_account = Db_account(tags=tags, user=newname_all, passwd=newpass_all, role='all') all_account.save() all_account.account.add(user) all_account.dbname.add(dbname) # for rollback except Exception, e: info = "CREATE Failed!" dbname.delete() if flag == 1:
def descrypt(values): py = prpcrypt() values = py.decrypt(values) return values
def mon_basic(db): try: py = prpcrypt() conn = MySQLdb.connect(host=db.instance.ip, user=db.account.user, passwd=py.decrypt(db.account.passwd), port=int(db.instance.port), connect_timeout=3, charset='utf8') conn.autocommit(True) cur = conn.cursor() conn.select_db('information_schema') ############################# CHECK MYSQL #################################################### mysql_variables = get_mysql_variables(cur) mysql_status = get_mysql_status(cur) time.sleep(1) mysql_status_2 = get_mysql_status(cur) ############################# GET VARIABLES ################################################### version = get_item(mysql_variables, 'version') key_buffer_size = get_item(mysql_variables, 'key_buffer_size') sort_buffer_size = get_item(mysql_variables, 'sort_buffer_size') join_buffer_size = get_item(mysql_variables, 'join_buffer_size') max_connections = get_item(mysql_variables, 'max_connections') max_connect_errors = get_item(mysql_variables, 'max_connect_errors') open_files_limit = get_item(mysql_variables, 'open_files_limit') table_open_cache = get_item(mysql_variables, 'table_open_cache') max_tmp_tables = get_item(mysql_variables, 'max_tmp_tables') max_heap_table_size = get_item(mysql_variables, 'max_heap_table_size') max_allowed_packet = get_item(mysql_variables, 'max_allowed_packet') thread_cache_size = get_item(mysql_variables, 'thread_cache_size') ############################# GET INNODB INFO ################################################## # innodb variables innodb_version = get_item(mysql_variables, 'innodb_version') innodb_buffer_pool_instances = get_item( mysql_variables, 'innodb_buffer_pool_instances') innodb_buffer_pool_size = get_item(mysql_variables, 'innodb_buffer_pool_size') innodb_doublewrite = get_item(mysql_variables, 'innodb_doublewrite') innodb_file_per_table = get_item(mysql_variables, 'innodb_file_per_table') innodb_flush_log_at_trx_commit = get_item( mysql_variables, 'innodb_flush_log_at_trx_commit') innodb_flush_method = get_item(mysql_variables, 'innodb_flush_method') innodb_force_recovery = get_item(mysql_variables, 'innodb_force_recovery') innodb_io_capacity = get_item(mysql_variables, 'innodb_io_capacity') innodb_read_io_threads = get_item(mysql_variables, 'innodb_read_io_threads') innodb_write_io_threads = get_item(mysql_variables, 'innodb_write_io_threads') # innodb status innodb_buffer_pool_pages_total = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_total')) innodb_buffer_pool_pages_data = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_data')) innodb_buffer_pool_pages_dirty = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_dirty')) innodb_buffer_pool_pages_flushed = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed')) innodb_buffer_pool_pages_free = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_free')) innodb_buffer_pool_pages_misc = int( get_item(mysql_status, 'Innodb_buffer_pool_pages_misc')) innodb_buffer_pool_wait_free = int( get_item(mysql_status, 'Innodb_buffer_pool_wait_free')) if innodb_buffer_pool_pages_misc > 18046744073709540000: innodb_buffer_pool_pages_misc = 0 innodb_page_size = int(get_item(mysql_status, 'Innodb_page_size')) innodb_pages_created = int( get_item(mysql_status, 'Innodb_pages_created')) innodb_pages_read = int(get_item(mysql_status, 'Innodb_pages_read')) innodb_pages_written = int( get_item(mysql_status, 'Innodb_pages_written')) innodb_row_lock_current_waits = int( get_item(mysql_status, 'Innodb_row_lock_current_waits')) innodb_row_lock_time = int( get_item(mysql_status, 'Innodb_row_lock_time')) innodb_row_lock_waits = int( get_item(mysql_status, 'Innodb_row_lock_waits')) innodb_log_waits = int(get_item(mysql_status, 'Innodb_log_waits')) # innodb persecond info innodb_buffer_pool_read_requests_persecond = int( get_item( mysql_status_2, 'Innodb_buffer_pool_read_requests')) - int( get_item(mysql_status, 'Innodb_buffer_pool_read_requests')) innodb_buffer_pool_reads_persecond = int( get_item(mysql_status_2, 'Innodb_buffer_pool_reads')) - int( get_item(mysql_status, 'Innodb_buffer_pool_reads')) innodb_buffer_pool_write_requests_persecond = int( get_item(mysql_status_2, 'Innodb_buffer_pool_write_requests') ) - int(get_item(mysql_status, 'Innodb_buffer_pool_write_requests')) innodb_buffer_pool_pages_flushed_persecond = int( get_item( mysql_status_2, 'Innodb_buffer_pool_pages_flushed')) - int( get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed')) innodb_rows_deleted_persecond = int( get_item(mysql_status_2, 'Innodb_rows_deleted')) - int( get_item(mysql_status, 'Innodb_rows_deleted')) innodb_rows_inserted_persecond = int( get_item(mysql_status_2, 'Innodb_rows_inserted')) - int( get_item(mysql_status, 'Innodb_rows_inserted')) innodb_rows_read_persecond = int( get_item(mysql_status_2, 'Innodb_rows_read')) - int( get_item(mysql_status, 'Innodb_rows_read')) innodb_rows_updated_persecond = int( get_item(mysql_status_2, 'Innodb_rows_updated')) - int( get_item(mysql_status, 'Innodb_rows_updated')) ############################# GET STATUS ################################################## connect = 1 uptime = get_item(mysql_status, 'Uptime') open_files = get_item(mysql_status, 'Open_files') open_tables = get_item(mysql_status, 'Open_tables') opened_tables = get_item(mysql_status, 'Opened_tables') threads_connected = get_item(mysql_status, 'Threads_connected') threads_running = get_item(mysql_status, 'Threads_running') threads_created = get_item(mysql_status, 'Threads_created') threads_cached = get_item(mysql_status, 'Threads_cached') # threads_waits = 20 max_used_connections = get_item(mysql_status, 'Max_used_connections') connections = get_item(mysql_status, 'Connections') aborted_clients = get_item(mysql_status, 'Aborted_clients') aborted_connects = get_item(mysql_status, 'Aborted_connects') key_blocks_not_flushed = get_item(mysql_status, 'Key_blocks_not_flushed') key_blocks_unused = get_item(mysql_status, 'Key_blocks_unused') key_blocks_used = get_item(mysql_status, 'Key_blocks_used') slow_queries = int(get_item(mysql_status, 'Slow_queries')) ############################# GET STATUS PERSECOND ################################################## threads_created_percond = int( get_item(mysql_status_2, 'Threads_created')) - int(threads_created) connections_persecond = int(get_item( mysql_status_2, 'Connections')) - int( get_item(mysql_status, 'Connections')) bytes_received_persecond = ( int(get_item(mysql_status_2, 'Bytes_received')) - int(get_item(mysql_status, 'Bytes_received'))) / 1024 bytes_sent_persecond = ( int(get_item(mysql_status_2, 'Bytes_sent')) - int(get_item(mysql_status, 'Bytes_sent'))) / 1024 com_select_persecond = int(get_item( mysql_status_2, 'Com_select')) - int( get_item(mysql_status, 'Com_select')) com_insert_persecond = int(get_item( mysql_status_2, 'Com_insert')) - int( get_item(mysql_status, 'Com_insert')) com_update_persecond = int(get_item( mysql_status_2, 'Com_update')) - int( get_item(mysql_status, 'Com_update')) com_delete_persecond = int(get_item( mysql_status_2, 'Com_delete')) - int( get_item(mysql_status, 'Com_delete')) com_commit_persecond = int(get_item( mysql_status_2, 'Com_commit')) - int( get_item(mysql_status, 'Com_commit')) com_rollback_persecond = int(get_item( mysql_status_2, 'Com_rollback')) - int( get_item(mysql_status, 'Com_rollback')) questions_persecond = int(get_item(mysql_status_2, 'Questions')) - int( get_item(mysql_status, 'Questions')) queries_persecond = int(get_item(mysql_status_2, 'Queries')) - int( get_item(mysql_status, 'Queries')) transaction_persecond = ( int(get_item(mysql_status_2, 'Com_commit')) + int(get_item(mysql_status_2, 'Com_rollback'))) - ( int(get_item(mysql_status, 'Com_commit')) + int(get_item(mysql_status, 'Com_rollback'))) created_tmp_disk_tables_persecond = int( get_item(mysql_status_2, 'Created_tmp_disk_tables')) - int( get_item(mysql_status, 'Created_tmp_disk_tables')) created_tmp_files_persecond = int( get_item(mysql_status_2, 'Created_tmp_files')) - int( get_item(mysql_status, 'Created_tmp_files')) created_tmp_tables_persecond = int( get_item(mysql_status_2, 'Created_tmp_tables')) - int( get_item(mysql_status, 'Created_tmp_tables')) table_locks_immediate_persecond = int( get_item(mysql_status_2, 'Table_locks_immediate')) - int( get_item(mysql_status, 'Table_locks_immediate')) table_locks_waited_persecond = int( get_item(mysql_status_2, 'Table_locks_waited')) - int( get_item(mysql_status, 'Table_locks_waited')) key_read_requests_persecond = int( get_item(mysql_status_2, 'Key_read_requests')) - int( get_item(mysql_status, 'Key_read_requests')) key_reads_persecond = int(get_item(mysql_status_2, 'Key_reads')) - int( get_item(mysql_status, 'Key_reads')) key_write_requests_persecond = int( get_item(mysql_status_2, 'Key_write_requests')) - int( get_item(mysql_status, 'Key_write_requests')) key_writes_persecond = int(get_item( mysql_status_2, 'Key_writes')) - int( get_item(mysql_status, 'Key_writes')) ############################# GET MYSQL HITRATE ################################################## if (string.atof(get_item(mysql_status, 'Qcache_hits')) + string.atof(get_item(mysql_status, 'Com_select'))) <> 0: query_cache_hitrate = string.atof( get_item(mysql_status, 'Qcache_hits')) / ( string.atof(get_item(mysql_status, 'Qcache_hits')) + string.atof(get_item(mysql_status, 'Com_select'))) query_cache_hitrate = "%9.2f" % query_cache_hitrate else: query_cache_hitrate = 0 if string.atof(get_item(mysql_status, 'Connections')) <> 0: thread_cache_hitrate = 1 - string.atof( get_item(mysql_status, 'Threads_created')) / string.atof( get_item(mysql_status, 'Connections')) thread_cache_hitrate = "%9.2f" % thread_cache_hitrate else: thread_cache_hitrate = 0 if string.atof(get_item(mysql_status, 'Key_read_requests')) <> 0: key_buffer_read_rate = 1 - string.atof( get_item(mysql_status, 'Key_reads')) / string.atof( get_item(mysql_status, 'Key_read_requests')) key_buffer_read_rate = "%9.2f" % key_buffer_read_rate else: key_buffer_read_rate = 0 if string.atof(get_item(mysql_status, 'Key_write_requests')) <> 0: key_buffer_write_rate = 1 - string.atof( get_item(mysql_status, 'Key_writes')) / string.atof( get_item(mysql_status, 'Key_write_requests')) key_buffer_write_rate = "%9.2f" % key_buffer_write_rate else: key_buffer_write_rate = 0 if (string.atof(get_item(mysql_status, 'Key_blocks_used')) + string.atof(get_item(mysql_status, 'Key_blocks_unused'))) <> 0: key_blocks_used_rate = string.atof( get_item(mysql_status, 'Key_blocks_used')) / ( string.atof(get_item(mysql_status, 'Key_blocks_used')) + string.atof(get_item(mysql_status, 'Key_blocks_unused'))) key_blocks_used_rate = "%9.2f" % key_blocks_used_rate else: key_blocks_used_rate = 0 if (string.atof(get_item(mysql_status, 'Created_tmp_disk_tables')) + string.atof(get_item(mysql_status, 'Created_tmp_tables'))) <> 0: created_tmp_disk_tables_rate = string.atof( get_item(mysql_status, 'Created_tmp_disk_tables') ) / (string.atof(get_item(mysql_status, 'Created_tmp_disk_tables')) + string.atof(get_item(mysql_status, 'Created_tmp_tables'))) created_tmp_disk_tables_rate = "%9.2f" % created_tmp_disk_tables_rate else: created_tmp_disk_tables_rate = 0 if string.atof(max_connections) <> 0: connections_usage_rate = string.atof( threads_connected) / string.atof(max_connections) connections_usage_rate = "%9.2f" % connections_usage_rate else: connections_usage_rate = 0 if string.atof(open_files_limit) <> 0: open_files_usage_rate = string.atof(open_files) / string.atof( open_files_limit) open_files_usage_rate = "%9.2f" % open_files_usage_rate else: open_files_usage_rate = 0 if string.atof(table_open_cache) <> 0: open_tables_usage_rate = string.atof(open_tables) / string.atof( table_open_cache) open_tables_usage_rate = "%9.2f" % open_tables_usage_rate else: open_tables_usage_rate = 0 # repl slave_status = cur.execute('show slave status;') if slave_status <> 0: role = 'slave' role_new = 's' else: role = 'master' role_new = 'm' now_time = datetime.datetime.now() ############################# INSERT INTO SERVER ################################################## sql = "replace into mysql_status(db_ip,db_port,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate,create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" sql2 = "insert into mysql_status_his(db_ip,db_port,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate,create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" param = ( db.instance.ip, int(db.instance.port), connect, role, uptime, version, max_connections, max_connect_errors, open_files_limit, table_open_cache, max_tmp_tables, max_heap_table_size, max_allowed_packet, open_files, open_tables, threads_connected, threads_running, threads_created, threads_cached, connections, aborted_clients, aborted_connects, connections_persecond, bytes_received_persecond, bytes_sent_persecond, com_select_persecond, com_insert_persecond, com_update_persecond, com_delete_persecond, com_commit_persecond, com_rollback_persecond, questions_persecond, queries_persecond, transaction_persecond, created_tmp_tables_persecond, created_tmp_disk_tables_persecond, created_tmp_files_persecond, table_locks_immediate_persecond, table_locks_waited_persecond, key_buffer_size, sort_buffer_size, join_buffer_size, key_blocks_not_flushed, key_blocks_unused, key_blocks_used, key_read_requests_persecond, key_reads_persecond, key_write_requests_persecond, key_writes_persecond, innodb_version, innodb_buffer_pool_instances, innodb_buffer_pool_size, innodb_doublewrite, innodb_file_per_table, innodb_flush_log_at_trx_commit, innodb_flush_method, innodb_force_recovery, innodb_io_capacity, innodb_read_io_threads, innodb_write_io_threads, innodb_buffer_pool_pages_total, innodb_buffer_pool_pages_data, innodb_buffer_pool_pages_dirty, innodb_buffer_pool_pages_flushed, innodb_buffer_pool_pages_free, innodb_buffer_pool_pages_misc, innodb_page_size, innodb_pages_created, innodb_pages_read, innodb_pages_written, innodb_row_lock_current_waits, innodb_buffer_pool_pages_flushed_persecond, innodb_buffer_pool_read_requests_persecond, innodb_buffer_pool_reads_persecond, innodb_buffer_pool_write_requests_persecond, innodb_rows_read_persecond, innodb_rows_inserted_persecond, innodb_rows_updated_persecond, innodb_rows_deleted_persecond, query_cache_hitrate, thread_cache_hitrate, key_buffer_read_rate, key_buffer_write_rate, key_blocks_used_rate, created_tmp_disk_tables_rate, connections_usage_rate, open_files_usage_rate, open_tables_usage_rate, now_time) # print param mysql_exec(sql, param) mysql_exec(sql2, param) if db.check_connections: alarm_type = 'connections' if db.connection_threshold <= int(threads_connected): if record_alarm(db, alarm_type): sendmail_monitor.delay(db.tag + '-too many connections', db.mail_to.split(';'), 'values:' + str(threads_connected), alarm_type) else: check_ifok(db, alarm_type) # check mysql connected connected = cur.execute( "select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server,connect_user,connect_db;" ) if connected: for line in cur.fetchall(): sql = "insert into mysql_connected(db_ip,db_port,connect_server,connect_user,connect_db,connect_count,create_time) values(%s,%s,%s,%s,%s,%s,%s);" param = (db.instance.ip, int(db.instance.port), line[0], line[1], line[2], line[3], now_time) mysql_exec(sql, param) #check replication master_thread = cur.execute( "select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';" ) slave_status = cur.execute('show slave status;') datalist = [] if master_thread >= 1: datalist.append(int(1)) if slave_status <> 0: datalist.append(int(1)) else: datalist.append(int(0)) else: datalist.append(int(0)) if slave_status <> 0: datalist.append(int(1)) else: datalist.append(int(0)) sql = "delete from mysql_replication where db_ip=%s and db_port=%s;" param = (db.instance.ip, db.instance.port) mysql_exec(sql, param) if slave_status <> 0: gtid_mode = cur.execute( "select * from information_schema.global_variables where variable_name='gtid_mode';" ) result = cur.fetchone() if result: gtid_mode = result[1] else: gtid_mode = 'OFF' datalist.append(gtid_mode) read_only = cur.execute( "select * from information_schema.global_variables where variable_name='read_only';" ) result = cur.fetchone() datalist.append(result[1]) #slave_info=cur.execute('show slave status;') if db.replchannel <> '0': slave_info = cur.execute( "show slave status for channel '%s';" % (db.replchannel)) else: slave_info = cur.execute('show slave status;') result = cur.fetchone() # print "result" # print slave_info master_server = result[1] master_port = result[3] slave_io_run = result[10] slave_sql_run = result[11] delay = result[32] current_binlog_file = result[9] current_binlog_pos = result[21] master_binlog_file = result[5] master_binlog_pos = result[6] try: slave_sQL_rnning_state = result[44] except Exception, e: slave_sQL_running_state = "NULL" datalist.append(master_server) datalist.append(master_port) datalist.append(slave_io_run) datalist.append(slave_sql_run) datalist.append(delay) datalist.append(current_binlog_file) datalist.append(current_binlog_pos) datalist.append(master_binlog_file) datalist.append(master_binlog_pos) datalist.append(0) datalist.append(slave_sQL_rnning_state) if db.check_slave: if (slave_io_run == "Yes") and (slave_sql_run == "Yes"): alarm_type = 'slave stop' check_ifok(db, alarm_type) if db.check_delay: alarm_type = 'slave delay' if db.delay_threshold <= int(delay): if record_alarm(db, alarm_type): sendmail_monitor.delay(db.tag + '-slave delay', db.mail_to.split(';'), 'values:' + str(delay), alarm_type) else: check_ifok(db, alarm_type) else: alarm_type = 'slave stop' if record_alarm(db, alarm_type): sendmail_monitor.delay(db.tag + '-slave stop', db.mail_to.split(';'), alarm_type, alarm_type) elif master_thread >= 1: gtid_mode = cur.execute( "select * from information_schema.global_variables where variable_name='gtid_mode';" ) result = cur.fetchone() if result: gtid_mode = result[1] else: gtid_mode = 'OFF' datalist.append(gtid_mode) read_only = cur.execute( "select * from information_schema.global_variables where variable_name='read_only';" ) result = cur.fetchone() datalist.append(result[1]) datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') datalist.append('---') master = cur.execute('show master status;') master_result = cur.fetchone() datalist.append(master_result[0]) datalist.append(master_result[1]) binlog_file = cur.execute('show master logs;') binlogs = 0 if binlog_file: for row in cur.fetchall(): binlogs = binlogs + row[1] datalist.append(binlogs) datalist.append('---')