Exemple #1
0
def check_sqlserver(host, port, username, passwd, server_id, tags):
    try:
        conn = pymssql.connect(host=host,
                               port=int(port),
                               user=username,
                               password=passwd,
                               charset="utf8")

        connect = 1
        role = -1
        uptime = sqlserver.get_uptime(conn)
        version = sqlserver.get_version(conn)

        lock_timeout = sqlserver.get_variables(conn, 'LOCK_TIMEOUT')
        trancount = sqlserver.get_variables(conn, 'TRANCOUNT')
        max_connections = sqlserver.get_variables(conn, 'MAX_CONNECTIONS')
        processes = sqlserver.ger_processes(conn)
        processes_running = sqlserver.ger_processes_running(conn)
        processes_waits = sqlserver.ger_processes_waits(conn)

        connections = sqlserver.get_variables(conn, 'CONNECTIONS')
        pack_received = sqlserver.get_variables(conn, 'PACK_RECEIVED')
        pack_sent = sqlserver.get_variables(conn, 'PACK_SENT')
        packet_errors = sqlserver.get_variables(conn, 'PACKET_ERRORS')

        time.sleep(1)

        connections_2 = sqlserver.get_variables(conn, 'CONNECTIONS')
        pack_received_2 = sqlserver.get_variables(conn, 'PACK_RECEIVED')
        pack_sent_2 = sqlserver.get_variables(conn, 'PACK_SENT')
        packet_errors_2 = sqlserver.get_variables(conn, 'PACKET_ERRORS')

        connections_persecond = int(connections_2) - int(connections)
        pack_received_persecond = int(pack_received_2) - int(pack_received)
        pack_sent_persecond = int(pack_sent_2) - int(pack_sent)
        packet_errors_persecond = int(packet_errors_2) - int(packet_errors)

        sql = "insert into sqlserver_status(server_id,tags,host,port,connect,role,uptime,version,lock_timeout,trancount,max_connections,processes,processes_running,processes_waits,connections_persecond,pack_received_persecond,pack_sent_persecond,packet_errors_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, tags, host, port, connect, role, uptime, version,
                 lock_timeout, trancount, max_connections, processes,
                 processes_running, processes_waits, connections_persecond,
                 pack_received_persecond, pack_sent_persecond,
                 packet_errors_persecond)
        func.mysql_exec(sql, param)
        func.update_db_status_init(role, version, host, port, tags)

    except Exception, e:
        logger_msg = "check sqlserver %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into sqlserver_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #2
0
def check_mysql(host,port,username,password,server_id,tags):
    try:
        conn=MySQLdb.connect(host=host,user=username,passwd=password,port=int(port),connect_timeout=3,charset='utf8')
        cur=conn.cursor()
        conn.select_db('information_schema')
        #cur.execute('flush hosts;')
        ############################# CHECK MYSQL ####################################################
        mysql_variables = func.get_mysql_variables(cur)
        mysql_status = func.get_mysql_status(cur)       
        time.sleep(1)
        mysql_status_2 = func.get_mysql_status(cur)
        ############################# GET VARIABLES ###################################################
        version = func.get_item(mysql_variables,'version')
        key_buffer_size = func.get_item(mysql_variables,'key_buffer_size')
        sort_buffer_size = func.get_item(mysql_variables,'sort_buffer_size')
        join_buffer_size = func.get_item(mysql_variables,'join_buffer_size')
        max_connections = func.get_item(mysql_variables,'max_connections')
        max_connect_errors = func.get_item(mysql_variables,'max_connect_errors')
        open_files_limit = func.get_item(mysql_variables,'open_files_limit')
        table_open_cache = func.get_item(mysql_variables,'table_open_cache')
        max_tmp_tables = func.get_item(mysql_variables,'max_tmp_tables')
        max_heap_table_size = func.get_item(mysql_variables,'max_heap_table_size')
        max_allowed_packet = func.get_item(mysql_variables,'max_allowed_packet')
        ############################# GET INNODB INFO ##################################################
        #innodb variables
        innodb_version = func.get_item(mysql_variables,'innodb_version')
        innodb_buffer_pool_instances = func.get_item(mysql_variables,'innodb_buffer_pool_instances')
        innodb_buffer_pool_size = func.get_item(mysql_variables,'innodb_buffer_pool_size')
        innodb_doublewrite = func.get_item(mysql_variables,'innodb_doublewrite')
        innodb_file_per_table = func.get_item(mysql_variables,'innodb_file_per_table')
        innodb_flush_log_at_trx_commit = func.get_item(mysql_variables,'innodb_flush_log_at_trx_commit')
        innodb_flush_method = func.get_item(mysql_variables,'innodb_flush_method')
        innodb_force_recovery = func.get_item(mysql_variables,'innodb_force_recovery')
        innodb_io_capacity = func.get_item(mysql_variables,'innodb_io_capacity')
        innodb_read_io_threads = func.get_item(mysql_variables,'innodb_read_io_threads')
        innodb_write_io_threads = func.get_item(mysql_variables,'innodb_write_io_threads')
        #innodb status
        innodb_buffer_pool_pages_total = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_total'))
        innodb_buffer_pool_pages_data = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_data'))
        innodb_buffer_pool_pages_dirty = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_dirty'))
        innodb_buffer_pool_pages_flushed = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_flushed'))
        innodb_buffer_pool_pages_free = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_free'))
        innodb_buffer_pool_pages_misc = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_misc'))
        innodb_page_size = int(func.get_item(mysql_status,'Innodb_page_size'))
        innodb_pages_created = int(func.get_item(mysql_status,'Innodb_pages_created'))
        innodb_pages_read = int(func.get_item(mysql_status,'Innodb_pages_read'))
        innodb_pages_written = int(func.get_item(mysql_status,'Innodb_pages_written'))
        innodb_row_lock_current_waits = int(func.get_item(mysql_status,'Innodb_row_lock_current_waits'))
        #innodb persecond info
        innodb_buffer_pool_read_requests_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_read_requests')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_read_requests'))
        innodb_buffer_pool_reads_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_reads')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_reads'))
        innodb_buffer_pool_write_requests_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_write_requests')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_write_requests'))
        innodb_buffer_pool_pages_flushed_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_pages_flushed')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_flushed'))
        innodb_rows_deleted_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_deleted')) - int(func.get_item(mysql_status,'Innodb_rows_deleted'))
        innodb_rows_inserted_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_inserted')) - int(func.get_item(mysql_status,'Innodb_rows_inserted'))
        innodb_rows_read_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_read')) - int(func.get_item(mysql_status,'Innodb_rows_read'))
        innodb_rows_updated_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_updated')) - int(func.get_item(mysql_status,'Innodb_rows_updated'))
        ############################# GET STATUS ##################################################
        connect = 1
        uptime = func.get_item(mysql_status,'Uptime')
        open_files = func.get_item(mysql_status,'Open_files')
        open_tables = func.get_item(mysql_status,'Open_tables')
        threads_connected = func.get_item(mysql_status,'Threads_connected')
        threads_running = func.get_item(mysql_status,'Threads_running')
        threads_created = func.get_item(mysql_status,'Threads_created')
        threads_cached = func.get_item(mysql_status,'Threads_cached')
        threads_waits = mysql.get_waits(conn)
        connections = func.get_item(mysql_status,'Connections')
        aborted_clients = func.get_item(mysql_status,'Aborted_clients')
        aborted_connects = func.get_item(mysql_status,'Aborted_connects')
        key_blocks_not_flushed = func.get_item(mysql_status,'Key_blocks_not_flushed')
        key_blocks_unused = func.get_item(mysql_status,'Key_blocks_unused')
        key_blocks_used = func.get_item(mysql_status,'Key_blocks_used')
        ############################# GET STATUS PERSECOND ##################################################
        connections_persecond = int(func.get_item(mysql_status_2,'Connections')) - int(func.get_item(mysql_status,'Connections'))
        bytes_received_persecond = (int(func.get_item(mysql_status_2,'Bytes_received')) - int(func.get_item(mysql_status,'Bytes_received')))/1024
        bytes_sent_persecond = (int(func.get_item(mysql_status_2,'Bytes_sent')) - int(func.get_item(mysql_status,'Bytes_sent')))/1024
        com_select_persecond = int(func.get_item(mysql_status_2,'Com_select')) - int(func.get_item(mysql_status,'Com_select'))
        com_insert_persecond = int(func.get_item(mysql_status_2,'Com_insert')) - int(func.get_item(mysql_status,'Com_insert'))
        com_update_persecond = int(func.get_item(mysql_status_2,'Com_update')) - int(func.get_item(mysql_status,'Com_update'))
        com_delete_persecond = int(func.get_item(mysql_status_2,'Com_delete')) - int(func.get_item(mysql_status,'Com_delete'))
        com_commit_persecond = int(func.get_item(mysql_status_2,'Com_commit')) - int(func.get_item(mysql_status,'Com_commit'))
        com_rollback_persecond = int(func.get_item(mysql_status_2,'Com_rollback')) - int(func.get_item(mysql_status,'Com_rollback'))
        questions_persecond = int(func.get_item(mysql_status_2,'Questions')) - int(func.get_item(mysql_status,'Questions'))
        queries_persecond = int(func.get_item(mysql_status_2,'Queries')) - int(func.get_item(mysql_status,'Queries'))
        transaction_persecond = (int(func.get_item(mysql_status_2,'Com_commit')) + int(func.get_item(mysql_status_2,'Com_rollback'))) - (int(func.get_item(mysql_status,'Com_commit')) + int(func.get_item(mysql_status,'Com_rollback')))
        created_tmp_disk_tables_persecond = int(func.get_item(mysql_status_2,'Created_tmp_disk_tables')) - int(func.get_item(mysql_status,'Created_tmp_disk_tables'))
        created_tmp_files_persecond = int(func.get_item(mysql_status_2,'Created_tmp_files')) - int(func.get_item(mysql_status,'Created_tmp_files'))
        created_tmp_tables_persecond = int(func.get_item(mysql_status_2,'Created_tmp_tables')) - int(func.get_item(mysql_status,'Created_tmp_tables'))
        table_locks_immediate_persecond = int(func.get_item(mysql_status_2,'Table_locks_immediate')) - int(func.get_item(mysql_status,'Table_locks_immediate'))
        table_locks_waited_persecond = int(func.get_item(mysql_status_2,'Table_locks_waited')) - int(func.get_item(mysql_status,'Table_locks_waited'))
        key_read_requests_persecond = int(func.get_item(mysql_status_2,'Key_read_requests')) - int(func.get_item(mysql_status,'Key_read_requests'))
        key_reads_persecond = int(func.get_item(mysql_status_2,'Key_reads')) - int(func.get_item(mysql_status,'Key_reads'))
        key_write_requests_persecond = int(func.get_item(mysql_status_2,'Key_write_requests')) - int(func.get_item(mysql_status,'Key_write_requests'))
        key_writes_persecond = int(func.get_item(mysql_status_2,'Key_writes')) - int(func.get_item(mysql_status,'Key_writes'))
        ############################# GET MYSQL HITRATE ##################################################
        if (string.atof(func.get_item(mysql_status,'Qcache_hits')) + string.atof(func.get_item(mysql_status,'Com_select'))) <> 0:
            query_cache_hitrate = string.atof(func.get_item(mysql_status,'Qcache_hits')) / (string.atof(func.get_item(mysql_status,'Qcache_hits')) + string.atof(func.get_item(mysql_status,'Com_select')))
            query_cache_hitrate =  "%9.2f" %query_cache_hitrate
        else:
            query_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status,'Connections')) <> 0:
            thread_cache_hitrate = 1 - string.atof(func.get_item(mysql_status,'Threads_created')) / string.atof(func.get_item(mysql_status,'Connections'))
            thread_cache_hitrate =  "%9.2f" %thread_cache_hitrate
        else:
            thread_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status,'Key_read_requests')) <> 0:
            key_buffer_read_rate = 1 - string.atof(func.get_item(mysql_status,'Key_reads')) / string.atof(func.get_item(mysql_status,'Key_read_requests'))
            key_buffer_read_rate =  "%9.2f" %key_buffer_read_rate
        else:
            key_buffer_read_rate = 0

        if string.atof(func.get_item(mysql_status,'Key_write_requests')) <> 0:
            key_buffer_write_rate = 1 - string.atof(func.get_item(mysql_status,'Key_writes')) / string.atof(func.get_item(mysql_status,'Key_write_requests'))
            key_buffer_write_rate =  "%9.2f" %key_buffer_write_rate
        else:
            key_buffer_write_rate = 0
        
        if (string.atof(func.get_item(mysql_status,'Key_blocks_used'))+string.atof(func.get_item(mysql_status,'Key_blocks_unused'))) <> 0:
            key_blocks_used_rate = string.atof(func.get_item(mysql_status,'Key_blocks_used')) / (string.atof(func.get_item(mysql_status,'Key_blocks_used'))+string.atof(func.get_item(mysql_status,'Key_blocks_unused')))
            key_blocks_used_rate =  "%9.2f" %key_blocks_used_rate
        else:
            key_blocks_used_rate = 0

        if (string.atof(func.get_item(mysql_status,'Created_tmp_disk_tables'))+string.atof(func.get_item(mysql_status,'Created_tmp_tables'))) <> 0:
            created_tmp_disk_tables_rate = string.atof(func.get_item(mysql_status,'Created_tmp_disk_tables')) / (string.atof(func.get_item(mysql_status,'Created_tmp_disk_tables'))+string.atof(func.get_item(mysql_status,'Created_tmp_tables')))
            created_tmp_disk_tables_rate =  "%9.2f" %created_tmp_disk_tables_rate
        else:
            created_tmp_disk_tables_rate = 0

        if string.atof(max_connections) <> 0:
            connections_usage_rate = string.atof(threads_connected)/string.atof(max_connections)
            connections_usage_rate =  "%9.2f" %connections_usage_rate
        else:
            connections_usage_rate = 0

        if string.atof(open_files_limit) <> 0:            
            open_files_usage_rate = string.atof(open_files)/string.atof(open_files_limit)
            open_files_usage_rate =  "%9.2f" %open_files_usage_rate
        else:
            open_files_usage_rate = 0

        if string.atof(table_open_cache) <> 0:            
            open_tables_usage_rate = string.atof(open_tables)/string.atof(table_open_cache)
            open_tables_usage_rate =  "%9.2f" %open_tables_usage_rate
        else:
            open_tables_usage_rate = 0
  
        #repl
        slave_status=cur.execute('show slave status;')
        if slave_status <> 0:
            role='slave'
            role_new='s'
        else:
            role='master'
            role_new='m'

        ############################# INSERT INTO SERVER ##################################################
        sql = "insert into mysql_status(server_id,host,port,tags,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_waits,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_waits,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate)
        func.mysql_exec(sql,param)
        func.update_db_status_init(role_new,version,host,port,tags)

        #check mysql process
        processlist=cur.execute("select * from information_schema.processlist where DB !='information_schema' and command !='Sleep';")
        if processlist:
            for line in cur.fetchall():
                sql="insert into mysql_processlist(server_id,host,port,tags,pid,p_user,p_host,p_db,command,time,status,info) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                param=(server_id,host,port,tags,line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7])
                func.mysql_exec(sql,param)

        #check mysql connected
        connected=cur.execute("select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count  from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server ;");
        if connected:
            for line in cur.fetchall():
                sql="insert into mysql_connected(server_id,host,port,tags,connect_server,connect_user,connect_db,connect_count) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                param =(server_id,host,port,tags,line[0],line[1],line[2],line[3])
                func.mysql_exec(sql,param)

        #check mysql replication
        master_thread=cur.execute("select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';")
        slave_status=cur.execute('show slave status;')
        datalist=[]
        if master_thread >= 1:
            datalist.append(int(1))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))
        else:
            datalist.append(int(0))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))


        if slave_status <> 0:
            gtid_mode=cur.execute("select * from information_schema.global_variables where variable_name='gtid_mode';")
            result=cur.fetchone()
            if result:
                gtid_mode=result[1]
            else:
                gtid_mode='OFF'
            datalist.append(gtid_mode)
            read_only=cur.execute("select * from information_schema.global_variables where variable_name='read_only';")
            result=cur.fetchone()
            datalist.append(result[1])
            slave_info=cur.execute('show slave status;')
            result=cur.fetchone()
            master_server=result[1]
            master_port=result[3]
            slave_io_run=result[10]
            slave_sql_run=result[11]
            delay=result[32]
            current_binlog_file=result[9]
            current_binlog_pos=result[21]
            master_binlog_file=result[5]
            master_binlog_pos=result[6]

            datalist.append(master_server)
            datalist.append(master_port)
            datalist.append(slave_io_run)
            datalist.append(slave_sql_run)
            datalist.append(delay)
            datalist.append(current_binlog_file)
            datalist.append(current_binlog_pos)
            datalist.append(master_binlog_file)
            datalist.append(master_binlog_pos)
            datalist.append(0)

        elif master_thread >= 1:
            gtid_mode=cur.execute("select * from information_schema.global_variables where variable_name='gtid_mode';")
            result=cur.fetchone()
            if result:
                gtid_mode=result[1]
            else:
                gtid_mode='OFF'
            datalist.append(gtid_mode)
            read_only=cur.execute("select * from information_schema.global_variables where variable_name='read_only';")
            result=cur.fetchone()
            datalist.append(result[1])
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            master=cur.execute('show master status;')
            master_result=cur.fetchone()
            datalist.append(master_result[0])
            datalist.append(master_result[1])
            binlog_file=cur.execute('show master logs;')
            binlogs=0
            if binlog_file:
                for row in cur.fetchall():
                    binlogs = binlogs + row[1]
                datalist.append(binlogs)
        else:
            datalist=[]

        result=datalist
        if result:
            sql="insert into mysql_replication(server_id,tags,host,port,is_master,is_slave,gtid_mode,read_only,master_server,master_port,slave_io_run,slave_sql_run,delay,current_binlog_file,current_binlog_pos,master_binlog_file,master_binlog_pos,master_binlog_space) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            param=(server_id,tags,host,port,result[0],result[1],result[2],result[3],result[4],result[5],result[6],result[7],result[8],result[9],result[10],result[11],result[12],result[13])
            func.mysql_exec(sql,param)

        cur.close()
        exit

    except MySQLdb.Error,e:
        logger_msg="check mysql %s:%s failure: %d %s" %(host,port,e.args[0],e.args[1])
        logger.warning(logger_msg)
        logger_msg="check mysql %s:%s failure: sleep 3 seconds and check again." %(host,port)
        logger.warning(logger_msg)
        time.sleep(3)
        try:
            conn=MySQLdb.connect(host=host,user=username,passwd=password,port=int(port),connect_timeout=3,charset='utf8')
            cur=conn.cursor()
            conn.select_db('information_schema')
        except MySQLdb.Error,e:
            logger_msg="check mysql second %s:%s failure: %d %s" %(host,port,e.args[0],e.args[1])
            logger.warning(logger_msg)
            connect = 0
            sql="insert into mysql_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)
def check_memcache(host, port, server_id, tags):
    try:
        connect_db = memcache.Client(["%s:%s" % (host, port)])
        serverStatus = connect_db.get_stats()
        sleep(1)
        serverStatus_2 = connect_db.get_stats()
        connect = 1
        pid = serverStatus[0][1]['pid']
        total_items = serverStatus[0][1]['total_items']
        uptime = serverStatus[0][1]['uptime']
        version = serverStatus[0][1]['version']
        limit_maxbytes = serverStatus[0][1]['limit_maxbytes']
        rusage_user = serverStatus[0][1]['rusage_user']
        bytes_read = serverStatus[0][1]['bytes_read']
        rusage_system = serverStatus[0][1]['rusage_system']
        cmd_get = serverStatus[0][1]['cmd_get']
        curr_connections = serverStatus[0][1]['curr_connections']
        threads = serverStatus[0][1]['threads']
        total_connections = serverStatus[0][1]['total_connections']
        cmd_set = serverStatus[0][1]['cmd_set']
        curr_items = serverStatus[0][1]['curr_items']
        get_misses = serverStatus[0][1]['get_misses']
        evictions = serverStatus[0][1]['evictions']
        bytes = serverStatus[0][1]['bytes']
        connection_structures = serverStatus[0][1]['connection_structures']
        bytes_written = serverStatus[0][1]['bytes_written']
        time = serverStatus[0][1]['time']
        pointer_size = serverStatus[0][1]['pointer_size']
        get_hits = serverStatus[0][1]['get_hits']
        network_bytesIn_persecond = int(
            serverStatus_2[0][1]['bytes_written']) - int(
                serverStatus[0][1]['bytes_written'])
        network_bytesOut_persecond = int(
            serverStatus_2[0][1]['bytes_read']) - int(
                serverStatus[0][1]['bytes_read'])
        opcounters_get_persecond = int(serverStatus_2[0][1]['cmd_get']) - int(
            serverStatus[0][1]['cmd_get'])
        opcounters_set_persecond = int(serverStatus_2[0][1]['cmd_set']) - int(
            serverStatus[0][1]['cmd_set'])
        opcounters_get_rate = (int(serverStatus_2[0][1]['get_hits']) -
                               int(serverStatus[0][1]['get_hits'])) * 10000 / (
                                   int(serverStatus_2[0][1]['cmd_get']) -
                                   int(serverStatus[0][1]['cmd_get']) + 1)
        ##################### insert data to mysql server#############################
        sql = "replace into memcache_status(server_id,host,port,tags,connect,pid,total_items,uptime,version,limit_maxbytes,rusage_user,bytes_read,rusage_system,cmd_get,curr_connections,threads,total_connections,cmd_set,curr_items,get_misses,evictions,bytes,connection_structures,bytes_written,time,pointer_size,get_hits,network_bytesIn_persecond,network_bytesOut_persecond,opcounters_get_persecond,opcounters_set_persecond,opcounters_get_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, host, port, tags, connect, pid, total_items,
                 uptime, version, limit_maxbytes, rusage_user, bytes_read,
                 rusage_system, cmd_get, curr_connections, threads,
                 total_connections, cmd_set, curr_items, get_misses, evictions,
                 bytes, connection_structures, bytes_written, time,
                 pointer_size, get_hits, network_bytesIn_persecond,
                 network_bytesOut_persecond, opcounters_get_persecond,
                 opcounters_set_persecond, opcounters_get_rate)
        func.mysql_exec(sql, param)
        role = 'm'
        func.update_db_status_init(role, version, host, port, tags)
    except Exception, e:
        logger_msg = "check memcache %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)
        try:
            curr_connections = 0
            sql = "replace into memcache_status(server_id,host,port,tags,curr_connections) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, curr_connections)
            func.mysql_exec(sql, param)
        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #4
0
def check_redis(host, port, passwd, server_id, tags):
    try:
        r = redis.StrictRedis(host=host,
                              port=int(port),
                              password=passwd,
                              db=0,
                              socket_timeout=3,
                              encoding='utf-8')
        info = r.info()
        time.sleep(1)
        info_2 = r.info()
        # Server
        redis_version = info['redis_version']
        redis_git_sha1 = info['redis_git_sha1']
        redis_git_dirty = info['redis_git_dirty']
        arch_bits = info['arch_bits']
        multiplexing_api = info['multiplexing_api']
        gcc_version = info['gcc_version']
        process_id = info['process_id']
        uptime_in_seconds = info['uptime_in_seconds']
        uptime_in_days = info['uptime_in_days']
        lru_clock = info['lru_clock']
        os = check_value(info, 'os')
        redis_mode = check_value(info, 'redis_mode')
        hz = check_value(info, 'hz')
        run_id = check_value(info, 'run_id')
        tcp_port = check_value(info, 'tcp_port')

        # Clients
        connected_clients = info['connected_clients']
        client_longest_output_list = info['client_longest_output_list']
        client_biggest_input_buf = info['client_biggest_input_buf']
        blocked_clients = info['blocked_clients']
        # Memory
        used_memory = info['used_memory']
        used_memory_human = info['used_memory_human']
        used_memory_rss = info['used_memory_rss']
        used_memory_peak = info['used_memory_peak']
        used_memory_peak_human = info['used_memory_peak_human']
        used_memory_lua = check_value(info, 'used_memory_lua')
        mem_fragmentation_ratio = info['mem_fragmentation_ratio']
        mem_allocator = info['mem_allocator']
        # Persistence
        loading = info['loading']
        rdb_changes_since_last_save = check_value(
            info, 'rdb_changes_since_last_save')
        rdb_bgsave_in_progress = check_value(info, 'rdb_bgsave_in_progress')
        rdb_last_save_time = check_value(info, 'rdb_last_save_time')
        rdb_last_bgsave_status = check_value(info, 'rdb_last_bgsave_status')
        rdb_last_bgsave_time_sec = check_value(info,
                                               'rdb_last_bgsave_time_sec')
        rdb_current_bgsave_time_sec = check_value(
            info, 'rdb_current_bgsave_time_sec')
        aof_enabled = check_value(info, 'aof_enabled')
        aof_rewrite_in_progress = check_value(info, 'aof_rewrite_in_progress')
        aof_rewrite_scheduled = check_value(info, 'aof_rewrite_scheduled')
        aof_last_rewrite_time_sec = check_value(info,
                                                'aof_last_rewrite_time_sec')
        aof_current_rewrite_time_sec = check_value(
            info, 'aof_current_rewrite_time_sec')
        aof_last_bgrewrite_status = check_value(info,
                                                'aof_last_bgrewrite_status')
        # Stats
        total_connections_received = check_value(info,
                                                 'total_connections_received')
        total_commands_processed = check_value(info,
                                               'total_commands_processed')
        current_commands_processed = int(info_2['total_commands_processed'] -
                                         info['total_commands_processed'])
        instantaneous_ops_per_sec = check_value(info,
                                                'instantaneous_ops_per_sec')
        rejected_connections = check_value(info, 'rejected_connections')

        expired_keys = info['expired_keys']
        evicted_keys = info['evicted_keys']
        keyspace_hits = info['keyspace_hits']
        keyspace_misses = info['keyspace_misses']
        pubsub_channels = info['pubsub_channels']
        pubsub_patterns = info['pubsub_patterns']
        latest_fork_usec = info['latest_fork_usec']
        # Replication
        role = info['role']
        connected_slaves = info['connected_slaves']

        # CPU
        used_cpu_sys = info['used_cpu_sys']
        used_cpu_user = info['used_cpu_user']
        used_cpu_sys_children = info['used_cpu_sys_children']
        used_cpu_user_children = info['used_cpu_user_children']

        # replication
        if role == 'slave':
            #print info
            master_host = info['master_host']
            master_port = info['master_port']
            master_link_status = info['master_link_status']
            master_last_io_seconds_ago = info['master_last_io_seconds_ago']
            master_sync_in_progress = info['master_sync_in_progress']
            #slave_repl_offset = info['slave_repl_offset']
            slave_priority = check_value(info, 'slave_priority')
            slave_read_only = check_value(info, 'slave_read_only')
            master_server_id = func.mysql_query(
                "SELECT id FROM db_servers_redis WHERE host='%s' AND port='%s' limit 1;"
                % (master_host, master_port))
            master_server_id = master_server_id[0][0]
            role_new = 's'
        else:
            master_host = '-1'
            master_port = '-1'
            master_link_status = '-1'
            master_last_io_seconds_ago = '-1'
            master_sync_in_progress = '-1'
            #slave_repl_offset = '---'
            slave_priority = '-1'
            slave_read_only = '-1'
            master_server_id = '-1'
            role_new = 'm'

        #add redis_status
        connect = 1
        sql = "insert into redis_status(server_id,host,port,tags,redis_role,connect,redis_version,redis_git_sha1,redis_git_dirty,redis_mode,os,arch_bits,multiplexing_api,gcc_version,process_id,run_id,tcp_port,uptime_in_seconds,uptime_in_days,hz,lru_clock,connected_clients,client_longest_output_list,client_biggest_input_buf,blocked_clients,used_memory,used_memory_human,used_memory_rss,used_memory_peak,used_memory_peak_human,used_memory_lua,mem_fragmentation_ratio,mem_allocator,loading,rdb_changes_since_last_save,rdb_bgsave_in_progress,rdb_last_save_time,rdb_last_bgsave_status,rdb_last_bgsave_time_sec,rdb_current_bgsave_time_sec,aof_enabled,aof_rewrite_in_progress,aof_rewrite_scheduled,aof_last_rewrite_time_sec,aof_current_rewrite_time_sec,aof_last_bgrewrite_status,total_connections_received,total_commands_processed,current_commands_processed,instantaneous_ops_per_sec,rejected_connections,expired_keys,evicted_keys,keyspace_hits,keyspace_misses,pubsub_channels,pubsub_patterns,latest_fork_usec,used_cpu_sys,used_cpu_user,used_cpu_sys_children,used_cpu_user_children) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, host, port, tags, role, connect, redis_version,
                 redis_git_sha1, redis_git_dirty, redis_mode, os, arch_bits,
                 multiplexing_api, gcc_version, process_id, run_id, tcp_port,
                 uptime_in_seconds, uptime_in_days, hz, lru_clock,
                 connected_clients, client_longest_output_list,
                 client_biggest_input_buf, blocked_clients, used_memory,
                 used_memory_human, used_memory_rss, used_memory_peak,
                 used_memory_peak_human, used_memory_lua,
                 mem_fragmentation_ratio, mem_allocator, loading,
                 rdb_changes_since_last_save, rdb_bgsave_in_progress,
                 rdb_last_save_time, rdb_last_bgsave_status,
                 rdb_last_bgsave_time_sec, rdb_current_bgsave_time_sec,
                 aof_enabled, aof_rewrite_in_progress, aof_rewrite_scheduled,
                 aof_last_rewrite_time_sec, aof_current_rewrite_time_sec,
                 aof_last_bgrewrite_status, total_connections_received,
                 total_commands_processed, current_commands_processed,
                 instantaneous_ops_per_sec, rejected_connections, expired_keys,
                 evicted_keys, keyspace_hits, keyspace_misses, pubsub_channels,
                 pubsub_patterns, latest_fork_usec, used_cpu_sys,
                 used_cpu_user, used_cpu_sys_children, used_cpu_user_children)
        func.mysql_exec(sql, param)

        #add redis_replication
        sql_1 = "insert into redis_replication(server_id,tags,host,port,role,master_server_id,master_host,master_port,master_link_status,master_last_io_seconds_ago,master_sync_in_progress,slave_priority,slave_read_only,connected_slaves) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param_1 = (server_id, tags, host, port, role, master_server_id,
                   master_host, master_port, master_link_status,
                   master_last_io_seconds_ago, master_sync_in_progress,
                   slave_priority, slave_read_only, connected_slaves)
        func.mysql_exec(sql_1, param_1)
        func.update_db_status_init(role_new, redis_version, host, port, tags)

    except Exception, e:
        logger_msg = "check redis %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into redis_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
                 indexCounters_accesses, indexCounters_hits,
                 indexCounters_misses, indexCounters_resets,
                 indexCounters_missRatio, dur_commits, dur_journaledMB,
                 dur_writeToDataFilesMB, dur_compression,
                 dur_commitsInWriteLock, dur_earlyCommits, dur_timeMs_dt,
                 dur_timeMs_prepLogBuffer, dur_timeMs_writeToJournal,
                 dur_timeMs_writeToDataFiles, dur_timeMs_remapPrivateView,
                 mem_bits, mem_resident, mem_virtual, mem_supported,
                 mem_mapped, mem_mappedWithJournal, network_bytesIn_persecond,
                 network_bytesOut_persecond, network_numRequests_persecond,
                 opcounters_insert_persecond, opcounters_query_persecond,
                 opcounters_update_persecond, opcounters_delete_persecond,
                 opcounters_command_persecond)
        func.mysql_exec(sql, param)
        role = 'm'
        func.update_db_status_init(repl_role_new, version, host, port, tags)

    except Exception, e:
        logger_msg = "check mongodb %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
        finally:
def check_sqlserver(host, port, username, passwd, server_id, tags):
    try:
        conn = pymssql.connect(host=host, port=int(port), user=username, password=passwd, charset="utf8")

        connect = 1
        role = -1
        uptime = sqlserver.get_uptime(conn)
        version = sqlserver.get_version(conn)

        lock_timeout = sqlserver.get_variables(conn, "LOCK_TIMEOUT")
        trancount = sqlserver.get_variables(conn, "TRANCOUNT")
        max_connections = sqlserver.get_variables(conn, "MAX_CONNECTIONS")
        processes = sqlserver.ger_processes(conn)
        processes_running = sqlserver.ger_processes_running(conn)
        processes_waits = sqlserver.ger_processes_waits(conn)

        connections = sqlserver.get_variables(conn, "CONNECTIONS")
        pack_received = sqlserver.get_variables(conn, "PACK_RECEIVED")
        pack_sent = sqlserver.get_variables(conn, "PACK_SENT")
        packet_errors = sqlserver.get_variables(conn, "PACKET_ERRORS")

        time.sleep(1)

        connections_2 = sqlserver.get_variables(conn, "CONNECTIONS")
        pack_received_2 = sqlserver.get_variables(conn, "PACK_RECEIVED")
        pack_sent_2 = sqlserver.get_variables(conn, "PACK_SENT")
        packet_errors_2 = sqlserver.get_variables(conn, "PACKET_ERRORS")

        connections_persecond = int(connections_2) - int(connections)
        pack_received_persecond = int(pack_received_2) - int(pack_received)
        pack_sent_persecond = int(pack_sent_2) - int(pack_sent)
        packet_errors_persecond = int(packet_errors_2) - int(packet_errors)

        sql = "insert into sqlserver_status(server_id,tags,host,port,connect,role,uptime,version,lock_timeout,trancount,max_connections,processes,processes_running,processes_waits,connections_persecond,pack_received_persecond,pack_sent_persecond,packet_errors_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (
            server_id,
            tags,
            host,
            port,
            connect,
            role,
            uptime,
            version,
            lock_timeout,
            trancount,
            max_connections,
            processes,
            processes_running,
            processes_waits,
            connections_persecond,
            pack_received_persecond,
            pack_sent_persecond,
            packet_errors_persecond,
        )
        func.mysql_exec(sql, param)
        func.update_db_status_init(role, version, host, port, tags)

    except Exception, e:
        logger_msg = "check sqlserver %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into sqlserver_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #7
0
def check_mongodb(host, port, user, passwd, server_id, tags):
    try:
        func.mysql_exec(
            "insert into mongodb_status_his SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mongodb_status where server_id='%s';"
            % (server_id), '')
        func.mysql_exec(
            "delete from mongodb_status where server_id='%s';" % (server_id),
            '')

        #connect = pymongo.Connection(host,int(port))
        client = pymongo.MongoClient(host, int(port))
        db = client['admin']
        db.authenticate(user, passwd)
        serverStatus = client.admin.command(
            bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        time.sleep(1)
        serverStatus_2 = client.admin.command(
            bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        connect = 1
        ok = int(serverStatus['ok'])
        version = serverStatus['version']
        uptime = serverStatus['uptime']
        connections_current = serverStatus['connections']['current']
        connections_available = serverStatus['connections']['available']
        globalLock_activeClients = serverStatus['globalLock']['activeClients'][
            'total']
        globalLock_currentQueue = serverStatus['globalLock']['currentQueue'][
            'total']
        mem_bits = serverStatus['mem']['bits']
        mem_resident = serverStatus['mem']['resident']
        mem_virtual = serverStatus['mem']['virtual']
        mem_supported = serverStatus['mem']['supported']
        mem_mapped = serverStatus['mem']['mapped']
        mem_mappedWithJournal = serverStatus['mem']['mappedWithJournal']
        network_bytesIn_persecond = int(
            serverStatus_2['network']['bytesIn']) - int(
                serverStatus['network']['bytesIn'])
        network_bytesOut_persecond = int(
            serverStatus_2['network']['bytesOut']) - int(
                serverStatus['network']['bytesOut'])
        network_numRequests_persecond = int(
            serverStatus_2['network']['numRequests']) - int(
                serverStatus['network']['numRequests'])
        opcounters_insert_persecond = int(
            serverStatus_2['opcounters']['insert']) - int(
                serverStatus['opcounters']['insert'])
        opcounters_query_persecond = int(
            serverStatus_2['opcounters']['query']) - int(
                serverStatus['opcounters']['query'])
        opcounters_update_persecond = int(
            serverStatus_2['opcounters']['update']) - int(
                serverStatus['opcounters']['update'])
        opcounters_delete_persecond = int(
            serverStatus_2['opcounters']['delete']) - int(
                serverStatus['opcounters']['delete'])
        opcounters_command_persecond = int(
            serverStatus_2['opcounters']['command']) - int(
                serverStatus['opcounters']['command'])

        #replset
        try:
            repl = serverStatus['repl']
            setName = repl['setName']
            replset = 1
            if repl['secondary'] == True:
                repl_role = 'secondary'
                repl_role_new = 's'
            else:
                repl_role = 'master'
                repl_role_new = 'm'
        except:
            replset = 0
            repl_role = 'master'
            repl_role_new = 'm'
            pass

        ##################### insert data to mysql server#############################
        sql = "insert into mongodb_status(server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, host, port, tags, connect, replset, repl_role, ok,
                 uptime, version, connections_current, connections_available,
                 globalLock_currentQueue, globalLock_activeClients, mem_bits,
                 mem_resident, mem_virtual, mem_supported, mem_mapped,
                 mem_mappedWithJournal, network_bytesIn_persecond,
                 network_bytesOut_persecond, network_numRequests_persecond,
                 opcounters_insert_persecond, opcounters_query_persecond,
                 opcounters_update_persecond, opcounters_delete_persecond,
                 opcounters_command_persecond)
        func.mysql_exec(sql, param)
        role = 'm'
        func.update_db_status_init(repl_role_new, version, host, port, tags)

    except Exception, e:
        logger_msg = "check mongodb %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #8
0
def check_mysql(host, port, username, password, server_id, tags):
    logger_msg = "[BBQ]begin check mysql %s:%s " % (host, port)
    logger.info(logger_msg)
    try:
        conn = MySQLdb.connect(host=host,
                               user=username,
                               passwd=password,
                               port=int(port),
                               connect_timeout=3,
                               charset='utf8')
        cur = conn.cursor()
        conn.select_db('information_schema')
        #cur.execute('flush hosts;')
        saveMysqlStatus = {}
        saveMysqlStatus['host'] = host
        saveMysqlStatus['port'] = port
        saveMysqlStatus['server_id'] = server_id
        saveMysqlStatus['tags'] = tags
        saveMysqlStatus['connect'] = 1
        ############################# CHECK MYSQL ####################################################
        mysql_variables = func.get_mysql_variables(cur)
        mysql_status = func.get_mysql_status(cur)
        logger_msg = "[BBQ]get mysql %s:%s status1 " % (host, port)
        logger.info(logger_msg)
        time.sleep(1)
        mysql_status_2 = func.get_mysql_status(cur)
        logger_msg = "[BBQ]get mysql %s:%s status2 " % (host, port)
        logger.info(logger_msg)
        ############################# GET VARIABLES ###################################################
        version = func.get_item(mysql_variables, 'version')
        saveMysqlStatus['version'] = version
        saveMysqlStatus['innodb_stats_on_metadata'] = func.get_item(
            mysql_variables, 'innodb_stats_on_metadata')
        saveMysqlStatus['sync_binlog'] = func.get_item(mysql_variables,
                                                       'sync_binlog')
        saveMysqlStatus['key_buffer_size'] = func.get_item(
            mysql_variables, 'key_buffer_size')
        saveMysqlStatus['sort_buffer_size'] = func.get_item(
            mysql_variables, 'sort_buffer_size')
        saveMysqlStatus['join_buffer_size'] = func.get_item(
            mysql_variables, 'join_buffer_size')
        saveMysqlStatus['max_connections'] = func.get_item(
            mysql_variables, 'max_connections')
        saveMysqlStatus['max_connect_errors'] = func.get_item(
            mysql_variables, 'max_connect_errors')
        saveMysqlStatus['open_files_limit'] = func.get_item(
            mysql_variables, 'open_files_limit')
        saveMysqlStatus['table_open_cache'] = func.get_item(
            mysql_variables, 'table_open_cache')
        saveMysqlStatus['max_tmp_tables'] = func.get_item(
            mysql_variables, 'max_tmp_tables')
        saveMysqlStatus['max_heap_table_size'] = func.get_item(
            mysql_variables, 'max_heap_table_size')
        saveMysqlStatus['max_allowed_packet'] = func.get_item(
            mysql_variables, 'max_allowed_packet')

        ############################# GET INNODB INFO ##################################################
        #innodb variables
        saveMysqlStatus['innodb_version'] = func.get_item(
            mysql_variables, 'innodb_version')
        saveMysqlStatus['innodb_buffer_pool_instances'] = func.get_item(
            mysql_variables, 'innodb_buffer_pool_instances')
        saveMysqlStatus['innodb_buffer_pool_size'] = func.get_item(
            mysql_variables, 'innodb_buffer_pool_size')
        saveMysqlStatus['innodb_doublewrite'] = func.get_item(
            mysql_variables, 'innodb_doublewrite')
        saveMysqlStatus['innodb_file_per_table'] = func.get_item(
            mysql_variables, 'innodb_file_per_table')
        saveMysqlStatus['innodb_flush_log_at_trx_commit'] = func.get_item(
            mysql_variables, 'innodb_flush_log_at_trx_commit')
        saveMysqlStatus['innodb_flush_method'] = func.get_item(
            mysql_variables, 'innodb_flush_method')
        saveMysqlStatus['innodb_force_recovery'] = func.get_item(
            mysql_variables, 'innodb_force_recovery')
        saveMysqlStatus['innodb_io_capacity'] = func.get_item(
            mysql_variables, 'innodb_io_capacity')
        saveMysqlStatus['innodb_read_io_threads'] = func.get_item(
            mysql_variables, 'innodb_read_io_threads')
        saveMysqlStatus['innodb_write_io_threads'] = func.get_item(
            mysql_variables, 'innodb_write_io_threads')
        #innodb status
        saveMysqlStatus['innodb_buffer_pool_pages_total'] = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_total'))
        saveMysqlStatus['innodb_buffer_pool_pages_data'] = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_data'))
        saveMysqlStatus['innodb_buffer_pool_pages_dirty'] = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_dirty'))
        saveMysqlStatus['innodb_buffer_pool_pages_flushed'] = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed'))
        saveMysqlStatus['innodb_buffer_pool_pages_free'] = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_free'))
        saveMysqlStatus['innodb_buffer_pool_pages_misc'] = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_misc'))
        saveMysqlStatus['innodb_page_size'] = int(
            func.get_item(mysql_status, 'Innodb_page_size'))
        saveMysqlStatus['innodb_pages_created'] = int(
            func.get_item(mysql_status, 'Innodb_pages_created'))
        saveMysqlStatus['innodb_pages_read'] = int(
            func.get_item(mysql_status, 'Innodb_pages_read'))
        saveMysqlStatus['innodb_pages_written'] = int(
            func.get_item(mysql_status, 'Innodb_pages_written'))
        saveMysqlStatus['innodb_row_lock_current_waits'] = int(
            func.get_item(mysql_status, 'Innodb_row_lock_current_waits'))
        #innodb persecond info
        saveMysqlStatus['innodb_buffer_pool_read_requests_persecond'] = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_read_requests')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_read_requests'))
        saveMysqlStatus['innodb_buffer_pool_reads_persecond'] = int(
            func.get_item(mysql_status_2, 'Innodb_buffer_pool_reads')) - int(
                func.get_item(mysql_status, 'Innodb_buffer_pool_reads'))
        saveMysqlStatus['innodb_buffer_pool_write_requests_persecond'] = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_write_requests')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_write_requests'))
        saveMysqlStatus['innodb_buffer_pool_pages_flushed_persecond'] = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_pages_flushed')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_pages_flushed'))
        saveMysqlStatus['innodb_rows_deleted_persecond'] = int(
            func.get_item(mysql_status_2, 'Innodb_rows_deleted')) - int(
                func.get_item(mysql_status, 'Innodb_rows_deleted'))
        saveMysqlStatus['innodb_rows_inserted_persecond'] = int(
            func.get_item(mysql_status_2, 'Innodb_rows_inserted')) - int(
                func.get_item(mysql_status, 'Innodb_rows_inserted'))
        saveMysqlStatus['innodb_rows_read_persecond'] = int(
            func.get_item(mysql_status_2, 'Innodb_rows_read')) - int(
                func.get_item(mysql_status, 'Innodb_rows_read'))
        saveMysqlStatus['innodb_rows_updated_persecond'] = int(
            func.get_item(mysql_status_2, 'Innodb_rows_updated')) - int(
                func.get_item(mysql_status, 'Innodb_rows_updated'))
        ############################# GET STATUS ##################################################
        saveMysqlStatus['uptime'] = func.get_item(mysql_status, 'Uptime')
        saveMysqlStatus['open_files'] = func.get_item(mysql_status,
                                                      'Open_files')
        saveMysqlStatus['open_tables'] = func.get_item(mysql_status,
                                                       'Open_tables')
        saveMysqlStatus['threads_connected'] = func.get_item(
            mysql_status, 'Threads_connected')
        saveMysqlStatus['threads_running'] = func.get_item(
            mysql_status, 'Threads_running')
        #saveMysqlStatus['threads_created'] = func.get_item(mysql_status,'Threads_created')
        saveMysqlStatus['threads_created'] = int(
            func.get_item(mysql_status_2, 'Threads_created')) - int(
                func.get_item(mysql_status, 'Threads_created'))
        saveMysqlStatus['threads_cached'] = func.get_item(
            mysql_status, 'Threads_cached')
        saveMysqlStatus['threads_waits'] = mysql.get_waits(conn)
        saveMysqlStatus['connections'] = func.get_item(mysql_status,
                                                       'Connections')
        saveMysqlStatus['aborted_clients'] = func.get_item(
            mysql_status, 'Aborted_clients')
        saveMysqlStatus['aborted_connects'] = func.get_item(
            mysql_status, 'Aborted_connects')
        saveMysqlStatus['key_blocks_not_flushed'] = func.get_item(
            mysql_status, 'Key_blocks_not_flushed')
        saveMysqlStatus['key_blocks_unused'] = func.get_item(
            mysql_status, 'Key_blocks_unused')
        saveMysqlStatus['key_blocks_used'] = func.get_item(
            mysql_status, 'Key_blocks_used')
        ############################# GET STATUS PERSECOND ##################################################
        saveMysqlStatus['connections_persecond'] = int(
            func.get_item(mysql_status_2, 'Connections')) - int(
                func.get_item(mysql_status, 'Connections'))
        saveMysqlStatus['bytes_received_persecond'] = (
            int(func.get_item(mysql_status_2, 'Bytes_received')) -
            int(func.get_item(mysql_status, 'Bytes_received'))) / 1024
        saveMysqlStatus['bytes_sent_persecond'] = (
            int(func.get_item(mysql_status_2, 'Bytes_sent')) -
            int(func.get_item(mysql_status, 'Bytes_sent'))) / 1024
        saveMysqlStatus['com_select_persecond'] = int(
            func.get_item(mysql_status_2, 'Com_select')) - int(
                func.get_item(mysql_status, 'Com_select'))
        saveMysqlStatus['com_insert_persecond'] = int(
            func.get_item(mysql_status_2, 'Com_insert')) - int(
                func.get_item(mysql_status, 'Com_insert'))
        saveMysqlStatus['com_update_persecond'] = int(
            func.get_item(mysql_status_2, 'Com_update')) - int(
                func.get_item(mysql_status, 'Com_update'))
        saveMysqlStatus['com_delete_persecond'] = int(
            func.get_item(mysql_status_2, 'Com_delete')) - int(
                func.get_item(mysql_status, 'Com_delete'))
        saveMysqlStatus['com_commit_persecond'] = int(
            func.get_item(mysql_status_2, 'Com_commit')) - int(
                func.get_item(mysql_status, 'Com_commit'))
        saveMysqlStatus['com_rollback_persecond'] = int(
            func.get_item(mysql_status_2, 'Com_rollback')) - int(
                func.get_item(mysql_status, 'Com_rollback'))
        saveMysqlStatus['questions_persecond'] = int(
            func.get_item(mysql_status_2, 'Questions')) - int(
                func.get_item(mysql_status, 'Questions'))
        saveMysqlStatus['queries_persecond'] = int(
            func.get_item(mysql_status_2, 'Queries')) - int(
                func.get_item(mysql_status, 'Queries'))
        saveMysqlStatus['transaction_persecond'] = (
            int(func.get_item(mysql_status_2, 'Com_commit')) +
            int(func.get_item(mysql_status_2, 'Com_rollback'))) - (
                int(func.get_item(mysql_status, 'Com_commit')) +
                int(func.get_item(mysql_status, 'Com_rollback')))
        saveMysqlStatus['created_tmp_disk_tables_persecond'] = int(
            func.get_item(mysql_status_2, 'Created_tmp_disk_tables')) - int(
                func.get_item(mysql_status, 'Created_tmp_disk_tables'))
        saveMysqlStatus['created_tmp_files_persecond'] = int(
            func.get_item(mysql_status_2, 'Created_tmp_files')) - int(
                func.get_item(mysql_status, 'Created_tmp_files'))
        saveMysqlStatus['created_tmp_tables_persecond'] = int(
            func.get_item(mysql_status_2, 'Created_tmp_tables')) - int(
                func.get_item(mysql_status, 'Created_tmp_tables'))
        saveMysqlStatus['table_locks_immediate_persecond'] = int(
            func.get_item(mysql_status_2, 'Table_locks_immediate')) - int(
                func.get_item(mysql_status, 'Table_locks_immediate'))
        saveMysqlStatus['table_locks_waited_persecond'] = int(
            func.get_item(mysql_status_2, 'Table_locks_waited')) - int(
                func.get_item(mysql_status, 'Table_locks_waited'))
        saveMysqlStatus['key_read_requests_persecond'] = int(
            func.get_item(mysql_status_2, 'Key_read_requests')) - int(
                func.get_item(mysql_status, 'Key_read_requests'))
        saveMysqlStatus['key_reads_persecond'] = int(
            func.get_item(mysql_status_2, 'Key_reads')) - int(
                func.get_item(mysql_status, 'Key_reads'))
        saveMysqlStatus['key_write_requests_persecond'] = int(
            func.get_item(mysql_status_2, 'Key_write_requests')) - int(
                func.get_item(mysql_status, 'Key_write_requests'))
        saveMysqlStatus['key_writes_persecond'] = int(
            func.get_item(mysql_status_2, 'Key_writes')) - int(
                func.get_item(mysql_status, 'Key_writes'))
        ############################# GET MYSQL HITRATE ##################################################
        if (string.atof(func.get_item(mysql_status, 'Qcache_hits')) +
                string.atof(func.get_item(mysql_status, 'Com_select'))) <> 0:
            query_cache_hitrate = string.atof(
                func.get_item(mysql_status, 'Qcache_hits')) / (
                    string.atof(func.get_item(mysql_status, 'Qcache_hits')) +
                    string.atof(func.get_item(mysql_status, 'Com_select')))
            query_cache_hitrate = "%9.2f" % query_cache_hitrate
        else:
            query_cache_hitrate = 0
        saveMysqlStatus['query_cache_hitrate'] = query_cache_hitrate

        if string.atof(func.get_item(mysql_status, 'Connections')) <> 0:
            thread_cache_hitrate = 1 - string.atof(
                func.get_item(mysql_status, 'Threads_created')) / string.atof(
                    func.get_item(mysql_status, 'Connections'))
            thread_cache_hitrate = "%9.2f" % thread_cache_hitrate
        else:
            thread_cache_hitrate = 0
        saveMysqlStatus['thread_cache_hitrate'] = thread_cache_hitrate

        if string.atof(func.get_item(mysql_status, 'Key_read_requests')) <> 0:
            key_buffer_read_rate = 1 - string.atof(
                func.get_item(mysql_status, 'Key_reads')) / string.atof(
                    func.get_item(mysql_status, 'Key_read_requests'))
            key_buffer_read_rate = "%9.2f" % key_buffer_read_rate
        else:
            key_buffer_read_rate = 0
        saveMysqlStatus['key_buffer_read_rate'] = key_buffer_read_rate

        if string.atof(func.get_item(mysql_status, 'Key_write_requests')) <> 0:
            key_buffer_write_rate = 1 - string.atof(
                func.get_item(mysql_status, 'Key_writes')) / string.atof(
                    func.get_item(mysql_status, 'Key_write_requests'))
            key_buffer_write_rate = "%9.2f" % key_buffer_write_rate
        else:
            key_buffer_write_rate = 0
        saveMysqlStatus['key_buffer_write_rate'] = key_buffer_write_rate

        if (string.atof(func.get_item(mysql_status, 'Key_blocks_used')) +
                string.atof(func.get_item(mysql_status,
                                          'Key_blocks_unused'))) <> 0:
            key_blocks_used_rate = string.atof(
                func.get_item(mysql_status, 'Key_blocks_used')
            ) / (string.atof(func.get_item(mysql_status, 'Key_blocks_used')) +
                 string.atof(func.get_item(mysql_status, 'Key_blocks_unused')))
            key_blocks_used_rate = "%9.2f" % key_blocks_used_rate
        else:
            key_blocks_used_rate = 0
        saveMysqlStatus['key_blocks_used_rate'] = key_blocks_used_rate

        if (string.atof(func.get_item(
                mysql_status, 'Created_tmp_disk_tables')) + string.atof(
                    func.get_item(mysql_status, 'Created_tmp_tables'))) <> 0:
            created_tmp_disk_tables_rate = string.atof(
                func.get_item(mysql_status, 'Created_tmp_disk_tables')) / (
                    string.atof(
                        func.get_item(mysql_status, 'Created_tmp_disk_tables'))
                    + string.atof(
                        func.get_item(mysql_status, 'Created_tmp_tables')))
            created_tmp_disk_tables_rate = "%9.2f" % created_tmp_disk_tables_rate
        else:
            created_tmp_disk_tables_rate = 0
        saveMysqlStatus[
            'created_tmp_disk_tables_rate'] = created_tmp_disk_tables_rate

        max_connections = saveMysqlStatus.get('max_connections')
        threads_connected = saveMysqlStatus.get('threads_connected')
        if string.atof(max_connections) <> 0:
            connections_usage_rate = string.atof(
                threads_connected) / string.atof(max_connections)
            connections_usage_rate = "%9.2f" % connections_usage_rate
        else:
            connections_usage_rate = 0
        saveMysqlStatus['connections_usage_rate'] = connections_usage_rate

        open_files_limit = saveMysqlStatus.get('open_files_limit')
        open_files = saveMysqlStatus.get('open_files')
        if string.atof(open_files_limit) <> 0:
            open_files_usage_rate = string.atof(open_files) / string.atof(
                open_files_limit)
            open_files_usage_rate = "%9.2f" % open_files_usage_rate
        else:
            open_files_usage_rate = 0
        saveMysqlStatus['open_files_usage_rate'] = open_files_usage_rate

        table_open_cache = saveMysqlStatus.get('table_open_cache')
        open_tables = saveMysqlStatus.get('open_tables')
        if string.atof(table_open_cache) <> 0:
            open_tables_usage_rate = string.atof(open_tables) / string.atof(
                table_open_cache)
            open_tables_usage_rate = "%9.2f" % open_tables_usage_rate
        else:
            open_tables_usage_rate = 0
        saveMysqlStatus['open_tables_usage_rate'] = open_tables_usage_rate

        #repl
        read_only = func.get_item(mysql_variables, 'read_only')
        slave_hosts = cur.execute('show slave hosts;')
        slave_status = cur.execute('show slave status;')
        if (slave_status == 0) or (slave_status <> 0 and slave_hosts > 0 and
                                   (not cmp(read_only, "OFF"))):
            role = 'master'
            role_new = 'm'
        else:
            role = 'slave'
            role_new = 's'
        saveMysqlStatus['role'] = role

        ############################# disk size          ##################################################
        disk_size_m = 0
        saveMysqlStatus['disk_size_m'] = disk_size_m

        ############################# INSERT INTO SERVER ##################################################
        updSqls = []
        updSqls.append(
            "replace into mysql_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mysql_status where host='%s' and port=%s"
            % (host, port))
        updSqls.append("delete from mysql_status where host='%s' and port=%s" %
                       (host, port))
        cols = []
        vals = []
        for key, val in saveMysqlStatus.iteritems():
            cols.append(key)
            vals.append(str(val))
        insSql = "insert into mysql_status(%s) VALUES ('%s')" % (
            ",".join(cols), "','".join(vals))
        updSqls.append(insSql)
        #logger.info(updSqls)
        func.mysql_exec_many(updSqls)
        logger_msg = "[BBQ]save mysql %s:%s status " % (host, port)
        logger.info(logger_msg)
        func.update_db_status_init(server_id, role_new, version, host, port,
                                   tags)

        # save other
        func.other_save("mysql_status", saveMysqlStatus)

        #check mysql process
        processlist = cur.execute(
            'select * from information_schema.processlist where DB !="information_schema" and command !="Sleep";'
        )
        if processlist:
            for line in cur.fetchall():
                sql = "insert into mysql_processlist(server_id,host,port,tags,pid,p_user,p_host,p_db,command,time,status,info) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3], line[4], line[5], line[6], line[7])
                func.mysql_exec(sql, param)

        #check mysql connected
        connected = cur.execute(
            "select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count  from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server, connect_user, connect_db ;"
        )
        if connected:
            for line in cur.fetchall():
                sql = "insert into mysql_connected(server_id,host,port,tags,connect_server,connect_user,connect_db,connect_count) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3])
                func.mysql_exec(sql, param)

        #check mysql replication
        master_thread = cur.execute(
            "select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';"
        )
        slave_status = cur.execute('show slave status;')
        datalist = []
        if master_thread >= 1:
            datalist.append(int(1))
            if not cmp("slave", role):
                datalist.append(int(1))
            else:
                datalist.append(int(0))
        else:
            datalist.append(int(0))
            if not cmp("slave", role):
                datalist.append(int(1))
            else:
                datalist.append(int(0))

        gtid_mode = cur.execute(
            "select * from information_schema.global_variables where variable_name='gtid_mode';"
        )
        result = cur.fetchone()
        if result:
            gtid_mode = result[1]
        else:
            gtid_mode = 'OFF'
        datalist.append(gtid_mode)

        read_only = cur.execute(
            "show variables where variable_name like 'read_only';")
        result = cur.fetchone()
        datalist.append(result[1])

        master_binlog_file = '---'
        master_binlog_pos = '---'
        master_binlog_space = 0
        slave_info = cur.execute('show slave status;')
        if slave_info == 0:
            for i in range(0, 7):
                datalist.append('-1')
        else:
            result = cur.fetchone()
            master_server = result[1]
            master_port = result[3]
            master_binlog_file = result[5]
            master_binlog_pos = result[6]
            current_binlog_file = result[9]
            slave_io_run = result[10]
            slave_sql_run = result[11]
            current_binlog_pos = result[21]
            delay = result[32]
            # delay use hearbeat
            masterHosts = func.mysql_query(
                "select host from db_servers_mysql where host='%s' or replicate_ip='%s' limit 1"
                % (master_server, master_server))
            if masterHosts != 0:
                masterHost = masterHosts[0][0]
                connMaster = MySQLdb.connect(host=masterHost,
                                             user=username,
                                             passwd=password,
                                             port=int(master_port),
                                             connect_timeout=3,
                                             charset='utf8')
                curMaster = connMaster.cursor()
                connMaster.select_db('information_schema')
                master_variables = func.get_mysql_variables(curMaster)
                master_server_id = master_variables.get("server_id")
                connMaster.close()
                # query delay by master_server_id
                qHb = "select UNIX_TIMESTAMP()-UNIX_TIMESTAMP(STR_TO_DATE(substring_index(ts,'.',1),'%%Y-%%m-%%dT%%H:%%i:%%s')) as d from dhdba.heartbeat where server_id='%s';" % (
                    master_server_id)
                cur.execute(qHb)
                lines = cur.fetchone()
                if lines:
                    delay = int(lines[0])
                    if delay < 0:
                        delay = 0
            datalist.append(master_server)
            datalist.append(master_port)
            datalist.append(slave_io_run)
            datalist.append(slave_sql_run)
            datalist.append(delay)
            datalist.append(current_binlog_file)
            datalist.append(current_binlog_pos)
        master = cur.execute('show master status;')
        if master != 0:
            master_result = cur.fetchone()
            master_binlog_file = master_result[0]
            master_binlog_pos = master_result[1]
        binlog_file = cur.execute('show master logs;')
        if binlog_file:
            for row in cur.fetchall():
                master_binlog_space = master_binlog_space + int(row[1])

        datalist.append(master_binlog_file)
        datalist.append(master_binlog_pos)
        datalist.append(master_binlog_space)

        result = datalist
        if result:
            func.mysql_exec(
                "replace into mysql_replication_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mysql_replication where host='%s' and port=%s"
                % (host, port), '')
            func.mysql_exec(
                "delete from mysql_replication where host='%s' and port=%s" %
                (host, port), '')
            sql = "insert into mysql_replication(server_id,tags,host,port,is_master,is_slave,gtid_mode,read_only,master_server,master_port,slave_io_run,slave_sql_run,delay,current_binlog_file,current_binlog_pos,master_binlog_file,master_binlog_pos,master_binlog_space) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            param = (server_id, tags, host, port, result[0], result[1],
                     result[2], result[3], result[4], result[5], result[6],
                     result[7], result[8], result[9], result[10], result[11],
                     result[12], result[13])
            func.mysql_exec(sql, param)

        cur.close()
    except MySQLdb.Error, e:
        logger_msg = "check mysql %s:%s failure: %d %s" % (
            host, port, e.args[0], e.args[1])
        logger.warning(logger_msg)
        logger_msg = "check mysql %s:%s failure: sleep 3 seconds and check again." % (
            host, port)
        logger.warning(logger_msg)
        time.sleep(3)
        try:
            conn = MySQLdb.connect(host=host,
                                   user=username,
                                   passwd=password,
                                   port=int(port),
                                   connect_timeout=3,
                                   charset='utf8')
            cur = conn.cursor()
            conn.select_db('information_schema')
        except MySQLdb.Error, e:
            logger_msg = "check mysql second %s:%s failure: %d %s" % (
                host, port, e.args[0], e.args[1])
            logger.warning(logger_msg)
            connect = 0
            sql = "replace into mysql_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)
Exemple #9
0
def check_redis(host,port,passwd,server_id,tags):
    try:
        r=redis.StrictRedis(host=host,port=port,password=passwd,db=0,socket_timeout=3,charset='utf-8') 
        info=r.info()
        time.sleep(1)
        info_2=r.info()
        # Server
        redis_version = info['redis_version']
        redis_git_sha1 = info['redis_git_sha1']
        redis_git_dirty = info['redis_git_dirty']
        arch_bits = info['arch_bits']
        multiplexing_api = info['multiplexing_api']
        gcc_version = info['gcc_version']
        process_id = info['process_id']
        uptime_in_seconds = info['uptime_in_seconds']
        uptime_in_days = info['uptime_in_days']
        lru_clock = info['lru_clock']
        os = check_value(info,'os')
        redis_mode = check_value(info,'redis_mode')
        hz = check_value(info,'hz')
        run_id = check_value(info,'run_id')
        tcp_port = check_value(info,'tcp_port')

        # Clients 
        connected_clients = info['connected_clients']
        client_longest_output_list = info['client_longest_output_list']
        client_biggest_input_buf = info['client_biggest_input_buf']
        blocked_clients = info['blocked_clients']
        # Memory
        used_memory = info['used_memory']
        used_memory_human = info['used_memory_human']
        used_memory_rss = info['used_memory_rss']
        used_memory_peak = info['used_memory_peak']
        used_memory_peak_human = info['used_memory_peak_human']
        used_memory_lua = check_value(info,'used_memory_lua')
        mem_fragmentation_ratio = info['mem_fragmentation_ratio']
        mem_allocator = info['mem_allocator']
        # Persistence
        loading = info['loading']
        rdb_changes_since_last_save = check_value(info,'rdb_changes_since_last_save')
        rdb_bgsave_in_progress = check_value(info,'rdb_bgsave_in_progress')
        rdb_last_save_time = check_value(info,'rdb_last_save_time')
        rdb_last_bgsave_status = check_value(info,'rdb_last_bgsave_status')
        rdb_last_bgsave_time_sec = check_value(info,'rdb_last_bgsave_time_sec')
        rdb_current_bgsave_time_sec = check_value(info,'rdb_current_bgsave_time_sec')
        aof_enabled = check_value(info,'aof_enabled')
        aof_rewrite_in_progress = check_value(info,'aof_rewrite_in_progress')
        aof_rewrite_scheduled = check_value(info,'aof_rewrite_scheduled')
        aof_last_rewrite_time_sec = check_value(info,'aof_last_rewrite_time_sec')
        aof_current_rewrite_time_sec = check_value(info,'aof_current_rewrite_time_sec')
        aof_last_bgrewrite_status = check_value(info,'aof_last_bgrewrite_status')
        # Stats
        total_connections_received = check_value(info,'total_connections_received')
        total_commands_processed = check_value(info,'total_commands_processed')
        current_commands_processed = int(info_2['total_commands_processed'] - info['total_commands_processed'])
        instantaneous_ops_per_sec = check_value(info,'instantaneous_ops_per_sec')
        rejected_connections = check_value(info,'rejected_connections')

        expired_keys = info['expired_keys']
        evicted_keys = info['evicted_keys']
        keyspace_hits = info['keyspace_hits']
        keyspace_misses = info['keyspace_misses']
        pubsub_channels = info['pubsub_channels']
        pubsub_patterns = info['pubsub_patterns']
        latest_fork_usec = info['latest_fork_usec']
        # Replication
        role = info['role']
        connected_slaves = info['connected_slaves']
        
        # CPU
        used_cpu_sys = info['used_cpu_sys']
        used_cpu_user = info['used_cpu_user']
        used_cpu_sys_children = info['used_cpu_sys_children']
        used_cpu_user_children = info['used_cpu_user_children']

        # replication
        if role == 'slave':
           #print info
           master_host = info['master_host']
           master_port = info['master_port']
           master_link_status = info['master_link_status']
           master_last_io_seconds_ago = info['master_last_io_seconds_ago']
           master_sync_in_progress = info['master_sync_in_progress']
           #slave_repl_offset = info['slave_repl_offset']
           slave_priority = check_value(info,'slave_priority')
           slave_read_only = check_value(info,'slave_read_only')
           master_server_id = func.mysql_query("SELECT id FROM db_servers_redis WHERE host='%s' AND port='%s' limit 1;" %(master_host,master_port))
           master_server_id = master_server_id[0][0]
           role_new='s'
        else:
           master_host = '-1'
           master_port = '-1'
           master_link_status= '-1'
           master_last_io_seconds_ago = '-1'
           master_sync_in_progress = '-1'
           #slave_repl_offset = '---'
           slave_priority = '-1'
           slave_read_only = '-1'
           master_server_id = '-1'
           role_new='m'

        #add redis_status
        connect=1
        sql = "insert into redis_status(server_id,host,port,tags,redis_role,connect,redis_version,redis_git_sha1,redis_git_dirty,redis_mode,os,arch_bits,multiplexing_api,gcc_version,process_id,run_id,tcp_port,uptime_in_seconds,uptime_in_days,hz,lru_clock,connected_clients,client_longest_output_list,client_biggest_input_buf,blocked_clients,used_memory,used_memory_human,used_memory_rss,used_memory_peak,used_memory_peak_human,used_memory_lua,mem_fragmentation_ratio,mem_allocator,loading,rdb_changes_since_last_save,rdb_bgsave_in_progress,rdb_last_save_time,rdb_last_bgsave_status,rdb_last_bgsave_time_sec,rdb_current_bgsave_time_sec,aof_enabled,aof_rewrite_in_progress,aof_rewrite_scheduled,aof_last_rewrite_time_sec,aof_current_rewrite_time_sec,aof_last_bgrewrite_status,total_connections_received,total_commands_processed,current_commands_processed,instantaneous_ops_per_sec,rejected_connections,expired_keys,evicted_keys,keyspace_hits,keyspace_misses,pubsub_channels,pubsub_patterns,latest_fork_usec,used_cpu_sys,used_cpu_user,used_cpu_sys_children,used_cpu_user_children) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,role,connect,redis_version,redis_git_sha1,redis_git_dirty,redis_mode,os,arch_bits,multiplexing_api,gcc_version,process_id,run_id,tcp_port,uptime_in_seconds,uptime_in_days,hz,lru_clock,connected_clients,client_longest_output_list,client_biggest_input_buf,blocked_clients,used_memory,used_memory_human,used_memory_rss,used_memory_peak,used_memory_peak_human,used_memory_lua,mem_fragmentation_ratio,mem_allocator,loading,rdb_changes_since_last_save,rdb_bgsave_in_progress,rdb_last_save_time,rdb_last_bgsave_status,rdb_last_bgsave_time_sec,rdb_current_bgsave_time_sec,aof_enabled,aof_rewrite_in_progress,aof_rewrite_scheduled,aof_last_rewrite_time_sec,aof_current_rewrite_time_sec,aof_last_bgrewrite_status,total_connections_received,total_commands_processed,current_commands_processed,instantaneous_ops_per_sec,rejected_connections,expired_keys,evicted_keys,keyspace_hits,keyspace_misses,pubsub_channels,pubsub_patterns,latest_fork_usec,used_cpu_sys,used_cpu_user,used_cpu_sys_children,used_cpu_user_children)
        func.mysql_exec(sql,param)

        #add redis_replication
        sql_1 = "insert into redis_replication(server_id,tags,host,port,role,master_server_id,master_host,master_port,master_link_status,master_last_io_seconds_ago,master_sync_in_progress,slave_priority,slave_read_only,connected_slaves) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param_1 = (server_id,tags,host,port,role,master_server_id,master_host,master_port,master_link_status,master_last_io_seconds_ago,master_sync_in_progress,slave_priority,slave_read_only,connected_slaves)
        func.mysql_exec(sql_1,param_1)
        func.update_db_status_init(role_new,redis_version,host,port,tags)

    except Exception, e:
        logger_msg="check redis %s:%s : %s" %(host,port,e)
        logger.warning(logger_msg)
   
        try:
            connect=0
            sql="insert into redis_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #10
0
def check_mongodb(host,port,user,passwd,server_id,tags):
    try:
        connect = pymongo.Connection(host,int(port))
        db = connect['admin'] 
        db.authenticate(user,passwd)
        serverStatus=connect.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        time.sleep(1)
        serverStatus_2=connect.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        connect = 1
        ok = int(serverStatus['ok'])
        version = serverStatus['version']
        uptime = serverStatus['uptime']
        connections_current = serverStatus['connections']['current']
        connections_available = serverStatus['connections']['available']
        globalLock_activeClients = serverStatus['globalLock']['activeClients']['total']
        globalLock_currentQueue = serverStatus['globalLock']['currentQueue']['total']
        indexCounters_accesses = serverStatus['indexCounters']['accesses']
        indexCounters_hits = serverStatus['indexCounters']['hits']
        indexCounters_misses = serverStatus['indexCounters']['misses']
        indexCounters_resets = serverStatus['indexCounters']['resets']
        indexCounters_missRatio = serverStatus['indexCounters']['missRatio']
        #cursors_totalOpen = serverStatus['cursors']['totalOpen']
        #cursors_timeOut =  serverStatus['cursors']['timeOut']
        dur_commits = serverStatus['dur']['commits']
        dur_journaledMB = serverStatus['dur']['journaledMB']
        dur_writeToDataFilesMB = serverStatus['dur']['writeToDataFilesMB']
        dur_compression = serverStatus['dur']['compression']
        dur_commitsInWriteLock = serverStatus['dur']['commitsInWriteLock']
        dur_earlyCommits = serverStatus['dur']['earlyCommits']
        dur_timeMs_dt = serverStatus['dur']['timeMs']['dt']
        dur_timeMs_prepLogBuffer = serverStatus['dur']['timeMs']['prepLogBuffer']
        dur_timeMs_writeToJournal = serverStatus['dur']['timeMs']['writeToJournal']
        dur_timeMs_writeToDataFiles = serverStatus['dur']['timeMs']['writeToDataFiles']
        dur_timeMs_remapPrivateView = serverStatus['dur']['timeMs']['remapPrivateView']
        mem_bits = serverStatus['mem']['bits']
        mem_resident = serverStatus['mem']['resident']
        mem_virtual = serverStatus['mem']['virtual']
        mem_supported = serverStatus['mem']['supported']
        mem_mapped = serverStatus['mem']['mapped']
        mem_mappedWithJournal = serverStatus['mem']['mappedWithJournal']
        network_bytesIn_persecond = int(serverStatus_2['network']['bytesIn']) - int(serverStatus['network']['bytesIn'])
        network_bytesOut_persecond = int(serverStatus_2['network']['bytesOut']) - int(serverStatus['network']['bytesOut'])
        network_numRequests_persecond = int(serverStatus_2['network']['numRequests']) - int(serverStatus['network']['numRequests'])
        opcounters_insert_persecond = int(serverStatus_2['opcounters']['insert']) - int(serverStatus['opcounters']['insert'])
        opcounters_query_persecond = int(serverStatus_2['opcounters']['query']) - int(serverStatus['opcounters']['query'])
        opcounters_update_persecond = int(serverStatus_2['opcounters']['update']) - int(serverStatus['opcounters']['update'])
        opcounters_delete_persecond = int(serverStatus_2['opcounters']['delete']) - int(serverStatus['opcounters']['delete'])
        opcounters_command_persecond = int(serverStatus_2['opcounters']['command']) - int(serverStatus['opcounters']['command'])

        #replset
        try:
            repl=serverStatus['repl']
            setName=repl['setName']
            replset=1
            if repl['secondary']==true:
                repl_role='secondary'
                repl_role_new='s'
            else:
                repl_role='master'
                repl_role_new='m' 
        except:
            replset=0
            repl_role='master'
            repl_role_new='m'
            pass

        ##################### insert data to mysql server#############################
        sql = "insert into mongodb_status(server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,indexCounters_accesses,indexCounters_hits,indexCounters_misses,indexCounters_resets,indexCounters_missRatio,dur_commits,dur_journaledMB,dur_writeToDataFilesMB,dur_compression,dur_commitsInWriteLock,dur_earlyCommits,dur_timeMs_dt,dur_timeMs_prepLogBuffer,dur_timeMs_writeToJournal,dur_timeMs_writeToDataFiles,dur_timeMs_remapPrivateView,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"       
        param = (server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,indexCounters_accesses,indexCounters_hits,indexCounters_misses,indexCounters_resets,indexCounters_missRatio,dur_commits,dur_journaledMB,dur_writeToDataFilesMB,dur_compression,dur_commitsInWriteLock,dur_earlyCommits,dur_timeMs_dt,dur_timeMs_prepLogBuffer,dur_timeMs_writeToJournal,dur_timeMs_writeToDataFiles,dur_timeMs_remapPrivateView,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond)
        func.mysql_exec(sql,param)
        role='m'
        func.update_db_status_init(repl_role_new,version,host,port,tags)

    except Exception, e:
        logger_msg="check mongodb %s:%s : %s" %(host,port,e)
        logger.warning(logger_msg)

        try:
            connect=0
            sql="insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
def check_mongodb(host,port,user,passwd,server_id,tags):
    try:
        func.mysql_exec("insert into mongodb_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mongodb_status where server_id='%s';" %(server_id),'')
        func.mysql_exec("delete from mongodb_status where server_id='%s';" %(server_id),'')

        #connect = pymongo.Connection(host,int(port))
        client = pymongo.MongoClient(host, int(port))
        db = client['admin'] 
        db.authenticate(user,passwd)
        serverStatus=client.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        time.sleep(1)
        serverStatus_2=client.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        connect = 1
        ok = int(serverStatus['ok'])
        version = serverStatus['version']
        uptime = serverStatus['uptime']
        connections_current = serverStatus['connections']['current']
        connections_available = serverStatus['connections']['available']
        globalLock_activeClients = serverStatus['globalLock']['activeClients']['total']
        globalLock_currentQueue = serverStatus['globalLock']['currentQueue']['total']
        mem_bits = serverStatus['mem']['bits']
        mem_resident = serverStatus['mem']['resident']
        mem_virtual = serverStatus['mem']['virtual']
        mem_supported = serverStatus['mem']['supported']
        mem_mapped = serverStatus['mem']['mapped']
        mem_mappedWithJournal = serverStatus['mem']['mappedWithJournal']
        network_bytesIn_persecond = int(serverStatus_2['network']['bytesIn']) - int(serverStatus['network']['bytesIn'])
        network_bytesOut_persecond = int(serverStatus_2['network']['bytesOut']) - int(serverStatus['network']['bytesOut'])
        network_numRequests_persecond = int(serverStatus_2['network']['numRequests']) - int(serverStatus['network']['numRequests'])
        opcounters_insert_persecond = int(serverStatus_2['opcounters']['insert']) - int(serverStatus['opcounters']['insert'])
        opcounters_query_persecond = int(serverStatus_2['opcounters']['query']) - int(serverStatus['opcounters']['query'])
        opcounters_update_persecond = int(serverStatus_2['opcounters']['update']) - int(serverStatus['opcounters']['update'])
        opcounters_delete_persecond = int(serverStatus_2['opcounters']['delete']) - int(serverStatus['opcounters']['delete'])
        opcounters_command_persecond = int(serverStatus_2['opcounters']['command']) - int(serverStatus['opcounters']['command'])

        #replset
        try:
            repl=serverStatus['repl']
            setName=repl['setName']
            replset=1
            if repl['secondary']== True:
                repl_role='secondary'
                repl_role_new='s'
            else:
                repl_role='master'
                repl_role_new='m' 
        except:
            replset=0
            repl_role='master'
            repl_role_new='m'
            pass

        ##################### insert data to mysql server#############################
        sql = "insert into mongodb_status(server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"       
        param = (server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond)
        func.mysql_exec(sql,param)
        role='m'
        func.update_db_status_init(repl_role_new,version,host,port,tags)

    except Exception, e:
        logger_msg="check mongodb %s:%s : %s" %(host,port,e)
        logger.warning(logger_msg)

        try:
            connect=0
            sql="insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #12
0
                 instance_role, instance_status, database_role, open_mode,
                 protection_mode, host_name, database_status, startup_time,
                 uptime, version, archiver, session_total, session_actives,
                 session_waits, dg_stats, dg_delay, processes,
                 session_logical_reads_persecond, physical_reads_persecond,
                 physical_writes_persecond,
                 physical_read_io_requests_persecond,
                 physical_write_io_requests_persecond,
                 db_block_changes_persecond, os_cpu_wait_time,
                 logons_persecond, logons_current, opened_cursors_persecond,
                 opened_cursors_current, user_commits_persecond,
                 user_rollbacks_persecond, user_calls_persecond,
                 db_block_gets_persecond, flashback_on,
                 flashback_earliest_time, flashback_space_used)
        func.mysql_exec(sql, param)
        func.update_db_status_init(server_id, 'oracle', database_role_new,
                                   version, tags)
        func.mysql_exec("commit;", '')

        logger.info("Generate oracle instance alert for server: %s begin:" %
                    (server_id))
        alert.gen_alert_oracle_status(
            server_id)  # generate oracle instance alert
        logger.info("Generate oracle instance alert for server: %s end." %
                    (server_id))

        #check tablespace
        func.mysql_exec("begin;", '')
        func.mysql_exec(
            "insert into oracle_tablespace_his SELECT *,DATE_FORMAT(sysdate(),'%%Y%%m%%d%%H%%i%%s') from oracle_tablespace where server_id = %s;"
            % (server_id), '')
        func.mysql_exec(
Exemple #13
0
            innodb_buffer_pool_pages_flushed, innodb_buffer_pool_pages_free,
            innodb_buffer_pool_pages_misc, innodb_page_size,
            innodb_pages_created, innodb_pages_read, innodb_pages_written,
            innodb_row_lock_current_waits,
            innodb_buffer_pool_pages_flushed_persecond,
            innodb_buffer_pool_read_requests_persecond,
            innodb_buffer_pool_reads_persecond,
            innodb_buffer_pool_write_requests_persecond,
            innodb_rows_read_persecond, innodb_rows_inserted_persecond,
            innodb_rows_updated_persecond, innodb_rows_deleted_persecond,
            query_cache_hitrate, thread_cache_hitrate, key_buffer_read_rate,
            key_buffer_write_rate, key_blocks_used_rate,
            created_tmp_disk_tables_rate, connections_usage_rate,
            open_files_usage_rate, open_tables_usage_rate)
        func.mysql_exec(sql, param)
        func.update_db_status_init(server_id, 'mysql', role_new, version, tags)

        # generate mysql status alert
        alert.gen_alert_mysql_status(server_id)

        #check mysql process
        processlist = cur.execute(
            "select * from information_schema.processlist where DB !='information_schema' and command !='Sleep';"
        )
        if processlist:
            for line in cur.fetchall():
                sql = "insert into mysql_processlist(server_id,host,port,tags,pid,p_user,p_host,p_db,command,time,status,info) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3], line[4], line[5], line[6], line[7])
                func.mysql_exec(sql, param)
Exemple #14
0
        os_cpu_wait_time = sysstat_0['OS CPU Qt wait time']
        logons_persecond = sysstat_1['logons cumulative']-sysstat_0['logons cumulative']
        logons_current = sysstat_0['logons current']
        opened_cursors_persecond = sysstat_1['opened cursors cumulative']-sysstat_0['opened cursors cumulative']
        opened_cursors_current = sysstat_0['opened cursors current']
        user_commits_persecond = sysstat_1['user commits']-sysstat_0['user commits']
        user_rollbacks_persecond = sysstat_1['user rollbacks']-sysstat_0['user rollbacks']
        user_calls_persecond = sysstat_1['user calls']-sysstat_0['user calls']
        db_block_gets_persecond = sysstat_1['db block gets']-sysstat_0['db block gets']
        #print session_logical_reads_persecond

        ##################### insert data to mysql server#############################
        sql = "insert into oracle_status(server_id,host,port,tags,connect,instance_name,instance_role,instance_status,database_role,open_mode,protection_mode,host_name,database_status,startup_time,uptime,version,archiver,session_total,session_actives,session_waits,dg_stats,dg_delay,processes,session_logical_reads_persecond,physical_reads_persecond,physical_writes_persecond,physical_read_io_requests_persecond,physical_write_io_requests_persecond,db_block_changes_persecond,os_cpu_wait_time,logons_persecond,logons_current,opened_cursors_persecond,opened_cursors_current,user_commits_persecond,user_rollbacks_persecond,user_calls_persecond,db_block_gets_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,connect,instance_name,instance_role,instance_status,database_role,open_mode,protection_mode,host_name,database_status,startup_time,uptime,version,archiver,session_total,session_actives,session_waits,dg_stats,dg_delay,processes,session_logical_reads_persecond,physical_reads_persecond,physical_writes_persecond,physical_read_io_requests_persecond,physical_write_io_requests_persecond,db_block_changes_persecond,os_cpu_wait_time,logons_persecond,logons_current,opened_cursors_persecond,opened_cursors_current,user_commits_persecond,user_rollbacks_persecond,user_calls_persecond,db_block_gets_persecond)
        func.mysql_exec(sql,param) 
        func.update_db_status_init(database_role_new,version,host,port,tags)

        #check tablespace
        tablespace = oracle.get_tablespace(conn)
        if tablespace:
           for line in tablespace:
              sql="insert into oracle_tablespace(server_id,host,port,tags,tablespace_name,total_size,used_size,avail_size,used_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
              param=(server_id,host,port,tags,line[0],line[1],line[2],line[3],line[4])
              func.mysql_exec(sql,param)
           

    except Exception, e:
        logger.error(e)
        sys.exit(1)

    finally:
        pack_sent_2 = sqlserver.get_variables(conn, 'PACK_SENT')
        packet_errors_2 = sqlserver.get_variables(conn, 'PACKET_ERRORS')

        connections_persecond = int(connections_2) - int(connections)
        pack_received_persecond = int(pack_received_2) - int(pack_received)
        pack_sent_persecond = int(pack_sent_2) - int(pack_sent)
        packet_errors_persecond = int(packet_errors_2) - int(packet_errors)

        sql = "insert into sqlserver_status(server_id,tags,host,port,connect,role,uptime,version,lock_timeout,trancount,max_connections,processes,processes_running,processes_waits,connections_persecond,pack_received_persecond,pack_sent_persecond,packet_errors_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, tags, host, port, connect, role, uptime, version,
                 lock_timeout, trancount, max_connections, processes,
                 processes_running, processes_waits, connections_persecond,
                 pack_received_persecond, pack_sent_persecond,
                 packet_errors_persecond)
        func.mysql_exec(sql, param)
        func.update_db_status_init(server_id, 'sqlserver', role, version, tags)

        # generate sqlserver status alert
        alert.gen_alert_sqlserver_status(server_id)

        #check logspace
        logspace = sqlserver.get_logspace(conn)
        if logspace:
            for line in logspace:
                sql = "insert into sqlserver_space(server_id,host,port,tags,db_name,total_size,max_rate,status) values(%s,%s,%s,%s,%s,%s,%s,%s)"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3])
                func.mysql_exec(sql, param)

            #logger.info("Generate logspace alert for server: %s begin:" %(server_id))
            #alert.gen_alert_sqlserver_logspace(server_id)    # generate logspace alert
Exemple #16
0
        
        logons_persecond = sysstat_1['logons cumulative']-sysstat_0['logons cumulative']
        logons_current = sysstat_0['logons current']
        opened_cursors_persecond = sysstat_1['opened cursors cumulative']-sysstat_0['opened cursors cumulative']
        opened_cursors_current = sysstat_0['opened cursors current']
        user_commits_persecond = sysstat_1['user commits']-sysstat_0['user commits']
        user_rollbacks_persecond = sysstat_1['user rollbacks']-sysstat_0['user rollbacks']
        user_calls_persecond = sysstat_1['user calls']-sysstat_0['user calls']
        db_block_gets_persecond = sysstat_1['db block gets']-sysstat_0['db block gets']
        #print session_logical_reads_persecond

        ##################### insert data to mysql server#############################
        sql = "insert into oracle_status(server_id,host,port,tags,connect,db_name, instance_name,instance_role,instance_status,database_role,open_mode,protection_mode,host_name,database_status,startup_time,uptime,version,archiver,session_total,session_actives,session_waits,dg_stats,dg_delay,processes,session_logical_reads_persecond,physical_reads_persecond,physical_writes_persecond,physical_read_io_requests_persecond,physical_write_io_requests_persecond,db_block_changes_persecond,os_cpu_wait_time,logons_persecond,logons_current,opened_cursors_persecond,opened_cursors_current,user_commits_persecond,user_rollbacks_persecond,user_calls_persecond,db_block_gets_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,connect,db_name,instance_name,instance_role,instance_status,database_role,open_mode,protection_mode,host_name,database_status,startup_time,uptime,version,archiver,session_total,session_actives,session_waits,dg_stats,dg_delay,processes,session_logical_reads_persecond,physical_reads_persecond,physical_writes_persecond,physical_read_io_requests_persecond,physical_write_io_requests_persecond,db_block_changes_persecond,os_cpu_wait_time,logons_persecond,logons_current,opened_cursors_persecond,opened_cursors_current,user_commits_persecond,user_rollbacks_persecond,user_calls_persecond,db_block_gets_persecond)
        func.mysql_exec(sql,param) 
        func.update_db_status_init(database_role_new,version,host,port,tags)

        #check tablespace
        tablespace = oracle.get_tablespace(conn)
        if tablespace:
           for line in tablespace:
              sql="insert into oracle_tablespace(server_id,host,port,tags,tablespace_name,total_size,used_size,avail_size,used_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
              param=(server_id,host,port,tags,line[0],line[1],line[2],line[3],line[4])
              func.mysql_exec(sql,param)
              
        #check dataguard status
        result = func.mysql_query("select count(1) from db_servers_oracle_dg where primary_db_id = '%s' or standby_db_id = '%s'" %(server_id, server_id))
        if result:
            is_dg = result[0][0]

        if is_dg > 0:
Exemple #17
0
def check_mysql(host, port, username, password, server_id, tags):
    try:
        conn = MySQLdb.connect(host=host,
                               user=username,
                               passwd=password,
                               port=int(port),
                               connect_timeout=3,
                               charset='utf8')
        cur = conn.cursor()
        conn.select_db('information_schema')
        #cur.execute('flush hosts;')
        ############################# CHECK MYSQL ####################################################
        mysql_variables = func.get_mysql_variables(cur)
        mysql_status = func.get_mysql_status(cur)
        time.sleep(1)
        mysql_status_2 = func.get_mysql_status(cur)
        ############################# GET VARIABLES ###################################################
        version = func.get_item(mysql_variables, 'version')
        key_buffer_size = func.get_item(mysql_variables, 'key_buffer_size')
        sort_buffer_size = func.get_item(mysql_variables, 'sort_buffer_size')
        join_buffer_size = func.get_item(mysql_variables, 'join_buffer_size')
        max_connections = func.get_item(mysql_variables, 'max_connections')
        max_connect_errors = func.get_item(mysql_variables,
                                           'max_connect_errors')
        open_files_limit = func.get_item(mysql_variables, 'open_files_limit')
        table_open_cache = func.get_item(mysql_variables, 'table_open_cache')
        max_tmp_tables = func.get_item(mysql_variables, 'max_tmp_tables')
        max_heap_table_size = func.get_item(mysql_variables,
                                            'max_heap_table_size')
        max_allowed_packet = func.get_item(mysql_variables,
                                           'max_allowed_packet')
        ############################# GET INNODB INFO ##################################################
        #innodb variables
        innodb_version = func.get_item(mysql_variables, 'innodb_version')
        innodb_buffer_pool_instances = func.get_item(
            mysql_variables, 'innodb_buffer_pool_instances')
        innodb_buffer_pool_size = func.get_item(mysql_variables,
                                                'innodb_buffer_pool_size')
        innodb_doublewrite = func.get_item(mysql_variables,
                                           'innodb_doublewrite')
        innodb_file_per_table = func.get_item(mysql_variables,
                                              'innodb_file_per_table')
        innodb_flush_log_at_trx_commit = func.get_item(
            mysql_variables, 'innodb_flush_log_at_trx_commit')
        innodb_flush_method = func.get_item(mysql_variables,
                                            'innodb_flush_method')
        innodb_force_recovery = func.get_item(mysql_variables,
                                              'innodb_force_recovery')
        innodb_io_capacity = func.get_item(mysql_variables,
                                           'innodb_io_capacity')
        innodb_read_io_threads = func.get_item(mysql_variables,
                                               'innodb_read_io_threads')
        innodb_write_io_threads = func.get_item(mysql_variables,
                                                'innodb_write_io_threads')
        #innodb status
        innodb_buffer_pool_pages_total = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_total'))
        innodb_buffer_pool_pages_data = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_data'))
        innodb_buffer_pool_pages_dirty = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_dirty'))
        innodb_buffer_pool_pages_flushed = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed'))
        innodb_buffer_pool_pages_free = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_free'))
        innodb_buffer_pool_pages_misc = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_misc'))
        innodb_page_size = int(func.get_item(mysql_status, 'Innodb_page_size'))
        innodb_pages_created = int(
            func.get_item(mysql_status, 'Innodb_pages_created'))
        innodb_pages_read = int(
            func.get_item(mysql_status, 'Innodb_pages_read'))
        innodb_pages_written = int(
            func.get_item(mysql_status, 'Innodb_pages_written'))
        innodb_row_lock_current_waits = int(
            func.get_item(mysql_status, 'Innodb_row_lock_current_waits'))
        #innodb persecond info
        innodb_buffer_pool_read_requests_persecond = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_read_requests')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_read_requests'))
        innodb_buffer_pool_reads_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_buffer_pool_reads')) - int(
                func.get_item(mysql_status, 'Innodb_buffer_pool_reads'))
        innodb_buffer_pool_write_requests_persecond = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_write_requests')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_write_requests'))
        innodb_buffer_pool_pages_flushed_persecond = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_pages_flushed')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_pages_flushed'))
        innodb_rows_deleted_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_deleted')) - int(
                func.get_item(mysql_status, 'Innodb_rows_deleted'))
        innodb_rows_inserted_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_inserted')) - int(
                func.get_item(mysql_status, 'Innodb_rows_inserted'))
        innodb_rows_read_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_read')) - int(
                func.get_item(mysql_status, 'Innodb_rows_read'))
        innodb_rows_updated_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_updated')) - int(
                func.get_item(mysql_status, 'Innodb_rows_updated'))
        ############################# GET STATUS ##################################################
        connect = 1
        uptime = func.get_item(mysql_status, 'Uptime')
        open_files = func.get_item(mysql_status, 'Open_files')
        open_tables = func.get_item(mysql_status, 'Open_tables')
        threads_connected = func.get_item(mysql_status, 'Threads_connected')
        threads_running = func.get_item(mysql_status, 'Threads_running')
        threads_created = func.get_item(mysql_status, 'Threads_created')
        threads_cached = func.get_item(mysql_status, 'Threads_cached')
        threads_waits = mysql.get_waits(conn)
        connections = func.get_item(mysql_status, 'Connections')
        aborted_clients = func.get_item(mysql_status, 'Aborted_clients')
        aborted_connects = func.get_item(mysql_status, 'Aborted_connects')
        key_blocks_not_flushed = func.get_item(mysql_status,
                                               'Key_blocks_not_flushed')
        key_blocks_unused = func.get_item(mysql_status, 'Key_blocks_unused')
        key_blocks_used = func.get_item(mysql_status, 'Key_blocks_used')
        ############################# GET STATUS PERSECOND ##################################################
        connections_persecond = int(
            func.get_item(mysql_status_2, 'Connections')) - int(
                func.get_item(mysql_status, 'Connections'))
        bytes_received_persecond = (
            int(func.get_item(mysql_status_2, 'Bytes_received')) -
            int(func.get_item(mysql_status, 'Bytes_received'))) / 1024
        bytes_sent_persecond = (
            int(func.get_item(mysql_status_2, 'Bytes_sent')) -
            int(func.get_item(mysql_status, 'Bytes_sent'))) / 1024
        com_select_persecond = int(func.get_item(
            mysql_status_2, 'Com_select')) - int(
                func.get_item(mysql_status, 'Com_select'))
        com_insert_persecond = int(func.get_item(
            mysql_status_2, 'Com_insert')) - int(
                func.get_item(mysql_status, 'Com_insert'))
        com_update_persecond = int(func.get_item(
            mysql_status_2, 'Com_update')) - int(
                func.get_item(mysql_status, 'Com_update'))
        com_delete_persecond = int(func.get_item(
            mysql_status_2, 'Com_delete')) - int(
                func.get_item(mysql_status, 'Com_delete'))
        com_commit_persecond = int(func.get_item(
            mysql_status_2, 'Com_commit')) - int(
                func.get_item(mysql_status, 'Com_commit'))
        com_rollback_persecond = int(
            func.get_item(mysql_status_2, 'Com_rollback')) - int(
                func.get_item(mysql_status, 'Com_rollback'))
        questions_persecond = int(func.get_item(
            mysql_status_2, 'Questions')) - int(
                func.get_item(mysql_status, 'Questions'))
        queries_persecond = int(func.get_item(
            mysql_status_2, 'Queries')) - int(
                func.get_item(mysql_status, 'Queries'))
        transaction_persecond = (
            int(func.get_item(mysql_status_2, 'Com_commit')) +
            int(func.get_item(mysql_status_2, 'Com_rollback'))) - (
                int(func.get_item(mysql_status, 'Com_commit')) +
                int(func.get_item(mysql_status, 'Com_rollback')))
        created_tmp_disk_tables_persecond = int(
            func.get_item(mysql_status_2, 'Created_tmp_disk_tables')) - int(
                func.get_item(mysql_status, 'Created_tmp_disk_tables'))
        created_tmp_files_persecond = int(
            func.get_item(mysql_status_2, 'Created_tmp_files')) - int(
                func.get_item(mysql_status, 'Created_tmp_files'))
        created_tmp_tables_persecond = int(
            func.get_item(mysql_status_2, 'Created_tmp_tables')) - int(
                func.get_item(mysql_status, 'Created_tmp_tables'))
        table_locks_immediate_persecond = int(
            func.get_item(mysql_status_2, 'Table_locks_immediate')) - int(
                func.get_item(mysql_status, 'Table_locks_immediate'))
        table_locks_waited_persecond = int(
            func.get_item(mysql_status_2, 'Table_locks_waited')) - int(
                func.get_item(mysql_status, 'Table_locks_waited'))
        key_read_requests_persecond = int(
            func.get_item(mysql_status_2, 'Key_read_requests')) - int(
                func.get_item(mysql_status, 'Key_read_requests'))
        key_reads_persecond = int(func.get_item(
            mysql_status_2, 'Key_reads')) - int(
                func.get_item(mysql_status, 'Key_reads'))
        key_write_requests_persecond = int(
            func.get_item(mysql_status_2, 'Key_write_requests')) - int(
                func.get_item(mysql_status, 'Key_write_requests'))
        key_writes_persecond = int(func.get_item(
            mysql_status_2, 'Key_writes')) - int(
                func.get_item(mysql_status, 'Key_writes'))
        ############################# GET MYSQL HITRATE ##################################################
        if (string.atof(func.get_item(mysql_status, 'Qcache_hits')) +
                string.atof(func.get_item(mysql_status, 'Com_select'))) <> 0:
            query_cache_hitrate = string.atof(
                func.get_item(mysql_status, 'Qcache_hits')) / (
                    string.atof(func.get_item(mysql_status, 'Qcache_hits')) +
                    string.atof(func.get_item(mysql_status, 'Com_select')))
            query_cache_hitrate = "%9.2f" % query_cache_hitrate
        else:
            query_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status, 'Connections')) <> 0:
            thread_cache_hitrate = 1 - string.atof(
                func.get_item(mysql_status, 'Threads_created')) / string.atof(
                    func.get_item(mysql_status, 'Connections'))
            thread_cache_hitrate = "%9.2f" % thread_cache_hitrate
        else:
            thread_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status, 'Key_read_requests')) <> 0:
            key_buffer_read_rate = 1 - string.atof(
                func.get_item(mysql_status, 'Key_reads')) / string.atof(
                    func.get_item(mysql_status, 'Key_read_requests'))
            key_buffer_read_rate = "%9.2f" % key_buffer_read_rate
        else:
            key_buffer_read_rate = 0

        if string.atof(func.get_item(mysql_status, 'Key_write_requests')) <> 0:
            key_buffer_write_rate = 1 - string.atof(
                func.get_item(mysql_status, 'Key_writes')) / string.atof(
                    func.get_item(mysql_status, 'Key_write_requests'))
            key_buffer_write_rate = "%9.2f" % key_buffer_write_rate
        else:
            key_buffer_write_rate = 0

        if (string.atof(func.get_item(mysql_status, 'Key_blocks_used')) +
                string.atof(func.get_item(mysql_status,
                                          'Key_blocks_unused'))) <> 0:
            key_blocks_used_rate = string.atof(
                func.get_item(mysql_status, 'Key_blocks_used')
            ) / (string.atof(func.get_item(mysql_status, 'Key_blocks_used')) +
                 string.atof(func.get_item(mysql_status, 'Key_blocks_unused')))
            key_blocks_used_rate = "%9.2f" % key_blocks_used_rate
        else:
            key_blocks_used_rate = 0

        if (string.atof(func.get_item(
                mysql_status, 'Created_tmp_disk_tables')) + string.atof(
                    func.get_item(mysql_status, 'Created_tmp_tables'))) <> 0:
            created_tmp_disk_tables_rate = string.atof(
                func.get_item(mysql_status, 'Created_tmp_disk_tables')) / (
                    string.atof(
                        func.get_item(mysql_status, 'Created_tmp_disk_tables'))
                    + string.atof(
                        func.get_item(mysql_status, 'Created_tmp_tables')))
            created_tmp_disk_tables_rate = "%9.2f" % created_tmp_disk_tables_rate
        else:
            created_tmp_disk_tables_rate = 0

        if string.atof(max_connections) <> 0:
            connections_usage_rate = string.atof(
                threads_connected) / string.atof(max_connections)
            connections_usage_rate = "%9.2f" % connections_usage_rate
        else:
            connections_usage_rate = 0

        if string.atof(open_files_limit) <> 0:
            open_files_usage_rate = string.atof(open_files) / string.atof(
                open_files_limit)
            open_files_usage_rate = "%9.2f" % open_files_usage_rate
        else:
            open_files_usage_rate = 0

        if string.atof(table_open_cache) <> 0:
            open_tables_usage_rate = string.atof(open_tables) / string.atof(
                table_open_cache)
            open_tables_usage_rate = "%9.2f" % open_tables_usage_rate
        else:
            open_tables_usage_rate = 0

        #repl
        slave_status = cur.execute('show slave status;')
        if slave_status <> 0:
            role = 'slave'
            role_new = 's'
        else:
            role = 'master'
            role_new = 'm'

        ############################# INSERT INTO SERVER ##################################################
        sql = "insert into mysql_status(server_id,host,port,tags,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_waits,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (
            server_id, host, port, tags, connect, role, uptime, version,
            max_connections, max_connect_errors, open_files_limit,
            table_open_cache, max_tmp_tables, max_heap_table_size,
            max_allowed_packet, open_files, open_tables, threads_connected,
            threads_running, threads_waits, threads_created, threads_cached,
            connections, aborted_clients, aborted_connects,
            connections_persecond, bytes_received_persecond,
            bytes_sent_persecond, com_select_persecond, com_insert_persecond,
            com_update_persecond, com_delete_persecond, com_commit_persecond,
            com_rollback_persecond, questions_persecond, queries_persecond,
            transaction_persecond, created_tmp_tables_persecond,
            created_tmp_disk_tables_persecond, created_tmp_files_persecond,
            table_locks_immediate_persecond, table_locks_waited_persecond,
            key_buffer_size, sort_buffer_size, join_buffer_size,
            key_blocks_not_flushed, key_blocks_unused, key_blocks_used,
            key_read_requests_persecond, key_reads_persecond,
            key_write_requests_persecond, key_writes_persecond, innodb_version,
            innodb_buffer_pool_instances, innodb_buffer_pool_size,
            innodb_doublewrite, innodb_file_per_table,
            innodb_flush_log_at_trx_commit, innodb_flush_method,
            innodb_force_recovery, innodb_io_capacity, innodb_read_io_threads,
            innodb_write_io_threads, innodb_buffer_pool_pages_total,
            innodb_buffer_pool_pages_data, innodb_buffer_pool_pages_dirty,
            innodb_buffer_pool_pages_flushed, innodb_buffer_pool_pages_free,
            innodb_buffer_pool_pages_misc, innodb_page_size,
            innodb_pages_created, innodb_pages_read, innodb_pages_written,
            innodb_row_lock_current_waits,
            innodb_buffer_pool_pages_flushed_persecond,
            innodb_buffer_pool_read_requests_persecond,
            innodb_buffer_pool_reads_persecond,
            innodb_buffer_pool_write_requests_persecond,
            innodb_rows_read_persecond, innodb_rows_inserted_persecond,
            innodb_rows_updated_persecond, innodb_rows_deleted_persecond,
            query_cache_hitrate, thread_cache_hitrate, key_buffer_read_rate,
            key_buffer_write_rate, key_blocks_used_rate,
            created_tmp_disk_tables_rate, connections_usage_rate,
            open_files_usage_rate, open_tables_usage_rate)
        func.mysql_exec(sql, param)
        func.update_db_status_init(role_new, version, host, port, tags)

        #check mysql process
        processlist = cur.execute(
            "select * from information_schema.processlist where DB !='information_schema' and command !='Sleep';"
        )
        if processlist:
            for line in cur.fetchall():
                sql = "insert into mysql_processlist(server_id,host,port,tags,pid,p_user,p_host,p_db,command,time,status,info) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3], line[4], line[5], line[6], line[7])
                func.mysql_exec(sql, param)

        #check mysql connected
        connected = cur.execute(
            "select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count  from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server ;"
        )
        if connected:
            for line in cur.fetchall():
                sql = "insert into mysql_connected(server_id,host,port,tags,connect_server,connect_user,connect_db,connect_count) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3])
                func.mysql_exec(sql, param)

        #check mysql replication
        master_thread = cur.execute(
            "select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';"
        )
        slave_status = cur.execute('show slave status;')
        datalist = []
        if master_thread >= 1:
            datalist.append(int(1))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))
        else:
            datalist.append(int(0))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))

        if slave_status <> 0:
            gtid_mode = cur.execute(
                "select * from information_schema.global_variables where variable_name='gtid_mode';"
            )
            result = cur.fetchone()
            if result:
                gtid_mode = result[1]
            else:
                gtid_mode = 'OFF'
            datalist.append(gtid_mode)
            read_only = cur.execute(
                "select * from information_schema.global_variables where variable_name='read_only';"
            )
            result = cur.fetchone()
            datalist.append(result[1])
            slave_info = cur.execute('show slave status;')
            result = cur.fetchone()
            master_server = result[1]
            master_port = result[3]
            slave_io_run = result[10]
            slave_sql_run = result[11]
            delay = result[32]
            current_binlog_file = result[9]
            current_binlog_pos = result[21]
            master_binlog_file = result[5]
            master_binlog_pos = result[6]

            datalist.append(master_server)
            datalist.append(master_port)
            datalist.append(slave_io_run)
            datalist.append(slave_sql_run)
            datalist.append(delay)
            datalist.append(current_binlog_file)
            datalist.append(current_binlog_pos)
            datalist.append(master_binlog_file)
            datalist.append(master_binlog_pos)
            datalist.append(0)

        elif master_thread >= 1:
            gtid_mode = cur.execute(
                "select * from information_schema.global_variables where variable_name='gtid_mode';"
            )
            result = cur.fetchone()
            if result:
                gtid_mode = result[1]
            else:
                gtid_mode = 'OFF'
            datalist.append(gtid_mode)
            read_only = cur.execute(
                "select * from information_schema.global_variables where variable_name='read_only';"
            )
            result = cur.fetchone()
            datalist.append(result[1])
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            master = cur.execute('show master status;')
            master_result = cur.fetchone()
            datalist.append(master_result[0])
            datalist.append(master_result[1])
            binlog_file = cur.execute('show master logs;')
            binlogs = 0
            if binlog_file:
                for row in cur.fetchall():
                    binlogs = binlogs + row[1]
                datalist.append(binlogs)
        else:
            datalist = []

        result = datalist
        if result:
            sql = "insert into mysql_replication(server_id,tags,host,port,is_master,is_slave,gtid_mode,read_only,master_server,master_port,slave_io_run,slave_sql_run,delay,current_binlog_file,current_binlog_pos,master_binlog_file,master_binlog_pos,master_binlog_space) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            param = (server_id, tags, host, port, result[0], result[1],
                     result[2], result[3], result[4], result[5], result[6],
                     result[7], result[8], result[9], result[10], result[11],
                     result[12], result[13])
            func.mysql_exec(sql, param)

        cur.close()
        exit

    except MySQLdb.Error, e:
        logger_msg = "check mysql %s:%s failure: %d %s" % (
            host, port, e.args[0], e.args[1])
        logger.warning(logger_msg)
        logger_msg = "check mysql %s:%s failure: sleep 3 seconds and check again." % (
            host, port)
        logger.warning(logger_msg)
        time.sleep(3)
        try:
            conn = MySQLdb.connect(host=host,
                                   user=username,
                                   passwd=password,
                                   port=int(port),
                                   connect_timeout=3,
                                   charset='utf8')
            cur = conn.cursor()
            conn.select_db('information_schema')
        except MySQLdb.Error, e:
            logger_msg = "check mysql second %s:%s failure: %d %s" % (
                host, port, e.args[0], e.args[1])
            logger.warning(logger_msg)
            connect = 0
            sql = "insert into mysql_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)
def check_mongodb(host, port, user, passwd, server_id, tags):
    try:
        connect = pymongo.Connection(host, int(port))
        db = connect['admin']
        db.authenticate(user, passwd)
        serverStatus = connect.admin.command(
            bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        time.sleep(1)
        serverStatus_2 = connect.admin.command(
            bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        connect = 1
        ok = int(serverStatus['ok'])
        version = serverStatus['version']
        uptime = serverStatus['uptime']
        connections_current = serverStatus['connections']['current']
        connections_available = serverStatus['connections']['available']
        globalLock_activeClients = serverStatus['globalLock']['activeClients'][
            'total']
        globalLock_currentQueue = serverStatus['globalLock']['currentQueue'][
            'total']
        indexCounters_accesses = serverStatus['indexCounters']['accesses']
        indexCounters_hits = serverStatus['indexCounters']['hits']
        indexCounters_misses = serverStatus['indexCounters']['misses']
        indexCounters_resets = serverStatus['indexCounters']['resets']
        indexCounters_missRatio = serverStatus['indexCounters']['missRatio']
        #cursors_totalOpen = serverStatus['cursors']['totalOpen']
        #cursors_timeOut =  serverStatus['cursors']['timeOut']
        dur_commits = serverStatus['dur']['commits']
        dur_journaledMB = serverStatus['dur']['journaledMB']
        dur_writeToDataFilesMB = serverStatus['dur']['writeToDataFilesMB']
        dur_compression = serverStatus['dur']['compression']
        dur_commitsInWriteLock = serverStatus['dur']['commitsInWriteLock']
        dur_earlyCommits = serverStatus['dur']['earlyCommits']
        dur_timeMs_dt = serverStatus['dur']['timeMs']['dt']
        dur_timeMs_prepLogBuffer = serverStatus['dur']['timeMs'][
            'prepLogBuffer']
        dur_timeMs_writeToJournal = serverStatus['dur']['timeMs'][
            'writeToJournal']
        dur_timeMs_writeToDataFiles = serverStatus['dur']['timeMs'][
            'writeToDataFiles']
        dur_timeMs_remapPrivateView = serverStatus['dur']['timeMs'][
            'remapPrivateView']
        mem_bits = serverStatus['mem']['bits']
        mem_resident = serverStatus['mem']['resident']
        mem_virtual = serverStatus['mem']['virtual']
        mem_supported = serverStatus['mem']['supported']
        mem_mapped = serverStatus['mem']['mapped']
        mem_mappedWithJournal = serverStatus['mem']['mappedWithJournal']
        network_bytesIn_persecond = int(
            serverStatus_2['network']['bytesIn']) - int(
                serverStatus['network']['bytesIn'])
        network_bytesOut_persecond = int(
            serverStatus_2['network']['bytesOut']) - int(
                serverStatus['network']['bytesOut'])
        network_numRequests_persecond = int(
            serverStatus_2['network']['numRequests']) - int(
                serverStatus['network']['numRequests'])
        opcounters_insert_persecond = int(
            serverStatus_2['opcounters']['insert']) - int(
                serverStatus['opcounters']['insert'])
        opcounters_query_persecond = int(
            serverStatus_2['opcounters']['query']) - int(
                serverStatus['opcounters']['query'])
        opcounters_update_persecond = int(
            serverStatus_2['opcounters']['update']) - int(
                serverStatus['opcounters']['update'])
        opcounters_delete_persecond = int(
            serverStatus_2['opcounters']['delete']) - int(
                serverStatus['opcounters']['delete'])
        opcounters_command_persecond = int(
            serverStatus_2['opcounters']['command']) - int(
                serverStatus['opcounters']['command'])

        #replset
        try:
            repl = serverStatus['repl']
            setName = repl['setName']
            replset = 1
            if repl['secondary'] == True:
                repl_role = 'secondary'
                repl_role_new = 's'
            else:
                repl_role = 'master'
                repl_role_new = 'm'
        except:
            replset = 0
            repl_role = 'master'
            repl_role_new = 'm'
            pass

        ##################### insert data to mysql server#############################
        sql = "insert into mongodb_status(server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,indexCounters_accesses,indexCounters_hits,indexCounters_misses,indexCounters_resets,indexCounters_missRatio,dur_commits,dur_journaledMB,dur_writeToDataFilesMB,dur_compression,dur_commitsInWriteLock,dur_earlyCommits,dur_timeMs_dt,dur_timeMs_prepLogBuffer,dur_timeMs_writeToJournal,dur_timeMs_writeToDataFiles,dur_timeMs_remapPrivateView,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, host, port, tags, connect, replset, repl_role, ok,
                 uptime, version, connections_current, connections_available,
                 globalLock_currentQueue, globalLock_activeClients,
                 indexCounters_accesses, indexCounters_hits,
                 indexCounters_misses, indexCounters_resets,
                 indexCounters_missRatio, dur_commits, dur_journaledMB,
                 dur_writeToDataFilesMB, dur_compression,
                 dur_commitsInWriteLock, dur_earlyCommits, dur_timeMs_dt,
                 dur_timeMs_prepLogBuffer, dur_timeMs_writeToJournal,
                 dur_timeMs_writeToDataFiles, dur_timeMs_remapPrivateView,
                 mem_bits, mem_resident, mem_virtual, mem_supported,
                 mem_mapped, mem_mappedWithJournal, network_bytesIn_persecond,
                 network_bytesOut_persecond, network_numRequests_persecond,
                 opcounters_insert_persecond, opcounters_query_persecond,
                 opcounters_update_persecond, opcounters_delete_persecond,
                 opcounters_command_persecond)
        func.mysql_exec(sql, param)
        role = 'm'
        func.update_db_status_init(repl_role_new, version, host, port, tags)

    except Exception, e:
        logger_msg = "check mongodb %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)