def check_mysql_bigtable(host,port,username,password,server_id,tags,bigtable_size):
    try:
        conn=MySQLdb.connect(host=host,user=username,passwd=password,port=int(port),connect_timeout=2,charset='utf8')
        curs=conn.cursor()
        conn.select_db('information_schema')
        try:
           bigtable=curs.execute("SELECT table_schema as 'DB',table_name as 'TABLE',CONCAT(ROUND(( data_length + index_length ) / ( 1024 * 1024 ), 2), '') 'TOTAL' , table_comment as COMMENT FROM information_schema.TABLES;");
           if bigtable:
               for row in curs.fetchall():
                   datalist=[]
                   for r in row:
                      datalist.append(r)
                   result=datalist
                   if result:
                       table_size = float(string.atof(result[2]))
                       if table_size >= int(bigtable_size):
                          sql="insert into mysql_bigtable(server_id,host,port,tags,db_name,table_name,table_size,table_comment) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                          param=(server_id,host,port,tags,result[0],result[1],result[2],result[3])
                          func.mysql_exec(sql,param)
        except :
           pass

        finally:
           curs.close()
           conn.close()
           sys.exit(1)

    except MySQLdb.Error,e:
        pass
        print "Mysql Error %d: %s" %(e.args[0],e.args[1])
Exemple #2
0
def main():
    try:
        func.mysql_exec('delete from alarm_history order by id limit 100;', '')

    except Exception, e:
        print e
        sys.exit(1)
Exemple #3
0
def main():
    try:
        func.mysql_exec('delete from alarm_history;','')
    
    except Exception, e:
        print e
        sys.exit(1)
def saveTables(ip, port, dbs):
    print "save %s:%s begin" % (ip, port)
    insDbSql = "REPLACE INTO mysql_databases(db_name, db_ip, db_port) VALUES (%s, %s, %s)"
    insDbVals = []
    insTbSql = "REPLACE INTO mysql_tables(db_name, tb_name, has_primary, rows, data_len, index_count, index_len) VALUES (%s, %s, %s, %s, %s, %s, %s)"
    insTbVals = []

    for dbName, tbs in dbs.iteritems():
        insDbVals.append([dbName, ip, port])
        for tbName, tbInfos in tbs.iteritems():
            insTbVals.append([
                dbName, tbName,
                tbInfos.get("has_primary"),
                tbInfos.get("rows"),
                tbInfos.get("data_len"),
                tbInfos.get("index_count"),
                tbInfos.get("index_len")
            ])

    # save
    print insDbSql
    #print insDbVals
    func.mysql_exec(insDbSql, insDbVals)
    print insTbSql
    #print insTbVals
    func.mysql_exec(insTbSql, insTbVals)

    print "save %s:%s end" % (ip, port)
def main():

    func.mysql_exec(
        "insert into sqlserver_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from sqlserver_status;",
        "",
    )
    func.mysql_exec("delete from sqlserver_status;", "")

    servers = func.mysql_query(
        "select id,host,port,username,password,tags from db_servers_sqlserver where is_delete=0 and monitor=1;"
    )

    logger.info("check sqlserver controller started.")

    if servers:
        plist = []

        for row in servers:
            server_id = row[0]
            host = row[1]
            port = row[2]
            username = row[3]
            passwd = row[4]
            tags = row[5]
            p = Process(target=check_sqlserver, args=(host, port, username, passwd, server_id, tags))
            plist.append(p)
            p.start()

        for p in plist:
            p.join()

    else:
        logger.warning("check sqlserver: not found any servers")

    logger.info("check sqlserver controller finished.")
def main():
    func.mysql_exec(
        "replace into memcache_status_history  SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from memcache_status;",
        '')
    func.mysql_exec(
        'delete from memcache_status where server_id not in (select id from db_servers_memcache where monitor=1);',
        '')
    #func.mysql_exec('delete from memcache_status;','')
    #get memcache servers list
    servers = func.mysql_query(
        'select id,host,port,tags from db_servers_memcache where is_delete=0 and monitor=1;'
    )
    logger.info("check memcache controller started.")
    if servers:
        plist = []
        for row in servers:
            server_id = row[0]
            host = row[1]
            port = row[2]
            tags = row[3]
            p = Process(target=check_memcache,
                        args=(host, port, server_id, tags))
            plist.append(p)
            p.start()

        for p in plist:
            p.join()

    else:
        logger.warning("check memcache: not found any servers")
    logger.info("check memcache controller finished.")
def stat_mysql_summary():
    print("%s: stat_mysql_summary started." %
          (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ))
    statSql = '''
    REPLACE INTO daily_mysql
	(server_id, host, port, tags, db_type, disk_size_m
	, tps_avg, tps_min, tps_max, qps_avg, qps_min, qps_max,stat_date, create_time)
	
    SELECT server_id, HOST, PORT, tags, 'mysql', MAX(disk_size_m),
      CEIL(AVG(IF(com_insert_persecond>0,com_insert_persecond,0)
		+ IF(com_update_persecond>0,com_update_persecond,0)
		+ IF(com_delete_persecond>0,com_delete_persecond,0))),
      CEIL(MIN(IF(com_insert_persecond>0,com_insert_persecond,0)
		+ IF(com_update_persecond>0,com_update_persecond,0)
		+ IF(com_delete_persecond>0,com_delete_persecond,0))),
      CEIL(MAX(IF(com_insert_persecond>0,com_insert_persecond,0)
		+ IF(com_update_persecond>0,com_update_persecond,0)
		+ IF(com_delete_persecond>0,com_delete_persecond,0))),
      MIN(IF(com_select_persecond>0,com_select_persecond,0)),
      MAX(IF(com_select_persecond>0,com_select_persecond,0)),
      CEIL(AVG(IF(com_select_persecond>0,com_select_persecond,0))),
      DATE(create_time),NOW()
    FROM mysql_status_history
    WHERE create_time>=DATE_SUB(CURDATE(), INTERVAL 1 DAY) AND create_time<CURDATE()
    GROUP BY DATE(create_time),HOST,PORT
                '''
    func.mysql_exec(statSql)
def main():
    func.mysql_exec(
        "insert into mysql_bigtable_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),8) from mysql_bigtable",
        '')
    func.mysql_exec('delete from mysql_bigtable;', '')
    #get mysql servers list
    servers = func.mysql_query(
        'select id,host,port,username,password,tags,bigtable_size from db_servers_mysql where is_delete=0 and monitor=1 and bigtable_monitor=1;'
    )
    if servers:
        print("%s: check mysql bigtable controller started." %
              (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ))
        plist = []
        for row in servers:
            server_id = row[0]
            host = row[1]
            port = row[2]
            username = row[3]
            password = row[4]
            tags = row[5]
            bigtable_size = row[6]
            p = Process(target=check_mysql_bigtable,
                        args=(host, port, username, password, server_id, tags,
                              bigtable_size))
            plist.append(p)
        for p in plist:
            p.start()
        time.sleep(15)
        for p in plist:
            p.terminate()
        for p in plist:
            p.join()
        print("%s: check mysql bigtable controller finished." %
              (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ))
def main():

    func.mysql_exec(
        "insert into mongodb_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mongodb_status;",
        '')
    func.mysql_exec('delete from mongodb_status;', '')

    #get mongodb servers list
    servers = func.mysql_query(
        'select id,host,port,username,password,tags from db_servers_mongodb where is_delete=0 and monitor=1;'
    )

    logger.info("check mongodb controller started.")

    if servers:
        plist = []
        for row in servers:
            server_id = row[0]
            host = row[1]
            port = row[2]
            username = row[3]
            password = row[4]
            tags = row[5]
            p = Process(target=check_mongodb,
                        args=(host, port, username, password, server_id, tags))
            plist.append(p)
            p.start()

        for p in plist:
            p.join()

    else:
        logger.warning("check mongodb: not found any servers")

    logger.info("check mongodb controller finished.")
Exemple #10
0
def main():
    func.mysql_exec("insert into mysql_bigtable_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),8) from mysql_bigtable",'')
    func.mysql_exec('delete from mysql_bigtable;','')
    #get mysql servers list
    servers = func.mysql_query('select id,host,port,username,password,tags,bigtable_size from db_servers_mysql where is_delete=0 and monitor=1 and bigtable_monitor=1;')
    if servers:
        print("%s: check mysql bigtable controller started." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),));
        plist = []
        for row in servers:
            server_id=row[0]
            host=row[1]
            port=row[2]
            username=row[3]
            password=row[4]
            tags=row[5]
            bigtable_size=row[6]
            p = Process(target = check_mysql_bigtable, args = (host,port,username,password,server_id,tags,bigtable_size))
            plist.append(p)
        for p in plist:
            p.start()
        time.sleep(15)
        for p in plist:
            p.terminate()
        for p in plist:
            p.join()
        print("%s: check mysql bigtable controller finished." % (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),))
Exemple #11
0
def check_sqlserver(host, port, username, passwd, server_id, tags):
    try:
        conn = pymssql.connect(host=host,
                               port=int(port),
                               user=username,
                               password=passwd,
                               charset="utf8")

        connect = 1
        role = -1
        uptime = sqlserver.get_uptime(conn)
        version = sqlserver.get_version(conn)

        lock_timeout = sqlserver.get_variables(conn, 'LOCK_TIMEOUT')
        trancount = sqlserver.get_variables(conn, 'TRANCOUNT')
        max_connections = sqlserver.get_variables(conn, 'MAX_CONNECTIONS')
        processes = sqlserver.ger_processes(conn)
        processes_running = sqlserver.ger_processes_running(conn)
        processes_waits = sqlserver.ger_processes_waits(conn)

        connections = sqlserver.get_variables(conn, 'CONNECTIONS')
        pack_received = sqlserver.get_variables(conn, 'PACK_RECEIVED')
        pack_sent = sqlserver.get_variables(conn, 'PACK_SENT')
        packet_errors = sqlserver.get_variables(conn, 'PACKET_ERRORS')

        time.sleep(1)

        connections_2 = sqlserver.get_variables(conn, 'CONNECTIONS')
        pack_received_2 = sqlserver.get_variables(conn, 'PACK_RECEIVED')
        pack_sent_2 = sqlserver.get_variables(conn, 'PACK_SENT')
        packet_errors_2 = sqlserver.get_variables(conn, 'PACKET_ERRORS')

        connections_persecond = int(connections_2) - int(connections)
        pack_received_persecond = int(pack_received_2) - int(pack_received)
        pack_sent_persecond = int(pack_sent_2) - int(pack_sent)
        packet_errors_persecond = int(packet_errors_2) - int(packet_errors)

        sql = "insert into sqlserver_status(server_id,tags,host,port,connect,role,uptime,version,lock_timeout,trancount,max_connections,processes,processes_running,processes_waits,connections_persecond,pack_received_persecond,pack_sent_persecond,packet_errors_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id, tags, host, port, connect, role, uptime, version,
                 lock_timeout, trancount, max_connections, processes,
                 processes_running, processes_waits, connections_persecond,
                 pack_received_persecond, pack_sent_persecond,
                 packet_errors_persecond)
        func.mysql_exec(sql, param)
        func.update_db_status_init(role, version, host, port, tags)

    except Exception, e:
        logger_msg = "check sqlserver %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into sqlserver_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #12
0
def alert_to_history():
    try:
        sql = "insert into alerts_his select *,sysdate() from alerts where create_time < date_add(sysdate(), interval -3 day);"
        func.mysql_exec(sql, '')

        sql = "delete from alerts where id in(select id from alerts_his where create_time < date_add(sysdate(), interval -5 day));"
        func.mysql_exec(sql, '')
    except Exception, e:
        logger_msg = "alert_to_history: %s" % (str(e).strip('\n'))
        logger.warning(logger_msg)
Exemple #13
0
def main():
    monitor_os_method = func.get_option("monitor_os_method")
    cmdFile = "%s/check_os_%s.py" % (scDir, monitor_os_method)
    os.system("python %s" % (cmdFile))

    osTables = ('os_status', 'os_net', 'os_disk', 'os_diskio')
    for osTable in osTables:
        func.mysql_exec(
            'DELETE s FROM %s AS s WHERE s.ip NOT IN (SELECT v.host FROM v_monitor_host AS v)'
            % (osTable))
def doTopsqlSummary():
    delSql = "delete from oracle_slowquery_summary"
    func.mysql_exec(delSql)
    insSql = '''
	    INSERT INTO oracle_slowquery_summary(host, port, sql_id, first_id, first_time)
	    SELECT q.host,q.port,q.sql_id,q.id,q.stat_date
	    FROM oracle_slowquery q
	    , (SELECT MIN(id) min_id FROM oracle_slowquery t1 GROUP BY HOST, PORT, sql_id) t
	    WHERE q.id=t.min_id
	    '''
    func.mysql_exec(insSql)
def saveTopsql(statTime, server_id, host, port, tags, instance_num, topSqls):
    tbName = "oracle_slowquery"

    TEXTCOLUMS = ["ID", "Module", "Text"]
    DBCOLUMN_MAP_TOPSQL = {}
    DBCOLUMN_MAP_TOPSQL["ID"] = "sql_id"
    DBCOLUMN_MAP_TOPSQL["Module"] = "module"
    DBCOLUMN_MAP_TOPSQL["Text"] = "sql_text"
    DBCOLUMN_MAP_TOPSQL["Executions"] = "executions"
    DBCOLUMN_MAP_TOPSQL["Elapsed Time (s)"] = "elapsed_time_per_exec"
    DBCOLUMN_MAP_TOPSQL["Gets per Exec"] = "gets_per_exec"
    DBCOLUMN_MAP_TOPSQL["Reads per Exec"] = "reads_per_exec"
    DBCOLUMN_MAP_TOPSQL["Physical Reads"] = "physical_read_reqs"
    DBCOLUMN_MAP_TOPSQL["Elapsed Time %Total"] = "elapsed_pct"

    for sqlID, sqlVal in topSqls.iteritems():
        dbColVals = {}
        dbColVals["server_id"] = server_id
        dbColVals["host"] = host
        dbColVals["port"] = port
        #dbColVals["tags"] = tags
        dbColVals["instance_num"] = instance_num
        dbColVals["stat_date"] = statTime
        dbColVals["create_time"] = datetime.datetime.now()
        # init default
        for key, dbCol in DBCOLUMN_MAP_TOPSQL.iteritems():
            if TEXTCOLUMS.count(key) > 0:
                iVal = ''
            else:
                iVal = 0
            dbColVals[dbCol] = iVal

        #
        for itemName, itemVal in sqlVal.iteritems():
            if DBCOLUMN_MAP_TOPSQL.has_key(itemName):
                if itemVal == None:
                    continue
                if TEXTCOLUMS.count(itemName) > 0:
                    iVal = itemVal
                else:
                    iVal = float(itemVal.replace(",", ""))
                dbColVals[DBCOLUMN_MAP_TOPSQL.get(itemName)] = iVal

        insColNames = []
        insColVals = []
        for colName, colVal in dbColVals.iteritems():
            insColNames.append(colName)
            insColVals.append(colVal)
        insSql = "replace into %s(%s) VALUES(%s) " % (
            tbName, ",".join(insColNames),
            ("%s," * len(insColVals)).rstrip(","))
        #print insSql
        #print insColVals
        func.mysql_exec(insSql, insColVals)
def stat_mysql_slowquery():
    print("%s: stat_mysql_slowquery started." %
          (time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ))
    statSlowSql = '''
        REPLACE INTO mysql_slow_query_review_summary
            (id, serverid_max, hostname_max, db_max, user_max, checksum, ts_min, ts_max, ts_cnt,
             Query_time_pct_95, Lock_time_pct_95, Rows_sent_pct_95, Rows_examined_pct_95)
        SELECT  MIN(id), serverid_max, hostname_max, db_max, user_max, checksum, MIN(ts_min), MAX(ts_max), SUM(ts_cnt),
          MAX(Query_time_pct_95), MAX(Lock_time_pct_95), MAX(Rows_sent_pct_95), MAX(Rows_examined_pct_95)
        FROM mysql_slow_query_review_history
        GROUP BY hostname_max,db_max,CHECKSUM ORDER BY NULL;
        '''
    func.mysql_exec(statSlowSql)
Exemple #17
0
def get_connect(server_id):
    url = ""
    host = ""
    port = ""
    username = ""
    password = ""
    tags = ""

    server = func.mysql_query(
        "select host,port,dsn,username,password,tags from db_cfg_oracle where id=%s;"
        % (server_id))
    if server:
        for row in server:
            host = row[0]
            port = row[1]
            username = row[3]
            password = row[4]
            tags = row[5]
            url = row[0] + ':' + row[1] + '/' + row[2]

    if host == "":
        logger.warning("get host failed, exit!")
        sys.exit(1)

    try:
        conn = cx_Oracle.connect(username,
                                 password,
                                 url,
                                 mode=cx_Oracle.SYSDBA)  #获取connection对象
        return conn
    except Exception, e:
        logger_msg = "check oracle %s : %s" % (url, str(e).strip('\n'))
        logger.warning(logger_msg)

        try:
            connect = 0

            func.mysql_exec("begin;", '')

            sql = "delete from oracle_status where server_id = %s; " % (
                server_id)
            func.mysql_exec(sql, '')

            sql = "insert into oracle_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

            func.mysql_exec("commit;", '')
        except Exception, e:
            func.mysql_exec("rollback;", '')
            logger.error(str(e).strip('\n'))
Exemple #18
0
def main():

    func.mysql_exec("insert into oracle_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from oracle_status;",'')
    func.mysql_exec('delete from oracle_status;','')

    func.mysql_exec("insert into oracle_tablespace_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from oracle_tablespace;",'')
    func.mysql_exec('delete from oracle_tablespace;','')

    #get oracle servers list
    servers=func.mysql_query("select id,host,port,dsn,username,password,tags from db_servers_oracle where is_delete=0 and monitor=1;")

    logger.info("check oracle controller start.")
    if servers:
        plist = []
        for row in servers:
            server_id=row[0]
            host=row[1]
            port=row[2]
            dsn=row[3]
            username=row[4]
            password=row[5]
            tags=row[6]
            p = Process(target = check_oracle, args = (host,port,dsn,username,password,server_id,tags))
            plist.append(p)
            p.start()
        #time.sleep(10)
        #for p in plist:
        #    p.terminate()
        for p in plist:
            p.join()

    else:
        logger.warning("check oracle: not found any servers")

    logger.info("check oracle controller finished.")
Exemple #19
0
def main():

    func.mysql_exec("insert into redis_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from redis_status;",'')
    func.mysql_exec('delete from redis_status  where server_id not in (select id from db_servers_redis where monitor=1);','')
    func.mysql_exec("insert into redis_replication_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from redis_replication;",'')
    func.mysql_exec('delete from redis_replication where server_id not in (select id from db_servers_redis where monitor=1);','')

    servers = func.mysql_query('select id,host,port,password,tags from db_servers_redis where is_delete=0 and monitor=1;')

    logger.info("check redis controller started.")

    if servers:
         plist = []

         for row in servers:
             server_id=row[0]
             host=row[1]
             port=row[2]
             passwd=row[3]
             tags=row[4]
             p = Process(target = check_redis, args = (host,port,passwd,server_id,tags))
             plist.append(p)
             p.start()

         for p in plist:
             p.join()

    else:
         logger.warning("check redis: not found any servers")

    logger.info("check redis controller finished.")
Exemple #20
0
def main():

    func.mysql_exec("insert into redis_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from redis_status;",'')
    func.mysql_exec('delete from redis_status;','')
    func.mysql_exec("insert into redis_replication_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from redis_replication;",'')
    func.mysql_exec('delete from redis_replication;','')

    servers = func.mysql_query('select id,host,port,password,tags from db_servers_redis where is_delete=0 and monitor=1;')

    logger.info("check redis controller started.")

    if servers:
         plist = []

         for row in servers:
             server_id=row[0]
             host=row[1]
             port=row[2]
             passwd=row[3]
             tags=row[4]
             p = Process(target = check_redis, args = (host,port,passwd,server_id,tags))
             plist.append(p)
             p.start()

         for p in plist:
             p.join()

    else:
         logger.warning("check redis: not found any servers")

    logger.info("check redis controller finished.")
def check_mysql_bigtable2(no, host,port,username,password,server_id,tags,bigtable_size):
    try:
	print "[BBQ] check_mysql_bigtable %s %s:%s"%(no, host,port)
        conn=MySQLdb.connect(host=host,user=username,passwd=password,port=int(port),connect_timeout=2,charset='utf8')
        curs=conn.cursor()
        conn.select_db('information_schema')
        try:
	    disk_size_m = -1
	    # get datadir
	    curs.execute("show global variables like 'datadir'")
	    dataDir = (curs.fetchone())[1]

	    # get database
	    curs.execute("show databases")
	    for line in curs.fetchall():
		dbName = line[0]
		saltCmd = "du -ch %s/%s/*" % (dataDir.rstrip("/"), dbName)
		jobID = func.exeSaltAsyncCmd(host, saltCmd)
		time.sleep(3)
		saltRess = func.getSaltJobByID(host, jobID)
		if saltRess is None:
		    break
		
		sizess = saltRess.split("\n")
		svals = (re.sub(r'\s+', ' ', sizess[len(sizess)-1])).split()
		disk_size_m = int(svals[0])
		updSql = "update mysql_status set disk_size_m=%s where host='%s' and port=%s" % (disk_size_m, host, port)
		func.mysql_exec(updSql)

	    # bigtable
	    bigtable=curs.execute("SELECT table_schema as 'DB',table_name as 'TABLE',IFNULL(ROUND(( data_length + index_length ) / ( 1024 * 1024 ), 2),0) as 'total_length' , table_comment as COMMENT FROM information_schema.TABLES WHERE IFNULL(ROUND(( data_length + index_length ) / ( 1024 * 1024 ), 2),0) >= %s ORDER BY total_length DESC ;" % (bigtable_size))
	    lines = curs.fetchall()
	    for line in lines:
		updSqls = []
		updSqls.append("insert into mysql_bigtable_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),8) from mysql_bigtable where host='%s' and port=%s" % (host,port))
		updSqls.append("delete from mysql_bigtable  where host='%s' and port=%s" % (host,port))
		sql="insert into mysql_bigtable(server_id,host,port,tags,db_name,table_name,table_size,table_comment) values('%s','%s','%s','%s','%s','%s','%s','%s');" % (server_id,host,port,tags, line[0], line[1], line[2], line[3])
		updSqls.append(sql)

		#func.mysql_exec_many(updSqls)
		for updSql in updSqls:
		    func.mysql_exec(updSql)

	except MySQLdb.Error,e:
           print "warn %s:%s,%s,%s" % (host,port,username,password)
	   print e

        finally:
           curs.close()
           conn.close()
Exemple #22
0
def check_oracle(host,port,dsn,username,password,server_id,tags):
    url=host+':'+port+'/'+dsn

    try:
        conn=cx_Oracle.connect(username,password,url, mode=cx_Oracle.SYSDBA) #获取connection对象
    except Exception, e:
        logger_msg="check oracle %s : %s" %(url,str(e).strip('\n'))
        logger.warning(logger_msg)

        try:
            connect=0
            sql="insert into oracle_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)
        except Exception, e:
            logger.error(str(e).strip('\n'))
            sys.exit(1)
Exemple #23
0
def main():
    #get os servers list
    zbItems=func.mysql_query("SELECT item_type, stat_item_name, zabbix_item_name, zabbix_item_value_unit, zabbix_server,last_stat_time FROM zabbix_item where item_type='os';")
    zbSrvItems = {}
    for zbItem in zbItems:
	(item_type, stat_item_name, zabbix_item_name, zabbix_item_value_unit, zabbix_server,last_stat_time) = zbItem
        if not zbSrvItems.has_key(zabbix_server):
            zbSrvItems[zabbix_server] = []
        zbSrvItems.get(zabbix_server).append(zbItem)
    #print zbSrvItems 
    zbSectors = ["zabbix_dc", "zabbix_dba"]
    zbApis = {}
    for zbSector in zbSectors:
        zbKey = func.get_config(zbSector,'key')
        zbHost = func.get_config(zbSector,'host')
        zbUser = func.get_config(zbSector,'user')
        zbPasswd = func.get_config(zbSector, 'passwd')
        zbApis[zbKey] = zbCli(zbHost, zbUser, zbPasswd)

    logger.info("check os controller started.")
    servers=func.mysql_query("select id, host,tags, create_time from db_servers_os where is_delete=0 and monitor=1;")
    if servers:
        plist = []
	proHostsNum = len(servers)/cpus+1
	logger.info("check os sum:%s, cpus:%s, percpu:%s" % (len(servers), cpus, proHostsNum))
	for i in range(0,len(servers), proHostsNum):
	    proSrvs = servers[i:i+proHostsNum]
	    p = Process(target = check_hosts, args=(i, proSrvs, zbSrvItems, zbApis))
	    plist.append(p)
	intervals = exeTimeout/len(plist)
	if intervals <= 0:
	    intervals = 1

	for p in plist:
	    p.start()
	    #time.sleep(intervals)
	    time.sleep(1)
        
	for p in plist:
	    p.join(timeout=10)

    else: 
         logger.warning("check os: not found any servers")

    func.mysql_exec('update os_status set zabbix=0,create_time=now() where create_time<date_sub(now(), interval %s second)' % (min_interval))
    logger.info("check os controller finished.")
Exemple #24
0
def check_oracle(host,port,dsn,username,password,server_id,tags):
    url=host+':'+port+'/'+dsn

    try:
        conn=cx_Oracle.connect(username,password,url) #获取connection对象

    except Exception, e:
        logger_msg="check oracle %s : %s" %(url,str(e).strip('\n'))
        logger.warning(logger_msg)

        try:
            connect=0
            sql="insert into oracle_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)
        except Exception, e:
            logger.error(str(e).strip('\n'))
            sys.exit(1)
Exemple #25
0
def main():
    #func.mysql_exec("insert into mysql_processlist_history SELECT * from mysql_processlist",'')
    #func.mysql_exec("delete from mysql_processlist",'')
    func.mysql_exec(
        "replace into mysql_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mysql_status",
        '')
    func.mysql_exec(
        'delete from mysql_status where server_id not in (select id from db_servers_mysql where monitor=1);',
        '')

    func.mysql_exec(
        "replace into mysql_replication_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mysql_replication",
        '')
    func.mysql_exec(
        'delete from mysql_replication where server_id not in (select id from db_servers_mysql where monitor=1);',
        '')
    #get mysql servers list
    servers = func.mysql_query(
        "select id,host,port,username,password,tags,repl_channel from db_servers_mysql where is_delete=0 and monitor=1 and monitor_proxy='0';"
    )

    logger.info("check mysql controller started.")

    if servers:
        plist = []
        for row in servers:
            server_id = row[0]
            host = row[1]
            port = row[2]
            username = row[3]
            password = row[4]
            tags = row[5]
            replchannel = row[6]
            #thread.start_new_thread(check_mysql, (host,port,user,passwd,server_id,application_id))
            #time.sleep(1)
            p = Process(target=check_mysql,
                        args=(host, port, username, password, server_id, tags,
                              replchannel))
            plist.append(p)
        for p in plist:
            p.start()
        time.sleep(10)
        for p in plist:
            p.terminate()
        for p in plist:
            p.join()

    else:
        logger.warning("check mysql: not found any servers")

    logger.info("check mysql controller finished.")
Exemple #26
0
def check_mysql_bigtable(host, port, username, password, server_id, tags,
                         bigtable_size):
    try:
        conn = MySQLdb.connect(host=host,
                               user=username,
                               passwd=password,
                               port=int(port),
                               connect_timeout=2,
                               charset='utf8')
        curs = conn.cursor()
        conn.select_db('information_schema')
        try:
            bigtable = curs.execute(
                "SELECT table_schema as 'DB',table_name as 'TABLE',CONCAT(ROUND(( data_length + index_length ) / ( 1024 * 1024 ), 2), '') 'TOTAL' , table_comment as COMMENT FROM information_schema.TABLES where TABLE_SCHEMA not in ('information_schema','mysql','performance_schema','sys','test') and engine is not null ORDER BY data_length + index_length DESC limit 10;"
            )
            #print bigtable_size
            if bigtable:
                for row in curs.fetchall():
                    datalist = []
                    for r in row:
                        datalist.append(r)
                    result = datalist
                    if result:
                        table_size = float(string.atof(result[2]))
                        if table_size >= int(bigtable_size):
                            sql = "insert into mysql_bigtable(server_id,host,port,tags,db_name,table_name,table_size,table_comment) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                            param = (server_id, host, port, tags, result[0],
                                     result[1], result[2], result[3])
                            func.mysql_exec(sql, param)
        except:
            pass

        finally:
            curs.close()
            conn.close()
            sys.exit(1)

    except MySQLdb.Error, e:
        pass
        print "Mysql Error %d: %s" % (e.args[0], e.args[1])
def check_sqlserver(host, port, username, passwd, server_id, tags):
    try:
        conn = pymssql.connect(host=host,
                               port=int(port),
                               user=username,
                               password=passwd,
                               charset="utf8")
    except Exception, e:
        func.mysql_exec("rollback;", '')
        logger_msg = "check sqlserver %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0

            func.mysql_exec("begin;", '')

            sql = "delete from sqlserver_status where server_id = %s; " % (
                server_id)
            func.mysql_exec(sql, '')

            sql = "insert into sqlserver_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

            logger.info(
                "Generate sqlserver instance alert for server: %s begin:" %
                (server_id))
            alert.gen_alert_sqlserver_status(
                server_id)  # generate oracle instance alert
            logger.info(
                "Generate sqlserver instance alert for server: %s end." %
                (server_id))

            func.mysql_exec("commit;", '')

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #28
0
def check_oracle(host, port, dsn, username, password, server_id, tags):
    url = host + ':' + port + '/' + dsn

    try:
        conn = cx_Oracle.connect(username,
                                 password,
                                 url,
                                 mode=cx_Oracle.SYSDBA)  #获取connection对象
    except Exception, e:
        logger_msg = "check oracle %s : %s" % (url, str(e).strip('\n'))
        logger.warning(logger_msg)

        try:
            connect = 0

            func.mysql_exec("begin;", '')

            sql = "delete from oracle_status where server_id = %s; " % (
                server_id)
            func.mysql_exec(sql, '')

            sql = "insert into oracle_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

            logger.info(
                "Generate oracle instance alert for server: %s begin:" %
                (server_id))
            alert.gen_alert_oracle_status(
                server_id)  # generate oracle instance alert
            logger.info("Generate oracle instance alert for server: %s end." %
                        (server_id))

            func.mysql_exec("commit;", '')
        except Exception, e:
            func.mysql_exec("rollback;", '')
            logger.error(str(e).strip('\n'))
            sys.exit(1)
def saveLoadProfile(statTime, server_id, host, port, tags, instance_num,
                    loadProfs):
    tbName = "oracle_awrreport"

    DBCOLUMN_MAP_LOADPROFILE = {}
    DBCOLUMN_MAP_LOADPROFILE["DB Time"] = "db_time"
    DBCOLUMN_MAP_LOADPROFILE["DB CPU"] = "db_cpu"
    DBCOLUMN_MAP_LOADPROFILE["Redo size"] = "redo_size"
    DBCOLUMN_MAP_LOADPROFILE["Logical reads"] = "logical_reads"
    DBCOLUMN_MAP_LOADPROFILE["User calls"] = "user_calls"
    DBCOLUMN_MAP_LOADPROFILE["Executes"] = "executes"
    DBCOLUMN_MAP_LOADPROFILE["Transactions"] = "transactions"

    dbColVals = {}
    dbColVals["server_id"] = server_id
    dbColVals["host"] = host
    dbColVals["port"] = port
    dbColVals["tags"] = tags
    dbColVals["instance_num"] = instance_num
    dbColVals["stat_date"] = statTime
    dbColVals["create_time"] = datetime.datetime.now()
    for itemName, itemVal in loadProfs.iteritems():
        sItemName = (itemName.split("("))[0].strip()
        if re.search("^Logical read", sItemName):
            sItemName = "Logical reads"

        if DBCOLUMN_MAP_LOADPROFILE.has_key(sItemName):
            dbColVals[DBCOLUMN_MAP_LOADPROFILE.get(sItemName)] = itemVal

    insColNames = []
    insColVals = []
    for colName, colVal in dbColVals.iteritems():
        insColNames.append(colName)
        insColVals.append(colVal)
    insSql = "replace into %s(%s) VALUES(%s) " % (
        tbName, ",".join(insColNames), ("%s," * len(insColVals)).rstrip(","))
    func.mysql_exec(insSql, insColVals)
Exemple #30
0
def check_hosts(i, hosts, zbSrvItems, zbApis):
    if len(hosts)<=0:
        return

    for host in hosts:
    	logger.info(host)
    	(server_id, ip, tags, create_time) = host
    	if not cmp(ip, ''):
            continue
        itemTypeVals = check_os_zabbix(ip, tags, zbSrvItems, zbApis)
	for itemType, itemVals in itemTypeVals.iteritems():
	    if not itemTabs.has_key(itemType):
		errMsg = "no item_type %s on %s" % (itemType, ip)
		print "ERROR", errMsg, itemTabs
		func.add_alarm(server_id,tags,ip,0,create_time, "os",'check_os_zabbix', 'check_os_zabbix','warning', errMsg)
		continue
	    tbName = itemTabs.get(itemType)
	    colNames = ["ip", "tags", "zabbix"]
	    colVals = [ip, tags]
	    if len(itemVals) == 0:
		colVals.append("0")
	    else:
		colVals.append("1")
		for item, statVal in itemVals.iteritems():
		    stat_item_name = item[1]
		    upd_zbitem_time(stat_item_name, statVal.get("clock"))
		    colNames.append(stat_item_name)
		    colVals.append(statVal.get("value"))

	    func.mysql_exec("insert ignore into %s_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from %s where ip='%s';" %(tbName, tbName, ip))

	    insSql = "REPLACE into %s(%s) VALUES ('%s')" % (tbName, ",".join(colNames), "','".join(colVals))
	    func.mysql_exec(insSql)

	    # save other database
	    func.other_save("check_os_zabbix", itemVals)
Exemple #31
0
def main():

    func.mysql_exec(
        "insert into oracle_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from oracle_status;",
        '')
    func.mysql_exec('delete from oracle_status;', '')

    func.mysql_exec(
        "insert into oracle_tablespace_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from oracle_tablespace;",
        '')
    func.mysql_exec('delete from oracle_tablespace;', '')

    #get oracle servers list
    servers = func.mysql_query(
        "select id,host,port,dsn,username,password,tags from db_servers_oracle where is_delete=0 and monitor=1;"
    )

    logger.info("check oracle controller start.")
    if servers:
        plist = []
        for row in servers:
            server_id = row[0]
            host = row[1]
            port = row[2]
            dsn = row[3]
            username = row[4]
            password = row[5]
            tags = row[6]
            p = Process(target=check_oracle,
                        args=(host, port, dsn, username, password, server_id,
                              tags))
            plist.append(p)
            p.start()
        #time.sleep(10)
        #for p in plist:
        #    p.terminate()
        for p in plist:
            p.join()

    else:
        logger.warning("check oracle: not found any servers")

    logger.info("check oracle controller finished.")
Exemple #32
0
def main():

    func.mysql_exec("insert into mysql_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mysql_status",'')
    func.mysql_exec('delete from mysql_status;','')

    func.mysql_exec("insert into mysql_replication_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mysql_replication",'')
    func.mysql_exec('delete from mysql_replication;','')
    #get mysql servers list
    servers = func.mysql_query('select id,host,port,username,password,tags from db_servers_mysql where is_delete=0 and monitor=1;')

    logger.info("check mysql controller started.")

    if servers:
         plist = []
         for row in servers:
             server_id=row[0]
             host=row[1]
             port=row[2]
             username=row[3]
             password=row[4]
             tags=row[5]
             #thread.start_new_thread(check_mysql, (host,port,user,passwd,server_id,application_id))
             #time.sleep(1)
             p = Process(target = check_mysql, args = (host,port,username,password,server_id,tags))
             plist.append(p)
         for p in plist:
             p.start()
         time.sleep(10)
         for p in plist:
             p.terminate()	
         for p in plist:
             p.join()
         
    else:
         logger.warning("check mysql: not found any servers")

    logger.info("check mysql controller finished.")
def check_mysql_tablespace(no, host, port, username, password, server_id, tags,
                           bigtable_size):
    try:
        conn = MySQLdb.connect(host=host,
                               user=username,
                               passwd=password,
                               port=int(port),
                               connect_timeout=2,
                               charset='utf8')
        curs = conn.cursor()
        conn.select_db('information_schema')
        try:
            disk_size_m = 0
            saveDbs = []
            bigTables = []
            # get datadir
            curs.execute("show global variables like 'datadir'")
            dataDir = (curs.fetchone())[1]

            # get database
            curs.execute("show databases")
            for line in curs.fetchall():
                dbName = line[0]
                tbnum = 0
                dbsize_m = 0
                if igDbs.count(dbName) > 0:
                    continue
                saltCmd = "du -cm %s/%s/* 2>/dev/null" % (dataDir.rstrip("/"),
                                                          dbName)
                jobID = func.exeSaltAsyncCmd(host, saltCmd)
                time.sleep(3)
                saltRess = func.getSaltJobByID(host, jobID)
                if saltRess is None:
                    break
                slines = saltRess.split("\n")
                for sline in slines:
                    (dsize_m, dfile) = re.sub(r'\s+', ' ', sline).split()
                    dsize_m = int(dsize_m)
                    tbfile = dfile.split("/")[-1]
                    if not cmp("total", tbfile):
                        disk_size_m += dsize_m
                        continue
                    elif re.search("^db.", tbfile):
                        continue

                    tbName = tbfile.split(".")[0]
                    if re.search("frm$", tbfile):
                        tbnum += 1
                    else:
                        dbsize_m += dsize_m
                        if dsize_m > bigtable_size:
                            bigTables.append([dbName, tbName, dsize_m])
                saveDbs.append([dbName, host, port, tbnum, dbsize_m])

            print "%s:%s, disk_size_m %s, bigtbs %s" % (
                host, port, disk_size_m, len(bigTables))

            # databases
            func.mysql_exec(
                "delete from mysql_databases  where db_ip='%s' and db_port=%s"
                % (host, port))
            if len(saveDbs) > 0:
                insVals = []
                insSql = "INSERT INTO mysql_databases(db_name, db_ip, db_port, tb_count, data_size_m) values(%s,%s,%s,%s,%s);"
                func.mysql_exec(insSql, saveDbs)

            # bigtable
            func.mysql_exec(
                "delete from mysql_bigtable  where host='%s' and port=%s" %
                (host, port))
            if len(bigTables) > 0:
                insVals = []
                for bigTable in bigTables:
                    (dbName, tbName, dsize_m) = bigTable
                    insVals.append([
                        server_id, host, port, tags, dbName, tbName, dsize_m,
                        ''
                    ])
                insSql = "insert into mysql_bigtable(server_id,host,port,tags,db_name,table_name,table_size,table_comment) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                func.mysql_exec(insSql, insVals)

        except MySQLdb.Error, e:
            print "warn %s:%s,%s,%s" % (host, port, username, password)
            print e

        finally:
            curs.close()
            conn.close()
def check_mongodb(host,port,user,passwd,server_id,tags):
    try:
        func.mysql_exec("insert into mongodb_status_history SELECT *,LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from mongodb_status where server_id='%s';" %(server_id),'')
        func.mysql_exec("delete from mongodb_status where server_id='%s';" %(server_id),'')

        #connect = pymongo.Connection(host,int(port))
        client = pymongo.MongoClient(host, int(port))
        db = client['admin'] 
        db.authenticate(user,passwd)
        serverStatus=client.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        time.sleep(1)
        serverStatus_2=client.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        connect = 1
        ok = int(serverStatus['ok'])
        version = serverStatus['version']
        uptime = serverStatus['uptime']
        connections_current = serverStatus['connections']['current']
        connections_available = serverStatus['connections']['available']
        globalLock_activeClients = serverStatus['globalLock']['activeClients']['total']
        globalLock_currentQueue = serverStatus['globalLock']['currentQueue']['total']
        mem_bits = serverStatus['mem']['bits']
        mem_resident = serverStatus['mem']['resident']
        mem_virtual = serverStatus['mem']['virtual']
        mem_supported = serverStatus['mem']['supported']
        mem_mapped = serverStatus['mem']['mapped']
        mem_mappedWithJournal = serverStatus['mem']['mappedWithJournal']
        network_bytesIn_persecond = int(serverStatus_2['network']['bytesIn']) - int(serverStatus['network']['bytesIn'])
        network_bytesOut_persecond = int(serverStatus_2['network']['bytesOut']) - int(serverStatus['network']['bytesOut'])
        network_numRequests_persecond = int(serverStatus_2['network']['numRequests']) - int(serverStatus['network']['numRequests'])
        opcounters_insert_persecond = int(serverStatus_2['opcounters']['insert']) - int(serverStatus['opcounters']['insert'])
        opcounters_query_persecond = int(serverStatus_2['opcounters']['query']) - int(serverStatus['opcounters']['query'])
        opcounters_update_persecond = int(serverStatus_2['opcounters']['update']) - int(serverStatus['opcounters']['update'])
        opcounters_delete_persecond = int(serverStatus_2['opcounters']['delete']) - int(serverStatus['opcounters']['delete'])
        opcounters_command_persecond = int(serverStatus_2['opcounters']['command']) - int(serverStatus['opcounters']['command'])

        #replset
        try:
            repl=serverStatus['repl']
            setName=repl['setName']
            replset=1
            if repl['secondary']== True:
                repl_role='secondary'
                repl_role_new='s'
            else:
                repl_role='master'
                repl_role_new='m' 
        except:
            replset=0
            repl_role='master'
            repl_role_new='m'
            pass

        ##################### insert data to mysql server#############################
        sql = "insert into mongodb_status(server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"       
        param = (server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond)
        func.mysql_exec(sql,param)
        role='m'
        func.update_db_status_init(repl_role_new,version,host,port,tags)

    except Exception, e:
        logger_msg="check mongodb %s:%s : %s" %(host,port,e)
        logger.warning(logger_msg)

        try:
            connect=0
            sql="insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
def clean_invalid_db_status():
    try:
        func.mysql_exec(
            "insert into sqlserver_status_his SELECT *,sysdate() from sqlserver_status where server_id not in(select id from db_cfg_sqlserver where is_delete = 0);",
            '')
        func.mysql_exec(
            'delete from sqlserver_status where server_id not in(select id from db_cfg_sqlserver where is_delete = 0);',
            '')

        func.mysql_exec(
            "insert into sqlserver_mirror_p_his SELECT *,sysdate() from sqlserver_mirror_p where server_id not in(select id from db_cfg_sqlserver where is_delete = 0);",
            '')
        func.mysql_exec(
            'delete from sqlserver_mirror_p where server_id not in(select id from db_cfg_sqlserver where is_delete = 0);',
            '')

        func.mysql_exec(
            "insert into sqlserver_mirror_s_his SELECT *,sysdate() from sqlserver_mirror_s where server_id not in(select id from db_cfg_sqlserver where is_delete = 0);",
            '')
        func.mysql_exec(
            'delete from sqlserver_mirror_s where server_id not in(select id from db_cfg_sqlserver where is_delete = 0);',
            '')

        func.mysql_exec(
            "delete from db_status where db_type = 'sqlserver' and server_id not in(select id from db_cfg_sqlserver where is_delete = 0);",
            '')

    except Exception, e:
        logger.error(e)
                "Generate sqlserver instance alert for server: %s end." %
                (server_id))

            func.mysql_exec("commit;", '')

        except Exception, e:
            logger.error(e)
            sys.exit(1)
        finally:
            sys.exit(1)

    finally:
        func.check_db_status(server_id, host, port, tags, 'sqlserver')

    try:
        func.mysql_exec("begin;", '')
        func.mysql_exec(
            "insert into sqlserver_status_his SELECT *,DATE_FORMAT(sysdate(),'%%Y%%m%%d%%H%%i%%s') from sqlserver_status where server_id = %s;"
            % (server_id), '')
        func.mysql_exec(
            'delete from sqlserver_status where server_id = %s;' % (server_id),
            '')

        #func.mysql_exec("insert into sqlserver_space_his SELECT *,DATE_FORMAT(sysdate(),'%%Y%%m%%d%%H%%i%%s') from sqlserver_space where server_id = %s;" %(server_id),'')
        func.mysql_exec(
            'delete from sqlserver_space where server_id = %s;' % (server_id),
            '')

        connect = 1
        role = -1
        uptime = sqlserver.get_uptime(conn)
Exemple #37
0
def delete_history_data(table_name,expire_days):
    func.mysql_exec('CREATE TABLE '+table_name+'_clean_temp AS SELECT * FROM '+table_name+' WHERE create_time >= from_unixtime(unix_timestamp(now())-('+expire_days+'*24*60*60));','')
    func.mysql_exec('TRUNCATE TABLE '+table_name,'')
    func.mysql_exec('INSERT INTO '+table_name+' SELECT * FROM '+table_name+'_clean_temp','')
    func.mysql_exec('DROP TABLE IF EXISTS '+table_name+'_clean_temp;','')
Exemple #38
0
def check_os(ip,community,filter_os_disk,tags):

    func.mysql_exec("insert into os_status_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_status where ip='%s';" %(ip),'')
    func.mysql_exec("insert into os_disk_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_disk where ip='%s';" %(ip),'')
    func.mysql_exec("insert into os_diskio_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_diskio where ip='%s';" %(ip),'')
    func.mysql_exec("insert into os_net_history select *, LEFT(REPLACE(REPLACE(REPLACE(create_time,'-',''),' ',''),':',''),12) from os_net where ip='%s';" %(ip),'')
    func.mysql_exec("delete from os_status where ip='%s'" %(ip),'')
    func.mysql_exec("delete from os_disk where ip='%s'" %(ip),'')
    func.mysql_exec("delete from os_diskio where ip='%s'" %(ip),'')
    func.mysql_exec("delete from os_net where ip='%s'" %(ip),'')

    command="sh check_os.sh"
    try :
        os.system("%s %s %s %s %s %s %s %s %s %s"%(command,ip,dbhost,dbport,dbuser,dbpasswd,dbname,community,filter_os_disk,tags))
   
    except Exception, e:
            print e
            sys.exit(1)
def check_sqlserver(host, port, username, passwd, server_id, tags):
    try:
        conn = pymssql.connect(host=host, port=int(port), user=username, password=passwd, charset="utf8")

        connect = 1
        role = -1
        uptime = sqlserver.get_uptime(conn)
        version = sqlserver.get_version(conn)

        lock_timeout = sqlserver.get_variables(conn, "LOCK_TIMEOUT")
        trancount = sqlserver.get_variables(conn, "TRANCOUNT")
        max_connections = sqlserver.get_variables(conn, "MAX_CONNECTIONS")
        processes = sqlserver.ger_processes(conn)
        processes_running = sqlserver.ger_processes_running(conn)
        processes_waits = sqlserver.ger_processes_waits(conn)

        connections = sqlserver.get_variables(conn, "CONNECTIONS")
        pack_received = sqlserver.get_variables(conn, "PACK_RECEIVED")
        pack_sent = sqlserver.get_variables(conn, "PACK_SENT")
        packet_errors = sqlserver.get_variables(conn, "PACKET_ERRORS")

        time.sleep(1)

        connections_2 = sqlserver.get_variables(conn, "CONNECTIONS")
        pack_received_2 = sqlserver.get_variables(conn, "PACK_RECEIVED")
        pack_sent_2 = sqlserver.get_variables(conn, "PACK_SENT")
        packet_errors_2 = sqlserver.get_variables(conn, "PACKET_ERRORS")

        connections_persecond = int(connections_2) - int(connections)
        pack_received_persecond = int(pack_received_2) - int(pack_received)
        pack_sent_persecond = int(pack_sent_2) - int(pack_sent)
        packet_errors_persecond = int(packet_errors_2) - int(packet_errors)

        sql = "insert into sqlserver_status(server_id,tags,host,port,connect,role,uptime,version,lock_timeout,trancount,max_connections,processes,processes_running,processes_waits,connections_persecond,pack_received_persecond,pack_sent_persecond,packet_errors_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (
            server_id,
            tags,
            host,
            port,
            connect,
            role,
            uptime,
            version,
            lock_timeout,
            trancount,
            max_connections,
            processes,
            processes_running,
            processes_waits,
            connections_persecond,
            pack_received_persecond,
            pack_sent_persecond,
            packet_errors_persecond,
        )
        func.mysql_exec(sql, param)
        func.update_db_status_init(role, version, host, port, tags)

    except Exception, e:
        logger_msg = "check sqlserver %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0
            sql = "insert into sqlserver_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #40
0
def check_mongodb(host,port,user,passwd,server_id,tags):
    try:
        connect = pymongo.Connection(host,int(port))
        db = connect['admin'] 
        db.authenticate(user,passwd)
        serverStatus=connect.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        time.sleep(1)
        serverStatus_2=connect.admin.command(bson.son.SON([('serverStatus', 1), ('repl', 2)]))
        connect = 1
        ok = int(serverStatus['ok'])
        version = serverStatus['version']
        uptime = serverStatus['uptime']
        connections_current = serverStatus['connections']['current']
        connections_available = serverStatus['connections']['available']
        globalLock_activeClients = serverStatus['globalLock']['activeClients']['total']
        globalLock_currentQueue = serverStatus['globalLock']['currentQueue']['total']
        indexCounters_accesses = serverStatus['indexCounters']['accesses']
        indexCounters_hits = serverStatus['indexCounters']['hits']
        indexCounters_misses = serverStatus['indexCounters']['misses']
        indexCounters_resets = serverStatus['indexCounters']['resets']
        indexCounters_missRatio = serverStatus['indexCounters']['missRatio']
        #cursors_totalOpen = serverStatus['cursors']['totalOpen']
        #cursors_timeOut =  serverStatus['cursors']['timeOut']
        dur_commits = serverStatus['dur']['commits']
        dur_journaledMB = serverStatus['dur']['journaledMB']
        dur_writeToDataFilesMB = serverStatus['dur']['writeToDataFilesMB']
        dur_compression = serverStatus['dur']['compression']
        dur_commitsInWriteLock = serverStatus['dur']['commitsInWriteLock']
        dur_earlyCommits = serverStatus['dur']['earlyCommits']
        dur_timeMs_dt = serverStatus['dur']['timeMs']['dt']
        dur_timeMs_prepLogBuffer = serverStatus['dur']['timeMs']['prepLogBuffer']
        dur_timeMs_writeToJournal = serverStatus['dur']['timeMs']['writeToJournal']
        dur_timeMs_writeToDataFiles = serverStatus['dur']['timeMs']['writeToDataFiles']
        dur_timeMs_remapPrivateView = serverStatus['dur']['timeMs']['remapPrivateView']
        mem_bits = serverStatus['mem']['bits']
        mem_resident = serverStatus['mem']['resident']
        mem_virtual = serverStatus['mem']['virtual']
        mem_supported = serverStatus['mem']['supported']
        mem_mapped = serverStatus['mem']['mapped']
        mem_mappedWithJournal = serverStatus['mem']['mappedWithJournal']
        network_bytesIn_persecond = int(serverStatus_2['network']['bytesIn']) - int(serverStatus['network']['bytesIn'])
        network_bytesOut_persecond = int(serverStatus_2['network']['bytesOut']) - int(serverStatus['network']['bytesOut'])
        network_numRequests_persecond = int(serverStatus_2['network']['numRequests']) - int(serverStatus['network']['numRequests'])
        opcounters_insert_persecond = int(serverStatus_2['opcounters']['insert']) - int(serverStatus['opcounters']['insert'])
        opcounters_query_persecond = int(serverStatus_2['opcounters']['query']) - int(serverStatus['opcounters']['query'])
        opcounters_update_persecond = int(serverStatus_2['opcounters']['update']) - int(serverStatus['opcounters']['update'])
        opcounters_delete_persecond = int(serverStatus_2['opcounters']['delete']) - int(serverStatus['opcounters']['delete'])
        opcounters_command_persecond = int(serverStatus_2['opcounters']['command']) - int(serverStatus['opcounters']['command'])

        #replset
        try:
            repl=serverStatus['repl']
            setName=repl['setName']
            replset=1
            if repl['secondary']==true:
                repl_role='secondary'
                repl_role_new='s'
            else:
                repl_role='master'
                repl_role_new='m' 
        except:
            replset=0
            repl_role='master'
            repl_role_new='m'
            pass

        ##################### insert data to mysql server#############################
        sql = "insert into mongodb_status(server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,indexCounters_accesses,indexCounters_hits,indexCounters_misses,indexCounters_resets,indexCounters_missRatio,dur_commits,dur_journaledMB,dur_writeToDataFilesMB,dur_compression,dur_commitsInWriteLock,dur_earlyCommits,dur_timeMs_dt,dur_timeMs_prepLogBuffer,dur_timeMs_writeToJournal,dur_timeMs_writeToDataFiles,dur_timeMs_remapPrivateView,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"       
        param = (server_id,host,port,tags,connect,replset,repl_role,ok,uptime,version,connections_current,connections_available,globalLock_currentQueue,globalLock_activeClients,indexCounters_accesses,indexCounters_hits,indexCounters_misses,indexCounters_resets,indexCounters_missRatio,dur_commits,dur_journaledMB,dur_writeToDataFilesMB,dur_compression,dur_commitsInWriteLock,dur_earlyCommits,dur_timeMs_dt,dur_timeMs_prepLogBuffer,dur_timeMs_writeToJournal,dur_timeMs_writeToDataFiles,dur_timeMs_remapPrivateView,mem_bits,mem_resident,mem_virtual,mem_supported,mem_mapped,mem_mappedWithJournal,network_bytesIn_persecond,network_bytesOut_persecond,network_numRequests_persecond,opcounters_insert_persecond,opcounters_query_persecond,opcounters_update_persecond,opcounters_delete_persecond,opcounters_command_persecond)
        func.mysql_exec(sql,param)
        role='m'
        func.update_db_status_init(repl_role_new,version,host,port,tags)

    except Exception, e:
        logger_msg="check mongodb %s:%s : %s" %(host,port,e)
        logger.warning(logger_msg)

        try:
            connect=0
            sql="insert into mongodb_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)
def get_connect(server_id):
    host = ""
    port = ""
    username = ""
    password = ""
    tags = ""

    server = func.mysql_query(
        "select host,port,username,password,tags from db_cfg_sqlserver where id=%s;"
        % (server_id))
    if server:
        for row in server:
            host = row[0]
            port = row[1]
            username = row[2]
            passwd = row[3]
            tags = row[4]

    if host == "":
        logger.warning("get host failed, exit!")
        sys.exit(1)

    try:
        conn = pymssql.connect(host=host,
                               port=int(port),
                               user=username,
                               password=passwd,
                               charset="utf8")
        return conn
    except Exception, e:
        func.mysql_exec("rollback;", '')
        logger_msg = "check sqlserver %s:%s : %s" % (host, port, e)
        logger.warning(logger_msg)

        try:
            connect = 0

            func.mysql_exec("begin;", '')

            sql = "delete from sqlserver_status where server_id = %s; " % (
                server_id)
            func.mysql_exec(sql, '')

            # delete for the mirror record
            sql = "delete from sqlserver_mirror_p where server_id = %s; " % (
                server_id)
            func.mysql_exec(sql, '')

            sql = "delete from sqlserver_mirror_s where server_id = %s; " % (
                server_id)
            func.mysql_exec(sql, '')

            sql = "insert into sqlserver_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)

            logger.info(
                "Generate sqlserver instance alert for server: %s begin:" %
                (server_id))
            alert.gen_alert_sqlserver_status(
                server_id)  # generate oracle instance alert
            logger.info(
                "Generate sqlserver instance alert for server: %s end." %
                (server_id))

            func.mysql_exec("commit;", '')

        except Exception, e:
            logger.error(e)
            sys.exit(1)
Exemple #42
0
        db_block_changes_persecond = sysstat_1['db block changes']-sysstat_0['db block changes']
        os_cpu_wait_time = sysstat_0['OS CPU Qt wait time']
        logons_persecond = sysstat_1['logons cumulative']-sysstat_0['logons cumulative']
        logons_current = sysstat_0['logons current']
        opened_cursors_persecond = sysstat_1['opened cursors cumulative']-sysstat_0['opened cursors cumulative']
        opened_cursors_current = sysstat_0['opened cursors current']
        user_commits_persecond = sysstat_1['user commits']-sysstat_0['user commits']
        user_rollbacks_persecond = sysstat_1['user rollbacks']-sysstat_0['user rollbacks']
        user_calls_persecond = sysstat_1['user calls']-sysstat_0['user calls']
        db_block_gets_persecond = sysstat_1['db block gets']-sysstat_0['db block gets']
        #print session_logical_reads_persecond

        ##################### insert data to mysql server#############################
        sql = "insert into oracle_status(server_id,host,port,tags,connect,instance_name,instance_role,instance_status,database_role,open_mode,protection_mode,host_name,database_status,startup_time,uptime,version,archiver,session_total,session_actives,session_waits,dg_stats,dg_delay,processes,session_logical_reads_persecond,physical_reads_persecond,physical_writes_persecond,physical_read_io_requests_persecond,physical_write_io_requests_persecond,db_block_changes_persecond,os_cpu_wait_time,logons_persecond,logons_current,opened_cursors_persecond,opened_cursors_current,user_commits_persecond,user_rollbacks_persecond,user_calls_persecond,db_block_gets_persecond) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,connect,instance_name,instance_role,instance_status,database_role,open_mode,protection_mode,host_name,database_status,startup_time,uptime,version,archiver,session_total,session_actives,session_waits,dg_stats,dg_delay,processes,session_logical_reads_persecond,physical_reads_persecond,physical_writes_persecond,physical_read_io_requests_persecond,physical_write_io_requests_persecond,db_block_changes_persecond,os_cpu_wait_time,logons_persecond,logons_current,opened_cursors_persecond,opened_cursors_current,user_commits_persecond,user_rollbacks_persecond,user_calls_persecond,db_block_gets_persecond)
        func.mysql_exec(sql,param) 
        func.update_db_status_init(database_role_new,version,host,port,tags)

        #check tablespace
        tablespace = oracle.get_tablespace(conn)
        if tablespace:
           for line in tablespace:
              sql="insert into oracle_tablespace(server_id,host,port,tags,tablespace_name,total_size,used_size,avail_size,used_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
              param=(server_id,host,port,tags,line[0],line[1],line[2],line[3],line[4])
              func.mysql_exec(sql,param)
           

    except Exception, e:
        logger.error(e)
        sys.exit(1)
def check_mirror(mirror_id, pri_id, sta_id, db_name, is_switch):
    p_id = ""
    s_id = ""
    p_conn = ""
    s_conn = ""
    if is_switch == 0:
        p_id = pri_id
        s_id = sta_id
    else:
        p_id = sta_id
        s_id = pri_id

    try:
        p_conn = get_connect(p_id)
        s_conn = get_connect(s_id)

        func.mysql_exec("begin;", '')
        func.mysql_exec(
            "insert into sqlserver_mirror_p_his SELECT *,DATE_FORMAT(sysdate(),'%%Y%%m%%d%%H%%i%%s') from sqlserver_mirror_p where db_name = '%s' and server_id in (%s, %s);"
            % (db_name, pri_id, sta_id), '')
        func.mysql_exec(
            "delete from sqlserver_mirror_p where db_name = '%s' and server_id in (%s, %s);"
            % (db_name, pri_id, sta_id), '')

        func.mysql_exec(
            "insert into sqlserver_mirror_s_his SELECT *,DATE_FORMAT(sysdate(),'%%Y%%m%%d%%H%%i%%s') from sqlserver_mirror_s where db_name = '%s' and server_id in (%s, %s);"
            % (db_name, pri_id, sta_id), '')
        func.mysql_exec(
            "delete from sqlserver_mirror_s where db_name = '%s' and server_id in (%s, %s);"
            % (db_name, pri_id, sta_id), '')

        if p_conn:
            # collect primary information
            logger.info("Generate mirror primary info for server: %s begin:" %
                        (p_id))
            mp_info = sqlserver.get_mirror_info(p_conn, db_name)
            if mp_info:
                if mp_info[4] == 1:
                    sql = "insert into sqlserver_mirror_p(mirror_id,server_id,db_id,db_name,mirroring_role,mirroring_state,mirroring_state_desc,mirroring_safety_level,mirroring_partner_name,mirroring_partner_instance,mirroring_failover_lsn,mirroring_connection_timeout,mirroring_redo_queue,mirroring_end_of_log_lsn,mirroring_replication_lsn) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                    param = (mirror_id, p_id, mp_info[0], mp_info[1],
                             mp_info[4], mp_info[5], mp_info[6], mp_info[7],
                             mp_info[8], mp_info[9], mp_info[10], mp_info[11],
                             mp_info[12], mp_info[13], mp_info[14])
                    func.mysql_exec(sql, param)
                    logger.info(
                        "Generate mirror primary info for server: %s end:" %
                        (p_id))
                else:
                    logger.warn(
                        "The primary server: %s configured in mirror group is NOT match the mirroring_role!"
                        % (p_id))

        if s_conn:
            # collect standby information
            logger.info("Generate mirror standby info for server: %s begin:" %
                        (s_id))
            ms_info = sqlserver.get_mirror_info(s_conn, db_name)
            if ms_info:
                if ms_info[4] == 1:
                    logger.warn(
                        "The standby server: %s configured in mirror group is NOT match the mirroring_role!"
                        % (s_id))
                else:
                    sql = "insert into sqlserver_mirror_s(mirror_id,server_id,db_id,db_name,master_server,master_port,mirroring_role,mirroring_state,mirroring_state_desc,mirroring_safety_level,mirroring_partner_name,mirroring_partner_instance,mirroring_failover_lsn,mirroring_connection_timeout,mirroring_redo_queue,mirroring_end_of_log_lsn,mirroring_replication_lsn) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                    param = (mirror_id, s_id, ms_info[0], ms_info[1],
                             ms_info[2], ms_info[3], ms_info[4], ms_info[5],
                             ms_info[6], ms_info[7], ms_info[8], ms_info[9],
                             ms_info[10], ms_info[11], ms_info[12],
                             ms_info[13], ms_info[14])
                    func.mysql_exec(sql, param)
                    logger.info(
                        "Generate mirror standby info for server: %s end:" %
                        (s_id))

        func.mysql_exec("commit;", '')
    except Exception, e:
        logger.error(e)
        func.mysql_exec("rollback;", '')
Exemple #44
0
def check_mysql(host,port,username,password,server_id,tags):
    try:
        conn=MySQLdb.connect(host=host,user=username,passwd=password,port=int(port),connect_timeout=3,charset='utf8')
        cur=conn.cursor()
        conn.select_db('information_schema')
        #cur.execute('flush hosts;')
        ############################# CHECK MYSQL ####################################################
        mysql_variables = func.get_mysql_variables(cur)
        mysql_status = func.get_mysql_status(cur)       
        time.sleep(1)
        mysql_status_2 = func.get_mysql_status(cur)
        ############################# GET VARIABLES ###################################################
        version = func.get_item(mysql_variables,'version')
        key_buffer_size = func.get_item(mysql_variables,'key_buffer_size')
        sort_buffer_size = func.get_item(mysql_variables,'sort_buffer_size')
        join_buffer_size = func.get_item(mysql_variables,'join_buffer_size')
        max_connections = func.get_item(mysql_variables,'max_connections')
        max_connect_errors = func.get_item(mysql_variables,'max_connect_errors')
        open_files_limit = func.get_item(mysql_variables,'open_files_limit')
        table_open_cache = func.get_item(mysql_variables,'table_open_cache')
        max_tmp_tables = func.get_item(mysql_variables,'max_tmp_tables')
        max_heap_table_size = func.get_item(mysql_variables,'max_heap_table_size')
        max_allowed_packet = func.get_item(mysql_variables,'max_allowed_packet')
        ############################# GET INNODB INFO ##################################################
        #innodb variables
        innodb_version = func.get_item(mysql_variables,'innodb_version')
        innodb_buffer_pool_instances = func.get_item(mysql_variables,'innodb_buffer_pool_instances')
        innodb_buffer_pool_size = func.get_item(mysql_variables,'innodb_buffer_pool_size')
        innodb_doublewrite = func.get_item(mysql_variables,'innodb_doublewrite')
        innodb_file_per_table = func.get_item(mysql_variables,'innodb_file_per_table')
        innodb_flush_log_at_trx_commit = func.get_item(mysql_variables,'innodb_flush_log_at_trx_commit')
        innodb_flush_method = func.get_item(mysql_variables,'innodb_flush_method')
        innodb_force_recovery = func.get_item(mysql_variables,'innodb_force_recovery')
        innodb_io_capacity = func.get_item(mysql_variables,'innodb_io_capacity')
        innodb_read_io_threads = func.get_item(mysql_variables,'innodb_read_io_threads')
        innodb_write_io_threads = func.get_item(mysql_variables,'innodb_write_io_threads')
        #innodb status
        innodb_buffer_pool_pages_total = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_total'))
        innodb_buffer_pool_pages_data = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_data'))
        innodb_buffer_pool_pages_dirty = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_dirty'))
        innodb_buffer_pool_pages_flushed = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_flushed'))
        innodb_buffer_pool_pages_free = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_free'))
        innodb_buffer_pool_pages_misc = int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_misc'))
        innodb_page_size = int(func.get_item(mysql_status,'Innodb_page_size'))
        innodb_pages_created = int(func.get_item(mysql_status,'Innodb_pages_created'))
        innodb_pages_read = int(func.get_item(mysql_status,'Innodb_pages_read'))
        innodb_pages_written = int(func.get_item(mysql_status,'Innodb_pages_written'))
        innodb_row_lock_current_waits = int(func.get_item(mysql_status,'Innodb_row_lock_current_waits'))
        #innodb persecond info
        innodb_buffer_pool_read_requests_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_read_requests')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_read_requests'))
        innodb_buffer_pool_reads_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_reads')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_reads'))
        innodb_buffer_pool_write_requests_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_write_requests')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_write_requests'))
        innodb_buffer_pool_pages_flushed_persecond = int(func.get_item(mysql_status_2,'Innodb_buffer_pool_pages_flushed')) - int(func.get_item(mysql_status,'Innodb_buffer_pool_pages_flushed'))
        innodb_rows_deleted_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_deleted')) - int(func.get_item(mysql_status,'Innodb_rows_deleted'))
        innodb_rows_inserted_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_inserted')) - int(func.get_item(mysql_status,'Innodb_rows_inserted'))
        innodb_rows_read_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_read')) - int(func.get_item(mysql_status,'Innodb_rows_read'))
        innodb_rows_updated_persecond = int(func.get_item(mysql_status_2,'Innodb_rows_updated')) - int(func.get_item(mysql_status,'Innodb_rows_updated'))
        ############################# GET STATUS ##################################################
        connect = 1
        uptime = func.get_item(mysql_status,'Uptime')
        open_files = func.get_item(mysql_status,'Open_files')
        open_tables = func.get_item(mysql_status,'Open_tables')
        threads_connected = func.get_item(mysql_status,'Threads_connected')
        threads_running = func.get_item(mysql_status,'Threads_running')
        threads_created = func.get_item(mysql_status,'Threads_created')
        threads_cached = func.get_item(mysql_status,'Threads_cached')
        threads_waits = mysql.get_waits(conn)
        connections = func.get_item(mysql_status,'Connections')
        aborted_clients = func.get_item(mysql_status,'Aborted_clients')
        aborted_connects = func.get_item(mysql_status,'Aborted_connects')
        key_blocks_not_flushed = func.get_item(mysql_status,'Key_blocks_not_flushed')
        key_blocks_unused = func.get_item(mysql_status,'Key_blocks_unused')
        key_blocks_used = func.get_item(mysql_status,'Key_blocks_used')
        ############################# GET STATUS PERSECOND ##################################################
        connections_persecond = int(func.get_item(mysql_status_2,'Connections')) - int(func.get_item(mysql_status,'Connections'))
        bytes_received_persecond = (int(func.get_item(mysql_status_2,'Bytes_received')) - int(func.get_item(mysql_status,'Bytes_received')))/1024
        bytes_sent_persecond = (int(func.get_item(mysql_status_2,'Bytes_sent')) - int(func.get_item(mysql_status,'Bytes_sent')))/1024
        com_select_persecond = int(func.get_item(mysql_status_2,'Com_select')) - int(func.get_item(mysql_status,'Com_select'))
        com_insert_persecond = int(func.get_item(mysql_status_2,'Com_insert')) - int(func.get_item(mysql_status,'Com_insert'))
        com_update_persecond = int(func.get_item(mysql_status_2,'Com_update')) - int(func.get_item(mysql_status,'Com_update'))
        com_delete_persecond = int(func.get_item(mysql_status_2,'Com_delete')) - int(func.get_item(mysql_status,'Com_delete'))
        com_commit_persecond = int(func.get_item(mysql_status_2,'Com_commit')) - int(func.get_item(mysql_status,'Com_commit'))
        com_rollback_persecond = int(func.get_item(mysql_status_2,'Com_rollback')) - int(func.get_item(mysql_status,'Com_rollback'))
        questions_persecond = int(func.get_item(mysql_status_2,'Questions')) - int(func.get_item(mysql_status,'Questions'))
        queries_persecond = int(func.get_item(mysql_status_2,'Queries')) - int(func.get_item(mysql_status,'Queries'))
        transaction_persecond = (int(func.get_item(mysql_status_2,'Com_commit')) + int(func.get_item(mysql_status_2,'Com_rollback'))) - (int(func.get_item(mysql_status,'Com_commit')) + int(func.get_item(mysql_status,'Com_rollback')))
        created_tmp_disk_tables_persecond = int(func.get_item(mysql_status_2,'Created_tmp_disk_tables')) - int(func.get_item(mysql_status,'Created_tmp_disk_tables'))
        created_tmp_files_persecond = int(func.get_item(mysql_status_2,'Created_tmp_files')) - int(func.get_item(mysql_status,'Created_tmp_files'))
        created_tmp_tables_persecond = int(func.get_item(mysql_status_2,'Created_tmp_tables')) - int(func.get_item(mysql_status,'Created_tmp_tables'))
        table_locks_immediate_persecond = int(func.get_item(mysql_status_2,'Table_locks_immediate')) - int(func.get_item(mysql_status,'Table_locks_immediate'))
        table_locks_waited_persecond = int(func.get_item(mysql_status_2,'Table_locks_waited')) - int(func.get_item(mysql_status,'Table_locks_waited'))
        key_read_requests_persecond = int(func.get_item(mysql_status_2,'Key_read_requests')) - int(func.get_item(mysql_status,'Key_read_requests'))
        key_reads_persecond = int(func.get_item(mysql_status_2,'Key_reads')) - int(func.get_item(mysql_status,'Key_reads'))
        key_write_requests_persecond = int(func.get_item(mysql_status_2,'Key_write_requests')) - int(func.get_item(mysql_status,'Key_write_requests'))
        key_writes_persecond = int(func.get_item(mysql_status_2,'Key_writes')) - int(func.get_item(mysql_status,'Key_writes'))
        ############################# GET MYSQL HITRATE ##################################################
        if (string.atof(func.get_item(mysql_status,'Qcache_hits')) + string.atof(func.get_item(mysql_status,'Com_select'))) <> 0:
            query_cache_hitrate = string.atof(func.get_item(mysql_status,'Qcache_hits')) / (string.atof(func.get_item(mysql_status,'Qcache_hits')) + string.atof(func.get_item(mysql_status,'Com_select')))
            query_cache_hitrate =  "%9.2f" %query_cache_hitrate
        else:
            query_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status,'Connections')) <> 0:
            thread_cache_hitrate = 1 - string.atof(func.get_item(mysql_status,'Threads_created')) / string.atof(func.get_item(mysql_status,'Connections'))
            thread_cache_hitrate =  "%9.2f" %thread_cache_hitrate
        else:
            thread_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status,'Key_read_requests')) <> 0:
            key_buffer_read_rate = 1 - string.atof(func.get_item(mysql_status,'Key_reads')) / string.atof(func.get_item(mysql_status,'Key_read_requests'))
            key_buffer_read_rate =  "%9.2f" %key_buffer_read_rate
        else:
            key_buffer_read_rate = 0

        if string.atof(func.get_item(mysql_status,'Key_write_requests')) <> 0:
            key_buffer_write_rate = 1 - string.atof(func.get_item(mysql_status,'Key_writes')) / string.atof(func.get_item(mysql_status,'Key_write_requests'))
            key_buffer_write_rate =  "%9.2f" %key_buffer_write_rate
        else:
            key_buffer_write_rate = 0
        
        if (string.atof(func.get_item(mysql_status,'Key_blocks_used'))+string.atof(func.get_item(mysql_status,'Key_blocks_unused'))) <> 0:
            key_blocks_used_rate = string.atof(func.get_item(mysql_status,'Key_blocks_used')) / (string.atof(func.get_item(mysql_status,'Key_blocks_used'))+string.atof(func.get_item(mysql_status,'Key_blocks_unused')))
            key_blocks_used_rate =  "%9.2f" %key_blocks_used_rate
        else:
            key_blocks_used_rate = 0

        if (string.atof(func.get_item(mysql_status,'Created_tmp_disk_tables'))+string.atof(func.get_item(mysql_status,'Created_tmp_tables'))) <> 0:
            created_tmp_disk_tables_rate = string.atof(func.get_item(mysql_status,'Created_tmp_disk_tables')) / (string.atof(func.get_item(mysql_status,'Created_tmp_disk_tables'))+string.atof(func.get_item(mysql_status,'Created_tmp_tables')))
            created_tmp_disk_tables_rate =  "%9.2f" %created_tmp_disk_tables_rate
        else:
            created_tmp_disk_tables_rate = 0

        if string.atof(max_connections) <> 0:
            connections_usage_rate = string.atof(threads_connected)/string.atof(max_connections)
            connections_usage_rate =  "%9.2f" %connections_usage_rate
        else:
            connections_usage_rate = 0

        if string.atof(open_files_limit) <> 0:            
            open_files_usage_rate = string.atof(open_files)/string.atof(open_files_limit)
            open_files_usage_rate =  "%9.2f" %open_files_usage_rate
        else:
            open_files_usage_rate = 0

        if string.atof(table_open_cache) <> 0:            
            open_tables_usage_rate = string.atof(open_tables)/string.atof(table_open_cache)
            open_tables_usage_rate =  "%9.2f" %open_tables_usage_rate
        else:
            open_tables_usage_rate = 0
  
        #repl
        slave_status=cur.execute('show slave status;')
        if slave_status <> 0:
            role='slave'
            role_new='s'
        else:
            role='master'
            role_new='m'

        ############################# INSERT INTO SERVER ##################################################
        sql = "insert into mysql_status(server_id,host,port,tags,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_waits,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_waits,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate)
        func.mysql_exec(sql,param)
        func.update_db_status_init(role_new,version,host,port,tags)

        #check mysql process
        processlist=cur.execute("select * from information_schema.processlist where DB !='information_schema' and command !='Sleep';")
        if processlist:
            for line in cur.fetchall():
                sql="insert into mysql_processlist(server_id,host,port,tags,pid,p_user,p_host,p_db,command,time,status,info) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                param=(server_id,host,port,tags,line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7])
                func.mysql_exec(sql,param)

        #check mysql connected
        connected=cur.execute("select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count  from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server ;");
        if connected:
            for line in cur.fetchall():
                sql="insert into mysql_connected(server_id,host,port,tags,connect_server,connect_user,connect_db,connect_count) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                param =(server_id,host,port,tags,line[0],line[1],line[2],line[3])
                func.mysql_exec(sql,param)

        #check mysql replication
        master_thread=cur.execute("select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';")
        slave_status=cur.execute('show slave status;')
        datalist=[]
        if master_thread >= 1:
            datalist.append(int(1))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))
        else:
            datalist.append(int(0))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))


        if slave_status <> 0:
            gtid_mode=cur.execute("select * from information_schema.global_variables where variable_name='gtid_mode';")
            result=cur.fetchone()
            if result:
                gtid_mode=result[1]
            else:
                gtid_mode='OFF'
            datalist.append(gtid_mode)
            read_only=cur.execute("select * from information_schema.global_variables where variable_name='read_only';")
            result=cur.fetchone()
            datalist.append(result[1])
            slave_info=cur.execute('show slave status;')
            result=cur.fetchone()
            master_server=result[1]
            master_port=result[3]
            slave_io_run=result[10]
            slave_sql_run=result[11]
            delay=result[32]
            current_binlog_file=result[9]
            current_binlog_pos=result[21]
            master_binlog_file=result[5]
            master_binlog_pos=result[6]

            datalist.append(master_server)
            datalist.append(master_port)
            datalist.append(slave_io_run)
            datalist.append(slave_sql_run)
            datalist.append(delay)
            datalist.append(current_binlog_file)
            datalist.append(current_binlog_pos)
            datalist.append(master_binlog_file)
            datalist.append(master_binlog_pos)
            datalist.append(0)

        elif master_thread >= 1:
            gtid_mode=cur.execute("select * from information_schema.global_variables where variable_name='gtid_mode';")
            result=cur.fetchone()
            if result:
                gtid_mode=result[1]
            else:
                gtid_mode='OFF'
            datalist.append(gtid_mode)
            read_only=cur.execute("select * from information_schema.global_variables where variable_name='read_only';")
            result=cur.fetchone()
            datalist.append(result[1])
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            master=cur.execute('show master status;')
            master_result=cur.fetchone()
            datalist.append(master_result[0])
            datalist.append(master_result[1])
            binlog_file=cur.execute('show master logs;')
            binlogs=0
            if binlog_file:
                for row in cur.fetchall():
                    binlogs = binlogs + row[1]
                datalist.append(binlogs)
        else:
            datalist=[]

        result=datalist
        if result:
            sql="insert into mysql_replication(server_id,tags,host,port,is_master,is_slave,gtid_mode,read_only,master_server,master_port,slave_io_run,slave_sql_run,delay,current_binlog_file,current_binlog_pos,master_binlog_file,master_binlog_pos,master_binlog_space) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            param=(server_id,tags,host,port,result[0],result[1],result[2],result[3],result[4],result[5],result[6],result[7],result[8],result[9],result[10],result[11],result[12],result[13])
            func.mysql_exec(sql,param)

        cur.close()
        exit

    except MySQLdb.Error,e:
        logger_msg="check mysql %s:%s failure: %d %s" %(host,port,e.args[0],e.args[1])
        logger.warning(logger_msg)
        logger_msg="check mysql %s:%s failure: sleep 3 seconds and check again." %(host,port)
        logger.warning(logger_msg)
        time.sleep(3)
        try:
            conn=MySQLdb.connect(host=host,user=username,passwd=password,port=int(port),connect_timeout=3,charset='utf8')
            cur=conn.cursor()
            conn.select_db('information_schema')
        except MySQLdb.Error,e:
            logger_msg="check mysql second %s:%s failure: %d %s" %(host,port,e.args[0],e.args[1])
            logger.warning(logger_msg)
            connect = 0
            sql="insert into mysql_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)
Exemple #45
0
def check_mysql(host, port, username, password, server_id, tags):
    try:
        conn = MySQLdb.connect(host=host,
                               user=username,
                               passwd=password,
                               port=int(port),
                               connect_timeout=3,
                               charset='utf8')
        cur = conn.cursor()
        conn.select_db('information_schema')
        #cur.execute('flush hosts;')
        ############################# CHECK MYSQL ####################################################
        mysql_variables = func.get_mysql_variables(cur)
        mysql_status = func.get_mysql_status(cur)
        time.sleep(1)
        mysql_status_2 = func.get_mysql_status(cur)
        ############################# GET VARIABLES ###################################################
        version = func.get_item(mysql_variables, 'version')
        key_buffer_size = func.get_item(mysql_variables, 'key_buffer_size')
        sort_buffer_size = func.get_item(mysql_variables, 'sort_buffer_size')
        join_buffer_size = func.get_item(mysql_variables, 'join_buffer_size')
        max_connections = func.get_item(mysql_variables, 'max_connections')
        max_connect_errors = func.get_item(mysql_variables,
                                           'max_connect_errors')
        open_files_limit = func.get_item(mysql_variables, 'open_files_limit')
        table_open_cache = func.get_item(mysql_variables, 'table_open_cache')
        max_tmp_tables = func.get_item(mysql_variables, 'max_tmp_tables')
        max_heap_table_size = func.get_item(mysql_variables,
                                            'max_heap_table_size')
        max_allowed_packet = func.get_item(mysql_variables,
                                           'max_allowed_packet')
        ############################# GET INNODB INFO ##################################################
        #innodb variables
        innodb_version = func.get_item(mysql_variables, 'innodb_version')
        innodb_buffer_pool_instances = func.get_item(
            mysql_variables, 'innodb_buffer_pool_instances')
        innodb_buffer_pool_size = func.get_item(mysql_variables,
                                                'innodb_buffer_pool_size')
        innodb_doublewrite = func.get_item(mysql_variables,
                                           'innodb_doublewrite')
        innodb_file_per_table = func.get_item(mysql_variables,
                                              'innodb_file_per_table')
        innodb_flush_log_at_trx_commit = func.get_item(
            mysql_variables, 'innodb_flush_log_at_trx_commit')
        innodb_flush_method = func.get_item(mysql_variables,
                                            'innodb_flush_method')
        innodb_force_recovery = func.get_item(mysql_variables,
                                              'innodb_force_recovery')
        innodb_io_capacity = func.get_item(mysql_variables,
                                           'innodb_io_capacity')
        innodb_read_io_threads = func.get_item(mysql_variables,
                                               'innodb_read_io_threads')
        innodb_write_io_threads = func.get_item(mysql_variables,
                                                'innodb_write_io_threads')
        #innodb status
        innodb_buffer_pool_pages_total = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_total'))
        innodb_buffer_pool_pages_data = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_data'))
        innodb_buffer_pool_pages_dirty = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_dirty'))
        innodb_buffer_pool_pages_flushed = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_flushed'))
        innodb_buffer_pool_pages_free = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_free'))
        innodb_buffer_pool_pages_misc = int(
            func.get_item(mysql_status, 'Innodb_buffer_pool_pages_misc'))
        innodb_page_size = int(func.get_item(mysql_status, 'Innodb_page_size'))
        innodb_pages_created = int(
            func.get_item(mysql_status, 'Innodb_pages_created'))
        innodb_pages_read = int(
            func.get_item(mysql_status, 'Innodb_pages_read'))
        innodb_pages_written = int(
            func.get_item(mysql_status, 'Innodb_pages_written'))
        innodb_row_lock_current_waits = int(
            func.get_item(mysql_status, 'Innodb_row_lock_current_waits'))
        #innodb persecond info
        innodb_buffer_pool_read_requests_persecond = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_read_requests')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_read_requests'))
        innodb_buffer_pool_reads_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_buffer_pool_reads')) - int(
                func.get_item(mysql_status, 'Innodb_buffer_pool_reads'))
        innodb_buffer_pool_write_requests_persecond = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_write_requests')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_write_requests'))
        innodb_buffer_pool_pages_flushed_persecond = int(
            func.get_item(
                mysql_status_2, 'Innodb_buffer_pool_pages_flushed')) - int(
                    func.get_item(mysql_status,
                                  'Innodb_buffer_pool_pages_flushed'))
        innodb_rows_deleted_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_deleted')) - int(
                func.get_item(mysql_status, 'Innodb_rows_deleted'))
        innodb_rows_inserted_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_inserted')) - int(
                func.get_item(mysql_status, 'Innodb_rows_inserted'))
        innodb_rows_read_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_read')) - int(
                func.get_item(mysql_status, 'Innodb_rows_read'))
        innodb_rows_updated_persecond = int(
            func.get_item(mysql_status_2, 'Innodb_rows_updated')) - int(
                func.get_item(mysql_status, 'Innodb_rows_updated'))
        ############################# GET STATUS ##################################################
        connect = 1
        uptime = func.get_item(mysql_status, 'Uptime')
        open_files = func.get_item(mysql_status, 'Open_files')
        open_tables = func.get_item(mysql_status, 'Open_tables')
        threads_connected = func.get_item(mysql_status, 'Threads_connected')
        threads_running = func.get_item(mysql_status, 'Threads_running')
        threads_created = func.get_item(mysql_status, 'Threads_created')
        threads_cached = func.get_item(mysql_status, 'Threads_cached')
        threads_waits = mysql.get_waits(conn)
        connections = func.get_item(mysql_status, 'Connections')
        aborted_clients = func.get_item(mysql_status, 'Aborted_clients')
        aborted_connects = func.get_item(mysql_status, 'Aborted_connects')
        key_blocks_not_flushed = func.get_item(mysql_status,
                                               'Key_blocks_not_flushed')
        key_blocks_unused = func.get_item(mysql_status, 'Key_blocks_unused')
        key_blocks_used = func.get_item(mysql_status, 'Key_blocks_used')
        ############################# GET STATUS PERSECOND ##################################################
        connections_persecond = int(
            func.get_item(mysql_status_2, 'Connections')) - int(
                func.get_item(mysql_status, 'Connections'))
        bytes_received_persecond = (
            int(func.get_item(mysql_status_2, 'Bytes_received')) -
            int(func.get_item(mysql_status, 'Bytes_received'))) / 1024
        bytes_sent_persecond = (
            int(func.get_item(mysql_status_2, 'Bytes_sent')) -
            int(func.get_item(mysql_status, 'Bytes_sent'))) / 1024
        com_select_persecond = int(func.get_item(
            mysql_status_2, 'Com_select')) - int(
                func.get_item(mysql_status, 'Com_select'))
        com_insert_persecond = int(func.get_item(
            mysql_status_2, 'Com_insert')) - int(
                func.get_item(mysql_status, 'Com_insert'))
        com_update_persecond = int(func.get_item(
            mysql_status_2, 'Com_update')) - int(
                func.get_item(mysql_status, 'Com_update'))
        com_delete_persecond = int(func.get_item(
            mysql_status_2, 'Com_delete')) - int(
                func.get_item(mysql_status, 'Com_delete'))
        com_commit_persecond = int(func.get_item(
            mysql_status_2, 'Com_commit')) - int(
                func.get_item(mysql_status, 'Com_commit'))
        com_rollback_persecond = int(
            func.get_item(mysql_status_2, 'Com_rollback')) - int(
                func.get_item(mysql_status, 'Com_rollback'))
        questions_persecond = int(func.get_item(
            mysql_status_2, 'Questions')) - int(
                func.get_item(mysql_status, 'Questions'))
        queries_persecond = int(func.get_item(
            mysql_status_2, 'Queries')) - int(
                func.get_item(mysql_status, 'Queries'))
        transaction_persecond = (
            int(func.get_item(mysql_status_2, 'Com_commit')) +
            int(func.get_item(mysql_status_2, 'Com_rollback'))) - (
                int(func.get_item(mysql_status, 'Com_commit')) +
                int(func.get_item(mysql_status, 'Com_rollback')))
        created_tmp_disk_tables_persecond = int(
            func.get_item(mysql_status_2, 'Created_tmp_disk_tables')) - int(
                func.get_item(mysql_status, 'Created_tmp_disk_tables'))
        created_tmp_files_persecond = int(
            func.get_item(mysql_status_2, 'Created_tmp_files')) - int(
                func.get_item(mysql_status, 'Created_tmp_files'))
        created_tmp_tables_persecond = int(
            func.get_item(mysql_status_2, 'Created_tmp_tables')) - int(
                func.get_item(mysql_status, 'Created_tmp_tables'))
        table_locks_immediate_persecond = int(
            func.get_item(mysql_status_2, 'Table_locks_immediate')) - int(
                func.get_item(mysql_status, 'Table_locks_immediate'))
        table_locks_waited_persecond = int(
            func.get_item(mysql_status_2, 'Table_locks_waited')) - int(
                func.get_item(mysql_status, 'Table_locks_waited'))
        key_read_requests_persecond = int(
            func.get_item(mysql_status_2, 'Key_read_requests')) - int(
                func.get_item(mysql_status, 'Key_read_requests'))
        key_reads_persecond = int(func.get_item(
            mysql_status_2, 'Key_reads')) - int(
                func.get_item(mysql_status, 'Key_reads'))
        key_write_requests_persecond = int(
            func.get_item(mysql_status_2, 'Key_write_requests')) - int(
                func.get_item(mysql_status, 'Key_write_requests'))
        key_writes_persecond = int(func.get_item(
            mysql_status_2, 'Key_writes')) - int(
                func.get_item(mysql_status, 'Key_writes'))
        ############################# GET MYSQL HITRATE ##################################################
        if (string.atof(func.get_item(mysql_status, 'Qcache_hits')) +
                string.atof(func.get_item(mysql_status, 'Com_select'))) <> 0:
            query_cache_hitrate = string.atof(
                func.get_item(mysql_status, 'Qcache_hits')) / (
                    string.atof(func.get_item(mysql_status, 'Qcache_hits')) +
                    string.atof(func.get_item(mysql_status, 'Com_select')))
            query_cache_hitrate = "%9.2f" % query_cache_hitrate
        else:
            query_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status, 'Connections')) <> 0:
            thread_cache_hitrate = 1 - string.atof(
                func.get_item(mysql_status, 'Threads_created')) / string.atof(
                    func.get_item(mysql_status, 'Connections'))
            thread_cache_hitrate = "%9.2f" % thread_cache_hitrate
        else:
            thread_cache_hitrate = 0

        if string.atof(func.get_item(mysql_status, 'Key_read_requests')) <> 0:
            key_buffer_read_rate = 1 - string.atof(
                func.get_item(mysql_status, 'Key_reads')) / string.atof(
                    func.get_item(mysql_status, 'Key_read_requests'))
            key_buffer_read_rate = "%9.2f" % key_buffer_read_rate
        else:
            key_buffer_read_rate = 0

        if string.atof(func.get_item(mysql_status, 'Key_write_requests')) <> 0:
            key_buffer_write_rate = 1 - string.atof(
                func.get_item(mysql_status, 'Key_writes')) / string.atof(
                    func.get_item(mysql_status, 'Key_write_requests'))
            key_buffer_write_rate = "%9.2f" % key_buffer_write_rate
        else:
            key_buffer_write_rate = 0

        if (string.atof(func.get_item(mysql_status, 'Key_blocks_used')) +
                string.atof(func.get_item(mysql_status,
                                          'Key_blocks_unused'))) <> 0:
            key_blocks_used_rate = string.atof(
                func.get_item(mysql_status, 'Key_blocks_used')
            ) / (string.atof(func.get_item(mysql_status, 'Key_blocks_used')) +
                 string.atof(func.get_item(mysql_status, 'Key_blocks_unused')))
            key_blocks_used_rate = "%9.2f" % key_blocks_used_rate
        else:
            key_blocks_used_rate = 0

        if (string.atof(func.get_item(
                mysql_status, 'Created_tmp_disk_tables')) + string.atof(
                    func.get_item(mysql_status, 'Created_tmp_tables'))) <> 0:
            created_tmp_disk_tables_rate = string.atof(
                func.get_item(mysql_status, 'Created_tmp_disk_tables')) / (
                    string.atof(
                        func.get_item(mysql_status, 'Created_tmp_disk_tables'))
                    + string.atof(
                        func.get_item(mysql_status, 'Created_tmp_tables')))
            created_tmp_disk_tables_rate = "%9.2f" % created_tmp_disk_tables_rate
        else:
            created_tmp_disk_tables_rate = 0

        if string.atof(max_connections) <> 0:
            connections_usage_rate = string.atof(
                threads_connected) / string.atof(max_connections)
            connections_usage_rate = "%9.2f" % connections_usage_rate
        else:
            connections_usage_rate = 0

        if string.atof(open_files_limit) <> 0:
            open_files_usage_rate = string.atof(open_files) / string.atof(
                open_files_limit)
            open_files_usage_rate = "%9.2f" % open_files_usage_rate
        else:
            open_files_usage_rate = 0

        if string.atof(table_open_cache) <> 0:
            open_tables_usage_rate = string.atof(open_tables) / string.atof(
                table_open_cache)
            open_tables_usage_rate = "%9.2f" % open_tables_usage_rate
        else:
            open_tables_usage_rate = 0

        #repl
        slave_status = cur.execute('show slave status;')
        if slave_status <> 0:
            role = 'slave'
            role_new = 's'
        else:
            role = 'master'
            role_new = 'm'

        ############################# INSERT INTO SERVER ##################################################
        sql = "insert into mysql_status(server_id,host,port,tags,connect,role,uptime,version,max_connections,max_connect_errors,open_files_limit,table_open_cache,max_tmp_tables,max_heap_table_size,max_allowed_packet,open_files,open_tables,threads_connected,threads_running,threads_waits,threads_created,threads_cached,connections,aborted_clients,aborted_connects,connections_persecond,bytes_received_persecond,bytes_sent_persecond,com_select_persecond,com_insert_persecond,com_update_persecond,com_delete_persecond,com_commit_persecond,com_rollback_persecond,questions_persecond,queries_persecond,transaction_persecond,created_tmp_tables_persecond,created_tmp_disk_tables_persecond,created_tmp_files_persecond,table_locks_immediate_persecond,table_locks_waited_persecond,key_buffer_size,sort_buffer_size,join_buffer_size,key_blocks_not_flushed,key_blocks_unused,key_blocks_used,key_read_requests_persecond,key_reads_persecond,key_write_requests_persecond,key_writes_persecond,innodb_version,innodb_buffer_pool_instances,innodb_buffer_pool_size,innodb_doublewrite,innodb_file_per_table,innodb_flush_log_at_trx_commit,innodb_flush_method,innodb_force_recovery,innodb_io_capacity,innodb_read_io_threads,innodb_write_io_threads,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data,innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_flushed,innodb_buffer_pool_pages_free,innodb_buffer_pool_pages_misc,innodb_page_size,innodb_pages_created,innodb_pages_read,innodb_pages_written,innodb_row_lock_current_waits,innodb_buffer_pool_pages_flushed_persecond,innodb_buffer_pool_read_requests_persecond,innodb_buffer_pool_reads_persecond,innodb_buffer_pool_write_requests_persecond,innodb_rows_read_persecond,innodb_rows_inserted_persecond,innodb_rows_updated_persecond,innodb_rows_deleted_persecond,query_cache_hitrate,thread_cache_hitrate,key_buffer_read_rate,key_buffer_write_rate,key_blocks_used_rate,created_tmp_disk_tables_rate,connections_usage_rate,open_files_usage_rate,open_tables_usage_rate) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (
            server_id, host, port, tags, connect, role, uptime, version,
            max_connections, max_connect_errors, open_files_limit,
            table_open_cache, max_tmp_tables, max_heap_table_size,
            max_allowed_packet, open_files, open_tables, threads_connected,
            threads_running, threads_waits, threads_created, threads_cached,
            connections, aborted_clients, aborted_connects,
            connections_persecond, bytes_received_persecond,
            bytes_sent_persecond, com_select_persecond, com_insert_persecond,
            com_update_persecond, com_delete_persecond, com_commit_persecond,
            com_rollback_persecond, questions_persecond, queries_persecond,
            transaction_persecond, created_tmp_tables_persecond,
            created_tmp_disk_tables_persecond, created_tmp_files_persecond,
            table_locks_immediate_persecond, table_locks_waited_persecond,
            key_buffer_size, sort_buffer_size, join_buffer_size,
            key_blocks_not_flushed, key_blocks_unused, key_blocks_used,
            key_read_requests_persecond, key_reads_persecond,
            key_write_requests_persecond, key_writes_persecond, innodb_version,
            innodb_buffer_pool_instances, innodb_buffer_pool_size,
            innodb_doublewrite, innodb_file_per_table,
            innodb_flush_log_at_trx_commit, innodb_flush_method,
            innodb_force_recovery, innodb_io_capacity, innodb_read_io_threads,
            innodb_write_io_threads, innodb_buffer_pool_pages_total,
            innodb_buffer_pool_pages_data, innodb_buffer_pool_pages_dirty,
            innodb_buffer_pool_pages_flushed, innodb_buffer_pool_pages_free,
            innodb_buffer_pool_pages_misc, innodb_page_size,
            innodb_pages_created, innodb_pages_read, innodb_pages_written,
            innodb_row_lock_current_waits,
            innodb_buffer_pool_pages_flushed_persecond,
            innodb_buffer_pool_read_requests_persecond,
            innodb_buffer_pool_reads_persecond,
            innodb_buffer_pool_write_requests_persecond,
            innodb_rows_read_persecond, innodb_rows_inserted_persecond,
            innodb_rows_updated_persecond, innodb_rows_deleted_persecond,
            query_cache_hitrate, thread_cache_hitrate, key_buffer_read_rate,
            key_buffer_write_rate, key_blocks_used_rate,
            created_tmp_disk_tables_rate, connections_usage_rate,
            open_files_usage_rate, open_tables_usage_rate)
        func.mysql_exec(sql, param)
        func.update_db_status_init(role_new, version, host, port, tags)

        #check mysql process
        processlist = cur.execute(
            "select * from information_schema.processlist where DB !='information_schema' and command !='Sleep';"
        )
        if processlist:
            for line in cur.fetchall():
                sql = "insert into mysql_processlist(server_id,host,port,tags,pid,p_user,p_host,p_db,command,time,status,info) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3], line[4], line[5], line[6], line[7])
                func.mysql_exec(sql, param)

        #check mysql connected
        connected = cur.execute(
            "select SUBSTRING_INDEX(host,':',1) as connect_server, user connect_user,db connect_db, count(SUBSTRING_INDEX(host,':',1)) as connect_count  from information_schema.processlist where db is not null and db!='information_schema' and db !='performance_schema' group by connect_server ;"
        )
        if connected:
            for line in cur.fetchall():
                sql = "insert into mysql_connected(server_id,host,port,tags,connect_server,connect_user,connect_db,connect_count) values(%s,%s,%s,%s,%s,%s,%s,%s);"
                param = (server_id, host, port, tags, line[0], line[1],
                         line[2], line[3])
                func.mysql_exec(sql, param)

        #check mysql replication
        master_thread = cur.execute(
            "select * from information_schema.processlist where COMMAND = 'Binlog Dump' or COMMAND = 'Binlog Dump GTID';"
        )
        slave_status = cur.execute('show slave status;')
        datalist = []
        if master_thread >= 1:
            datalist.append(int(1))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))
        else:
            datalist.append(int(0))
            if slave_status <> 0:
                datalist.append(int(1))
            else:
                datalist.append(int(0))

        if slave_status <> 0:
            gtid_mode = cur.execute(
                "select * from information_schema.global_variables where variable_name='gtid_mode';"
            )
            result = cur.fetchone()
            if result:
                gtid_mode = result[1]
            else:
                gtid_mode = 'OFF'
            datalist.append(gtid_mode)
            read_only = cur.execute(
                "select * from information_schema.global_variables where variable_name='read_only';"
            )
            result = cur.fetchone()
            datalist.append(result[1])
            slave_info = cur.execute('show slave status;')
            result = cur.fetchone()
            master_server = result[1]
            master_port = result[3]
            slave_io_run = result[10]
            slave_sql_run = result[11]
            delay = result[32]
            current_binlog_file = result[9]
            current_binlog_pos = result[21]
            master_binlog_file = result[5]
            master_binlog_pos = result[6]

            datalist.append(master_server)
            datalist.append(master_port)
            datalist.append(slave_io_run)
            datalist.append(slave_sql_run)
            datalist.append(delay)
            datalist.append(current_binlog_file)
            datalist.append(current_binlog_pos)
            datalist.append(master_binlog_file)
            datalist.append(master_binlog_pos)
            datalist.append(0)

        elif master_thread >= 1:
            gtid_mode = cur.execute(
                "select * from information_schema.global_variables where variable_name='gtid_mode';"
            )
            result = cur.fetchone()
            if result:
                gtid_mode = result[1]
            else:
                gtid_mode = 'OFF'
            datalist.append(gtid_mode)
            read_only = cur.execute(
                "select * from information_schema.global_variables where variable_name='read_only';"
            )
            result = cur.fetchone()
            datalist.append(result[1])
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            datalist.append('---')
            master = cur.execute('show master status;')
            master_result = cur.fetchone()
            datalist.append(master_result[0])
            datalist.append(master_result[1])
            binlog_file = cur.execute('show master logs;')
            binlogs = 0
            if binlog_file:
                for row in cur.fetchall():
                    binlogs = binlogs + row[1]
                datalist.append(binlogs)
        else:
            datalist = []

        result = datalist
        if result:
            sql = "insert into mysql_replication(server_id,tags,host,port,is_master,is_slave,gtid_mode,read_only,master_server,master_port,slave_io_run,slave_sql_run,delay,current_binlog_file,current_binlog_pos,master_binlog_file,master_binlog_pos,master_binlog_space) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            param = (server_id, tags, host, port, result[0], result[1],
                     result[2], result[3], result[4], result[5], result[6],
                     result[7], result[8], result[9], result[10], result[11],
                     result[12], result[13])
            func.mysql_exec(sql, param)

        cur.close()
        exit

    except MySQLdb.Error, e:
        logger_msg = "check mysql %s:%s failure: %d %s" % (
            host, port, e.args[0], e.args[1])
        logger.warning(logger_msg)
        logger_msg = "check mysql %s:%s failure: sleep 3 seconds and check again." % (
            host, port)
        logger.warning(logger_msg)
        time.sleep(3)
        try:
            conn = MySQLdb.connect(host=host,
                                   user=username,
                                   passwd=password,
                                   port=int(port),
                                   connect_timeout=3,
                                   charset='utf8')
            cur = conn.cursor()
            conn.select_db('information_schema')
        except MySQLdb.Error, e:
            logger_msg = "check mysql second %s:%s failure: %d %s" % (
                host, port, e.args[0], e.args[1])
            logger.warning(logger_msg)
            connect = 0
            sql = "insert into mysql_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param = (server_id, host, port, tags, connect)
            func.mysql_exec(sql, param)
Exemple #46
0
def check_redis(host,port,passwd,server_id,tags):
    try:
        r=redis.StrictRedis(host=host,port=port,password=passwd,db=0,socket_timeout=3,charset='utf-8') 
        info=r.info()
        time.sleep(1)
        info_2=r.info()
        # Server
        redis_version = info['redis_version']
        redis_git_sha1 = info['redis_git_sha1']
        redis_git_dirty = info['redis_git_dirty']
        arch_bits = info['arch_bits']
        multiplexing_api = info['multiplexing_api']
        gcc_version = info['gcc_version']
        process_id = info['process_id']
        uptime_in_seconds = info['uptime_in_seconds']
        uptime_in_days = info['uptime_in_days']
        lru_clock = info['lru_clock']
        os = check_value(info,'os')
        redis_mode = check_value(info,'redis_mode')
        hz = check_value(info,'hz')
        run_id = check_value(info,'run_id')
        tcp_port = check_value(info,'tcp_port')

        # Clients 
        connected_clients = info['connected_clients']
        client_longest_output_list = info['client_longest_output_list']
        client_biggest_input_buf = info['client_biggest_input_buf']
        blocked_clients = info['blocked_clients']
        # Memory
        used_memory = info['used_memory']
        used_memory_human = info['used_memory_human']
        used_memory_rss = info['used_memory_rss']
        used_memory_peak = info['used_memory_peak']
        used_memory_peak_human = info['used_memory_peak_human']
        used_memory_lua = check_value(info,'used_memory_lua')
        mem_fragmentation_ratio = info['mem_fragmentation_ratio']
        mem_allocator = info['mem_allocator']
        # Persistence
        loading = info['loading']
        rdb_changes_since_last_save = check_value(info,'rdb_changes_since_last_save')
        rdb_bgsave_in_progress = check_value(info,'rdb_bgsave_in_progress')
        rdb_last_save_time = check_value(info,'rdb_last_save_time')
        rdb_last_bgsave_status = check_value(info,'rdb_last_bgsave_status')
        rdb_last_bgsave_time_sec = check_value(info,'rdb_last_bgsave_time_sec')
        rdb_current_bgsave_time_sec = check_value(info,'rdb_current_bgsave_time_sec')
        aof_enabled = check_value(info,'aof_enabled')
        aof_rewrite_in_progress = check_value(info,'aof_rewrite_in_progress')
        aof_rewrite_scheduled = check_value(info,'aof_rewrite_scheduled')
        aof_last_rewrite_time_sec = check_value(info,'aof_last_rewrite_time_sec')
        aof_current_rewrite_time_sec = check_value(info,'aof_current_rewrite_time_sec')
        aof_last_bgrewrite_status = check_value(info,'aof_last_bgrewrite_status')
        # Stats
        total_connections_received = check_value(info,'total_connections_received')
        total_commands_processed = check_value(info,'total_commands_processed')
        current_commands_processed = int(info_2['total_commands_processed'] - info['total_commands_processed'])
        instantaneous_ops_per_sec = check_value(info,'instantaneous_ops_per_sec')
        rejected_connections = check_value(info,'rejected_connections')

        expired_keys = info['expired_keys']
        evicted_keys = info['evicted_keys']
        keyspace_hits = info['keyspace_hits']
        keyspace_misses = info['keyspace_misses']
        pubsub_channels = info['pubsub_channels']
        pubsub_patterns = info['pubsub_patterns']
        latest_fork_usec = info['latest_fork_usec']
        # Replication
        role = info['role']
        connected_slaves = info['connected_slaves']
        
        # CPU
        used_cpu_sys = info['used_cpu_sys']
        used_cpu_user = info['used_cpu_user']
        used_cpu_sys_children = info['used_cpu_sys_children']
        used_cpu_user_children = info['used_cpu_user_children']

        # replication
        if role == 'slave':
           #print info
           master_host = info['master_host']
           master_port = info['master_port']
           master_link_status = info['master_link_status']
           master_last_io_seconds_ago = info['master_last_io_seconds_ago']
           master_sync_in_progress = info['master_sync_in_progress']
           #slave_repl_offset = info['slave_repl_offset']
           slave_priority = check_value(info,'slave_priority')
           slave_read_only = check_value(info,'slave_read_only')
           master_server_id = func.mysql_query("SELECT id FROM db_servers_redis WHERE host='%s' AND port='%s' limit 1;" %(master_host,master_port))
           master_server_id = master_server_id[0][0]
           role_new='s'
        else:
           master_host = '-1'
           master_port = '-1'
           master_link_status= '-1'
           master_last_io_seconds_ago = '-1'
           master_sync_in_progress = '-1'
           #slave_repl_offset = '---'
           slave_priority = '-1'
           slave_read_only = '-1'
           master_server_id = '-1'
           role_new='m'

        #add redis_status
        connect=1
        sql = "insert into redis_status(server_id,host,port,tags,redis_role,connect,redis_version,redis_git_sha1,redis_git_dirty,redis_mode,os,arch_bits,multiplexing_api,gcc_version,process_id,run_id,tcp_port,uptime_in_seconds,uptime_in_days,hz,lru_clock,connected_clients,client_longest_output_list,client_biggest_input_buf,blocked_clients,used_memory,used_memory_human,used_memory_rss,used_memory_peak,used_memory_peak_human,used_memory_lua,mem_fragmentation_ratio,mem_allocator,loading,rdb_changes_since_last_save,rdb_bgsave_in_progress,rdb_last_save_time,rdb_last_bgsave_status,rdb_last_bgsave_time_sec,rdb_current_bgsave_time_sec,aof_enabled,aof_rewrite_in_progress,aof_rewrite_scheduled,aof_last_rewrite_time_sec,aof_current_rewrite_time_sec,aof_last_bgrewrite_status,total_connections_received,total_commands_processed,current_commands_processed,instantaneous_ops_per_sec,rejected_connections,expired_keys,evicted_keys,keyspace_hits,keyspace_misses,pubsub_channels,pubsub_patterns,latest_fork_usec,used_cpu_sys,used_cpu_user,used_cpu_sys_children,used_cpu_user_children) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param = (server_id,host,port,tags,role,connect,redis_version,redis_git_sha1,redis_git_dirty,redis_mode,os,arch_bits,multiplexing_api,gcc_version,process_id,run_id,tcp_port,uptime_in_seconds,uptime_in_days,hz,lru_clock,connected_clients,client_longest_output_list,client_biggest_input_buf,blocked_clients,used_memory,used_memory_human,used_memory_rss,used_memory_peak,used_memory_peak_human,used_memory_lua,mem_fragmentation_ratio,mem_allocator,loading,rdb_changes_since_last_save,rdb_bgsave_in_progress,rdb_last_save_time,rdb_last_bgsave_status,rdb_last_bgsave_time_sec,rdb_current_bgsave_time_sec,aof_enabled,aof_rewrite_in_progress,aof_rewrite_scheduled,aof_last_rewrite_time_sec,aof_current_rewrite_time_sec,aof_last_bgrewrite_status,total_connections_received,total_commands_processed,current_commands_processed,instantaneous_ops_per_sec,rejected_connections,expired_keys,evicted_keys,keyspace_hits,keyspace_misses,pubsub_channels,pubsub_patterns,latest_fork_usec,used_cpu_sys,used_cpu_user,used_cpu_sys_children,used_cpu_user_children)
        func.mysql_exec(sql,param)

        #add redis_replication
        sql_1 = "insert into redis_replication(server_id,tags,host,port,role,master_server_id,master_host,master_port,master_link_status,master_last_io_seconds_ago,master_sync_in_progress,slave_priority,slave_read_only,connected_slaves) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
        param_1 = (server_id,tags,host,port,role,master_server_id,master_host,master_port,master_link_status,master_last_io_seconds_ago,master_sync_in_progress,slave_priority,slave_read_only,connected_slaves)
        func.mysql_exec(sql_1,param_1)
        func.update_db_status_init(role_new,redis_version,host,port,tags)

    except Exception, e:
        logger_msg="check redis %s:%s : %s" %(host,port,e)
        logger.warning(logger_msg)
   
        try:
            connect=0
            sql="insert into redis_status(server_id,host,port,tags,connect) values(%s,%s,%s,%s,%s)"
            param=(server_id,host,port,tags,connect)
            func.mysql_exec(sql,param)

        except Exception, e:
            logger.error(e)
            sys.exit(1)