def get_oracle_alert(tags, db_conn, oracle_params, linux_params):
    db_version = oracle_params['db_version']
    host = oracle_params['host']
    # 取alert日志
    sql = "select alert_log,alert_log_seek from oracle_list where tags='{}' ".format(
        tags)
    alert_log, alert_log_seek = mysql_query(sql)[0]
    if not alert_log:
        sql = "select value from v$diag_info where name = 'Diag Trace'"
        alert_dir = query_one(db_conn, sql)
        # 取实例名
        sql = "select instance_name from v$instance"
        instance_name = query_one(db_conn, sql)
        alert_log = '{}/alert_{}.log'.format(alert_dir[0], instance_name[0])
        alert_log_seek = 0
        sql = "delete from alert_log where tags='{}' and type=1 ".format(tags)
        mysql_exec(sql)
    # ssh获取日志内容
    linux_oper = LinuxBase(linux_params)
    # 日志解析
    alert_content = linux_oper.readfile(alert_log, seek=alert_log_seek)
    alert_log_seek = parse_oracle_alert_logs(tags, host, alert_content,
                                             db_version)
    # 更新配置表中日志路径,日志偏移量
    sql = "update oracle_list set alert_log='{}',alert_log_seek={} where tags='{}' ".format(
        alert_log, alert_log_seek, tags)
    mysql_exec(sql)
 def log(self,log_content):
     log_level = 'info'
     log_type = 'Oracle RAC安装'
     current_time = now_local()
     print('{}: {}'.format(current_time,log_content))
     sql = "insert into setup_log(log_type,log_time,log_level,log_content)" \
           "values(%s,%s,%s,%s)"
     values = (log_type,current_time,log_level,log_content)
     mysql_exec(sql, values)
def get_mysql_slowquery(tags,mysql_params,linux_params):
    host = mysql_params['host']
    # slow query log location
    sql = "select slowquery_log,slowquery_log_seek from mysql_list where tags='{}' ".format(tags)
    slowquery_log,slowquery_log_seek = mysql_query(sql)[0]
    if not slowquery_log:
        slowquery_log = Mysql_Do(mysql_params).get_para('slow_query_log_file')
        slowquery_log_seek = 0
    # get slowquery log content
    linux_oper = LinuxBase(linux_params)
    slowquery_content = linux_oper.readfile(slowquery_log,seek=slowquery_log_seek)
    # parse log
    slowquery_log_seek = parse_mysql_slowquery_logs(tags,host,slowquery_content)
    # update alert log info to mysqlinfo
    sql = "update mysql_list set slowquery_log='{}',slowquery_log_seek={} where tags='{}' " .format(slowquery_log,slowquery_log_seek,tags)
    mysql_exec(sql)
def save_mysql_alert_log(tags,host,log_meta):
    check_time = now()
    for key in MysqlKeyWordList:
        if log_meta:
            save = False
            if key in log_meta['log_content']:
                save = True
            if save:
                if 'log_time' in log_meta:
                    log_time = log_meta['log_time']
                else:
                    log_time = ''
                sql = "insert into alert_log(tags,host,type,log_time,log_level,log_content,check_time) values(%s,%s,%s,%s,%s,%s,%s)"
                values = (tags, host,'2',log_time, log_meta['log_level'], log_meta['log_content'],check_time)
                mysql_exec(sql, values)
                log_meta = []
def get_mysql_alert(tags,mysql_params,linux_params):
    host = mysql_params['host']
    # logfile loglocation
    sql = "select alert_log,alert_log_seek from mysql_list where tags='{}' ".format(tags)
    alert_log,alert_log_seek = mysql_query(sql)[0]
    if not alert_log:
        alert_log = Mysql_Do(mysql_params).get_para('log_error')
        alert_log_seek = 0
        sql = "delete from alert_log where tags='{}' and type=2 ".format(tags)
        mysql_exec(sql)
    # get alert log content
    linux_oper = LinuxBase(linux_params)
    alert_content = linux_oper.readfile(alert_log,seek=alert_log_seek)
    # parse log
    alert_log_seek = parse_mysql_alert_logs(tags,host,alert_content)
    # update alert log info to mysqlinfo
    sql = "update mysql_list set alert_log='{}',alert_log_seek={} where tags='{}' " .format(alert_log,alert_log_seek,tags)
    mysql_exec(sql)
Exemple #6
0
def save_oracle_alert_log(tags,host,log_meta):
    for key in OracleKeyWordList:
        if log_meta:
            save = False
            save_type = '1'
            check_time = now()
            if key in log_meta['log_content']:
                save = True
            if save:
                if 'log_time' in log_meta:
                    log_time = log_meta['log_time']
                else:
                    log_time = ''

                sql = "insert into alert_log(tags,host,type,log_time,log_level,log_content,check_time)" \
                      "values(%s,%s,%s,%s,%s,%s,%s)"
                values = (tags,host,save_type,log_time,log_meta['log_level'],log_meta['log_content'],check_time)
                mysql_exec(sql,values)
                log_meta = []
Exemple #7
0
def get_redis_log(tags,redis_params,linux_params):
    host = redis_params['host']
    # logfile loglocation
    sql = "select log,log_seek from redis_list where tags='{}' ".format(tags)
    log,log_seek = mysql_query(sql)[0]
    if not log:
         redis_conn = RedisBase(redis_params).connection()
         log_dir = redis_conn.config_get('dir')['dir']
         log = redis_conn.config_get('logfile')['logfile']
         log = os.path.join(log_dir,log)
         log_seek = 0
         sql = "delete from alert_log where tags='{}' and type=3 ".format(tags)
         mysql_exec(sql)
    # get log content
    linux_oper = LinuxBase(linux_params)
    alert_content = linux_oper.readfile(log,seek=log_seek)
    # parse log
    log_seek = parse_redis_logs(tags,host,alert_content)
    # update alert log info to mysqlinfo
    sql = "update redis_list set log='{}',log_seek={} where tags='{}' " .format(log,log_seek,tags)
    mysql_exec(sql)
Exemple #8
0
def check_alarm():
    alarm_time = now()
    checklog.logger.info('初始化告警信息表')
    mysql_exec('insert into alarm_info_his select * from alarm_info')
    mysql_exec('delete from alarm_info')
    check_list = mysql_query(
        "select name,judge_value,judge_sql,judge_table,conf_table,conf_column from alarm_conf where judge_sql is not null and judge_table is not null"
    )
    for each_check in check_list:
        alarm_name, judge_value, judge_sql, judge_table, conf_table, conf_column = each_check
        checklog.logger.info("开始告警检查:{}".format(alarm_name))
        select_sql = "select count(*) from {}".format(judge_table)
        select_res = mysql_query(select_sql)
        if select_res[0][0] == 0:
            checklog.logger.info("%s未采集到数据" % alarm_name)
        else:
            is_judge_sql = ' tags in (select tags from {} where {} =1)'.format(
                conf_table, conf_column)
            judge_sql = judge_sql % (
                judge_value,
                is_judge_sql) if judge_value else judge_sql % is_judge_sql
            check_res = mysql_query(judge_sql)
            if check_res == 0:
                checklog.logger.info("{}:告警检查正常" % alarm_name)
            else:
                for each in check_res:
                    tags, url, alarm_content = each
                    alarm_title = tags + ':' + alarm_name
                    insert_sql = "insert into alarm_info (tags,url,alarm_type,alarm_header,alarm_content,alarm_time) values('{}','{}','{}','{}','{}','{}') ".format(
                        tags, url, alarm_name, alarm_title, alarm_content,
                        alarm_time)
                    checklog.logger.warning(alarm_content)
                    mysql_exec(insert_sql)
def save_data(host,tags,SQL_META):
    check_time = now()
    if SQL_META:
        start_time = SQL_META['start_time']
        host_client = SQL_META['host']
        db_name = ''
        if 'db_name' in SQL_META:
            db_name = SQL_META['db_name']
        sql_text = SQL_META['sql_text']
        query_time = float(SQL_META['query_time'])
        lock_time = float(SQL_META['lock_time'])
        rows_examined = int(SQL_META['rows_examined'])
        rows_sent = int(SQL_META['rows_sent'])
        thread_id = SQL_META['thread_id']

        if not sql_text.startswith('commit'):
            sql = "insert into mysql_slowquery(host,tags,start_time,client_host,db_name,sql_text,query_time,lock_time," \
                  "rows_examined,rows_sent,thread_id,check_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"

            values = (
                host, tags, start_time, host_client, db_name, sql_text, query_time, lock_time,
                rows_examined,rows_sent, thread_id,check_time)

            mysql_exec(sql, values)
 def clear_log(self):
     sql = 'truncate table setup_log'
     mysql_exec(sql,)
Exemple #11
0
def check_mysql(tags, mysql_params):
    check_time = now()
    host = mysql_params['host']
    port = mysql_params['port']
    linux_params = {
        'hostname': mysql_params['host'],
        'port': mysql_params['sshport_os'],
        'username': mysql_params['user_os'],
        'password': mysql_params['password_os']
    }
    # create connection
    mysql_conn = MysqlBase(mysql_params).connection()

    if mysql_conn:
        checklog.logger.info('{}:开始获取MySQL数据库监控信息'.format(tags))
        # get mysqlstat data
        mysqlstat = MySQLStat(mysql_params, mysql_conn)
        mysqlstat.init_stat_vals()
        mysqlstat.get_mysql_stat()
        time.sleep(1)
        mysqlstat.get_mysql_stat()
        mysqldata = mysqlstat.format_stat()
        # get mysql params
        mysqlparams = mysqlstat.get_mysql_params()
        #
        updays = round(float(mysqldata['uptime']) / 86400, 2)
        threads_waited = mysqlstat.get_threads_waited()
        total_rows, data_size, index_size = mysqlstat.get_totalsize()
        innodb_buffer_pool_read_requests = mysqldata[
            'innodb_buffer_pool_read_requests']
        innodb_buffer_pool_reads = mysqldata['innodb_buffer_pool_reads']
        innodb_buffer_pool_hit = (
            1 - innodb_buffer_pool_reads / innodb_buffer_pool_read_requests
        ) * 100 if innodb_buffer_pool_read_requests != 0 else 100

        checklog.logger.info('{}:写入mysql_stat采集数据'.format(tags))
        clear_table(tags, 'mysql_stat')

        insert_data_sql = "insert into mysql_stat(tags,host,port,version,updays,basedir,datadir,slow_query_log,slow_query_log_file,log_bin," \
                          "max_connections,max_connect_errors,total_rows,data_size,index_size,threads_connected,threads_running,threads_waited,threads_created,threads_cached," \
                          "qps,tps,bytes_received,bytes_sent,open_files_limit,open_files,table_open_cache,open_tables,key_buffer_size,sort_buffer_size,join_buffer_size," \
                          "key_blocks_unused,key_blocks_used,key_blocks_not_flushed,mysql_sel,mysql_ins,mysql_upd,mysql_del,select_scan,slow_queries," \
                          "key_read_requests,key_reads,key_write_requests,Key_writes,innodb_buffer_pool_size,innodb_buffer_pool_pages_total,innodb_buffer_pool_pages_data," \
                          "innodb_buffer_pool_pages_dirty,innodb_buffer_pool_pages_free,innodb_buffer_pool_hit,innodb_io_capacity," \
                          "innodb_read_io_threads,innodb_write_io_threads,innodb_rows_deleted,innodb_rows_inserted,innodb_rows_read,innodb_rows_updated," \
                          "innodb_row_lock_waits,innodb_row_lock_time_avg,innodb_buffer_pool_pages_flushed,innodb_data_read,innodb_data_written,innodb_data_reads," \
                          "innodb_data_writes,innodb_log_writes,innodb_data_fsyncs,innodb_os_log_written,status,check_time) " \
                          "values ('{tags}','{host}','{port}','{version}','{updays}','{basedir}','{datadir}','{slow_query_log}','{slow_query_log_file}','{log_bin}'," \
                          "{max_connections},{max_connect_errors},{total_rows},{data_size},{index_size},{threads_connected},{threads_running},{threads_waited},{threads_created},{threads_cached}," \
                          "{qps},{tps},{bytes_received},{bytes_sent},{open_files_limit},{open_files},{table_open_cache},{open_tables},{key_buffer_size},{sort_buffer_size},{join_buffer_size}," \
                          "{key_blocks_unused},{key_blocks_used},{key_blocks_not_flushed},{mysql_sel},{mysql_ins},{mysql_upd},{mysql_del},{select_scan},{slow_queries}," \
                          "{key_read_requests},{key_reads},{key_write_requests},{Key_writes},{innodb_buffer_pool_size},{innodb_buffer_pool_pages_total},{innodb_buffer_pool_pages_data}," \
                          "{innodb_buffer_pool_pages_dirty},{innodb_buffer_pool_pages_free},{innodb_buffer_pool_hit},{innodb_io_capacity}," \
                          "{innodb_read_io_threads},{innodb_write_io_threads},{innodb_rows_deleted},{innodb_rows_inserted},{innodb_rows_read},{innodb_rows_updated}," \
                          "{innodb_row_lock_waits},{innodb_row_lock_time_avg},{innodb_buffer_pool_pages_flushed},{innodb_data_read},{innodb_data_written},{innodb_data_reads}," \
                          "{innodb_data_writes},{innodb_log_writes},{innodb_data_fsyncs},{innodb_os_log_written},0,'{check_time}' )"

        insert_data_values = {**mysqldata, **mysqlparams, **locals()}
        insert_sql = insert_data_sql.format(**insert_data_values)
        mysql_exec(insert_sql)
        archive_table(tags, 'mysql_stat')

        # 后台日志解析
        get_mysql_alert(tags, mysql_params, linux_params)

        # 慢查询日志解析
        get_mysql_slowquery(tags, mysql_params, linux_params)
    else:
        error_msg = "{}:mysql数据库连接失败".format(tags)
        checklog.logger.error(error_msg)
        checklog.logger.info('{}:写入mysql_stat采集数据'.format(tags))
        clear_table(tags, 'mysql_stat')
        sql = "insert into mysql_stat(tags,host,port,status,check_time) values('{tags}','{host}',{port},1,'{check_time}')"
        sql = sql.format(**locals())
        mysql_exec(sql)
        archive_table(tags, 'mysql_stat')
Exemple #12
0
def check_linux(tags, linux_params):
    check_time = now()
    host = linux_params['hostname']
    port = linux_params['port']
    # create connection
    linux_conn, _ = LinuxBase(linux_params).connection()
    if linux_conn:
        checklog.logger.info('{}:开始获取Linux主机监控信息'.format(tags))
        # get linuxstat data
        linuxstat = LinuxStat(linux_params, linux_conn).get_linux()
        hostinfo = linuxstat['hostinfo']
        cpuinfo = linuxstat['cpuinfo']
        memtotal = linuxstat['Memtotal']
        ipinfo = linuxstat['ipinfo']
        load = linuxstat['load']
        cpustat = linuxstat['cpu']
        iostat = linuxstat['iostat']
        memstat = linuxstat['mem']
        vmstat = linuxstat['vmstat']
        tcpstat = linuxstat['tcpstat']
        netstat = linuxstat['net']
        procstat = linuxstat['proc']

        # total network in/out
        recv_kbps = round(sum([d['recv'] for d in netstat]), 2)
        send_kbps = round(sum([d['send'] for d in netstat]), 2)
        # total io
        read_mb = round(sum([d['rd_m_s'] for d in iostat]), 2)
        write_mb = round(sum([d['wr_m_s'] for d in iostat]), 2)
        iops = round(sum([d['io_s'] for d in iostat]), 2)
        # cpu used percent
        cpu_used = round(100 - cpustat['cpu_idle'], 2)
        # memory used percent
        mem_used = round((float(memstat['mem_used_mb']) /
                          (float(memtotal['memtotal']) / 1024)) * 100, 2)

        insert_data_values = {
            **locals(),
            **hostinfo,
            **cpuinfo,
            **memtotal,
            **ipinfo,
            **load,
            **cpustat,
            **memstat,
            **vmstat,
            **tcpstat,
            **procstat
        }

        print("cpu_speed:")

        insert_data_sql = "insert into linux_stat(tags,host,port,hostname,ipinfo,linux_version,updays,kernel,frame,cpu_mode,cpu_cache,processor,cpu_speed," \
                          "recv_kbps,send_kbps,load1,load5,load15,cpu_sys,cpu_iowait,cpu_user,cpu_used,memtotal,mem_used,mem_cache,mem_buffer,mem_free,mem_used_mb," \
                          "swap_used,swap_free,swapin,swapout,pgin,pgout,pgfault,pgmjfault,tcp_close,tcp_timewait,tcp_connected,tcp_syn,tcp_listen,iops,read_mb,write_mb," \
                          "proc_new,proc_running,proc_block,intr,ctx,softirq,status,check_time) " \
                          "values ('{tags}','{host}',{port},'{hostname}','{ipinfo}','{linux_version}',{updays},'{kernel}','{frame}','{cpu_mode}','{cpu_cache}','{processor}','{cpu_speed}'," \
                          "{recv_kbps},{send_kbps},{load1},{load5},{load15},{cpu_sys},{cpu_iowait},{cpu_user},{cpu_used},{memtotal},{mem_used},{mem_cache},{mem_buffer},{mem_free},{mem_used_mb}," \
                          "{swap_used},{swap_free},{swapin},{swapout},{pgin},{pgout},{pgfault},{pgmjfault},{tcp_close},{tcp_timewait},{tcp_connected},{tcp_syn},{tcp_listen},{iops},{read_mb},{write_mb}," \
                          "{proc_new},{proc_running},{proc_block},{intr},{ctx},{softirq},0,'{check_time}')"

        clear_table(tags, 'linux_stat')
        insert_sql = insert_data_sql.format(**insert_data_values)
        mysql_exec(insert_sql)
        archive_table(tags, 'linux_stat')

        # disk free
        clear_table(tags, 'linux_disk')
        diskfree_list = LinuxStat(linux_params, linux_conn).get_diskfree()
        for each in diskfree_list:
            dev, total_size, used_size, free_size, used_percent, mount_point = each
            print("used_percent>>>")
            used_percent = float(used_percent.replace('%', ''))

            insert_data_sql = '''insert into linux_disk(tags,host,dev,total_size,used_size,free_size,used_percent,mount_point,check_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s)'''

            values = (tags, host, dev, round(float(total_size) / 1024, 2),
                      round(float(used_size) / 1024,
                            2), round(float(free_size) / 1024,
                                      2), used_percent, mount_point, now())
            mysql_exec(insert_data_sql, values)
        archive_table(tags, 'linux_disk')

        # io stat
        clear_table(tags, 'linux_io_stat')
        for each in iostat:
            insert_data_sql = "insert into linux_io_stat(tags,host,dev,rd_s,rd_avgkb,rd_m_s,rd_mrg_s,rd_cnc,rd_rt,wr_s,wr_avgkb,wr_m_s,wr_mrg_s,wr_cnc,wr_rt,busy,in_prg,io_s,qtime,stime,check_time)" \
                              " values ('{tags}','{host}','{dev}',{rd_s},{rd_avgkb},{rd_m_s},{rd_mrg_s},{rd_cnc},{rd_rt},{wr_s},{wr_avgkb},{wr_m_s},{wr_mrg_s},{wr_cnc},{wr_rt},{busy},{in_prg},{io_s},{qtime},{stime},'{check_time}')"
            insert_data_values = {**locals(), **each}
            insert_sql = insert_data_sql.format(**insert_data_values)
            mysql_exec(insert_sql)
        archive_table(tags, 'linux_io_stat')
    else:
        error_msg = "{}:linux主机连接失败".format(tags)
        checklog.logger.error(error_msg)
        checklog.logger.info('{}:写入linux_stat采集数据'.format(tags))
        clear_table(tags, 'linux_stat')
        sql = "insert into linux_stat(tags,host,port,status,check_time) values('{tags}','{host}',{port},1,'{check_time}')"
        sql = sql.format(**locals())
        mysql_exec(sql)
        archive_table(tags, 'linux_stat')
Exemple #13
0
def check_oracle(tags, oracle_params):
    db_version = oracle_params['db_version']
    host = oracle_params['host']
    port = oracle_params['port']
    service_name = oracle_params['service_name']
    linux_params = {
        'hostname': oracle_params['host'],
        'port': oracle_params['sshport_os'],
        'username': oracle_params['user_os'],
        'password': oracle_params['password_os']
    }
    check_time = now()
    db_conn = OracleBase(oracle_params).connection()
    db_conn_cdb = OracleBase(oracle_params).connection_cdb(
    ) if db_version == 'Oracle12c' else db_conn

    if db_conn and db_conn_cdb:
        # db信息监控
        checklog.logger.info('{}:开始获取Oracle数据库监控信息'.format(tags))
        # 数据库
        dbname, db_unique_name, database_role, open_mode, log_mode, dbid, flashback_on, platform, created = database_info(
            db_conn)
        # 容量
        datafile_size = round(get_datafile_size(db_conn)[0], 2)
        tempfile_size = round(get_tempfile_size(db_conn)[0], 2)
        archivelog_size = round(get_archivelog_size(db_conn)[0], 2)
        # 实例
        inst_id, instance_name, hostname, startup_time, version = instance_info(
            db_conn)
        updays = (datetime.now() - startup_time).days
        # 连接数
        max_process, current_process, process_used_percent = process_info(
            db_conn_cdb)
        # 归档
        archive_used_percent = get_archived(db_conn)
        # 审计
        audit_trail = para(db_conn, 'audit_trail')
        is_rac = para(db_conn, 'cluster_database')
        # 默认Undo表空间
        undo_tablespace = para(db_conn, 'undo_tablespace')
        # Oraclestat
        oraclestat = OracleStat(oracle_params, db_conn)
        oraclestat.get_oracle_stat()
        time.sleep(1)
        oracle_data = oraclestat.get_oracle_stat()
        # 状态数据
        oracle_osstat = oracle_data['os']
        oracle_stat = oracle_data['stat']
        oracle_wait = oracle_data['wait']
        oracle_sess = oracle_data['sess']
        oracle_mem = oracle_data['mem']
        oracle_load = oracle_data['load']
        # PGA使用率
        pga_target_size, pga_used_size, pga_used_percent = pga(db_conn)
        if database_role == 'PYSICAL STANDBY DATABASE':
            adg_trans_lag, adg_trans_value = adg_trans(db_conn)
            adg_apply_lag, adg_apply_value = adg_apply(db_conn)
        else:
            adg_trans_lag = 'None'
            adg_apply_lag = 'None'
            adg_trans_value = 0
            adg_apply_value = 0

        # 锁等待信息
        lock_wait_res = get_lockwait_count(db_conn)
        dic_lock_wait = {each[0]: each[1] for each in lock_wait_res}
        enq_tx_row_lock_contention = dic_lock_wait.get(
            'enq: TX - row lock contention', 0)
        enq_tm_contention = dic_lock_wait.get('enq: TM - contention', 0)
        row_cache_lock = dic_lock_wait.get('row cache lock', 0)
        library_cache_lock = dic_lock_wait.get('library cache lock', 0)
        enq_tx_contention = dic_lock_wait.get('enq: TX - contention', 0)
        lock_wait_others = sum(each[1] for each in lock_wait_res) - (
            enq_tx_row_lock_contention + enq_tm_contention + row_cache_lock +
            library_cache_lock + enq_tx_contention)

        checklog.logger.info('{}:写入oracle_stat采集数据'.format(tags))
        clear_table(tags, 'oracle_stat')

        insert_data_values = {
            **locals(),
            **oracle_osstat,
            **oracle_wait,
            **oracle_load,
            **oracle_sess,
            **oracle_stat,
            **oracle_mem
        }

        insert_data_sql = "insert into oracle_stat(tags,host,port,service_name,hostname,platform,num_cpus,physical_memory,inst_id,instance_name,db_version," \
                          "dbid,created,dbname,db_unique_name,database_role,open_mode,updays,audit_trail,log_mode,is_rac,undo_tablespace,flashback_on," \
                          "datafile_size,tempfile_size,archivelog_size," \
                          "archive_used_percent,max_process,current_process,process_used_percent,pga_target_size,pga_used_size," \
                          "pga_used_percent,pga_size,sga_size,memory_used_percent,logons_cumulative,qps,tps,exec_count,user_commits,user_rollbacks," \
                          "consistent_gets,logical_reads,physical_reads,physical_writes,block_changes,redo_size,redo_writes,total_parse_count," \
                          "hard_parse_count,bytes_received,bytes_sent,io_throughput,total_sessions,active_sessions,active_trans_sessions," \
                          "blocked_sessions,dbtime,dbcpu,log_parallel_write_wait,log_file_sync_wait,log_file_sync_count," \
                          "db_file_scattered_read_wait,db_file_scattered_read_count,db_file_sequential_read_wait,db_file_sequential_read_count," \
                          "row_lock_wait_count,enq_tx_row_lock_contention,enq_tm_contention,row_cache_lock,library_cache_lock,enq_tx_contention,lock_wait_others," \
                          "adg_trans_lag,adg_apply_lag,adg_trans_value,adg_apply_value,status,check_time) " \
                          "values('{tags}','{host}',{port},'{service_name}','{hostname}','{platform}',{num_cpus},{physical_memory},{inst_id},'{instance_name}','{version}'," \
                          "{dbid},'{created}','{dbname}','{db_unique_name}','{database_role}','{open_mode}',{updays},'{audit_trail}','{log_mode}','{is_rac}','{undo_tablespace}','{flashback_on}'," \
                          "{datafile_size},{tempfile_size},{archivelog_size}," \
                          "{archive_used_percent},{max_process},{current_process},{process_used_percent},{pga_target_size},{pga_used_size}," \
                          "{pga_used_percent},{pga_size},{sga_size},{memory_used_percent},{logons_cumulative},{qps},{tps},{exec_count},{user_commits},{user_rollbacks}," \
                          "{consistent_gets},{logical_reads},{physical_reads},{physical_writes},{block_changes},{redo_size},{redo_writes},{total_parse_count}," \
                          "{hard_parse_count},{bytes_received},{bytes_sent},{io_throughput},{total_sessions},{active_sessions},{active_trans_sessions}," \
                          "{blocked_sessions},{dbtime},{dbcpu},{log_parallel_write_wait},{log_file_sync_wait},{log_file_sync_count}," \
                          "{db_file_scattered_read_wait},{db_file_scattered_read_count},{db_file_sequential_read_wait},{db_file_sequential_read_count}," \
                          "{row_lock_wait_count},{enq_tx_row_lock_contention},{enq_tm_contention},{row_cache_lock},{library_cache_lock},{enq_tx_contention},{lock_wait_others}," \
                          "'{adg_trans_lag}','{adg_apply_lag}',{adg_trans_value},{adg_apply_value},0,'{check_time}' )"

        insert_sql = insert_data_sql.format(**insert_data_values)
        mysql_exec(insert_sql)
        checklog.logger.info(
            '{}:获取Oracle数据库监控数据(数据库名:{} 数据库角色:{} 数据库状态:{})'.format(
                tags, dbname, database_role, open_mode))
        archive_table(tags, 'oracle_stat')

        # control file
        clear_table(tags, 'oracle_controlfile')
        controlfile_list = get_controlfile(db_conn)
        for each in controlfile_list:
            control_name, size = each
            insert_data_sql = "insert into oracle_controlfile(tags,host,port,service_name,name,size,check_time)" \
                              "values('{tags}','{host}',{port},'{service_name}','{control_name}',{size},'{check_time}')"
            insert_sql = insert_data_sql.format(**locals())
            mysql_exec(insert_sql)

        # redolog
        clear_table(tags, 'oracle_redolog')
        redolog_list = get_redolog(db_conn)
        for each in redolog_list:
            group_no, thread_no, type, sequence_no, size, archived, status, member = each
            insert_data_sql = "insert into oracle_redolog(tags,host,port,service_name,group_no,thread_no,type,sequence_no," \
                              "size,archived,status,member,check_time)" \
                                  "values('{tags}','{host}',{port},'{service_name}',{group_no},{thread_no},'{type}',{sequence_no}," \
                              "{size},'{archived}','{status}','{member}','{check_time}')"
            insert_sql = insert_data_sql.format(**locals())
            mysql_exec(insert_sql)

        # 表空间
        clear_table(tags, 'oracle_tablespace')
        tbsinfo_list = tablespace(db_conn)
        for tbsinfo in tbsinfo_list:
            tablespace_name, datafile_count, total_size, free_size, used_size, max_free_size, percent_used, percent_free, used_mb = tbsinfo
            insert_data_sql = "insert into oracle_tablespace(tags,host,port,service_name,tablespace_name,datafile_count,total_size,free_size," \
                              "used_size,max_free_size,percent_used,percent_free,used_mb,check_time)" \
                              "values('{tags}','{host}',{port},'{service_name}','{tablespace_name}',{datafile_count},{total_size},{free_size}," \
                              "{used_size},{max_free_size},{percent_used},{percent_free},{used_mb},'{check_time}')"
            insert_sql = insert_data_sql.format(**locals())
            mysql_exec(insert_sql)
        archive_table(tags, 'oracle_tablespace')

        # 临时表空间
        clear_table(tags, 'oracle_temp_tablespace')
        temptbsinfo_list = temp_tablespace(db_conn)
        for temptbsinfo in temptbsinfo_list:
            temptablespace_name, total_size, used_size, percent_used = temptbsinfo
            insert_data_sql = "insert into oracle_temp_tablespace(tags,host,port,service_name,temptablespace_name,total_size,used_size," \
                              "percent_used,check_time)" \
                              "values('{tags}','{host}',{port},'{service_name}','{temptablespace_name}',{total_size}," \
                              "{used_size},{percent_used},'{check_time}')"
            insert_sql = insert_data_sql.format(**locals())
            mysql_exec(insert_sql)
        archive_table(tags, 'oracle_temp_tablespace')

        # undo表空间
        clear_table(tags, 'oracle_undo_tablespace')
        undotbsinfo_list = get_undo_tablespace(db_conn)
        for undotbsinfo in undotbsinfo_list:
            undotablespace_name, total_size, used_size, percent_used = undotbsinfo
            insert_data_sql = "insert into oracle_undo_tablespace(tags,host,port,service_name,undotablespace_name,total_size,used_size," \
                              "percent_used,check_time)" \
                              "values('{tags}','{host}',{port},'{service_name}','{undotablespace_name}',{total_size}," \
                              "{used_size},{percent_used},'{check_time}')"
            insert_sql = insert_data_sql.format(**locals())
            mysql_exec(insert_sql)
        archive_table(tags, 'oracle_undo_tablespace')

        # 统计信息分析
        clear_table(tags, 'oracle_table_stats')
        oracletablestats_list = get_tab_stats(db_conn)
        for each in oracletablestats_list:
            owner, table_name, num_rows, change_pct, last_analyzed = each
            insert_data_sql = "insert into oracle_table_stats(tags,host,port,service_name,owner,table_name,num_rows,change_pct,last_analyzed,check_time) " \
                              "values('{tags}','{host}',{port},'{service_name}','{owner}','{table_name}',{num_rows},{change_pct},'{last_analyzed}','{check_time}')"
            insert_sql = insert_data_sql.format(**locals())
            mysql_exec(insert_sql)

        # 后台日志解析
        get_oracle_alert(tags, db_conn, oracle_params, linux_params)

        db_conn.close()
    else:
        error_msg = "{}:数据库连接失败".format(tags)
        checklog.logger.error(error_msg)
        clear_table(tags, 'oracle_stat')
        checklog.logger.info('{}:写入oracle_stat采集数据'.format(tags))
        sql = "insert into oracle_stat(tags,host,port,service_name,status,check_time) values(%s,%s,%s,%s,%s,%s)"
        value = (tags, oracle_params['host'], oracle_params['port'],
                 oracle_params['service_name'], 1, check_time)
        mysql_exec(sql, value)
        archive_table(tags, 'oracle_stat')