Ejemplo n.º 1
0
def sqlserver_performance(database):
    query = "\nselect COUNTER_NAME,CNTR_VALUE from sys.dm_os_performance_counters where\n(object_name like '%sql statistics%' and counter_name = 'batch requests/sec') or\n(object_name like '%sql statistics%' and counter_name = 'sql compilations/sec') or\n(object_name like '%sql statistics%' and counter_name = 'sql re-compilations/sec') or\n(object_name like '%buffer manager%' and counter_name = 'lazy writes/sec') or\n(object_name like '%buffer manager%' and counter_name = 'page life expectancy') or\n(object_name like '%memory manager%' and counter_name = 'connection memory (kb)') or\n(object_name like '%memory manager%' and counter_name = 'memory grants pending') or\n(object_name like '%memory manager%' and counter_name = 'sql cache memory (kb)') or\n(object_name like '%memory manager%' and counter_name = 'target server memory (kb)') or\n(object_name like '%memory manager%' and counter_name = 'total server memory (kb)') or\n(object_name like '%access methods%' and counter_name = 'full scans/sec') or\n(object_name like '%access methods%' and counter_name = 'forwarded records/sec') or\n(object_name like '%access methods%' and counter_name = 'mixed page allocations/sec') or\n(object_name like '%access methods%' and counter_name = 'page splits/sec') or\n(object_name like '%access methods%' and counter_name = 'table lock escalations/sec') or\n(object_name like '%general statistics%' and counter_name = 'logins/sec') or\n(object_name like '%general statistics%' and counter_name = 'logouts/sec') or\n(object_name like '%general statistics%' and counter_name = 'user connections') or\n(object_name like '%general statistics%' and counter_name = 'processes blocked') or\n(object_name like '%latches%' and counter_name = 'latch waits/sec') or\n(object_name like '%latches%' and counter_name = 'average latch wait time (ms)') or\n(object_name like '%access methods%' and counter_name = 'workfiles created/sec') or\n(object_name like '%access methods%' and counter_name = 'worktables created/sec') or\n(object_name like '%general statistics%' and counter_name = 'active temp tables') or\n(object_name like '%general statistics%' and counter_name = 'temp tables creation rate') or\n(object_name like '%general statistics%' and counter_name = 'temp tables for destruction') or\n(object_name like '%databases%' and counter_name ='active transactions' and instance_name = '_Total') or\n(object_name like '%databases%' and counter_name ='Transactions/sec' and instance_name = '_Total') or\n(object_name like '%databases%' and counter_name ='log flushes/sec' and instance_name = '_Total') or\n(object_name like '%databases%' and counter_name ='cache hit ratio' and instance_name = '_Total') or\n(object_name like '%SQLServer:Locks%' and counter_name like '%Lock%' and instance_name = '_Total')"
    match_patern = re.compile('/sec', re.IGNORECASE)
    date_current = get_10s_time()
    flag, json_data_current = run_sql(database, query)
    if not flag:
        print(str(build_exception_from_java(json_data_current)))
        return
    key = str(database.id) + ':performance'
    date_key = str(database.id) + ':performance_date'
    json_data_str_prev = redis.get(key)
    date_prev = redis.get(date_key)
    keys = ['COUNTER_NAME', 'CNTR_VALUE']
    redis.set(key, json.dumps(json_data_current))
    redis.set(date_key, str(date_current))
    if json_data_str_prev and date_prev and (
            date_current - to_date(date_prev)).total_seconds() < MAX_INTERVAL:
        json_data_prev = json.loads(json_data_str_prev)
        for idx, obj in enumerate(json_data_current):
            name = obj.get(keys[0])
            value = obj.get(keys[1])
            p = Performance()
            p.name = name
            p.database = database
            p.created_at = date_current
            if re.search(match_patern, name):
                p.value = round(
                    (float(value) - float(json_data_prev[idx].get(keys[1]))) /
                    INTERVAL, 1)
            else:
                p.value = float(value)
            p.save()
Ejemplo n.º 2
0
def db2_performance(database):
    query = '\n    select\n    TOTAL_CONS, APPLS_CUR_CONS, APPLS_IN_DB2, LOCKS_WAITING, NUM_ASSOC_AGENTS, ACTIVE_SORTS,\n    LOCKS_HELD, LOCK_WAITS,\n    TOTAL_SORTS, SORT_OVERFLOWS,\n    POOL_DATA_L_READS, POOL_TEMP_DATA_L_READS, POOL_INDEX_L_READS, POOL_TEMP_INDEX_L_READS, POOL_XDA_L_READS, POOL_TEMP_XDA_L_READS, POOL_DATA_P_READS, POOL_TEMP_DATA_P_READS,\n    POOL_INDEX_P_READS, POOL_TEMP_INDEX_P_READS, POOL_XDA_P_READS, POOL_TEMP_XDA_P_READS,\n    POOL_DATA_WRITES, POOL_INDEX_WRITES, POOL_XDA_WRITES,\n    DIRECT_READS, DIRECT_WRITES,\n    COMMIT_SQL_STMTS, ROLLBACK_SQL_STMTS, DYNAMIC_SQL_STMTS, STATIC_SQL_STMTS, FAILED_SQL_STMTS, SELECT_SQL_STMTS, UID_SQL_STMTS, DDL_SQL_STMTS,\n    ROWS_DELETED, ROWS_INSERTED, ROWS_UPDATED, ROWS_SELECTED, ROWS_READ,\n    LOG_READS, LOG_WRITES\n    from sysibmadm.snapdb'
    stats_list_realtime = [
        'APPLS_CUR_CONS', 'LOCKS_HELD', 'APPLS_IN_DB2', 'LOCKS_WAITING',
        'NUM_ASSOC_AGENTS', 'ACTIVE_SORTS'
    ]
    stats_list_delta = [
        'TOTAL_CONS', 'LOCK_WAITS', 'TOTAL_SORTS', 'SORT_OVERFLOWS',
        'POOL_DATA_L_READS', 'POOL_TEMP_DATA_L_READS', 'POOL_INDEX_L_READS',
        'POOL_TEMP_INDEX_L_READS', 'POOL_XDA_L_READS', 'POOL_TEMP_XDA_L_READS',
        'POOL_DATA_P_READS', 'POOL_TEMP_DATA_P_READS', 'POOL_INDEX_P_READS',
        'POOL_TEMP_INDEX_P_READS', 'POOL_XDA_P_READS', 'POOL_TEMP_XDA_P_READS',
        'POOL_DATA_WRITES', 'POOL_INDEX_WRITES', 'POOL_XDA_WRITES',
        'DIRECT_READS', 'DIRECT_WRITES', 'COMMIT_SQL_STMTS',
        'ROLLBACK_SQL_STMTS', 'DYNAMIC_SQL_STMTS', 'STATIC_SQL_STMTS',
        'FAILED_SQL_STMTS', 'SELECT_SQL_STMTS', 'UID_SQL_STMTS',
        'DDL_SQL_STMTS', 'ROWS_DELETED', 'ROWS_INSERTED', 'ROWS_UPDATED',
        'ROWS_SELECTED', 'ROWS_READ', 'LOG_READS', 'LOG_WRITES'
    ]
    date_current = get_10s_time()
    flag, json_data_current = run_sql(database, query)
    if not flag or not json_data_current:
        print(str(build_exception_from_java(json_data_current)))
        return
    json_data_current = json_data_current[0]
    key = str(database.id) + ':performance'
    date_key = str(database.id) + ':performance_date'
    json_data_str_prev = redis.get(key)
    date_prev = redis.get(date_key)
    redis.setex(key, MAX_INTERVAL, json.dumps(json_data_current))
    redis.setex(date_key, MAX_INTERVAL, str(date_current))
    if json_data_str_prev:
        if date_prev:
            if (date_current -
                    to_date(date_prev)).total_seconds() < MAX_INTERVAL:
                json_data_prev = json.loads(json_data_str_prev)
                for key, value in json_data_current.items():
                    p = Performance()
                    p.name = key
                    p.created_at = date_current
                    p.database = database
                    if key in stats_list_realtime:
                        p.value = value
                    else:
                        if key in stats_list_delta:
                            p.value = (float(value) - float(
                                json_data_prev.get(key))) / INTERVAL
                    p.save()
Ejemplo n.º 3
0
def mysql_performance(database):
    query = "show global status where VARIABLE_NAME in (\n        'Queries', 'Questions','Com_delete','Com_insert','Com_select','Com_update',\n        'Bytes_received','Bytes_sent',\n        'Thread_connected','Connections',\n        'Select_full_join', 'Select_full_range_join', 'Select_range', 'Select_range_check', 'Select_scan',\n        'Sort_merge_passes','Sort_scan','Sort_range','Sort_rows',\n        'Created_tmp_disk_tables','Created_tmp_files','Created_tmp_tables',\n        'Innodb_data_writes','Innodb_log_writes','Innodb_os_log_written',\n        'Innodb_rows_read','Innodb_rows_inserted','Innodb_rows_updated','Innodb_rows_deleted',\n        'Innodb_data_reads','Innodb_buffer_pool_read_requests'\n    )"
    none_delta_stats = 'Thread_connected'
    date_current = get_10s_time()
    flag, json_data_current = run_sql(database, query)
    if not flag:
        print(str(build_exception_from_java(json_data_current)))
        return
    key = str(database.id) + ':performance'
    date_key = str(database.id) + ':performance_date'
    json_data_str_prev = redis.get(key)
    date_prev = redis.get(date_key)
    k1, k2 = json_data_current[0].keys()
    if re.search('name', k1, re.IGNORECASE):
        keys = [k1, k2]
    else:
        keys = [k2, k1]
    redis.setex(key, MAX_INTERVAL, json.dumps(json_data_current))
    redis.setex(date_key, MAX_INTERVAL, str(date_current))
    if json_data_str_prev and date_prev and (
            date_current - to_date(date_prev)).total_seconds() < MAX_INTERVAL:
        json_data_prev = json.loads(json_data_str_prev)
        for idx, obj in enumerate(json_data_current):
            name = obj.get(keys[0])
            value = obj.get(keys[1])
            p = Performance()
            p.name = name
            p.database = database
            p.created_at = date_current
            if name in none_delta_stats:
                if not value:
                    print('value is None' + keys[1])
                p.value = float(value)
            else:
                p.value = round(
                    (float(value) - float(json_data_prev[idx].get(keys[1]))) /
                    INTERVAL, 1)
            p.save()
Ejemplo n.º 4
0
def get_oracle_summary(pk):
    key = 'oracle:%s:index' % pk
    read_data = redis.get(key)
    if read_data:
        return json.loads(read_data)
    else:
        conn = Database.objects.get(pk=pk)
        db_type = conn.db_type
        query = reprocess_query(Space_Total_Query, {'pk': pk})
        total_space = execute_return_json(query)
        if not total_space:
            get_space(conn)
            total_space = execute_return_json(query)
        query = SummaryQuery.get(db_type)
        flag, json_data = run_batch_sql(conn, query)
        if not flag:
            raise build_exception_from_java(json_data)
        space = {'space': total_space}
        summary_data = {**space, **json_data}
        redis.setex(key, 86400, json.dumps(summary_data))
        return summary_data
Ejemplo n.º 5
0
def oracle_performance(database):
    query = {
        'stats':
        "\nselect inst_id, name, value\nfrom\n    (\n        select ss.inst_id\n        ,      sn.name\n        ,      ss.value\n        from   v$statname sn\n        ,      gv$sysstat  ss\n        where  sn.statistic# = ss.statistic#\n        and    sn.name in (\n        'execute count', 'logons cumulative',\n        'parse count (hard)', 'parse count (total)', 'parse count (failures)',\n        'physical read total IO requests', 'physical read total bytes',\n        'physical write total IO requests', 'physical write total bytes',\n        'redo size', --'session cursor cache hits',\n        'session logical reads', 'user calls', 'user commits', 'user rollbacks','logons current',\n        'gc cr blocks received','gc current blocks received',\n        'gc cr block receive time', 'gc current block receive time')\n        union all\n        select inst_id\n        ,      STAT_NAME\n        ,      VALUE\n        from gv$osstat\n        where STAT_NAME in ('BUSY_TIME','IDLE_TIME')\n        union all\n        select\n          (select min(INSTANCE_NUMBER) from gv$instance),\n          'SCN GAP Per Minute',\n          current_scn\n        from v$database\n    )\norder by 1,2",
        'wait':
        "\n        select inst_id, event, TIME_WAITED, TOTAL_WAITS\n        from gv$system_event\n        where\n        event in (\n            'log file sync',\n            'log file parallel write',\n            'db file parallel write',\n            'db file sequential read',\n            'db file scattered read',\n            'direct path read',\n            'direct path read temp'\n            )\n            order by 1,2",
        'dg':
        "\n        select\n            INST_ID,\n            NAME,\n            VALUE\n        from gv$dataguard_stats\n        where name in ('apply lag','transport lag')"
    }
    stats_list = {
        'session cursor cache hits': 'session cursor cache hits',
        'BUSY_TIME': 'Host CPU Utilization (%)',
        'physical write total IO requests':
        'Physical Write IO Requests Per Sec',
        'physical write total bytes': 'Physical Write Total Bytes Per Sec',
        'physical read total IO requests': 'Physical Read IO Requests Per Sec',
        'physical read total bytes': 'Physical Read Total Bytes Per Sec',
        'SCN GAP Per Minute': 'SCN GAP Per Minute',
        'execute count': 'Executions Per Sec',
        'logons cumulative': 'Logons Per Sec',
        'logons current': 'Session Count',
        'parse count (failures)': 'Parse Failure Count Per Sec',
        'parse count (hard)': 'Hard Parse Count Per Sec',
        'parse count (total)': 'Total Parse Count Per Sec',
        'redo size': 'Redo Generated Per Sec',
        'session logical reads': 'Logical Reads Per Sec',
        'user rollbacks': 'User Rollbacks Per Sec',
        'user calls': 'User Calls Per Sec',
        'user commits': 'User Commits Per Sec',
        'gc cr blocks received': 'GC CR Block Received Per Second',
        'gc current blocks received': 'GC Current Block Received Per Second',
        'gc cr block receive time': 'Global Cache Average CR Get Time',
        'gc current block receive time':
        'Global Cache Average Current Get Time'
    }
    none_delta_stats = ('BUSY_TIME', 'IDLE_TIME', 'gc cr block receive time',
                        'gc current block receive time', 'logons current')
    date_current = get_10s_time()
    if not database.dg_stats:
        query.pop('dg')
    flag, json_data = run_batch_sql(database, query)
    if not flag:
        print(str(build_exception_from_java(json_data)))
        return
    json_data_1_current = json_data.get('stats')
    json_data_2_current = json_data.get('wait')
    json_data_dg = json_data.get('dg')
    key1 = str(database.id) + ':performance1'
    key2 = str(database.id) + ':performance2'
    date_key = str(database.id) + ':performance_date'
    json_data_str_1 = redis.get(key1)
    json_data_str_2 = redis.get(key2)
    date_prev = redis.get(date_key)
    keys1 = ['NAME', 'VALUE']
    keys2 = ['EVENT', 'TIME_WAITED', 'TOTAL_WAITS']
    redis.setex(key1, MAX_INTERVAL, json.dumps(json_data_1_current))
    redis.setex(key2, MAX_INTERVAL, json.dumps(json_data_2_current))
    redis.setex(date_key, MAX_INTERVAL, str(date_current))
    if json_data_str_1:
        if json_data_str_2:
            if date_prev:
                if (date_current -
                        to_date(date_prev)).total_seconds() < MAX_INTERVAL:
                    json_data_1_prev = json.loads(json_data_str_1)
                    json_data_2_prev = json.loads(json_data_str_2)
                    for idx, obj in enumerate(json_data_1_current):
                        name = obj.get(keys1[0])
                        if name == 'IDLE_TIME':
                            continue
                        value = obj.get(keys1[1])
                        inst_id = obj.get('INST_ID')
                        p = Performance()
                        p.name = stats_list.get(name)
                        p.database = database
                        p.created_at = date_current
                        p.inst_id = inst_id
                        delta = float(value) - float(json_data_1_prev[idx].get(
                            keys1[1]))
                        total = 1
                        if name in none_delta_stats:
                            if name in 'BUSY_TIME':
                                total = delta + float(
                                    json_data_1_current[idx + 1].get(
                                        keys1[1])) - float(
                                            json_data_1_prev[idx + 1].get(
                                                keys1[1]))
                                value = round(
                                    delta / (total if total != 0 else 1) * 100,
                                    1)
                            else:
                                if name in ('gc cr block receive time',
                                            'gc current block receive time'):
                                    delta = 10.0 * delta
                                    total = float(
                                        json_data_1_current[idx + 1].get(
                                            keys1[1])) - float(
                                                json_data_1_prev[idx + 1].get(
                                                    keys1[1]))
                                    value = round(
                                        delta / (total if total != 0 else 1),
                                        1)
                            p.value = value
                        else:
                            p.value = round(delta / INTERVAL, 1)
                        p.save()

                    for idx, obj in enumerate(json_data_2_current):
                        name = obj.get(keys2[0])
                        value1 = obj.get(keys2[1])
                        value2 = obj.get(keys2[2])
                        inst_id = obj.get('INST_ID')
                        delta = 10.0 * (float(value1) - float(
                            json_data_2_prev[idx].get(keys2[1])))
                        total = float(value2) - float(
                            json_data_2_prev[idx].get(keys2[2]))
                        value = round(delta / (total if total != 0 else 1), 1)
                        p = Performance()
                        p.name = name
                        p.database = database
                        p.created_at = date_current
                        p.inst_id = inst_id
                        p.value = value
                        p.save()

                    if json_data_dg:
                        for x in json_data_dg:
                            p = Performance()
                            p.name = x.get('NAME')
                            p.inst_id = x.get('INST_ID')
                            p.value = convert_oracle_interval_to_secodns(
                                x.get('VALUE'))
                            p.database = database
                            p.created_at = date_current
                            p.save()
Ejemplo n.º 6
0
def object_detail(pk,
                  owner,
                  object_name,
                  object_type=None,
                  subobject_name=None,
                  cache=False):
    try:
        schema_name = owner
        key = f'''{pk}:schema:{owner}:{object_name}:{subobject_name}'''
        if cache:
            cache_data = redis.get(key)
            if cache_data:
                return json.loads(cache_data)
            database = Database.objects.get(pk=pk)
            db_type = database.db_type
            type_map = Type_TO_CN.get(database.db_type)
            schema_name = owner
            db_name = None
            if db_type == 'sqlserver':
                db_name, owner = owner.split('.')
            options = {
                'OWNER': owner,
                'OBJECT_NAME': object_name,
                'SUBOBJECT_NAME': subobject_name
            }
            if not object_type:
                object_type = get_object_type(database, owner, object_name,
                                              options, db_name)
                if not object_type:
                    raise Exception('.')
                else:
                    object_type = type_map.get(object_type)
                options['OBJECT_TYPE'] = CN_TO_Type.get(db_type).get(
                    object_type)
                detail_query = {}
                if Object_Detail_Query.get(db_type):
                    if Object_Detail_Query.get(db_type).get(object_type):
                        detail_query = Object_Detail_Query.get(db_type).get(
                            object_type)
                ddl_query = DDL_Query.get(db_type) if DDL_Query.get(
                    db_type) else {}
                if not subobject_name:
                    query = {**detail_query, **ddl_query}
                else:
                    query = detail_query
                if db_type == 'sqlserver':
                    if detail_query:
                        query.pop('DDL')
                query = {k: (v.format(**options)) for k, v in query.items()}
                flag, schema_data = run_batch_sql(database, query, db_name)
                if not flag:
                    raise build_exception_from_java(schema_data)
                if schema_data.get('DDL'):
                    if db_type != 'mysql':
                        schema_data['DDL'] = schema_data.get('DDL')[0].get(
                            'DDL') if schema_data.get('DDL') else ''
                if schema_data.get('DDL'):
                    if db_type == 'mysql':
                        ddl_data = schema_data.get('DDL')[0]
                        schema_data['DDL'] = None
                        for k, v in ddl_data.items():
                            if 'create ' in k.lower():
                                schema_data['DDL'] = v

                        if not schema_data['DDL']:
                            for k, v in ddl_data.items():
                                if 'SQL Original Statement' in k:
                                    schema_data['DDL'] = v

                delta_list = []
                total_list = []
                if object_type == '':
                    query_delta = f'''
            select extract(epoch from created_at)*1000 created_at, rows - lag(rows) over (order by created_at) as rows
            from monitor_table_rows where database_id = '{pk}'
            and owner = '{schema_name}' and table_name = '{object_name}'
            order by created_at
            '''
                    query_total = f'''
            select extract(epoch from created_at)*1000 created_at, rows
            from monitor_table_rows where database_id = '{pk}'
            and owner = '{schema_name}' and table_name = '{object_name}'
            order by created_at
            '''
                    delta_list = execute_return_json(query_delta)
                    total_list = execute_return_json(query_total)
                new_schema = OrderedDict()
                for x in Ordered_List:
                    if x in schema_data:
                        new_schema[x] = schema_data.get(x)

                if delta_list:
                    new_schema[''] = {
                        'delta': [[x.get('CREATED_AT'),
                                   x.get('ROWS')] for x in delta_list
                                  if x.get('ROWS') != None],
                        'total': [[x.get('CREATED_AT'),
                                   x.get('ROWS')] for x in total_list
                                  if x.get('ROWS') != None]
                    }
                redis.set(key, json.dumps(new_schema))
        return new_schema
    except ObjectDoesNotExist:
        return {'error_message': ''}
    except Exception as err:
        return {'error_message': str(err)}
Ejemplo n.º 7
0
def oracle_sql_detail(pk, sql_id, sql_text=None, instance_id=None, time_span=None, begin_time=None, end_time=None, cache=True, activity=True, sql_audit=True, only_tune=False):
    database = Database.objects.get(pk=pk)
    if instance_id == 'null':
        instance_id = database.db_name
    inst_id = database.instance_id_list.split(',')[0] if not instance_id or instance_id == database.db_name or instance_id == '0' else instance_id
    if sql_audit:
        inst_id = database.instance_id_list
    key_audit = f'''{pk}:sql_detail:{sql_id}:audit'''
    audit_data = None
    audit_data_json = {}
    if cache:
        audit_data = redis.get(key_audit)
        if audit_data != None:
            audit_data_json = json.loads(audit_data)
    sql_detail = get_default_sql_detail_format(database.db_type)
    if sql_id != 'null':
        if time_span == 'realtime':
            sqldetail_sql = get_realtime_sql(sql_id, inst_id)
            if only_tune:
                sqldetail_sql.pop('binds')
            flag, sqldetail_data = run_batch_sql(database, sqldetail_sql)
            if not flag:
                return sqldetail_data
            stat_data = sqldetail_data.get('stats')
            plan_data = sqldetail_data.get('plans')
            sqlmon_data = sqldetail_data.get('sqlmon')
            bind_data = only_tunesqldetail_data.get('binds')[]
            for x in stat_data:
                key = ('{}-{}-{}').format(x.get('INST_ID'), x.get('CHILD_NUMBER'), x.get('PLAN_HASH_VALUE'))
                child_summary = {k:v for k, v in x.items() if k in ('CHILD_NUMBER',
                                                                    'PLAN_HASH_VALUE',
                                                                    'PARSING_SCHEMA_NAME',
                                                                    'LAST_LOAD_TIME',
                                                                    'MODULE', 'ACTION',
                                                                    'SERVICE')}
                pie_chart_data = {k:v for k, v in x.items() if k in ('ON CPU', 'Application',
                                                                     'Cluster', 'Concurrency',
                                                                     'User I/O')}
                execution_stats = {k:v for k, v in x.items() if k in ('EXECUTIONS',
                                                                      'ELAPSED_TIME',
                                                                      'CPU_TIME',
                                                                      'BUFFER_GETS',
                                                                      'DISK_READS',
                                                                      'DIRECT_WRITES',
                                                                      'ROWS_PROCESSED',
                                                                      'FETCHES')}
                metric_dict = {'EXECUTIONS':'', 
                 'ELAPSED_TIME':'()', 
                 'CPU_TIME':'CPU()', 
                 'BUFFER_GETS':'', 
                 'DISK_READS':'', 
                 'DIRECT_WRITES':'', 
                 'ROWS_PROCESSED':'', 
                 'FETCHES':''}
                total_executions = execution_stats.get('EXECUTIONS') if execution_stats.get('EXECUTIONS') != 0 else 1
                total_rows = execution_stats.get('ROWS_PROCESSED') if execution_stats.get('ROWS_PROCESSED') != 0 else 1
                execution_data = [{u'\u6307\u6807':metric_dict.get(k),  u'\u603b\u6570':v,  u'\u5e73\u5747\u6bcf\u6b21\u6267\u884c':round(v / total_executions),  u'\u5e73\u5747\u6bcf\u884c\u8bb0\u5f55':round(v / total_rows)} for k, v in execution_stats.items()]
                sql_detail['stats'][key] = {'child_summary':child_summary, 
                 'pie_chart_data':pie_chart_data, 
                 'execution_stats':{'header':[
                   '', '', '', ''], 
                  'data':execution_data}}

            plan_dic = defaultdict(list)
            for x in plan_data:
                key = ('{}-{}-{}').format(x.get('INST_ID'), x.get('CHILD_NUMBER'), x.get('PLAN_HASH_VALUE'))
                x.pop('INST_ID')
                plan_dic[key].append(x)

            sql_detail['plans']['data'] = plan_dic
            if sql_audit:
                if sqlmon_data:
                    sqlmon_data = sqlmon_data[:MAX_SQLMON_FOR_SQL_AUDIT]
                    binds_from_sqlmon = gen_sql_mononitor_and_binds(database, sqlmon_data)
                    bind_data = bind_data + binds_from_sqlmon
            sql_detail['sqlmon']['data'] = sqlmon_data
            sql_detail['binds']['data'] = bind_data
        else:
            sqldetail_sql = get_hist_sql(sql_id, inst_id, begin_time, end_time)
            query_sqlmon = f'''
                select
                ID,
                STATUS,
                SQL_ID,
                ELAPSED_TIME,
                DB_TIME,
                DB_CPU,
                SQL_EXEC_ID,
                SQL_EXEC_START,
                SQL_PLAN_HASH_VALUE,
                INST_ID,
                USERNAME
                from monitor_sqlmon
                where created_at BETWEEN to_timestamp({begin_time}) and to_timestamp({end_time})
                and sql_id = '{sql_id}' and database_id = '{pk}'
            '''
            flag, sqldetail_data = run_batch_sql(database, sqldetail_sql)
            if not flag:
                return sqldetail_data
            stat_data = sqldetail_data.get('stats')
            plan_data = sqldetail_data.get('plans')
            bind_data = sqldetail_data.get('binds')
            sqlmon_data = execute_return_json(query_sqlmon)
            exec_delta = defaultdict(list)
            avg_elapse_time = defaultdict(list)
            avg_cpu_time = defaultdict(list)
            avg_crs = defaultdict(list)
            avg_reads = defaultdict(list)
            plan_dic = defaultdict(list)
            stats_dict = defaultdict(dict)
            for x in stat_data:
                phv = str(x.get('PLAN_HASH_VALUE'))
                snap_time = x.get('SNAP_TIME')
                exec_delta[phv].append([snap_time, x.get('EXEC_DELTA')])
                avg_elapse_time[phv].append([snap_time, x.get('AVG_ELAPSE_TIME')])
                avg_cpu_time[phv].append([snap_time, x.get('AVG_CPU_TIME')])
                avg_crs[phv].append([snap_time, x.get('AVG_CRS')])
                avg_reads[phv].append([snap_time, x.get('AVG_READS')])

            stats_dict[''] = exec_delta
            stats_dict['(s)'] = avg_elapse_time
            stats_dict['CPU(s)'] = avg_elapse_time
            stats_dict[''] = avg_crs
            stats_dict[''] = avg_reads
            for x in plan_data:
                phv = str(x.get('PLAN_HASH_VALUE'))
                plan_dic[phv].append(x)

            sql_detail['stats'] = stats_dict
            sql_detail['plans']['data'] = plan_dic
            sql_detail['sqlmon']['data'] = sqlmon_data
            sql_detail['binds']['data'] = bind_data
        if cache == True:
            if audit_data != None:
                if not only_tune:
                    sql_detail['audit'] = audit_data_json
                audit_data_json = get_sql_audit(pk, sql_id, only_tune=only_tune)
                sql_detail['audit'] = audit_data_json
                redis.setex(key_audit, SQLTEXT_RETENTION, json.dumps(audit_data_json))
            if audit_data:
                new_plan_dict = {}
                if plan_dic:
                    new_plan_dict = {k[k.rfind('-') + 1:]:v for k, v in plan_dic.items()}
                tune_data = get_sql_tune(database, audit_data_json, new_plan_dict)
                if tune_data:
                    sql_detail['tune'] = tune_data
        return sql_detail