Beispiel #1
0
def get_ash_chain(pk, instance_id=None, begin_time=None, end_time=None):
    try:
        conn = Database.objects.get(pk=pk)
        inst_id_pred = ''
        if conn.instance_count > 1:
            if instance_id != None or instance_id != conn.db_name:
                if instance_id == conn.db_name:
                    instance_id = conn.instance_id_list
                inst_id_pred = (' and inst_id in ({})').format(instance_id)
        options = {
            'pk': pk,
            'begin_time': begin_time,
            'end_time': end_time,
            'inst_id_pred': inst_id_pred
        }
        query = ASH_CHAIN_QUERY.get(conn.db_type).format(**options)
        result = execute_return_json(query)
        return result
    except ObjectDoesNotExist:
        return {'error_message': ''}
    except Exception as err:
        return {'error_message': str(err)}
Beispiel #2
0
def get_sqlmon_list(pk, time_span=None, begin_time=None, end_time=None):
    try:
        database = Database.objects.get(pk=pk)
        json_data = []
        if time_span == 'realtime':
            query = SQLMON_QUERY.get('realtime')
            flag, json_data = run_sql(database, query)
            if not flag:
                raise build_exception_from_java(json_data)
            else:
                options = {
                    'begin_time': begin_time,
                    'end_time': end_time,
                    'pk': pk
                }
                query = SQLMON_QUERY.get('history').format(**options)
                json_data = execute_return_json(query)
        return json_data
    except ObjectDoesNotExist:
        return {'error_message': ''}
    except Exception as err:
        return {'error_message': str(err)}
Beispiel #3
0
def session_history(pk, session_id=None, begin_time=None, end_time=None):
    try:
        database = Database.objects.get(pk=pk)
        query = f'''
        select
    extract(epoch from created_at)*1000 CREATED_AT,
    count(*) SESSION_COUNT
from
    monitor_session
where database_id = '{pk}' and session_id = '{session_id}' and
    created_at between to_timestamp({begin_time}) and to_timestamp({end_time})
group by
    created_at
order by created_at'''
        result = execute_return_json(query)
        data = []
        for x in result:
            data.append([x.get('CREATED_AT'), x.get('SESSION_COUNT')])

        return data
    except ObjectDoesNotExist:
        return {'error_message': ''}
    except Exception as err:
        return {'error_message': str(err)}
Beispiel #4
0
def index(request):
    database_count = Database.objects.all().count()
    oracle_count = (Database.objects.filter(db_type='oracle')).count()
    mysql_count = (Database.objects.filter(db_type='mysql')).count()
    db2_count = (Database.objects.filter(db_type='db2')).count()
    sqlserver_count = (Database.objects.filter(db_type='sqlserver')).count()
    warn_query_sum = Warn_Result.objects.count()
    warn_db2_count = (Warn_Result.objects.filter(
        database__db_type='db2')).count()
    warn_oracle_count = (Warn_Result.objects.filter(
        database__db_type='oracle')).count()
    warn_mysql_count = (Warn_Result.objects.filter(
        database__db_type='mysql')).count()
    warn_sqlserver_count = (Warn_Result.objects.filter(
        database__db_type='sqlserver')).count()
    warn_db2_pct = num2pct(warn_db2_count /
                           warn_query_sum) if warn_query_sum != 0 else 0
    warn_oracle_pct = num2pct(warn_oracle_count /
                              warn_query_sum) if warn_query_sum != 0 else 0
    warn_mysql_pct = num2pct(warn_mysql_count /
                             warn_query_sum) if warn_query_sum != 0 else 0
    warn_sqlserver_pct = num2pct(warn_sqlserver_count /
                                 warn_query_sum) if warn_query_sum != 0 else 0
    all_health_report_score_list = list((((Heathcheck_Report.objects.exclude(
        report_detail__summary__score=None)).filter(status=1)).extra(
            select={'score': "report_detail#>'{summary,score}'"})).values_list(
                'score', flat=True))
    healthcheck_report_danger_count = len(
        [_score for _score in all_health_report_score_list if _score < 0.6])
    healthcheck_report_general_count = len(
        [_score for _score in all_health_report_score_list if _score < 0.8])
    healthcheck_report_good_count = len(
        [_score for _score in all_health_report_score_list if _score >= 0.8])
    query_result = execute_return_json(
        'SELECT database_id FROM heathcheck_heathcheck_report GROUP BY database_id;'
    )
    database_id_in_health_report = []
    healthcheck_score_list = []
    for _result in query_result:
        database_id_in_health_report.append(
            _result.get('database_id') or _result.get('DATABASE_ID'))

    for report_database_id in database_id_in_health_report[:5]:
        try:
            database = Database.objects.get(pk=report_database_id)
            score_list = (((Heathcheck_Report.objects.filter(
                database_id=report_database_id)).filter(status=1)).extra(
                    select={'score': "report_detail#>'{summary,score}'"})
                          ).values_list('score', 'created_at')
            score_list = [[
                int(score1[1].timestamp() * 1000),
                type(score1[1]) == datetime.datetime and num2pct(score1[0])
                if score1[1] else 0
            ] for score1 in score_list]
            if score_list:
                healthcheck_score_list.append({
                    'database_alias': database.alias,
                    'data': score_list
                })
        except Exception as e:
            print(e)

    sql_audit_count_list = []
    sql_audit_query_result = execute_return_json(
        'SELECT database_id,(SELECT alias From monitor_database WHERE id = database_id),AVG (total_score) FROM sqlaudit_audit_job WHERE status=3 GROUP BY database_id;'
    )
    have_audit_database_id_list = [
        _data.get('database_id') or _data.get('DATABASE_ID')
        for _data in sql_audit_query_result
    ]
    for _result in sql_audit_query_result:
        sql_audit_count_list.append({
            'database_alias':
            _result.get('alias') or _result.get('ALIAS'),
            'avg':
            _result.get('avg') or _result.get('AVG')
        })

    for _database1 in Database.objects.exclude(
            id__in=have_audit_database_id_list):
        if len(sql_audit_count_list) < 5:
            sql_audit_count_list.append({
                'database_alias': _database1.alias,
                'avg': 0
            })

    now = datetime.datetime.now()
    d1 = now - (datetime.timedelta(hours=1))
    data1 = [
        DatabaseData('oracle', oracle_count).__dict__,
        DatabaseData('mysql', mysql_count).__dict__,
        DatabaseData('db2', db2_count).__dict__
    ]
    data2 = [
        DatabaseData('oracle', warn_oracle_pct).__dict__,
        DatabaseData('mysql', warn_mysql_pct).__dict__,
        DatabaseData('db2', warn_db2_pct).__dict__,
        DatabaseData('sqlserver', warn_sqlserver_pct).__dict__
    ]
    database_pct = []
    if oracle_count != 0:
        database_pct.append(
            DatabaseData(
                'oracle',
                num2pct(oracle_count / database_count)
                if database_count != 0 else 0).__dict__)
    if mysql_count != 0:
        database_pct.append(
            DatabaseData(
                'mysql',
                num2pct(mysql_count / database_count)
                if database_count != 0 else 0).__dict__)
    if db2_count != 0:
        database_pct.append(
            DatabaseData(
                'db2',
                num2pct(db2_count / database_count)
                if database_count != 0 else 0).__dict__)
    if sqlserver_count != 0:
        database_pct.append(
            DatabaseData(
                'sqlserver',
                num2pct(sqlserver_count / database_count)
                if database_count != 0 else 0).__dict__)
    data10 = {
        'health_check': {
            'score':
            num2pct(
                sum(all_health_report_score_list) /
                len(all_health_report_score_list))
            if all_health_report_score_list else 0,
            'level': [
                healthcheck_report_danger_count,
                healthcheck_report_general_count, healthcheck_report_good_count
            ]
        },
        'warn': {
            'all_warn_count':
            warn_query_sum,
            'recent_hour_warn_count':
            (Warn_Result.objects.filter(created_at__gt=d1)).count()
        },
        'database': {
            'able': {
                'able_pct':
                num2pct((Database.objects.filter(disabled=False)).count() /
                        database_count) if database_count != 0 else 0
            },
            'pct': database_pct,
            'host': {
                'Linux': 0,
                'AIX': 0,
                'Windows': 0
            }
        }
    }
    data = {
        '1': data1,
        '2': data2,
        '3': healthcheck_score_list,
        '8': sql_audit_count_list[:5],
        '10': data10
    }
    return Response(data, status=status.HTTP_200_OK)
Beispiel #5
0
def object_detail(pk,
                  owner,
                  object_name,
                  object_type=None,
                  subobject_name=None,
                  cache=False):
    try:
        schema_name = owner
        key = f'''{pk}:schema:{owner}:{object_name}:{subobject_name}'''
        if cache:
            cache_data = redis.get(key)
            if cache_data:
                return json.loads(cache_data)
            database = Database.objects.get(pk=pk)
            db_type = database.db_type
            type_map = Type_TO_CN.get(database.db_type)
            schema_name = owner
            db_name = None
            if db_type == 'sqlserver':
                db_name, owner = owner.split('.')
            options = {
                'OWNER': owner,
                'OBJECT_NAME': object_name,
                'SUBOBJECT_NAME': subobject_name
            }
            if not object_type:
                object_type = get_object_type(database, owner, object_name,
                                              options, db_name)
                if not object_type:
                    raise Exception('.')
                else:
                    object_type = type_map.get(object_type)
                options['OBJECT_TYPE'] = CN_TO_Type.get(db_type).get(
                    object_type)
                detail_query = {}
                if Object_Detail_Query.get(db_type):
                    if Object_Detail_Query.get(db_type).get(object_type):
                        detail_query = Object_Detail_Query.get(db_type).get(
                            object_type)
                ddl_query = DDL_Query.get(db_type) if DDL_Query.get(
                    db_type) else {}
                if not subobject_name:
                    query = {**detail_query, **ddl_query}
                else:
                    query = detail_query
                if db_type == 'sqlserver':
                    if detail_query:
                        query.pop('DDL')
                query = {k: (v.format(**options)) for k, v in query.items()}
                flag, schema_data = run_batch_sql(database, query, db_name)
                if not flag:
                    raise build_exception_from_java(schema_data)
                if schema_data.get('DDL'):
                    if db_type != 'mysql':
                        schema_data['DDL'] = schema_data.get('DDL')[0].get(
                            'DDL') if schema_data.get('DDL') else ''
                if schema_data.get('DDL'):
                    if db_type == 'mysql':
                        ddl_data = schema_data.get('DDL')[0]
                        schema_data['DDL'] = None
                        for k, v in ddl_data.items():
                            if 'create ' in k.lower():
                                schema_data['DDL'] = v

                        if not schema_data['DDL']:
                            for k, v in ddl_data.items():
                                if 'SQL Original Statement' in k:
                                    schema_data['DDL'] = v

                delta_list = []
                total_list = []
                if object_type == '':
                    query_delta = f'''
            select extract(epoch from created_at)*1000 created_at, rows - lag(rows) over (order by created_at) as rows
            from monitor_table_rows where database_id = '{pk}'
            and owner = '{schema_name}' and table_name = '{object_name}'
            order by created_at
            '''
                    query_total = f'''
            select extract(epoch from created_at)*1000 created_at, rows
            from monitor_table_rows where database_id = '{pk}'
            and owner = '{schema_name}' and table_name = '{object_name}'
            order by created_at
            '''
                    delta_list = execute_return_json(query_delta)
                    total_list = execute_return_json(query_total)
                new_schema = OrderedDict()
                for x in Ordered_List:
                    if x in schema_data:
                        new_schema[x] = schema_data.get(x)

                if delta_list:
                    new_schema[''] = {
                        'delta': [[x.get('CREATED_AT'),
                                   x.get('ROWS')] for x in delta_list
                                  if x.get('ROWS') != None],
                        'total': [[x.get('CREATED_AT'),
                                   x.get('ROWS')] for x in total_list
                                  if x.get('ROWS') != None]
                    }
                redis.set(key, json.dumps(new_schema))
        return new_schema
    except ObjectDoesNotExist:
        return {'error_message': ''}
    except Exception as err:
        return {'error_message': str(err)}
Beispiel #6
0
def overall_table_rows(pk, time_span=None):
    try:
        if not time_span:
            query_time_span = f'''
            select max(extract(epoch from created_at)) time_span from monitor_table_rows where database_id = '{pk}'
            '''
            time_span_result = execute_return_json(query_time_span)
            if time_span_result:
                time_span = time_span_result[0].get('TIME_SPAN')
                if time_span is None:
                    return {'error_message': ''}
            else:
                return {'error_message': ''}
            query_total = f'''select sum(rows) as total
     from monitor_table_rows where database_id = '{pk}' and created_at = to_timestamp({time_span})'''
            query_delta_list = f'''
    select extract(epoch from created_at)*1000 created_at, delta
    from
    (
     select created_at, rows - lag(rows) over (order by created_at) delta, row_number() over(order by created_at) as id
     from
    (
    select created_at, sum(rows) as rows
    from monitor_table_rows where database_id = '{pk}'
    group by created_at
    ) a
    order by created_at
    ) b where delta is not null and id > 1'''
            query_total_list = f'''
    select extract(epoch from created_at)*1000 created_at, sum(rows) as rows
    from monitor_table_rows where database_id = '{pk}'
    group by created_at
    order by created_at'''
            query_table_rows_list = f'''
with v as (
select created_at, table_name, sum(rows) as rows
from monitor_table_rows where '{pk}' = database_id and created_at = to_timestamp({time_span})
group by created_at, table_name
),
v1 as (
select created_at, table_name, sum(rows) as rows
from monitor_table_rows where '{pk}' = database_id and created_at = (select max(created_at) from monitor_table_rows where '{pk}' = database_id and created_at < to_timestamp({time_span}))
group by created_at, table_name
)
select
  table_name, rows,
  rows - (select rows from v1 where v1.table_name = v.table_name) rows_lag,
  date_part('day',(to_timestamp({time_span}) - (select created_at from v1 limit 1))) +
  round(date_part('hour',(to_timestamp({time_span}) - (select created_at from v1 limit 1)))::numeric/24, 1) time_lag
  from v order by rows desc
  '''
            delta_total_result = execute_return_json(query_total)
            query_delta_result = execute_return_json(query_delta_list)
            query_total_result = execute_return_json(query_total_list)
            query_table_rows_result = execute_return_json(
                query_table_rows_list)
        return {
            'total':
            delta_total_result[0].get('TOTAL') if delta_total_result else None,
            'time_span':
            datetime.fromtimestamp(time_span).strftime('%Y-%m-%d %H:%M:%S'),
            'delta_list': [[x.get('CREATED_AT'),
                            x.get('DELTA')] for x in query_delta_result
                           if x.get('DELTA') != None],
            'total_list': [[x.get('CREATED_AT'),
                            x.get('ROWS')] for x in query_total_result
                           if x.get('ROWS') != None],
            'table_rows':
            query_table_rows_result
        }
    except ObjectDoesNotExist:
        return {'error_message': ''}
    except Exception as err:
        return {'error_message': str(err)}
Beispiel #7
0
def oracle_sql_detail(pk, sql_id, sql_text=None, instance_id=None, time_span=None, begin_time=None, end_time=None, cache=True, activity=True, sql_audit=True, only_tune=False):
    database = Database.objects.get(pk=pk)
    if instance_id == 'null':
        instance_id = database.db_name
    inst_id = database.instance_id_list.split(',')[0] if not instance_id or instance_id == database.db_name or instance_id == '0' else instance_id
    if sql_audit:
        inst_id = database.instance_id_list
    key_audit = f'''{pk}:sql_detail:{sql_id}:audit'''
    audit_data = None
    audit_data_json = {}
    if cache:
        audit_data = redis.get(key_audit)
        if audit_data != None:
            audit_data_json = json.loads(audit_data)
    sql_detail = get_default_sql_detail_format(database.db_type)
    if sql_id != 'null':
        if time_span == 'realtime':
            sqldetail_sql = get_realtime_sql(sql_id, inst_id)
            if only_tune:
                sqldetail_sql.pop('binds')
            flag, sqldetail_data = run_batch_sql(database, sqldetail_sql)
            if not flag:
                return sqldetail_data
            stat_data = sqldetail_data.get('stats')
            plan_data = sqldetail_data.get('plans')
            sqlmon_data = sqldetail_data.get('sqlmon')
            bind_data = only_tunesqldetail_data.get('binds')[]
            for x in stat_data:
                key = ('{}-{}-{}').format(x.get('INST_ID'), x.get('CHILD_NUMBER'), x.get('PLAN_HASH_VALUE'))
                child_summary = {k:v for k, v in x.items() if k in ('CHILD_NUMBER',
                                                                    'PLAN_HASH_VALUE',
                                                                    'PARSING_SCHEMA_NAME',
                                                                    'LAST_LOAD_TIME',
                                                                    'MODULE', 'ACTION',
                                                                    'SERVICE')}
                pie_chart_data = {k:v for k, v in x.items() if k in ('ON CPU', 'Application',
                                                                     'Cluster', 'Concurrency',
                                                                     'User I/O')}
                execution_stats = {k:v for k, v in x.items() if k in ('EXECUTIONS',
                                                                      'ELAPSED_TIME',
                                                                      'CPU_TIME',
                                                                      'BUFFER_GETS',
                                                                      'DISK_READS',
                                                                      'DIRECT_WRITES',
                                                                      'ROWS_PROCESSED',
                                                                      'FETCHES')}
                metric_dict = {'EXECUTIONS':'', 
                 'ELAPSED_TIME':'()', 
                 'CPU_TIME':'CPU()', 
                 'BUFFER_GETS':'', 
                 'DISK_READS':'', 
                 'DIRECT_WRITES':'', 
                 'ROWS_PROCESSED':'', 
                 'FETCHES':''}
                total_executions = execution_stats.get('EXECUTIONS') if execution_stats.get('EXECUTIONS') != 0 else 1
                total_rows = execution_stats.get('ROWS_PROCESSED') if execution_stats.get('ROWS_PROCESSED') != 0 else 1
                execution_data = [{u'\u6307\u6807':metric_dict.get(k),  u'\u603b\u6570':v,  u'\u5e73\u5747\u6bcf\u6b21\u6267\u884c':round(v / total_executions),  u'\u5e73\u5747\u6bcf\u884c\u8bb0\u5f55':round(v / total_rows)} for k, v in execution_stats.items()]
                sql_detail['stats'][key] = {'child_summary':child_summary, 
                 'pie_chart_data':pie_chart_data, 
                 'execution_stats':{'header':[
                   '', '', '', ''], 
                  'data':execution_data}}

            plan_dic = defaultdict(list)
            for x in plan_data:
                key = ('{}-{}-{}').format(x.get('INST_ID'), x.get('CHILD_NUMBER'), x.get('PLAN_HASH_VALUE'))
                x.pop('INST_ID')
                plan_dic[key].append(x)

            sql_detail['plans']['data'] = plan_dic
            if sql_audit:
                if sqlmon_data:
                    sqlmon_data = sqlmon_data[:MAX_SQLMON_FOR_SQL_AUDIT]
                    binds_from_sqlmon = gen_sql_mononitor_and_binds(database, sqlmon_data)
                    bind_data = bind_data + binds_from_sqlmon
            sql_detail['sqlmon']['data'] = sqlmon_data
            sql_detail['binds']['data'] = bind_data
        else:
            sqldetail_sql = get_hist_sql(sql_id, inst_id, begin_time, end_time)
            query_sqlmon = f'''
                select
                ID,
                STATUS,
                SQL_ID,
                ELAPSED_TIME,
                DB_TIME,
                DB_CPU,
                SQL_EXEC_ID,
                SQL_EXEC_START,
                SQL_PLAN_HASH_VALUE,
                INST_ID,
                USERNAME
                from monitor_sqlmon
                where created_at BETWEEN to_timestamp({begin_time}) and to_timestamp({end_time})
                and sql_id = '{sql_id}' and database_id = '{pk}'
            '''
            flag, sqldetail_data = run_batch_sql(database, sqldetail_sql)
            if not flag:
                return sqldetail_data
            stat_data = sqldetail_data.get('stats')
            plan_data = sqldetail_data.get('plans')
            bind_data = sqldetail_data.get('binds')
            sqlmon_data = execute_return_json(query_sqlmon)
            exec_delta = defaultdict(list)
            avg_elapse_time = defaultdict(list)
            avg_cpu_time = defaultdict(list)
            avg_crs = defaultdict(list)
            avg_reads = defaultdict(list)
            plan_dic = defaultdict(list)
            stats_dict = defaultdict(dict)
            for x in stat_data:
                phv = str(x.get('PLAN_HASH_VALUE'))
                snap_time = x.get('SNAP_TIME')
                exec_delta[phv].append([snap_time, x.get('EXEC_DELTA')])
                avg_elapse_time[phv].append([snap_time, x.get('AVG_ELAPSE_TIME')])
                avg_cpu_time[phv].append([snap_time, x.get('AVG_CPU_TIME')])
                avg_crs[phv].append([snap_time, x.get('AVG_CRS')])
                avg_reads[phv].append([snap_time, x.get('AVG_READS')])

            stats_dict[''] = exec_delta
            stats_dict['(s)'] = avg_elapse_time
            stats_dict['CPU(s)'] = avg_elapse_time
            stats_dict[''] = avg_crs
            stats_dict[''] = avg_reads
            for x in plan_data:
                phv = str(x.get('PLAN_HASH_VALUE'))
                plan_dic[phv].append(x)

            sql_detail['stats'] = stats_dict
            sql_detail['plans']['data'] = plan_dic
            sql_detail['sqlmon']['data'] = sqlmon_data
            sql_detail['binds']['data'] = bind_data
        if cache == True:
            if audit_data != None:
                if not only_tune:
                    sql_detail['audit'] = audit_data_json
                audit_data_json = get_sql_audit(pk, sql_id, only_tune=only_tune)
                sql_detail['audit'] = audit_data_json
                redis.setex(key_audit, SQLTEXT_RETENTION, json.dumps(audit_data_json))
            if audit_data:
                new_plan_dict = {}
                if plan_dic:
                    new_plan_dict = {k[k.rfind('-') + 1:]:v for k, v in plan_dic.items()}
                tune_data = get_sql_tune(database, audit_data_json, new_plan_dict)
                if tune_data:
                    sql_detail['tune'] = tune_data
        return sql_detail