def get_activity_realtime(pk): try: conn = Database.objects.get(pk=pk) result = {} db_type = conn.db_type now_timestamp = get_1s_timestamp() ash_table = Activity_Table_Name.get(db_type) wait_class = Wait_Class_Column.get(db_type) activity_type = Activity_Type.get(db_type) if db_type != 'oracle': query = f'''select {wait_class}, count(*) aas , cast(extract(epoch from created_at) as bigint) * 1000 created_at from {ash_table} where database_id = '{pk}' and created_at = to_timestamp({now_timestamp}) group by {wait_class}, created_at''' data = execute_ash_return_json(query) if data: result[conn.alias] = data2result_json(data, activity_type, is_open=False, is_realtime=True) else: result[conn.db_name] = get_default_ash(db_type) return result if db_type == 'oracle': result = get_oracle_activity_ws(conn) return result except ObjectDoesNotExist: return {'message': ''} except Exception as err: return {'error_message': str(err)}
def update_dashboard_data(database_list): if database_list == None: return [] else: db_filter_set = database_list db_id_set = db_filter_set.values_list('id', flat=True) result = [] warn_result_set = (Warn_Result.objects.filter( database_id__in=db_id_set)).filter( created_at__gte=datetime.datetime.now() - (datetime.timedelta(minutes=10))) for db in db_filter_set: now_timestamp = get_1s_timestamp() activity_data = get_database_activity(str(db.id), begin_time=now_timestamp - Dashboard_RANGE, end_time=now_timestamp) top_activity = activity_data.get(db.db_name) or activity_data.get( db.alias) if not top_activity: continue top_activity = top_activity.get('data') database_dict = dict(DatabaseSerializer(db).data) database_dict.update({'id': str(db.id)}) database_dict.update({'owner': str(db.owner.id)}) result.append({ 'top_activity': top_activity, 'database': database_dict, 'alarm_count': (warn_result_set.filter(database=db)).count() }) return result
def get_oracle_activity_ws(database): result = {} database_id = database.id instance_id_list_str = database.instance_id_list instance_id_list = instance_id_list_str.split(',') now_timestamp = get_1s_timestamp() if database.instance_count > 1: instance_id_list.append(instance_id_list_str) for inst_id in instance_id_list: query = f'''select wait_class, count(*) aas , cast(extract(epoch from created_at) as bigint) * 1000 created_at from monitor_oracle_ash where database_id = '{database_id}' and inst_id in ({inst_id}) and created_at = to_timestamp({now_timestamp}) group by wait_class, created_at''' data = execute_ash_return_json(query) if inst_id == instance_id_list_str or not has_instance(database): result[database.db_name] = (data2result_json( data, OracleActivityType, is_open=False, is_realtime=True)) if data else get_default_ash('oracle') else: result[inst_id] = (data2result_json( data, OracleActivityType, is_open=False, is_realtime=True)) if data else get_default_ash('oracle') return result
def get_top_sql_detail(database): db_type = database.db_type dim = len(TABLE_HEADERS.get(db_type)) - 1 end_time = get_1s_timestamp() begin_time = end_time - INTERVAL top_sql_list = get_activity_dimension(str(database.id), begin_time=begin_time, end_time=end_time, dim=dim) for x in top_sql_list.get('data'): db2_gen_sql_detail(database, x.get('SQL_ID'), x.get('SQL_TEXT'), x.get('SCHEMA'))
def get_realtime_data(self): database = self.database data_dic = {} result = {} instance_id_list_str = database.instance_id_list instance_id_list = instance_id_list_str.split(',') instance_id_list.append(instance_id_list_str) database_id = database.id now_timestamp = get_1s_timestamp() for inst_id in instance_id_list: query_sum = f'''select name_id, sum(value), cast(extract(epoch from min(created_at)) as bigint)*1000 from monitor_performance where database_id ='{database_id}' and inst_id in ({inst_id}) and created_at = to_timestamp({now_timestamp}) and name_id in ({(self.PERFORMANCE_NAME)}) group by database_id,name_id,created_at''' data_dic.update(new_execute_ash_return_json(query_sum)) query_cpu = f'''select name_id, inst_id, value, cast(extract(epoch from created_at) as bigint)*1000 from monitor_performance where database_id = '{database_id}' and inst_id in ({inst_id}) and created_at = to_timestamp({now_timestamp}) and name_id in ({(self.PERFORMANCE_CPU_NAME)})''' data_dic.update(new_execute_cpu_return_json(query_cpu)) query_avg = f'''select name_id, max(value), cast(extract(epoch from min(created_at)) as bigint)*1000 from monitor_performance where database_id = '{database_id}' and inst_id in ({inst_id}) and created_at = to_timestamp({now_timestamp}) and name_id in ({(self.PERFORMANCE_WAIT_NAME)}) group by database_id,name_id,created_at''' data_dic.update(new_execute_ash_return_json(query_avg)) if not has_instance(database): result[database.db_name] = data2result_oracle( data_dic, OraclePerformanceType, is_open=False, is_realtime=True) elif inst_id == instance_id_list_str: result[database.db_name] = data2result_oracle( data_dic, OraclePerformanceType, is_open=False, is_realtime=True) else: result[inst_id] = data2result_oracle(data_dic, OraclePerformanceType, is_open=False, is_realtime=True) return result
def get_database_activity(pk, time_span=None, instance_id=None, sql_id=None, session_id=None, begin_time=None, end_time=None): try: conn = Database.objects.get(pk=pk) db_type = conn.db_type sql_id_filter = get_sql_id_filter_str( db_type, sql_id, session_id) if sql_id or session_id else '' result = {} now_timestamp = get_1s_timestamp() ash_table = Activity_Table_Name.get(db_type) wait_class = Wait_Class_Column.get(db_type) all_wait_class = (' union all ').join([ ("select '{}'").format(x) if idx != 0 else ("select '{}' {}").format(x, wait_class) for idx, x in enumerate(Wait_Class.get(db_type)) ]) first_wait_class = ("select '{}' {}").format( Wait_Class.get(db_type)[0], wait_class) activity_type = Activity_Type.get(db_type) if conn.db_type != DatabaseType.ORACLE.value: if time_span == 'realtime': query = f'''WITH RECURSIVE cnt(x) AS ( values(1) UNION ALL SELECT x+1 FROM cnt where x < 360 ), ashdata as( select {wait_class}, count(*) aas, extract(epoch from created_at) created_at from {ash_table} where created_at > TIMESTAMP 'now' - interval '1 hours' and database_id = '{pk}' {sql_id_filter} group by {wait_class}, created_at order by created_at desc), ash_state as ({all_wait_class}), ash_placehoder as (select {now_timestamp} - x*10 created_at, {wait_class} from cnt, ash_state) select t1.{wait_class},cast(COALESCE(t2.aas, 0) as real) aas, cast(COALESCE(t1.created_at) as bigint) *1000 created_at from ash_placehoder t1 left outer join ashdata t2 on t1.{wait_class} = t2.{wait_class} and t1.created_at = t2.created_at order by t1.{wait_class}, t1.created_at''' else: time = int(end_time) - int(begin_time) if time <= 3600: num1 = int(time / 10) num2 = 10 else: if time <= 86400: num1 = 144 num2 = 600 else: if time <= 604800: num1 = 1008 num2 = 600 else: if time <= 2678400: num1 = 744 num2 = 3600 else: if time <= 8035200: num1 = 2232 num2 = 3600 else: if time <= 31622400: num1 = 8784 num2 = 3600 else: num1 = 144 num2 = 600 max_point = int( end_time) - int(end_time) % num2 query = f'''WITH RECURSIVE cnt(x) AS ( values(1) UNION ALL SELECT x+1 FROM cnt where x < {num1} ), ashdata as( select {wait_class} , cast(extract(epoch from created_at) as bigint) - mod(cast(extract(epoch from created_at) as bigint),{num2}) created_at, 10.0*count(*)/{num2} aas from {ash_table} where created_at between to_timestamp({begin_time}) and to_timestamp({end_time}) and database_id = '{pk}' {sql_id_filter} group by {wait_class}, cast(extract(epoch from created_at) as bigint) - mod(cast(extract(epoch from created_at) as bigint),{num2}) order by created_at desc), ash_state as (select distinct {wait_class} from ashdata union {first_wait_class}), ash_placehoder as (select {max_point} - x*{num2} created_at, {wait_class} from cnt, ash_state) select t1.{wait_class}, cast(COALESCE(t2.aas, 0) as real) aas, cast(t1.created_at as bigint) *1000 created_at from ash_placehoder t1 left outer join ashdata t2 on t1.{wait_class} = t2.{wait_class} and t1.created_at = t2.created_at order by t1.{wait_class}, t1.created_at''' data_dict = execute_ash_return_json(query) result[conn.alias] = data2result_json(data_dict, activity_type) return result if conn.db_type == DatabaseType.ORACLE.value: result = get_oracle_activity(conn, time_span, instance_id, sql_id_filter, begin_time, end_time) return result except ObjectDoesNotExist: return {'error_message': ''} except Exception as err: return {'error_message': str(err)}
def get_oracle_activity(database, time_span=None, instance_id=None, sql_id_filter=None, begin_time=None, end_time=None): result = {} database_id = database.id instance_id_list_str = database.instance_id_list instance_id_list = instance_id_list_str.split(',') if database.instance_count > 1: instance_id_list.append(instance_id_list_str) if time_span == 'realtime': for inst_id in instance_id_list: now_timestamp = get_1s_timestamp() query = f'''WITH RECURSIVE cnt(x) AS ( values(1) UNION ALL SELECT x+1 FROM cnt where x < 360 ), ashdata as( select wait_class , count(*) aas, extract(epoch from created_at) created_at from monitor_oracle_ash where created_at > TIMESTAMP 'now' - interval '1 hours' and database_id = '{database_id}' and inst_id in ({inst_id}) {sql_id_filter} group by wait_class, created_at order by created_at desc), ash_state as (select 'ON CPU' wait_class union all select 'Other' union all select 'Application' union all select 'Configuration' union all select 'Cluster' union all select 'Administrative' union all select 'Concurrency' union all select 'Commit' union all select 'Network' union all select 'User I/O' union all select 'System I/O' union all select 'Scheduler' union all select 'Queueing'), ash_placehoder as (select {now_timestamp} - x*10 created_at, wait_class from cnt, ash_state) select t1.wait_class,cast(COALESCE(t2.aas, 0) as real) aas, cast(COALESCE(t1.created_at) as bigint) *1000 created_at from ash_placehoder t1 left outer join ashdata t2 on t1.wait_class = t2.wait_class and t1.created_at = t2.created_at order by t1.wait_class, t1.created_at''' data_dict = execute_ash_return_json(query) if inst_id == instance_id_list_str or not has_instance(database): result[database.db_name] = data2result_json( data_dict, OracleActivityType) else: result[inst_id] = data2result_json(data_dict, OracleActivityType) return result inst_id_list = database.instance_id_list if not instance_id or instance_id == database.db_name else instance_id key = instance_id if instance_id else database.db_name time = int(end_time) - int(begin_time) if time <= 3600: num1 = int(time / 10) num2 = 10 else: if time <= 86400: num1 = 144 num2 = 600 else: if time <= 604800: num1 = 1008 num2 = 600 else: if time <= 2678400: num1 = 744 num2 = 3600 else: if time <= 8035200: num1 = 2232 num2 = 3600 else: if time <= 31622400: num1 = 8784 num2 = 3600 else: num1 = 144 num2 = 600 max_point = int(end_time) - int(end_time) % num2 query = f'''WITH RECURSIVE cnt(x) AS ( values(1) UNION ALL SELECT x+1 FROM cnt where x < {num1} ), ashdata as( select wait_class , cast(extract(epoch from created_at) as bigint) - mod(cast(extract(epoch from created_at) as bigint),{num2}) created_at, 10.0*count(*)/{num2} aas from monitor_oracle_ash where created_at between to_timestamp({begin_time}) and to_timestamp({end_time}) and database_id = '{database_id}' and inst_id in ({inst_id_list}) {sql_id_filter} group by wait_class, cast(extract(epoch from created_at) as bigint) - mod(cast(extract(epoch from created_at) as bigint),{num2}) order by created_at desc), ash_state as (select distinct wait_class from ashdata union select 'ON CPU' wait_class), ash_placehoder as (select {max_point} - x*{num2} created_at, wait_class from cnt, ash_state) select t1.wait_class, cast(COALESCE(t2.aas, 0) as real) aas, cast(t1.created_at as bigint) *1000 created_at from ash_placehoder t1 left outer join ashdata t2 on t1.wait_class = t2.wait_class and t1.created_at = t2.created_at order by t1.wait_class, t1.created_at''' data_dict = execute_ash_return_json(query) result[key] = data2result_json(data_dict, OracleActivityType) return result