Exemple #1
0
def ZJ_RUN_DETECT_EXEC_MODEL(db_name_ ,dbid_ ,inst_id_ ,gath_time):
    gath_time_= datetime.datetime.strptime(gath_time.encode('utf-8').decode("utf-8"), "%Y-%m-%d %H:%M")
    print("exec detect gath_time_ %s" %gath_time_)
    logger = top_log()
    try:
        conn = database_method.initial_connect('dmuser', 'dmuser', 'dmtest')
        conn = conn.create_conn()
        cursor = conn.cursor()
        cursor.callproc('ZJ_RUN_DETECT_EXEC_MODEL', [db_name_, dbid_,inst_id_,gath_time_])
        conn.commit()
        cursor.close()
        conn.close()
    except Exception as msg:
        logger.info(msg)
Exemple #2
0
def home(request):
    # return render(request,'home.html')
    # if request.method == 'POST':
    #     username = request.POST.get('username',None)
    #     age = request.POST.get("age",None)
    #     gender = request.POST.get("gender",None)
    #     usergroup_id = request.POST.get("group",None)
    #     models.UserInfo.objects.create(username=username, age=age, gender=gender,usergroup_id=usergroup_id)
    # USER_LIST = list(models.UserInfo.objects.all())
    # return render(request,"./node_modules/gentelella/production/anom_total.html",{"userlist":USER_LIST})

    conn = database_method.initial_connect('dmuser', 'dmuser', 'dmtest')
    conn = conn.create_conn()
    cursor = conn.cursor()

    cpu_outlier_result = """
        select "TIME",
    "SNAP_ID", 
    "DB_CPU", 
    db_id,
    '/ora_dual/load_profile_trend/?snapid='||SNAP_ID||'&dbid='||db_id
    from (
    SELECT 
    "TIME",
    "SNAP_ID", 
    "DB_CPU", 
    db_id,
    PREDICTION_PROBABILITY(dmuser.ANOM_SVM_1_6 USING *) ANOM_SVM_1_6_PROB,
    PREDICTION(dmuser.ANOM_SVM_1_6 USING *) ANOM_SVM_1_6_PRED
    FROM dmuser.stat_all_pivot_data) where ANOM_SVM_1_6_PRED=1 and rownum=1
        """

    sql_outlier_result = """
                               select * from (
                                   select type
                                   from topsql_all_data_his_view
                                )
                                pivot 
                                (
                                   count(*)
                                   for type in ('CPU Time' as "CPU_TIME",'Elapse Time' as "ELAPSE_TIME",'Buffer Reads' as "BUFFER_READS",'Physical Reads' as "PHYSICAL_READS",'Executions' as "EXECUTIONS")
                                )
    """

    cpu_result = """
            select db_cpu,time from dmuser.stat_all_pivot_data
            """

    # 执行异常探测
    cursor.callproc('dmuser.cpu_outlier_apply_model')

    # 执行异常分析
    cursor.execute(cpu_outlier_result)
    data_result_ = cursor.fetchall()
    data_result = list(data_result_)

    cursor.execute(sql_outlier_result)
    sql_result_ = cursor.fetchall()
    sql_result = list(sql_result_)
    outlier_sql = []
    for idx in range(len(sql_result)):
        outlier_sql.append({
            'CPU': sql_result[idx][0],
            'ELA': sql_result[idx][1],
            'BUFFER': sql_result[idx][2],
            'READ': sql_result[idx][3],
            'EXE': sql_result[idx][4]
        })

    # 提取异常原因
    reasons = []
    for idx in range(len(data_result)):

        url = []
        #         reason_sql = """
        #         select  extractValue(value(reason_name),('//Attribute/@name'))
        # from
        # (
        # select FEATURE_DETAILS(dmuser.feat_pca_1_6, 1, 10 USING *) data
        # from dmuser.stat_all_pivot_data
        # where snap_id =
        #         """ + str(data_result[idx][1]) + """  ) t,TABLE(XMLSequence(Extract(t.data,'//Attribute'))) reason_name where rownum<4
        #         """

        reason_sql = """
                   select stat_name from (
    select * from DBA_HIST_SYS_TIME_MODEL where snap_id=""" + str(
            data_result[idx]
            [1]) + """ and stat_name not in ('DB time','DB CPU')
    order by value desc) where rownum < 4
                          """

        cursor.execute(reason_sql)
        reason_result_ = cursor.fetchall()
        for reaon_idx in range(len(reason_result_)):
            url.append(
                data_result[idx][4] + "&reason=" +
                str(reason_result_[reaon_idx]).upper().replace(' ', '_').
                replace('(', '').replace(')', '').replace(',', '').replace(
                    '[', '').replace(']', '').replace('''''', ''))
            # url.append(data_result[idx][4] + "&reason=" + str(reason_result_[reaon_idx]))
        reasons.append({
            "TIME": data_result[idx][0],
            "snap_id": data_result[idx][1],
            "DB_CPU": data_result[idx][2],
            "URL": url,
            "reason": reason_result_
        })

    cursor.execute(cpu_result)
    cpu_all_result_ = cursor.fetchall()
    cpu_all_result = list(cpu_all_result_)

    normal = []
    normal_tiem = []
    outlier = []
    outlier_time = []
    timeid = []

    for idx_1 in range(len(cpu_all_result)):
        # outlier.append({'time':cpu_all_result[idx_1][0],'ANOM_SVM_1_6_PROB':cpu_all_result[idx_1][3]})
        outlier.append(cpu_all_result[idx_1][0])
        outlier_time.append(cpu_all_result[idx_1][1])
        # else:
        #     #normal.append({'time':cpu_all_result[idx_1][0],'ANOM_SVM_1_6_PROB':cpu_all_result[idx_1][3]})
        #     normal.append( cpu_all_result[idx_1][3])
        #     normal_tiem.append(cpu_all_result[idx_1][0])

    template = loader.get_template(
        './node_modules/gentelella/production/anom_total.html')
    timeline = Timeline(is_auto_play=True, timeline_bottom=0)

    cpu_line = Line(title_pos='center')

    # cpu_line.add(
    #     "正常值",
    #     normal_tiem,
    #     normal,
    #     is_smooth=True,
    #     mark_point=["max", "min"],
    #     mark_line=["average"],
    # legend_top = "50%"
    # )

    cpu_line.add("DB_CPU",
                 outlier_time,
                 outlier,
                 is_smooth=True,
                 mark_point=["max", "min"],
                 mark_line=["average"])

    context = dict(
        # title = [],
        cpu_line=cpu_line.render_embed(),
        data_result=reasons,
        sql_result=outlier_sql,
        # metric_data = load_profile_per_hour,
        myechart=timeline.render_embed(),
        # host=DEFAULT_HOST,#这句改为下面这句
        host=REMOTE_HOST,  # <-----修改为这个
        script_list=timeline.get_js_dependencies())
    return HttpResponse(template.render(context, request))

    # return render(request, "./node_modules/gentelella/production/sel_cpuoutlier_data.html", {'data_result': data_result})
    cursor.close()
Exemple #3
0
def get_top_sql(i,gath_time):
    logger = top_log()
    gath_time_ = datetime.datetime.strptime(gath_time.encode('utf-8').decode("utf-8"), "%Y-%m-%d %H:%M")
    print("begin gath time:%s" %gath_time_)
    try:
        conn = database_method.initial_connect('dmuser', 'dmuser', 'dmtest')
        conn = conn.create_conn()

        if i == 0:
            top_sql = """
            select a.*
            from
              (select inst_id as inst_id,
                    row_number() over (partition by inst_id order by cpu_time desc) as rc,
                    row_number() over (partition by inst_id order by disk_reads desc) as rr,
                    row_number() over (partition by inst_id order by elapsed_Time desc) as rt,
                    row_number() over (partition by inst_id order by BUFFER_GETS desc) as bf,
                    PARSING_SCHEMA_NAME db_USER,
                    sql_id,
                    round(cpu_time/1e6) as cpu_s ,
                    round(elapsed_Time/1e6) as elap_s ,
                    round((elapsed_Time - cpu_time)/1e6) wait_s,
                    trunc((elapsed_Time - cpu_time)*100/greatest(elapsed_Time,1),1)||'%' as "wait/elap",
                    executions as execs,
                    round(elapsed_Time/(executions+1)/1e6,2) ela_Pe,
                    buffer_gets as tot_bufs,
                    round(buffer_gets/greatest(executions,1),1) as avg_buf,
                    disk_reads as reads,
                    round(USER_IO_WAIT_TIME/1e6) as IOwait_s,
                    (select distinct name from gv$database) as db_name,
                    (select distinct dbid from gv$database) as dbid,
                    plan_hash_value
                    from gv$sqlarea t
                    where PARSING_SCHEMA_NAME in ('DMUSER')
                ) a
            where rc <=5 or rr<=5 or rt<=5 or bf<=5 
            order by inst_id,rc
            """
        else:
            top_sql = """
                        select a.*
                        from
                          (select inst_id as inst_id,
                                row_number() over (partition by inst_id order by cpu_time desc) as rc,
                                row_number() over (partition by inst_id order by disk_reads desc) as rr,
                                row_number() over (partition by inst_id order by elapsed_Time desc) as rt,
                                row_number() over (partition by inst_id order by BUFFER_GETS desc) as bf,
                                PARSING_SCHEMA_NAME db_USER,
                                sql_id,
                                round(cpu_time/1e6) as cpu_s ,
                                round(elapsed_Time/1e6) as elap_s ,
                                round((elapsed_Time - cpu_time)/1e6) wait_s,
                                trunc((elapsed_Time - cpu_time)*100/greatest(elapsed_Time,1),1)||'%' as "wait/elap",
                                executions as execs,
                                round(elapsed_Time/(executions+1)/1e6,2) ela_Pe,
                                buffer_gets as tot_bufs,
                                round(buffer_gets/greatest(executions,1),1) as avg_buf,
                                disk_reads as reads,
                                round(USER_IO_WAIT_TIME/1e6) as IOwait_s,
                                (select distinct name from gv$database) as db_name,
                                (select distinct dbid from gv$database) as dbid,
                                plan_hash_value
                                from gv$sqlarea t
                                where PARSING_SCHEMA_NAME not in ('SYS','SYSTEM','DBSNMP','DMUSER','MDSYS','ODMRSYS')
                            ) a
                        where rc <=5 or rr<=5 or rt<=5 or bf<=5 
                        order by inst_id,rc
                        """


        cursor = conn.cursor()
        cursor.execute(top_sql)
        top_sql_data = cursor.fetchall()


        result = []

        for j in range(len(top_sql_data)):
            result.append((gath_time_,top_sql_data[j][17],top_sql_data[j][18],top_sql_data[j][0],top_sql_data[j][5],top_sql_data[j][6],top_sql_data[j][19],top_sql_data[j][7],top_sql_data[j][8],top_sql_data[j][9],top_sql_data[j][11],top_sql_data[j][13],top_sql_data[j][15]))

        cursor.prepare(
                "insert into top_sql_data(GATH_TIME,DB_NAME,dbid,INST_ID,PARSING_SCHEMA_NAME,SQL_ID,plan_hash_value,cpu_s,elap_s,wait_s,EXECS,BUFFERS,DISK_READS) values(:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13)")

        #print(result)
        cursor.executemany(None, result)
        cursor.callproc('ZJ_PRE_DEAL_SQLDATA', [gath_time_,'DMTEST', top_sql_data[j][18],top_sql_data[j][0]])
        #
        conn.commit()
        cursor.close()
        conn.close()
        #contert_to_json(top_sql_data,top_ash_data)
        #logger.info(top_sql_data)
        #print(top_sql_data)

    except Exception as msg:
        logger.info(msg)