Ejemplo n.º 1
0
    def get_query_by_params(self, request, sorts=None):
        start = request.query_params.get('start')
        end = request.query_params.get('end')
        s = SlowQuery.search()
        if start is None or not isinstance(start, str) or len(start.strip()) == 0:
            start = None
        if end is None or not isinstance(end, str) or len(end.strip()) == 0:
            end = None

        if start is not None and end is not None:
            options = {
                # greater or equal than  -> gte 大于等于
                # greater than  -> gt 大于
                # little or equal thant -> lte 小于或等于
                'gte': start,
                'lte': end
            }
            s = s.filter('range', **{'@timestamp': options})

        sorts = request.query_params.get('sorts', sorts)
        if isinstance(sorts, str) and len(sorts) > 0:
            sorts = [item.strip() for item in sorts.split(",") if len(item.strip()) > 0]
            s = s.sort(*sorts)
        else:
            s = s.sort('-@timestamp')
        return s
Ejemplo n.º 2
0
def send_slow_alarm(record):
    msg_template = "您的数据库【{}】存在慢SQL: {},平均执行时间为{},一分钟执行了{}次,请关注"
    s = SlowQuery.search()
    results = s.filter("term", hash__keyword=record['hash']).execute()
    print(results)
    sql_printer = ""
    if len(results.hits.hits) > 0:
        sql_printer = results.hits.hits[0]['_source']['finger']
    msg = msg_template.format(record['schema'], sql_printer, record['avg_query_time'], record['hash_count'])
    send_wechat_msg("ZhangWenBing", msg)
Ejemplo n.º 3
0
def send_with_matplotlib(request):

    s = SlowQuery.search()
    options = {
        # greater or equal than  -> gte 大于等于
        # greater than  -> gt 大于
        # little or equal thant -> lte 小于或等于
        'gte': '2020-12-22T00:00:00.000Z',
        'lte': '2021-01-21T00:00:00.000Z'
    }
    s = s.filter('range', **{'@timestamp': options})
    aggs = {
        "aggs": {
            "date": {
                "date_histogram": {
                    "field": "@timestamp",
                    "calendar_interval": "day"
                },
                "aggs": {
                    "avg_query_time": {
                        "avg": {
                            "field": "query_time"
                        }
                    },
                    "avg_lock_time": {
                        "avg": {
                            "field": "lock_time"
                        }
                    }
                }
            }
        }
    }
    get_aggs(s.aggs, aggs)
    result = s.execute().aggregations
    rs = get_results(aggs, result)
    dates = [r['date'][:10] for r in rs]
    counts = [r['date_count'] for r in rs]

    print(dates)
    print(counts)

    return Response("success")
Ejemplo n.º 4
0
    def list(self, request, *args, **kwargs):
        # 入参: 开始时间、结束时间、库名、第几页、每页多少个
        start = request.query_params.get('start')
        end = request.query_params.get('end')
        schema = request.query_params.get('schema', None)
        is_aggr_by_hash = request.query_params.get('is_aggr_by_hash', False)
        if isinstance(is_aggr_by_hash, str) and is_aggr_by_hash.lower() == 'true':
            is_aggr_by_hash = True
        else:
            is_aggr_by_hash = False

        interval = request.query_params.get('interval', '1d')

        # 参数验证过程就省略
        s = SlowQuery.search()
        if schema is not None and len(schema) > 0:
            s = s.filter('term', schema__keyword=schema)
        if start is not None and end is not None:
            options = {
                # greater or equal than  -> gte 大于等于
                # greater than  -> gt 大于
                # little or equal thant -> lte 小于或等于
                'gte': start,
                'lte': end
            }
            s = s.filter('range', **{'@timestamp': options})

        s = s.sort('-@timestamp')
        paginator = CustomPagination()

        if is_aggr_by_hash:
            aggs = {
                "aggs": {
                    "date": {
                        "date_histogram": {
                            "field": "@timestamp",
                            "calendar_interval": interval
                        },
                        "aggs": {
                            "schema": {
                                "terms": {
                                    "field": "schema.keyword"
                                },
                                "aggs": {
                                    "hash": {
                                        "terms": {
                                            "field": "hash.keyword"
                                        },
                                        "aggs": {
                                            "rows_sent": {
                                                "avg": {
                                                    "field": "rows_sent"
                                                }
                                            },
                                            "rows_examined": {
                                                "avg": {
                                                    "field": "rows_examined"
                                                }
                                            },
                                            "query_time": {
                                                "avg": {
                                                    "field": "query_time"
                                                }
                                            },
                                            "sql": {
                                                "top_hits": {
                                                    "_source": [
                                                        "finger",
                                                        "@timestamp"
                                                    ],
                                                    "sort": [
                                                        {
                                                            "@timestamp": {
                                                                "order": "desc"
                                                            }
                                                        }
                                                    ],
                                                    "size": 1
                                                }
                                            }

                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }

            get_aggs(s.aggs, aggs)

            result = s.execute().aggregations

            rs = get_results(aggs, result)
        if is_aggr_by_hash:
            data = paginator.paginate_queryset(rs, request)
        else:
            data = paginator.paginate_queryset(s, request)
            data = [q.to_dict() for q in data]

        # 补充处理
        return paginator.get_paginated_response(data)
Ejemplo n.º 5
0
def alarm(request):
    end = int(time.time())
    start = end - 60

    global_query_time = 10
    global_query_count = 10

    global_cfg = AlarmSettingModel.objects.filter(schema=None).order_by("-id")
    if global_cfg.exists():
        global_query_time = global_cfg.first().query_time
        global_query_count = global_cfg.first().query_count

    s = SlowQuery.search()

    # options = {
    #     # greater or equal than  -> gte 大于等于
    #     # greater than  -> gt 大于
    #     # little or equal thant -> lte 小于或等于
    #     'gte': start,
    #     'lte': end
    # }
    # s = s.filter('range', **{'@timestamp': options})
    aggs = {
        "aggs": {
            "schema": {
                "terms": {
                    "field": "schema.keyword"
                },
                "aggs": {
                    "hash": {
                        "terms": {
                            "field": "hash.keyword"
                        },
                        "aggs": {
                            "avg_query_time": {
                                "avg": {
                                    "field": "query_time"
                                }
                            },
                            "avg_lock_time": {
                                "avg": {
                                    "field": "lock_time"
                                }
                            }
                        }
                    }
                }
            }
        }
    }
    get_aggs(s.aggs, aggs)

    result = s.execute().aggregations

    rs = get_results(aggs, result)

    schema_cfg_dict = {}
    schema_cfg_queryset = AlarmSettingModel.objects.exclude(schema__isnull=True).filter(stop_alarm=False, delete=False)
    for schema_cfg in schema_cfg_queryset:
        # schema_cfg_dict[schema_cfg.schema + "#" + schema_cfg.sql_print_hash] = schema_cfg
        cfg_key = schema_cfg.schema.schema + "#" + schema_cfg.sql_print_hash
        schema_cfg_dict[cfg_key] = schema_cfg

    for r in rs:
        threshold_query_time = global_query_time
        threshold_query_count = global_query_count
        cfg = schema_cfg_dict.get(r.get('schema') + "#" + r.get('hash'), None)
        if cfg is not None:
            threshold_query_count = cfg.query_count
            threshold_query_time = cfg.query_time
        if r['avg_query_time'] > threshold_query_time and r['hash_count'] > threshold_query_count:
            send_slow_alarm(r)
            break

    return HttpResponse("success")
Ejemplo n.º 6
0
def send_with_matplotlib(request):

    s = SlowQuery.search()
    options = {
        # greater or equal than  -> gte 大于等于
        # greater than  -> gt 大于
        # little or equal thant -> lte 小于或等于
        'gte': '2019-12-01T00:00:00.000Z',
        'lte': '2019-12-31T00:00:00.000Z'
    }
    s = s.filter('range', **{'@timestamp': options})
    aggs = {
        "aggs": {
            "date": {
                "date_histogram": {
                    "field": "@timestamp",
                    "calendar_interval": "day"
                },
                "aggs": {
                    "avg_query_time": {
                        "avg": {
                            "field": "query_time"
                        }
                    },
                    "avg_lock_time": {
                        "avg": {
                            "field": "lock_time"
                        }
                    }
                }
            }
        }
    }
    get_aggs(s.aggs, aggs)
    result = s.execute().aggregations
    rs = get_results(aggs, result)
    dates = [r['date'][:10] for r in rs]
    counts = [r['date_count'] for r in rs]

    rcParams['axes.unicode_minus'] = False  # 用来正常显示负号
    matplotlib.rcParams['font.family'] = 'SimHei'
    mpl.rcParams['font.sans-serif'] = ['SimHei']  # 更新字体格式
    mpl.rcParams['font.size'] = 14

    plt.figure(figsize=(12, 4))
    # 生成图形
    plt.title('慢SQL数量趋势图')
    plt.plot(dates, np.asarray(counts), label='慢SQL数量')
    plt.legend()

    plt.xticks(rotation=-30, ha='left')
    plt.tick_params(axis='x', labelsize=8)
    # 显示图形
    now = float(time.time())
    pic_name = str(now) + ".jpg"
    plt.savefig(pic_name)
    plt.close()

    report = render_to_string("slowsql.html", context={
        "send_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    })

    my_sender = "*****@*****.**"

    msg = MIMEMultipart('related')
    msg['From'] = formataddr([" ", my_sender])
    # [收件人邮箱昵称、收件人邮箱账号], 昵称随便
    msg['To'] = formataddr([" ", "*****@*****.**"])
    # 邮件的主题,也就是邮件的标题
    msg['Subject'] = "慢SQL周报"
    msg.attach(MIMEText(report, 'html', 'utf-8'))

    with open(pic_name, "rb") as f:
        pic = MIMEImage(f.read())
        pic.add_header('Content-ID', '<pic>')
        msg.attach(pic)

    server = smtplib.SMTP_SSL('smtp.exmail.qq.com', 465)

    # server.starttls()
    # Next, log in to the server
    server.login(my_sender, "ZSTmail2021")

    server.sendmail("*****@*****.**", "*****@*****.**", msg.as_string())
    server.quit()

    os.remove(pic_name)
    return HttpResponse("success")
Ejemplo n.º 7
0
    def es_test(self, request):
        # print(1)
        # return HttpResponse(1)
        s = SlowQuery.search()

        # 查询所有的数据
        # result = s.scan()

        # 等值查询
        # result = s.filter('term', schema__keyword='Ena').scan()

        # 排序
        # result = s.sort('-@timestamp')

        # 指定范围
        # from_date = '2020-12-20T00:00:00'
        # to_date = '2020-12-21T00:00:00'
        # options = {
        #     'gte': from_date,
        #     'lte': to_date
        # }
        # result = s.filter('range', **{'@timestamp': options}).execute()

        #
        # result = s.filter('wildcard', schema__keyword='*An*').scan()

        # result = s.filter('prefix', schema__keyword='L').scan()

        # result = s.filter('match', query_sql='abc cda').scan()
        # result = s.filter('match', query_sql='abc urtaevhwcz').scan()

        # result = s.filter('match', query_sql='T_computer').filter('term', schema__keyword='Cna').scan()

        q = Q("match", query_sql='T_computer') | Q("term",
                                                   schema__keyword="Cna")

        result = s.filter(q).scan()

        # result = s.sort('-@timestamp', 'rows_examined')

        # 一层聚合数据: 简单的一个根据日期聚合的

        # 查询语句
        timeAggs = A('date_histogram', field='@timestamp', fixed_interval="1d")

        # 'date' 为分组的名称
        s.aggs.bucket('date', timeAggs)

        # 获取数据
        aggs = s.execute().aggregations

        # 这里要 debugger 一下,然后才知道怎么取数据
        print(aggs)

        for date_item in aggs['date'].buckets:
            print(date_item)

        # for i in result:
        #     # print(i)
        #     print(i.to_dict(include_meta=True))
        return Response('success')
        """
Ejemplo n.º 8
0
def alarm_minute():
    # 从settings中获取配置信息
    r = redis.StrictRedis(host='localhost', port=6379, db=0)
    key_name = "slowsql_alarm_start"
    duration = 60
    start = int(time.time()) - duration

    if r.exists(key_name):
        start = r.get(key_name)
    else:
        r.set(key_name, start + duration)
    end = start + duration

    print("start: {}, type: {}".format(start, type(start)))

    global_query_time = 10
    global_query_count = 10

    global_cfg = AlarmSettingModel.objects.filter(schema=None).order_by("-id")
    if global_cfg.exists():
        global_query_time = global_cfg.first().query_time
        global_query_count = global_cfg.first().query_count

    s = SlowQuery.search()

    options = {
        # greater or equal than  -> gte 大于等于
        # greater than  -> gt 大于
        # little or equal thant -> lte 小于或等于
        'gte': start,
        'lte': end
    }
    s = s.filter('range', **{'@timestamp': options})
    aggs = {
        "aggs": {
            "schema": {
                "terms": {
                    "field": "schema.keyword"
                },
                "aggs": {
                    "hash": {
                        "terms": {
                            "field": "hash.keyword"
                        },
                        "aggs": {
                            "avg_query_time": {
                                "avg": {
                                    "field": "query_time"
                                }
                            },
                            "avg_lock_time": {
                                "avg": {
                                    "field": "lock_time"
                                }
                            }
                        }
                    }
                }
            }
        }
    }
    get_aggs(s.aggs, aggs)

    result = s.execute().aggregations

    rs = get_results(aggs, result)

    schema_cfg_dict = {}
    schema_cfg_queryset = AlarmSettingModel.objects.exclude(
        schema__isnull=True).filter(stop_alarm=False, delete=False)
    for schema_cfg in schema_cfg_queryset:
        # schema_cfg_dict[schema_cfg.schema + "#" + schema_cfg.sql_print_hash] = schema_cfg
        cfg_key = schema_cfg.schema.schema + "#" + schema_cfg.sql_print_hash
        schema_cfg_dict[cfg_key] = schema_cfg

    for r in rs:
        threshold_query_time = global_query_time
        threshold_query_count = global_query_count
        cfg = schema_cfg_dict.get(r.get('schema') + "#" + r.get('hash'), None)
        if cfg is not None:
            threshold_query_count = cfg.query_count
            threshold_query_time = cfg.query_time
        if r['avg_query_time'] > threshold_query_time and r[
                'hash_count'] > threshold_query_count:
            send_slow_alarm(r)
            break