예제 #1
0
def get_requests_breakdown(request):
    """
    Used on dashboard to get information which views are most used in
    a time interval
    """
    query_params = request.GET.mixed()
    query_params['resource'] = (request.context.resource.resource_id, )

    filter_settings = build_filter_settings_from_query_dict(
        request, query_params)
    if not filter_settings.get('end_date'):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings['end_date'] = end_date

    if not filter_settings.get('start_date'):
        delta = timedelta(hours=1)
        filter_settings['start_date'] = filter_settings['end_date'] - delta

    series = RequestMetricService.get_requests_breakdown(
        request, filter_settings)

    results = []
    for row in series:
        d_row = {
            'avg_response': round(row['main'] / row['requests'], 3),
            'requests': row['requests'],
            'main': row['main'],
            'view_name': row['key'],
            'latest_details': row['latest_details'],
            'percentage': round(row['percentage'] * 100, 1)
        }

        results.append(d_row)

    return results
예제 #2
0
def trending_reports(request):
    """
    Returns exception/slow reports trending for specific time interval
    """
    query_params = request.GET.mixed().copy()
    # pop report type to rewrite it to tag later
    report_type = query_params.pop("report_type", None)
    if report_type:
        query_params["type"] = report_type

    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    if not filter_settings.get("start_date"):
        delta = timedelta(hours=1)
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    results = ReportGroupService.get_trending(request, filter_settings)

    trending = []
    for occurences, group in results:
        report_group = group.get_dict(request)
        # show the occurences in time range instead of global ones
        report_group["occurences"] = occurences
        trending.append(report_group)

    return trending
예제 #3
0
def history(request):
    """ Separate error graph or similar graph"""
    report_group = request.context.report_group
    query_params = request.GET.mixed()
    query_params["resource"] = (report_group.resource_id, )

    filter_settings = build_filter_settings_from_query_dict(
        request, query_params)
    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    if not filter_settings.get("start_date"):
        delta = timedelta(days=30)
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    filter_settings["group_id"] = report_group.id

    result = ReportGroupService.get_report_stats(request, filter_settings)

    plot_data = []
    for row in result:
        point = {
            "x": row["x"],
            "reports": row["report"] + row["slow_report"] + row["not_found"],
        }
        plot_data.append(point)

    return plot_data
예제 #4
0
def common_values(request):
    config = request.GET.mixed()
    datasource = config.pop("datasource", "logs")
    filter_settings = build_filter_settings_from_query_dict(request, config)
    resources = list(filter_settings["resource"])
    tag_name = filter_settings["tags"][0]["value"][0]

    and_part = [{"terms": {"resource_id": list(resources)}}]
    if filter_settings["namespace"]:
        and_part.append({"terms": {"namespace": filter_settings["namespace"]}})
    query = {"query": {"bool": {"filter": and_part}}}
    query["aggs"] = {
        "sub_agg": {
            "terms": {
                "field": "tags.{}.values".format(tag_name),
                "size": 50
            }
        }
    }
    index_names = es_index_name_limiter(ixtypes=[datasource])
    result = Datastores.es.search(body=query,
                                  index=index_names,
                                  doc_type="log",
                                  size=0)
    values_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
    return {"values": [item["key"] for item in values_buckets]}
예제 #5
0
def get_requests_breakdown(request):
    """
    Used on dashboard to get information which views are most used in
    a time interval
    """
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)
    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    if not filter_settings.get("start_date"):
        delta = timedelta(hours=1)
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    series = RequestMetricService.get_requests_breakdown(request, filter_settings)

    results = []
    for row in series:
        d_row = {
            "avg_response": round(row["main"] / row["requests"], 3),
            "requests": row["requests"],
            "main": row["main"],
            "view_name": row["key"],
            "latest_details": row["latest_details"],
            "percentage": round(row["percentage"] * 100, 1),
        }

        results.append(d_row)

    return results
예제 #6
0
def requests_graphs(request):
    """
    Handles dashboard infomation for avg. response time split by today,
    2 days ago and week ago
    """
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get("start_date"):
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    result_now = RequestMetricService.get_metrics_stats(request, filter_settings)

    delta = filter_settings["end_date"] - filter_settings["start_date"]
    if delta < h.time_deltas.get("12h")["delta"]:
        seconds = h.time_deltas["1m"]["minutes"] * 60.0
    elif delta <= h.time_deltas.get("3d")["delta"]:
        seconds = h.time_deltas["5m"]["minutes"] * 60.0
    elif delta >= h.time_deltas.get("2w")["delta"]:
        seconds = h.time_deltas["24h"]["minutes"] * 60.0
    else:
        seconds = h.time_deltas["1h"]["minutes"] * 60.0

    for item in result_now:
        if item["requests"]:
            item["requests"] = round(item["requests"] / seconds, 3)
    return result_now
예제 #7
0
def metrics_graphs(request):
    """
    Handles metric dashboard graphs
    Returns information for time/tier breakdown
    """
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get("start_date"):
        filter_settings["start_date"] = filter_settings["end_date"] - delta
    if filter_settings["end_date"] <= filter_settings["start_date"]:
        filter_settings["end_date"] = filter_settings["start_date"]

    delta = filter_settings["end_date"] - filter_settings["start_date"]
    if delta < h.time_deltas.get("12h")["delta"]:
        divide_by_min = 1
    elif delta <= h.time_deltas.get("3d")["delta"]:
        divide_by_min = 5.0
    elif delta >= h.time_deltas.get("2w")["delta"]:
        divide_by_min = 60.0 * 24
    else:
        divide_by_min = 60.0

    results = RequestMetricService.get_metrics_stats(request, filter_settings)
    # because requests are PER SECOND / we divide 1 min stats by 60
    # requests are normalized to 1 min average
    # results are average seconds time spent per request in specific area
    for point in results:
        if point["requests"]:
            point["main"] = (
                point["main"]
                - point["sql"]
                - point["nosql"]
                - point["remote"]
                - point["tmpl"]
                - point["custom"]
            ) / point["requests"]
            point["sql"] = point["sql"] / point["requests"]
            point["nosql"] = point["nosql"] / point["requests"]
            point["remote"] = point["remote"] / point["requests"]
            point["tmpl"] = point["tmpl"] / point["requests"]
            point["custom"] = point["custom"] / point["requests"]
            point["requests_2"] = point["requests"] / 60.0 / divide_by_min

    selected_types = ["main", "sql", "nosql", "remote", "tmpl", "custom"]

    for point in results:
        for stat_type in selected_types:
            point[stat_type] = round(point.get(stat_type, 0), 3)

    return results
예제 #8
0
파일: logs.py 프로젝트: ashishprsspl1/ETTTT
def common_tags(request):
    config = request.GET.mixed()
    filter_settings = build_filter_settings_from_query_dict(request, config)

    resources = list(filter_settings["resource"])
    query = {
        "query": {
            "filtered": {
                "filter": {
                    "and": [{
                        "terms": {
                            "resource_id": list(resources)
                        }
                    }]
                }
            }
        }
    }
    start_date = filter_settings.get('start_date')
    end_date = filter_settings.get('end_date')
    filter_part = query['query']['filtered']['filter']['and']

    date_range = {"range": {"timestamp": {}}}
    if start_date:
        date_range["range"]["timestamp"]["gte"] = start_date
    if end_date:
        date_range["range"]["timestamp"]["lte"] = end_date
    if start_date or end_date:
        filter_part.append(date_range)

    levels = filter_settings.get('level')
    if levels:
        filter_part.append({"terms": {'log_level': levels}})
    namespaces = filter_settings.get('namespace')
    if namespaces:
        filter_part.append({"terms": {'namespace': namespaces}})

    query["aggs"] = {"sub_agg": {"terms": {"field": "tag_list", "size": 50}}}
    # tags
    index_names = es_index_name_limiter(
        ixtypes=[config.get('datasource', 'logs')])
    result = Datastores.es.search(query,
                                  index=index_names,
                                  doc_type='log',
                                  size=0)
    tag_buckets = result['aggregations']['sub_agg'].get('buckets', [])
    # namespaces
    query["aggs"] = {"sub_agg": {"terms": {"field": "namespace", "size": 50}}}
    result = Datastores.es.search(query,
                                  index=index_names,
                                  doc_type='log',
                                  size=0)
    namespaces_buckets = result['aggregations']['sub_agg'].get('buckets', [])
    return {
        "tags": [item['key'] for item in tag_buckets],
        "namespaces": [item['key'] for item in namespaces_buckets]
    }
예제 #9
0
def metrics_graphs(request):
    """
    Handles metric dashboard graphs
    Returns information for time/tier breakdown
    """
    query_params = request.GET.mixed()
    query_params['resource'] = (request.context.resource.resource_id, )

    filter_settings = build_filter_settings_from_query_dict(
        request, query_params)

    if not filter_settings.get('end_date'):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings['end_date'] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get('start_date'):
        filter_settings['start_date'] = filter_settings['end_date'] - delta
    if filter_settings['end_date'] <= filter_settings['start_date']:
        filter_settings['end_date'] = filter_settings['start_date']

    delta = filter_settings['end_date'] - filter_settings['start_date']
    if delta < h.time_deltas.get('12h')['delta']:
        divide_by_min = 1
    elif delta <= h.time_deltas.get('3d')['delta']:
        divide_by_min = 5.0
    elif delta >= h.time_deltas.get('2w')['delta']:
        divide_by_min = 60.0 * 24
    else:
        divide_by_min = 60.0

    results = RequestMetricService.get_metrics_stats(request, filter_settings)
    # because requests are PER SECOND / we divide 1 min stats by 60
    # requests are normalized to 1 min average
    # results are average seconds time spent per request in specific area
    for point in results:
        if point['requests']:
            point['main'] = (point['main'] - point['sql'] - point['nosql'] -
                             point['remote'] - point['tmpl'] -
                             point['custom']) / point['requests']
            point['sql'] = point['sql'] / point['requests']
            point['nosql'] = point['nosql'] / point['requests']
            point['remote'] = point['remote'] / point['requests']
            point['tmpl'] = point['tmpl'] / point['requests']
            point['custom'] = point['custom'] / point['requests']
            point['requests_2'] = point['requests'] / 60.0 / divide_by_min

    selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom']

    for point in results:
        for stat_type in selected_types:
            point[stat_type] = round(point.get(stat_type, 0), 3)

    return results
예제 #10
0
def response_graphs(request):
    """
    Handles dashboard infomation for avg. response time split by today,
    2 days ago and week ago
    """
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get("start_date"):
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    result_now = RequestMetricService.get_metrics_stats(request, filter_settings)

    filter_settings_2d = filter_settings.copy()
    filter_settings_2d["start_date"] = filter_settings["start_date"] - timedelta(days=2)
    filter_settings_2d["end_date"] = filter_settings["end_date"] - timedelta(days=2)
    result_2d = RequestMetricService.get_metrics_stats(request, filter_settings_2d)

    filter_settings_7d = filter_settings.copy()
    filter_settings_7d["start_date"] = filter_settings["start_date"] - timedelta(days=7)
    filter_settings_7d["end_date"] = filter_settings["end_date"] - timedelta(days=7)
    result_7d = RequestMetricService.get_metrics_stats(request, filter_settings_7d)

    plot_data = []

    for item in result_now:
        point = {"x": item["x"], "today": 0, "days_ago_2": 0, "days_ago_7": 0}
        if item["requests"]:
            point["today"] = round(item["main"] / item["requests"], 3)
        plot_data.append(point)

    for i, item in enumerate(result_2d[: len(plot_data)]):
        plot_data[i]["days_ago_2"] = 0
        point = result_2d[i]
        if point["requests"]:
            plot_data[i]["days_ago_2"] = round(point["main"] / point["requests"], 3)

    for i, item in enumerate(result_7d[: len(plot_data)]):
        plot_data[i]["days_ago_7"] = 0
        point = result_7d[i]
        if point["requests"]:
            plot_data[i]["days_ago_7"] = round(point["main"] / point["requests"], 3)

    return plot_data
예제 #11
0
def get_application_report_stats(request):
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)
    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    if not filter_settings.get("start_date"):
        delta = timedelta(hours=1)
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    result = ReportGroupService.get_report_stats(request, filter_settings)
    return result
예제 #12
0
파일: logs.py 프로젝트: ashishprsspl1/ETTTT
def fetch_logs(request):
    """
    Returns list of log entries from Elasticsearch
    """

    filter_settings = build_filter_settings_from_query_dict(
        request, request.GET.mixed())
    logs_paginator = LogService.get_paginator_by_app_ids(
        app_ids=filter_settings['resource'],
        page=filter_settings['page'],
        filter_settings=filter_settings)
    headers = gen_pagination_headers(request, logs_paginator)
    request.response.headers.update(headers)

    return [l.get_dict() for l in logs_paginator.sa_items]
예제 #13
0
def get_slow_calls(request):
    """
    Returns information for time consuming calls in specific time interval
    """
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get("start_date"):
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    return SlowCallService.get_time_consuming_calls(request, filter_settings)
예제 #14
0
def uptime_graphs(request):
    """
    Returns uptime information: current uptime, daily and monthly stats
    """
    query_params = request.GET.mixed().copy()
    query_params["resource"] = (request.context.resource.resource_id,)
    filter_settings = build_filter_settings_from_query_dict(request, query_params)

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    if not filter_settings.get("start_date"):
        delta = timedelta(hours=1)
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    plot_data = UptimeMetricService.uptime_for_resource(request, filter_settings)

    return plot_data
예제 #15
0
def get_apdex_stats(request):
    """
    Returns information and calculates APDEX score per server for dashboard
    server information (upper right stats boxes)
    """
    query_params = request.GET.mixed()
    query_params["resource"] = (request.context.resource.resource_id,)

    filter_settings = build_filter_settings_from_query_dict(request, query_params)
    # make sure we have only one resource here to don't produce
    # weird results when we have wrong app in app selector
    filter_settings["resource"] = [filter_settings["resource"][0]]

    if not filter_settings.get("end_date"):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings["end_date"] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get("start_date"):
        filter_settings["start_date"] = filter_settings["end_date"] - delta

    return RequestMetricService.get_apdex_stats(request, filter_settings)
예제 #16
0
def logs_mass_delete(request):
    params = request.GET.mixed()
    if "resource" not in params:
        raise HTTPUnprocessableEntity()
    # this might be '' and then colander will not validate the schema
    if not params.get("namespace"):
        params.pop("namespace", None)
    filter_settings = build_filter_settings_from_query_dict(
        request, params, resource_permissions=["update_reports"])

    resource_id = list(filter_settings["resource"])[0]
    # filter settings returns list of all of users applications
    # if app is not matching - normally we would not care as its used for search
    # but here user playing with params would possibly wipe out their whole data
    if int(resource_id) != int(params["resource"]):
        raise HTTPUnprocessableEntity()

    logs_cleanup.delay(resource_id, filter_settings)
    msg = ("Log cleanup process started - it may take a while for "
           "everything to get removed")
    request.session.flash(msg)
    return {}
예제 #17
0
def logs_fetch_series(request):
    """
    Handles metric dashboard graphs
    Returns information for time/tier breakdown
    """
    filter_settings = build_filter_settings_from_query_dict(
        request, request.GET.mixed())
    paginator = LogService.get_paginator_by_app_ids(
        app_ids=filter_settings["resource"],
        page=1,
        filter_settings=filter_settings,
        items_per_page=1,
    )
    now = datetime.utcnow().replace(microsecond=0, second=0)
    delta = timedelta(days=7)
    if paginator.sa_items:
        start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0,
                                                              second=0)
        filter_settings["start_date"] = start_date - delta
    else:
        filter_settings["start_date"] = now - delta
    filter_settings["end_date"] = filter_settings["start_date"] + timedelta(
        days=7)

    @request.registry.cache_regions.redis_sec_30.cache_on_arguments(
        "logs_graphs")
    def cached(apps, search_params, delta, now):
        data = LogService.get_time_series_aggregate(
            filter_settings["resource"], filter_settings)
        if not data:
            return []
        buckets = data["aggregations"]["events_over_time"]["buckets"]
        return [{
            "x": datetime.utcfromtimestamp(item["key"] / 1000),
            "logs": item["doc_count"],
        } for item in buckets]

    return cached(filter_settings, request.GET.mixed(), delta, now)
예제 #18
0
파일: logs.py 프로젝트: ashishprsspl1/ETTTT
def common_values(request):
    config = request.GET.mixed()
    datasource = config.pop('datasource', 'logs')
    filter_settings = build_filter_settings_from_query_dict(request, config)
    resources = list(filter_settings["resource"])
    tag_name = filter_settings['tags'][0]['value'][0]
    query = {
        'query': {
            'filtered': {
                'filter': {
                    'and': [{
                        'terms': {
                            'resource_id': list(resources)
                        }
                    }, {
                        'terms': {
                            'namespace': filter_settings['namespace']
                        }
                    }]
                }
            }
        }
    }
    query['aggs'] = {
        'sub_agg': {
            'terms': {
                'field': 'tags.{}.values'.format(tag_name),
                'size': 50
            }
        }
    }
    index_names = es_index_name_limiter(ixtypes=[datasource])
    result = Datastores.es.search(query,
                                  index=index_names,
                                  doc_type='log',
                                  size=0)
    values_buckets = result['aggregations']['sub_agg'].get('buckets', [])
    return {"values": [item['key'] for item in values_buckets]}
예제 #19
0
def requests_graphs(request):
    """
    Handles dashboard infomation for avg. response time split by today,
    2 days ago and week ago
    """
    query_params = request.GET.mixed()
    query_params['resource'] = (request.context.resource.resource_id, )

    filter_settings = build_filter_settings_from_query_dict(
        request, query_params)

    if not filter_settings.get('end_date'):
        end_date = datetime.utcnow().replace(microsecond=0, second=0)
        filter_settings['end_date'] = end_date

    delta = timedelta(hours=1)
    if not filter_settings.get('start_date'):
        filter_settings['start_date'] = filter_settings['end_date'] - delta

    result_now = RequestMetricService.get_metrics_stats(
        request, filter_settings)

    delta = filter_settings['end_date'] - filter_settings['start_date']
    if delta < h.time_deltas.get('12h')['delta']:
        seconds = h.time_deltas['1m']['minutes'] * 60.0
    elif delta <= h.time_deltas.get('3d')['delta']:
        seconds = h.time_deltas['5m']['minutes'] * 60.0
    elif delta >= h.time_deltas.get('2w')['delta']:
        seconds = h.time_deltas['24h']['minutes'] * 60.0
    else:
        seconds = h.time_deltas['1h']['minutes'] * 60.0

    for item in result_now:
        if item['requests']:
            item['requests'] = round(item['requests'] / seconds, 3)
    return result_now
예제 #20
0
def common_tags(request):
    config = request.GET.mixed()
    filter_settings = build_filter_settings_from_query_dict(request, config)

    resources = list(filter_settings["resource"])
    query = {
        "query": {
            "bool": {
                "filter": [{
                    "terms": {
                        "resource_id": list(resources)
                    }
                }]
            }
        }
    }
    start_date = filter_settings.get("start_date")
    end_date = filter_settings.get("end_date")
    filter_part = query["query"]["bool"]["filter"]

    date_range = {"range": {"timestamp": {}}}
    if start_date:
        date_range["range"]["timestamp"]["gte"] = start_date
    if end_date:
        date_range["range"]["timestamp"]["lte"] = end_date
    if start_date or end_date:
        filter_part.append(date_range)

    levels = filter_settings.get("level")
    if levels:
        filter_part.append({"terms": {"log_level": levels}})
    namespaces = filter_settings.get("namespace")
    if namespaces:
        filter_part.append({"terms": {"namespace": namespaces}})

    query["aggs"] = {
        "sub_agg": {
            "terms": {
                "field": "tag_list.keyword",
                "size": 50
            }
        }
    }
    # tags
    index_names = es_index_name_limiter(
        ixtypes=[config.get("datasource", "logs")])
    result = Datastores.es.search(body=query,
                                  index=index_names,
                                  doc_type="log",
                                  size=0)
    tag_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
    # namespaces
    query["aggs"] = {
        "sub_agg": {
            "terms": {
                "field": "namespace.keyword",
                "size": 50
            }
        }
    }
    result = Datastores.es.search(body=query,
                                  index=index_names,
                                  doc_type="log",
                                  size=0)
    namespaces_buckets = result["aggregations"]["sub_agg"].get("buckets", [])
    return {
        "tags": [item["key"] for item in tag_buckets],
        "namespaces": [item["key"] for item in namespaces_buckets],
    }