예제 #1
0
    def chart_alert_notification_vars(self, kwargs):
        tmpl_vars = self.get_notification_basic_vars(kwargs)
        tmpl_vars["chart_name"] = tmpl_vars["event_values"]["chart_name"]
        tmpl_vars["action_name"] = tmpl_vars["event_values"].get("action_name") or ""
        matched_values = tmpl_vars["event_values"]["matched_step_values"]
        tmpl_vars["readable_values"] = []
        for key, value in list(matched_values["values"].items()):
            matched_label = matched_values["labels"].get(key)
            if matched_label:
                tmpl_vars["readable_values"].append(
                    {"label": matched_label["human_label"], "value": value}
                )
        tmpl_vars["readable_values"] = sorted(
            tmpl_vars["readable_values"], key=lambda x: x["label"]
        )
        start_date = convert_date(tmpl_vars["event_values"]["start_interval"])
        end_date = None
        if tmpl_vars["event_values"].get("end_interval"):
            end_date = convert_date(tmpl_vars["event_values"]["end_interval"])

        app_url = kwargs["request"].registry.settings["_mail_url"]
        destination_url = kwargs["request"].route_url("/", _app_url=app_url)
        to_encode = {
            "resource": tmpl_vars["event_values"]["resource"],
            "start_date": start_date.strftime(DATE_FRMT),
        }
        if end_date:
            to_encode["end_date"] = end_date.strftime(DATE_FRMT)

        destination_url += "ui/{}?{}".format("logs", urllib.parse.urlencode(to_encode))
        tmpl_vars["destination_url"] = destination_url
        return tmpl_vars
예제 #2
0
    def chart_alert_notification_vars(self, kwargs):
        tmpl_vars = self.get_notification_basic_vars(kwargs)
        tmpl_vars['chart_name'] = tmpl_vars['event_values']['chart_name']
        tmpl_vars['action_name'] = tmpl_vars['event_values'].get(
            'action_name') or ''
        matched_values = tmpl_vars['event_values']['matched_step_values']
        tmpl_vars['readable_values'] = []
        for key, value in list(matched_values['values'].items()):
            matched_label = matched_values['labels'].get(key)
            if matched_label:
                tmpl_vars['readable_values'].append({
                    'label':
                    matched_label['human_label'],
                    'value':
                    value
                })
        tmpl_vars['readable_values'] = sorted(tmpl_vars['readable_values'],
                                              key=lambda x: x['label'])
        start_date = convert_date(tmpl_vars['event_values']['start_interval'])
        end_date = None
        if tmpl_vars['event_values'].get('end_interval'):
            end_date = convert_date(tmpl_vars['event_values']['end_interval'])

        app_url = kwargs['request'].registry.settings['_mail_url']
        destination_url = kwargs['request'].route_url('/', _app_url=app_url)
        to_encode = {
            'resource': tmpl_vars['event_values']['resource'],
            'start_date': start_date.strftime(DATE_FRMT),
        }
        if end_date:
            to_encode['end_date'] = end_date.strftime(DATE_FRMT)

        destination_url += 'ui/{}?{}'.format('logs',
                                             urllib.parse.urlencode(to_encode))
        tmpl_vars['destination_url'] = destination_url
        return tmpl_vars
예제 #3
0
def add_uptime_stats(params, metric):
    proto_version = parse_proto(params.get("protocol_version"))
    try:
        application = ApplicationService.by_id_cached()(metric["resource_id"])
        application = DBSession.merge(application, load=False)
        if not application:
            return
        start_interval = convert_date(metric["timestamp"])
        start_interval = start_interval.replace(second=0, microsecond=0)
        new_metric = UptimeMetric(
            start_interval=start_interval,
            response_time=metric["response_time"],
            status_code=metric["status_code"],
            is_ok=metric["is_ok"],
            location=metric.get("location", 1),
            tries=metric["tries"],
            resource_id=application.resource_id,
            owner_user_id=application.owner_user_id,
        )
        DBSession.add(new_metric)
        DBSession.flush()
        add_metrics_uptime([new_metric.es_doc()])
        if metric["is_ok"]:
            event_types = [Event.types["uptime_alert"]]
            statuses = [Event.statuses["active"]]
            # get events older than 5 min
            events = EventService.by_type_and_status(
                event_types,
                statuses,
                older_than=(datetime.utcnow() - timedelta(minutes=6)),
                app_ids=[application.resource_id],
            )
            for event in events:
                event.close()
        else:
            UptimeMetricService.check_for_alert(application, metric=metric)
        action = "METRICS UPTIME"
        metrics_msg = "%s: %s, proto:%s" % (action, str(application),
                                            proto_version)
        log.info(metrics_msg)
        session = DBSession()
        mark_changed(session)
        return True
    except Exception as exc:
        print_traceback(log)
        add_uptime_stats.retry(exc=exc)
예제 #4
0
    def set_data(self, data, resource, protocol_version=None):
        self.http_status = data['http_status']
        self.priority = data['priority']
        self.error = data['error']
        report_language = data.get('language', '').lower()
        self.language = getattr(Language, report_language, Language.unknown)
        # we need temp holder here to decide later
        # if we want to to commit the tags if report is marked for creation
        self.tags = {
            'server_name': data['server'],
            'view_name': data['view_name']
        }
        if data.get('tags'):
            for tag_tuple in data['tags']:
                self.tags[tag_tuple[0]] = tag_tuple[1]
        self.traceback = data['traceback']
        stripped_traceback = self.stripped_traceback()
        tb_repr = repr(stripped_traceback).encode('utf8')
        self.traceback_hash = hashlib.sha1(tb_repr).hexdigest()
        url_info = urllib.parse.urlsplit(data.get('url', ''),
                                         allow_fragments=False)
        self.url_domain = url_info.netloc[:128]
        self.url_path = url_info.path[:2048]
        self.occurences = data['occurences']
        if self.error:
            self.report_type = ReportType.error
        else:
            self.report_type = ReportType.slow

        # but if its status 404 its 404 type
        if self.http_status in [404, '404'] or self.error == '404 Not Found':
            self.report_type = ReportType.not_found
            self.error = ''

        self.generate_grouping_hash(
            data.get('appenlight.group_string', data.get('group_string')),
            resource.default_grouping, protocol_version)

        # details
        if data['http_status'] in [404, '404']:
            data = {
                "username": data["username"],
                "ip": data["ip"],
                "url": data["url"],
                "user_agent": data["user_agent"]
            }
            if data.get('HTTP_REFERER') or data.get('http_referer'):
                data['HTTP_REFERER'] = data.get(
                    'HTTP_REFERER', '') or data.get('http_referer', '')

        self.resource_id = resource.resource_id
        self.username = data['username']
        self.user_agent = data['user_agent']
        self.ip = data['ip']
        self.extra = {}
        if data.get('extra'):
            for extra_tuple in data['extra']:
                self.extra[extra_tuple[0]] = extra_tuple[1]

        self.url = data['url']
        self.request_id = data.get('request_id', '').replace('-', '') or str(
            uuid.uuid4())
        request_data = data.get('request', {})

        self.request = request_data
        self.request_stats = data.get('request_stats', {})
        traceback = data.get('traceback')
        if not traceback:
            traceback = data.get('frameinfo')
        self.traceback = traceback
        start_date = convert_date(data.get('start_time'))
        if not self.start_time or self.start_time < start_date:
            self.start_time = start_date

        self.end_time = convert_date(data.get('end_time'), False)
        self.duration = 0

        if self.start_time and self.end_time:
            d = self.end_time - self.start_time
            self.duration = d.total_seconds()

        # update tags with other vars
        if self.username:
            self.tags['user_name'] = self.username
        self.tags['report_language'] = Language.key_from_value(self.language)
예제 #5
0
def request_metrics_create(request):
    """
    Endpoint for performance metrics, aggregates view performance stats
    and converts them to general metric row
    """
    application = request.context.resource
    if request.method.upper() == "OPTIONS":
        return check_cors(request, application)
    else:
        check_cors(request, application, should_return=False)

    params = dict(request.params.copy())
    proto_version = parse_proto(params.get("protocol_version", ""))

    payload = request.unsafe_json_body
    schema = MetricsListSchema()
    dataset = schema.deserialize(payload)

    rate_limiting(
        request, application, "per_application_metrics_rate_limit", len(dataset)
    )

    # looping report data
    metrics = {}
    for metric in dataset:
        server_name = metric.get("server", "").lower() or "unknown"
        start_interval = convert_date(metric["timestamp"])
        start_interval = start_interval.replace(second=0, microsecond=0)

        for view_name, view_metrics in metric["metrics"]:
            key = "%s%s%s" % (metric["server"], start_interval, view_name)
            if start_interval not in metrics:
                metrics[key] = {
                    "requests": 0,
                    "main": 0,
                    "sql": 0,
                    "nosql": 0,
                    "remote": 0,
                    "tmpl": 0,
                    "custom": 0,
                    "sql_calls": 0,
                    "nosql_calls": 0,
                    "remote_calls": 0,
                    "tmpl_calls": 0,
                    "custom_calls": 0,
                    "start_interval": start_interval,
                    "server_name": server_name,
                    "view_name": view_name,
                }
            metrics[key]["requests"] += int(view_metrics["requests"])
            metrics[key]["main"] += round(view_metrics["main"], 5)
            metrics[key]["sql"] += round(view_metrics["sql"], 5)
            metrics[key]["nosql"] += round(view_metrics["nosql"], 5)
            metrics[key]["remote"] += round(view_metrics["remote"], 5)
            metrics[key]["tmpl"] += round(view_metrics["tmpl"], 5)
            metrics[key]["custom"] += round(view_metrics.get("custom", 0.0), 5)
            metrics[key]["sql_calls"] += int(view_metrics.get("sql_calls", 0))
            metrics[key]["nosql_calls"] += int(view_metrics.get("nosql_calls", 0))
            metrics[key]["remote_calls"] += int(view_metrics.get("remote_calls", 0))
            metrics[key]["tmpl_calls"] += int(view_metrics.get("tmpl_calls", 0))
            metrics[key]["custom_calls"] += int(view_metrics.get("custom_calls", 0))

            if not metrics[key]["requests"]:
                # fix this here because validator can't
                metrics[key]["requests"] = 1
                # metrics dict is being built to minimize
                # the amount of queries used
                # in case we get multiple rows from same minute

    normalized_metrics = []
    for metric in metrics.values():
        new_metric = {
            "namespace": "appenlight.request_metric",
            "timestamp": metric.pop("start_interval"),
            "server_name": metric["server_name"],
            "tags": list(metric.items()),
        }
        normalized_metrics.append(new_metric)

    tasks.add_metrics.delay(
        application.resource_id, params, normalized_metrics, proto_version
    )

    log.info(
        "REQUEST METRICS call {} {} client:{}".format(
            application.resource_name, proto_version, request.headers.get("user_agent")
        )
    )
    return "OK: request metrics accepted"
예제 #6
0
    def set_data(self, data, resource, protocol_version=None):
        self.http_status = data["http_status"]
        self.priority = data["priority"]
        self.error = data["error"]
        report_language = data.get("language", "").lower()
        self.language = getattr(Language, report_language, Language.unknown)
        # we need temp holder here to decide later
        # if we want to to commit the tags if report is marked for creation
        self.tags = {
            "server_name": data["server"],
            "view_name": data["view_name"]
        }
        if data.get("tags"):
            for tag_tuple in data["tags"]:
                self.tags[tag_tuple[0]] = tag_tuple[1]
        self.traceback = data["traceback"]
        stripped_traceback = self.stripped_traceback()
        tb_repr = repr(stripped_traceback).encode("utf8")
        self.traceback_hash = hashlib.sha1(tb_repr).hexdigest()
        url_info = urllib.parse.urlsplit(data.get("url", ""),
                                         allow_fragments=False)
        self.url_domain = url_info.netloc[:128]
        self.url_path = url_info.path[:2048]
        self.occurences = data["occurences"]
        if self.error:
            self.report_type = ReportType.error
        else:
            self.report_type = ReportType.slow

        # but if its status 404 its 404 type
        if self.http_status in [404, "404"] or self.error == "404 Not Found":
            self.report_type = ReportType.not_found
            self.error = ""

        self.generate_grouping_hash(
            data.get("appenlight.group_string", data.get("group_string")),
            resource.default_grouping,
            protocol_version,
        )

        # details
        if data["http_status"] in [404, "404"]:
            data = {
                "username": data["username"],
                "ip": data["ip"],
                "url": data["url"],
                "user_agent": data["user_agent"],
            }
            if data.get("HTTP_REFERER") or data.get("http_referer"):
                data["HTTP_REFERER"] = data.get(
                    "HTTP_REFERER", "") or data.get("http_referer", "")

        self.resource_id = resource.resource_id
        self.username = data["username"]
        self.user_agent = data["user_agent"]
        self.ip = data["ip"]
        self.extra = {}
        if data.get("extra"):
            for extra_tuple in data["extra"]:
                self.extra[extra_tuple[0]] = extra_tuple[1]

        self.url = data["url"]
        self.request_id = data.get("request_id", "").replace("-", "") or str(
            uuid.uuid4())
        request_data = data.get("request", {})

        self.request = request_data
        self.request_stats = data.get("request_stats") or {}
        traceback = data.get("traceback")
        if not traceback:
            traceback = data.get("frameinfo")
        self.traceback = traceback
        start_date = convert_date(data.get("start_time"))
        if not self.start_time or self.start_time < start_date:
            self.start_time = start_date

        self.end_time = convert_date(data.get("end_time"), False)
        self.duration = 0

        if self.start_time and self.end_time:
            d = self.end_time - self.start_time
            self.duration = d.total_seconds()

        # update tags with other vars
        if self.username:
            self.tags["user_name"] = self.username
        self.tags["report_language"] = Language.key_from_value(self.language)