def create_issue(self): """ Creates a new issue in github issue tracker from report group """ report = ReportGroupService.by_id( self.request.unsafe_json_body["group_id"]) form_data = { "title": self.request.unsafe_json_body.get("title", "Unknown Title"), "content": self.request.unsafe_json_body.get("content"), "kind": [self.request.unsafe_json_body["status"]], "responsible": self.request.unsafe_json_body["responsible"]["user"], } try: client = GithubIntegration.create_client( self.request, self.integration.config["user_name"], self.integration.config["repo_name"], ) issue = client.create_issue(form_data) except IntegrationException as e: self.request.response.status_code = 503 return {"error_messages": [str(e)]} comment_body = "Github issue created: %s " % issue["web_url"] comment = ReportComment( owner_id=self.request.user.id, report_time=report.first_timestamp, body=comment_body, ) report.comments.append(comment) return True
def history(request): """ Separate error graph or similar graph""" report_group = request.context.report_group query_params = request.GET.mixed() query_params["resource"] = (report_group.resource_id, ) filter_settings = build_filter_settings_from_query_dict( request, query_params) if not filter_settings.get("end_date"): end_date = datetime.utcnow().replace(microsecond=0, second=0) filter_settings["end_date"] = end_date if not filter_settings.get("start_date"): delta = timedelta(days=30) filter_settings["start_date"] = filter_settings["end_date"] - delta filter_settings["group_id"] = report_group.id result = ReportGroupService.get_report_stats(request, filter_settings) plot_data = [] for row in result: point = { "x": row["x"], "reports": row["report"] + row["slow_report"] + row["not_found"], } plot_data.append(point) return plot_data
def create_issue(self): """ Creates a new issue in github issue tracker from report group """ report = ReportGroupService.by_id( self.request.unsafe_json_body['group_id']) form_data = { 'title': self.request.unsafe_json_body.get('title', 'Unknown Title'), 'content': self.request.unsafe_json_body.get('content'), 'kind': [self.request.unsafe_json_body['status']], 'responsible': self.request.unsafe_json_body['responsible']['user'] } try: client = GithubIntegration.create_client( self.request, self.integration.config['user_name'], self.integration.config['repo_name']) issue = client.create_issue(form_data) except IntegrationException as e: self.request.response.status_code = 503 return {'error_messages': [str(e)]} comment_body = 'Github issue created: %s ' % issue['web_url'] comment = ReportComment(owner_id=self.request.user.id, report_time=report.first_timestamp, body=comment_body) report.comments.append(comment) return True
def create_issue(self): """ Creates a new issue in jira from report group """ report = ReportGroupService.by_id( self.request.unsafe_json_body['group_id']) form_data = { 'title': self.request.unsafe_json_body.get('title', 'Unknown Title'), 'content': self.request.unsafe_json_body.get('content', ''), 'issue_type': self.request.unsafe_json_body['issue_type']['id'], 'priority': self.request.unsafe_json_body['priority']['id'], 'responsible': self.request.unsafe_json_body['responsible']['id'], 'project': self.integration.config['project'] } try: client = self.create_client() issue = client.create_issue(form_data, request=self.request) except IntegrationException as e: self.request.response.status_code = 503 return {'error_messages': [str(e)]} comment_body = 'Jira issue created: %s ' % issue['web_url'] comment = ReportComment(owner_id=self.request.user.id, report_time=report.first_timestamp, body=comment_body) report.comments.append(comment) return True
def create_issue(self): """ Creates a new issue in jira from report group """ report = ReportGroupService.by_id( self.request.unsafe_json_body["group_id"]) form_data = { "title": self.request.unsafe_json_body.get("title", "Unknown Title"), "content": self.request.unsafe_json_body.get("content", ""), "issue_type": self.request.unsafe_json_body["issue_type"]["id"], "priority": self.request.unsafe_json_body["priority"]["id"], "responsible": self.request.unsafe_json_body["responsible"]["id"], "project": self.integration.config["project"], } try: client = self.create_client() issue = client.create_issue(form_data, request=self.request) except IntegrationException as e: self.request.response.status_code = 503 return {"error_messages": [str(e)]} comment_body = "Jira issue created: %s " % issue["web_url"] comment = ReportComment( owner_id=self.request.user.id, report_time=report.first_timestamp, body=comment_body, ) report.comments.append(comment) return True
def trending_reports(request): """ Returns exception/slow reports trending for specific time interval """ query_params = request.GET.mixed().copy() # pop report type to rewrite it to tag later report_type = query_params.pop("report_type", None) if report_type: query_params["type"] = report_type query_params["resource"] = (request.context.resource.resource_id,) filter_settings = build_filter_settings_from_query_dict(request, query_params) if not filter_settings.get("end_date"): end_date = datetime.utcnow().replace(microsecond=0, second=0) filter_settings["end_date"] = end_date if not filter_settings.get("start_date"): delta = timedelta(hours=1) filter_settings["start_date"] = filter_settings["end_date"] - delta results = ReportGroupService.get_trending(request, filter_settings) trending = [] for occurences, group in results: report_group = group.get_dict(request) # show the occurences in time range instead of global ones report_group["occurences"] = occurences trending.append(report_group) return trending
def __init__(self, request): Resource = appenlight.models.resource.Resource self.__acl__ = [] group_id = request.matchdict.get("group_id", request.params.get("group_id")) group_id = to_integer_safe(group_id) self.report_group = ReportGroupService.by_id(group_id) if group_id else None if not self.report_group: raise HTTPNotFound() self.public = self.report_group.public self.resource = ( ResourceService.by_resource_id(self.report_group.resource_id) if self.report_group else None ) if self.resource: self.__acl__ = self.resource.__acl__ if request.user: permissions = ResourceService.perms_for_user(self.resource, request.user) for perm_user, perm_name in permission_to_04_acls(permissions): self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) if self.public: self.__acl__.append((Allow, Everyone, "view")) if not request.user: # unauthed users need to visit using both group and report pair report_id = request.params.get( "reportId", request.params.get("report_id", -1) ) report = self.report_group.get_report(report_id, public=True) if not report: raise HTTPNotFound() add_root_superperm(request, self)
def daily_digest(): """ Sends daily digest with top 50 error reports """ request = get_current_request() apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) since_when = datetime.utcnow() - timedelta(hours=8) log.warning('Generating daily digests') for resource_id in apps: resource_id = resource_id.decode('utf8') end_date = datetime.utcnow().replace(microsecond=0, second=0) filter_settings = { 'resource': [resource_id], 'tags': [{ 'name': 'type', 'value': ['error'], 'op': None }], 'type': 'error', 'start_date': since_when, 'end_date': end_date } reports = ReportGroupService.get_trending( request, filter_settings=filter_settings, limit=50) application = ApplicationService.by_id(resource_id) if application: users = set([p.user for p in application.users_for_perm('view')]) for user in users: user.send_digest(request, application, reports=reports, since_when=since_when)
def check_user_report_notifications(resource_id): since_when = datetime.utcnow() try: request = get_current_request() application = ApplicationService.by_id(resource_id) if not application: return error_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( ReportType.error, resource_id) slow_key = REDIS_KEYS["reports_to_notify_per_type_per_app"].format( ReportType.slow, resource_id) error_group_ids = Datastores.redis.smembers(error_key) slow_group_ids = Datastores.redis.smembers(slow_key) Datastores.redis.delete(error_key) Datastores.redis.delete(slow_key) err_gids = [int(g_id) for g_id in error_group_ids] slow_gids = [int(g_id) for g_id in list(slow_group_ids)] group_ids = err_gids + slow_gids occurence_dict = {} for g_id in group_ids: key = REDIS_KEYS["counters"]["report_group_occurences"].format( g_id) val = Datastores.redis.get(key) Datastores.redis.delete(key) if val: occurence_dict[g_id] = int(val) else: occurence_dict[g_id] = 1 report_groups = ReportGroupService.by_ids(group_ids) report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) ApplicationService.check_for_groups_alert( application, "alert", report_groups=report_groups, occurence_dict=occurence_dict, ) users = set([ p.user for p in ResourceService.users_for_perm(application, "view") ]) report_groups = report_groups.all() for user in users: UserService.report_notify( user, request, application, report_groups=report_groups, occurence_dict=occurence_dict, ) for group in report_groups: # marks report_groups as notified if not group.notified: group.notified = True except Exception as exc: print_traceback(log) raise
def comment_create(request): """ Creates user comments for report group, sends email notifications of said comments """ report_group = request.context.report_group application = request.context.resource form = forms.CommentForm(MultiDict(request.unsafe_json_body), csrf_context=request) if request.method == "POST" and form.validate(): comment = ReportComment(owner_id=request.user.id, report_time=report_group.first_timestamp) form.populate_obj(comment) report_group.comments.append(comment) perm_list = ResourceService.users_for_perm(application, "view") uids_to_notify = [] users_to_notify = [] for perm in perm_list: user = perm.user if ("@{}".format(user.user_name) in comment.body and user.id not in uids_to_notify): uids_to_notify.append(user.id) users_to_notify.append(user) commenters = ReportGroupService.users_commenting( report_group, exclude_user_id=request.user.id) for user in commenters: if user.id not in uids_to_notify: uids_to_notify.append(user.id) users_to_notify.append(user) for user in users_to_notify: email_vars = { "user": user, "commenting_user": request.user, "request": request, "application": application, "report_group": report_group, "comment": comment, "email_title": "AppEnlight :: New comment", } UserService.send_email( request, recipients=[user.email], variables=email_vars, template="/email_templates/new_comment_report.jinja2", ) request.session.flash(_("Your comment was created")) return comment.get_dict() else: return form.errors
def index(request): """ Returns list of report groups based on user search query """ if request.user: request.user.last_login_date = datetime.utcnow() applications = UserService.resources_with_perms( request.user, ["view"], resource_types=["application"]) search_params = request.GET.mixed() all_possible_app_ids = set([app.resource_id for app in applications]) schema = ReportSearchSchema().bind(resources=all_possible_app_ids) tag_schema = TagListSchema() filter_settings = schema.deserialize(search_params) tag_list = [{ "name": k, "value": v } for k, v in filter_settings.items() if k not in accepted_search_params] tags = tag_schema.deserialize(tag_list) filter_settings["tags"] = tags if request.matched_route.name == "slow_reports": filter_settings["report_type"] = [ReportType.slow] else: filter_settings["report_type"] = [ReportType.error] reports_paginator = ReportGroupService.get_paginator_by_app_ids( app_ids=filter_settings["resource"], page=filter_settings["page"], filter_settings=filter_settings, ) reports = [] include_keys = ( "id", "http_status", "report_type", "resource_name", "front_url", "resource_id", "error", "url_path", "tags", "duration", ) for report in reports_paginator.sa_items: reports.append(report.get_dict(request, include_keys=include_keys)) headers = gen_pagination_headers(request, reports_paginator) request.response.headers.update(headers) return reports
def get_application_report_stats(request): query_params = request.GET.mixed() query_params["resource"] = (request.context.resource.resource_id,) filter_settings = build_filter_settings_from_query_dict(request, query_params) if not filter_settings.get("end_date"): end_date = datetime.utcnow().replace(microsecond=0, second=0) filter_settings["end_date"] = end_date if not filter_settings.get("start_date"): delta = timedelta(hours=1) filter_settings["start_date"] = filter_settings["end_date"] - delta result = ReportGroupService.get_report_stats(request, filter_settings) return result
def check_alerts(resource_id): since_when = datetime.utcnow() try: request = get_current_request() application = ApplicationService.by_id(resource_id) if not application: return error_key = REDIS_KEYS[ "reports_to_notify_per_type_per_app_alerting"].format( ReportType.error, resource_id) slow_key = REDIS_KEYS[ "reports_to_notify_per_type_per_app_alerting"].format( ReportType.slow, resource_id) error_group_ids = Datastores.redis.smembers(error_key) slow_group_ids = Datastores.redis.smembers(slow_key) Datastores.redis.delete(error_key) Datastores.redis.delete(slow_key) err_gids = [int(g_id) for g_id in error_group_ids] slow_gids = [int(g_id) for g_id in list(slow_group_ids)] group_ids = err_gids + slow_gids occurence_dict = {} for g_id in group_ids: key = REDIS_KEYS["counters"][ "report_group_occurences_alerting"].format(g_id) val = Datastores.redis.get(key) Datastores.redis.delete(key) if val: occurence_dict[g_id] = int(val) else: occurence_dict[g_id] = 1 report_groups = ReportGroupService.by_ids(group_ids) report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) ApplicationService.check_for_groups_alert( application, "alert", report_groups=report_groups, occurence_dict=occurence_dict, since_when=since_when, ) except Exception as exc: print_traceback(log) raise
def daily_digest(): """ Sends daily digest with top 50 error reports """ request = get_current_request() apps = Datastores.redis.smembers(REDIS_KEYS["apps_that_had_reports"]) Datastores.redis.delete(REDIS_KEYS["apps_that_had_reports"]) since_when = datetime.utcnow() - timedelta(hours=8) log.warning("Generating daily digests") for resource_id in apps: resource_id = resource_id.decode("utf8") end_date = datetime.utcnow().replace(microsecond=0, second=0) filter_settings = { "resource": [resource_id], "tags": [{ "name": "type", "value": ["error"], "op": None }], "type": "error", "start_date": since_when, "end_date": end_date, } reports = ReportGroupService.get_trending( request, filter_settings=filter_settings, limit=50) application = ApplicationService.by_id(resource_id) if application: users = set([ p.user for p in ResourceService.users_for_perm(application, "view") ]) for user in users: user.send_digest(request, application, reports=reports, since_when=since_when)
def get_dict(self, request, details=False, exclude_keys=None, include_keys=None): from appenlight.models.services.report_group import ReportGroupService instance_dict = super(Report, self).get_dict() instance_dict['req_stats'] = self.req_stats() instance_dict['group'] = {} instance_dict['group']['id'] = self.report_group.id instance_dict['group'][ 'total_reports'] = self.report_group.total_reports instance_dict['group']['last_report'] = self.report_group.last_report instance_dict['group']['priority'] = self.report_group.priority instance_dict['group']['occurences'] = self.report_group.occurences instance_dict['group'][ 'last_timestamp'] = self.report_group.last_timestamp instance_dict['group'][ 'first_timestamp'] = self.report_group.first_timestamp instance_dict['group']['public'] = self.report_group.public instance_dict['group']['fixed'] = self.report_group.fixed instance_dict['group']['read'] = self.report_group.read instance_dict['group'][ 'average_duration'] = self.report_group.average_duration instance_dict[ 'resource_name'] = self.report_group.application.resource_name instance_dict['report_type'] = self.report_type if instance_dict['http_status'] == 404 and not instance_dict['error']: instance_dict['error'] = '404 Not Found' if details: instance_dict['affected_users_count'] = \ ReportGroupService.affected_users_count(self.report_group) instance_dict['top_affected_users'] = [{ 'username': u.username, 'count': u.count } for u in ReportGroupService.top_affected_users(self.report_group) ] instance_dict['application'] = {'integrations': []} for integration in self.report_group.application.integrations: if integration.front_visible: instance_dict['application']['integrations'].append({ 'name': integration.integration_name, 'action': integration.integration_action }) instance_dict['comments'] = [ c.get_dict() for c in self.report_group.comments ] instance_dict['group']['next_report'] = None instance_dict['group']['previous_report'] = None next_in_group = self.get_next_in_group(request) previous_in_group = self.get_previous_in_group(request) if next_in_group: instance_dict['group']['next_report'] = next_in_group if previous_in_group: instance_dict['group']['previous_report'] = previous_in_group # slow call ordering def find_parent(row, data): for r in reversed(data): try: if (row['timestamp'] > r['timestamp'] and row['end_time'] < r['end_time']): return r except TypeError as e: log.warning('reports_view.find_parent: %s' % e) return None new_calls = [] calls = [c.get_dict() for c in self.slow_calls] while calls: # start from end for x in range(len(calls) - 1, -1, -1): parent = find_parent(calls[x], calls) if parent: parent['children'].append(calls[x]) else: # no parent at all? append to new calls anyways new_calls.append(calls[x]) # print 'append', calls[x] del calls[x] break instance_dict['slow_calls'] = new_calls instance_dict['front_url'] = self.get_public_url(request) exclude_keys_list = exclude_keys or [] include_keys_list = include_keys or [] for k in list(instance_dict.keys()): if k == 'group': continue if (k in exclude_keys_list or (k not in include_keys_list and include_keys)): del instance_dict[k] return instance_dict
def alerting_test(request): """ Allows to test send data on various registered alerting channels """ applications = request.user.resources_with_perms( ['view'], resource_types=['application']) # what we can select in total all_possible_app_ids = [app.resource_id for app in applications] resource = applications[0] alert_channels = [] for channel in request.user.alert_channels: alert_channels.append(channel.get_dict()) cname = request.params.get('channel_name') cvalue = request.params.get('channel_value') event_name = request.params.get('event_name') if cname and cvalue: for channel in request.user.alert_channels: if (channel.channel_value == cvalue and channel.channel_name == cname): break if event_name in ['error_report_alert', 'slow_report_alert']: # opened new_event = Event(resource_id=resource.resource_id, event_type=Event.types[event_name], start_date=datetime.datetime.utcnow(), status=Event.statuses['active'], values={ 'reports': 5, 'threshold': 10 }) channel.notify_alert(resource=resource, event=new_event, user=request.user, request=request) # closed ev_type = Event.types[event_name.replace('open', 'close')] new_event = Event(resource_id=resource.resource_id, event_type=ev_type, start_date=datetime.datetime.utcnow(), status=Event.statuses['closed'], values={ 'reports': 5, 'threshold': 10 }) channel.notify_alert(resource=resource, event=new_event, user=request.user, request=request) elif event_name == 'notify_reports': report = ReportGroupService.by_app_ids(all_possible_app_ids) \ .filter(ReportGroup.report_type == ReportType.error).first() confirmed_reports = [(5, report), (1, report)] channel.notify_reports(resource=resource, user=request.user, request=request, since_when=datetime.datetime.utcnow(), reports=confirmed_reports) confirmed_reports = [(5, report)] channel.notify_reports(resource=resource, user=request.user, request=request, since_when=datetime.datetime.utcnow(), reports=confirmed_reports) elif event_name == 'notify_uptime': new_event = Event(resource_id=resource.resource_id, event_type=Event.types['uptime_alert'], start_date=datetime.datetime.utcnow(), status=Event.statuses['active'], values={ "status_code": 500, "tries": 2, "response_time": 0 }) channel.notify_uptime_alert(resource=resource, event=new_event, user=request.user, request=request) elif event_name == 'chart_alert': event = EventService.by_type_and_status( event_types=(Event.types['chart_alert'], ), status_types=(Event.statuses['active'], )).first() channel.notify_chart_alert(resource=event.resource, event=event, user=request.user, request=request) elif event_name == 'daily_digest': since_when = datetime.datetime.utcnow() - datetime.timedelta( hours=8) filter_settings = { 'resource': [resource.resource_id], 'tags': [{ 'name': 'type', 'value': ['error'], 'op': None }], 'type': 'error', 'start_date': since_when } reports = ReportGroupService.get_trending( request, filter_settings=filter_settings, limit=50) channel.send_digest(resource=resource, user=request.user, request=request, since_when=datetime.datetime.utcnow(), reports=reports) return { 'alert_channels': alert_channels, 'applications': dict([(app.resource_id, app.resource_name) for app in applications.all()]) }
def alerting_test(request): """ Allows to test send data on various registered alerting channels """ applications = UserService.resources_with_perms( request.user, ["view"], resource_types=["application"]) # what we can select in total all_possible_app_ids = [app.resource_id for app in applications] resource = applications[0] alert_channels = [] for channel in request.user.alert_channels: alert_channels.append(channel.get_dict()) cname = request.params.get("channel_name") cvalue = request.params.get("channel_value") event_name = request.params.get("event_name") if cname and cvalue: for channel in request.user.alert_channels: if channel.channel_value == cvalue and channel.channel_name == cname: break if event_name in ["error_report_alert", "slow_report_alert"]: # opened new_event = Event( resource_id=resource.resource_id, event_type=Event.types[event_name], start_date=datetime.datetime.utcnow(), status=Event.statuses["active"], values={ "reports": 5, "threshold": 10 }, ) channel.notify_alert(resource=resource, event=new_event, user=request.user, request=request) # closed ev_type = Event.types[event_name.replace("open", "close")] new_event = Event( resource_id=resource.resource_id, event_type=ev_type, start_date=datetime.datetime.utcnow(), status=Event.statuses["closed"], values={ "reports": 5, "threshold": 10 }, ) channel.notify_alert(resource=resource, event=new_event, user=request.user, request=request) elif event_name == "notify_reports": report = ( ReportGroupService.by_app_ids(all_possible_app_ids).filter( ReportGroup.report_type == ReportType.error).first()) confirmed_reports = [(5, report), (1, report)] channel.notify_reports( resource=resource, user=request.user, request=request, since_when=datetime.datetime.utcnow(), reports=confirmed_reports, ) confirmed_reports = [(5, report)] channel.notify_reports( resource=resource, user=request.user, request=request, since_when=datetime.datetime.utcnow(), reports=confirmed_reports, ) elif event_name == "notify_uptime": new_event = Event( resource_id=resource.resource_id, event_type=Event.types["uptime_alert"], start_date=datetime.datetime.utcnow(), status=Event.statuses["active"], values={ "status_code": 500, "tries": 2, "response_time": 0 }, ) channel.notify_uptime_alert(resource=resource, event=new_event, user=request.user, request=request) elif event_name == "chart_alert": event = EventService.by_type_and_status( event_types=(Event.types["chart_alert"], ), status_types=(Event.statuses["active"], ), ).first() channel.notify_chart_alert(resource=event.resource, event=event, user=request.user, request=request) elif event_name == "daily_digest": since_when = datetime.datetime.utcnow() - datetime.timedelta( hours=8) filter_settings = { "resource": [resource.resource_id], "tags": [{ "name": "type", "value": ["error"], "op": None }], "type": "error", "start_date": since_when, } reports = ReportGroupService.get_trending( request, filter_settings=filter_settings, limit=50) channel.send_digest( resource=resource, user=request.user, request=request, since_when=datetime.datetime.utcnow(), reports=reports, ) return { "alert_channels": alert_channels, "applications": dict([(app.resource_id, app.resource_name) for app in applications.all()]), }
def get_dict(self, request, details=False, exclude_keys=None, include_keys=None): from appenlight.models.services.report_group import ReportGroupService instance_dict = super(Report, self).get_dict() instance_dict["req_stats"] = self.req_stats() instance_dict["group"] = {} instance_dict["group"]["id"] = self.report_group.id instance_dict["group"][ "total_reports"] = self.report_group.total_reports instance_dict["group"]["last_report"] = self.report_group.last_report instance_dict["group"]["priority"] = self.report_group.priority instance_dict["group"]["occurences"] = self.report_group.occurences instance_dict["group"][ "last_timestamp"] = self.report_group.last_timestamp instance_dict["group"][ "first_timestamp"] = self.report_group.first_timestamp instance_dict["group"]["public"] = self.report_group.public instance_dict["group"]["fixed"] = self.report_group.fixed instance_dict["group"]["read"] = self.report_group.read instance_dict["group"][ "average_duration"] = self.report_group.average_duration instance_dict[ "resource_name"] = self.report_group.application.resource_name instance_dict["report_type"] = self.report_type if instance_dict["http_status"] == 404 and not instance_dict["error"]: instance_dict["error"] = "404 Not Found" if details: instance_dict[ "affected_users_count"] = ReportGroupService.affected_users_count( self.report_group) instance_dict["top_affected_users"] = [{ "username": u.username, "count": u.count } for u in ReportGroupService.top_affected_users(self.report_group) ] instance_dict["application"] = {"integrations": []} for integration in self.report_group.application.integrations: if integration.front_visible: instance_dict["application"]["integrations"].append({ "name": integration.integration_name, "action": integration.integration_action, }) instance_dict["comments"] = [ c.get_dict() for c in self.report_group.comments ] instance_dict["group"]["next_report"] = None instance_dict["group"]["previous_report"] = None next_in_group = self.get_next_in_group(request) previous_in_group = self.get_previous_in_group(request) if next_in_group: instance_dict["group"]["next_report"] = next_in_group if previous_in_group: instance_dict["group"]["previous_report"] = previous_in_group # slow call ordering def find_parent(row, data): for r in reversed(data): try: if (row["timestamp"] > r["timestamp"] and row["end_time"] < r["end_time"]): return r except TypeError as e: log.warning("reports_view.find_parent: %s" % e) return None new_calls = [] calls = [c.get_dict() for c in self.slow_calls] while calls: # start from end for x in range(len(calls) - 1, -1, -1): parent = find_parent(calls[x], calls) if parent: parent["children"].append(calls[x]) else: # no parent at all? append to new calls anyways new_calls.append(calls[x]) # print 'append', calls[x] del calls[x] break instance_dict["slow_calls"] = new_calls instance_dict["front_url"] = self.get_public_url(request) exclude_keys_list = exclude_keys or [] include_keys_list = include_keys or [] for k in list(instance_dict.keys()): if k == "group": continue if k in exclude_keys_list or (k not in include_keys_list and include_keys): del instance_dict[k] return instance_dict
def add_reports(resource_id, request_params, dataset, **kwargs): proto_version = parse_proto(request_params.get("protocol_version", "")) current_time = datetime.utcnow().replace(second=0, microsecond=0) try: # we will store solr docs here for single insert es_report_docs = {} es_report_group_docs = {} resource = ApplicationService.by_id(resource_id) tags = [] es_slow_calls_docs = {} es_reports_stats_rows = {} for report_data in dataset: # build report details for later added_details = 0 report = Report() report.set_data(report_data, resource, proto_version) report._skip_ft_index = True # find latest group in this months partition report_group = ReportGroupService.by_hash_and_resource( report.resource_id, report.grouping_hash, since_when=datetime.utcnow().date().replace(day=1), ) occurences = report_data.get("occurences", 1) if not report_group: # total reports will be +1 moment later report_group = ReportGroup( grouping_hash=report.grouping_hash, occurences=0, total_reports=0, last_report=0, priority=report.priority, error=report.error, first_timestamp=report.start_time, ) report_group._skip_ft_index = True report_group.report_type = report.report_type report.report_group_time = report_group.first_timestamp add_sample = pick_sample(report_group.occurences, report_type=report_group.report_type) if add_sample: resource.report_groups.append(report_group) report_group.reports.append(report) added_details += 1 DBSession.flush() if report.partition_id not in es_report_docs: es_report_docs[report.partition_id] = [] es_report_docs[report.partition_id].append(report.es_doc()) tags.extend(list(report.tags.items())) slow_calls = report.add_slow_calls(report_data, report_group) DBSession.flush() for s_call in slow_calls: if s_call.partition_id not in es_slow_calls_docs: es_slow_calls_docs[s_call.partition_id] = [] es_slow_calls_docs[s_call.partition_id].append( s_call.es_doc()) # try generating new stat rows if needed else: # required for postprocessing to not fail later report.report_group = report_group stat_row = ReportService.generate_stat_rows( report, resource, report_group) if stat_row.partition_id not in es_reports_stats_rows: es_reports_stats_rows[stat_row.partition_id] = [] es_reports_stats_rows[stat_row.partition_id].append( stat_row.es_doc()) # see if we should mark 10th occurence of report last_occurences_10 = int(math.floor(report_group.occurences / 10)) curr_occurences_10 = int( math.floor((report_group.occurences + report.occurences) / 10)) last_occurences_100 = int(math.floor(report_group.occurences / 100)) curr_occurences_100 = int( math.floor( (report_group.occurences + report.occurences) / 100)) notify_occurences_10 = last_occurences_10 != curr_occurences_10 notify_occurences_100 = last_occurences_100 != curr_occurences_100 report_group.occurences = ReportGroup.occurences + occurences report_group.last_timestamp = report.start_time report_group.summed_duration = ReportGroup.summed_duration + report.duration summed_duration = ReportGroup.summed_duration + report.duration summed_occurences = ReportGroup.occurences + occurences report_group.average_duration = summed_duration / summed_occurences report_group.run_postprocessing(report) if added_details: report_group.total_reports = ReportGroup.total_reports + 1 report_group.last_report = report.id report_group.set_notification_info( notify_10=notify_occurences_10, notify_100=notify_occurences_100) DBSession.flush() report_group.get_report().notify_channel(report_group) if report_group.partition_id not in es_report_group_docs: es_report_group_docs[report_group.partition_id] = [] es_report_group_docs[report_group.partition_id].append( report_group.es_doc()) action = "REPORT" log_msg = "%s: %s %s, client: %s, proto: %s" % ( action, report_data.get("http_status", "unknown"), str(resource), report_data.get("client"), proto_version, ) log.info(log_msg) total_reports = len(dataset) redis_pipeline = Datastores.redis.pipeline(transaction=False) key = REDIS_KEYS["counters"]["reports_per_minute"].format(current_time) redis_pipeline.incr(key, total_reports) redis_pipeline.expire(key, 3600 * 24) key = REDIS_KEYS["counters"]["events_per_minute_per_user"].format( resource.owner_user_id, current_time) redis_pipeline.incr(key, total_reports) redis_pipeline.expire(key, 3600) key = REDIS_KEYS["counters"]["reports_per_hour_per_app"].format( resource_id, current_time.replace(minute=0)) redis_pipeline.incr(key, total_reports) redis_pipeline.expire(key, 3600 * 24 * 7) redis_pipeline.sadd( REDIS_KEYS["apps_that_got_new_data_per_hour"].format( current_time.replace(minute=0)), resource_id, ) redis_pipeline.execute() add_reports_es(es_report_group_docs, es_report_docs) add_reports_slow_calls_es(es_slow_calls_docs) add_reports_stats_rows_es(es_reports_stats_rows) return True except Exception as exc: print_traceback(log) if celery.conf["CELERY_EAGER_PROPAGATES_EXCEPTIONS"]: raise add_reports.retry(exc=exc)