def get(self, app_name): global ITEMS_PER_PAGE app, app_name = self._global(app_name) # Check if the Archive All flag was passed archive_all = self.get_argument("archive_all", None) if archive_all == "True": LoggingService.archive_all_exception_group(app["_id"]) self.redirect("/dashboard/%s" % app_name) # Check if there are any exceptions to archive, if so exception = self.get_arguments("exception", None) if exception: for ex in exception: LoggingService.archive_exception_group(ex) # Get Severity severity = None log_choice = self.get_argument("log_choice", None) if log_choice is None or log_choice == "specific": severity = int(self.get_argument("severity_level", 1)) # Get Page and Offset page = int(self.get_argument("page", 1)) start = (page * ITEMS_PER_PAGE) - ITEMS_PER_PAGE # Get Exceptions keyword = self.get_argument("keyword", "") # Sorting Variables sort = self.get_argument("sort", "last_seen_on") sort_direction = int(self.get_argument("sort_direction", -1)) lso_new_sort = -1 cnt_new_sort = -1 if sort == "last_seen_on": if sort_direction == -1: lso_new_sort = 1 else: if sort_direction == -1: cnt_new_sort = 1 if not keyword and app: self._data["exceptions"] = LoggingService.get_exceptions_groups( self._data["user"]["_id"], app["_id"], severity=severity, start=start, sort=sort, sort_direction=sort_direction, ) total_count = self._data["exceptions"].count() elif keyword: from Packages.mongodbsearch import mongodbsearch from pymongo import Connection self._data["exceptions"] = [] mongo_search = mongodbsearch.mongodb_search(Connection()["onerrorlog"]) conditions = {"key": str(self._data["user"]["_id"]), "application": str(app["_id"])} if severity is not None: conditions["severity"] = severity documents, _, total_count = mongo_search.search( keyword, conditions=conditions, fields=["unique_hash, _id"], start=start, scoring=("last_save_date", -1) ) for doc in documents: self._data["exceptions"].append(LoggingService.get_exception_group(doc["_id"])) else: self._data["exceptions"] = [] total_count = 0 if app: stats_data = {"today": [], "previous": []} stats = LoggingService.get_statistics_for_application(app["_id"], severity=severity) for i, s in enumerate(stats): stats_data["today"].append([i, s["count"]]) stats = LoggingService.get_statistics_for_application(app["_id"], days_back=1, severity=severity) for i, s in enumerate(stats): stats_data["previous"].append([i, s["count"]]) self._data["stats_data"] = stats_data self._data["log_choice"] = log_choice self._data["severity"] = severity self._data["get_severity_string"] = LoggingService.get_severity_string self._data["keyword"] = keyword self._data["total_count"] = total_count self._data["lso_new_sort"] = lso_new_sort self._data["cnt_new_sort"] = cnt_new_sort self._data["cgi"] = cgi self._compute_paging(page, total_count, app_name) self._data["section_title"] = "Dashboard : %s : %s" % ( self._data["user"]["company_name"], app["application"], ) self._data["application_name"] = app["application"] self._data["htmlTitle"] = "OnErrorLog - Dashboard" self.write(self.render_view("../Views/dashboard.html", self._data)) else: self._data["section_title"] = "Getting Started" self._data["htmlTitle"] = "OnErrorLog - Getting Started" self.write(self.render_view("../Views/gettingstarted.html", self._data))
def get(self, app_name): global ITEMS_PER_PAGE app, app_name = self._global(app_name) #Check if the Archive All flag was passed archive_all = self.get_argument('archive_all', None) if archive_all == 'True': LoggingService.archive_all_exception_group(app['_id']) self.redirect('/dashboard/%s' % app_name) #Check if there are any exceptions to archive, if so exception = self.get_arguments('exception', None) if exception: for ex in exception: LoggingService.archive_exception_group(ex) #Get Severity severity = None log_choice = self.get_argument('log_choice', "all") if log_choice is None or log_choice == 'specific': severity = int(self.get_argument('severity_level', 1)) #Get Page and Offset page = int(self.get_argument('page', 1)) start = (page * ITEMS_PER_PAGE) - ITEMS_PER_PAGE #Get Exceptions keyword = self.get_argument('keyword', '') #Sorting Variables sort = self.get_argument('sort', 'last_seen_on') sort_direction = int(self.get_argument('sort_direction', -1)) lso_new_sort = -1 cnt_new_sort = -1 if sort == 'last_seen_on': if sort_direction == -1: lso_new_sort = 1 else: if sort_direction == -1: cnt_new_sort = 1 if not keyword and app: self._data['exceptions'] = LoggingService.get_exceptions_groups( self._data['user']['_id'], app['_id'], severity=severity, start=start, sort=sort, sort_direction=sort_direction) total_count = self._data['exceptions'].count() elif keyword: from Packages.mongodbsearch import mongodbsearch from pymongo import Connection self._data['exceptions'] = [] mongo_search = mongodbsearch.mongodb_search(Connection()['onerrorlog']) conditions = {'key': str(self._data['user']['_id']), 'application': str(app['_id']), } if severity is not None: conditions['severity'] = severity documents, _, total_count = mongo_search.search(keyword, conditions=conditions, fields=['unique_hash, _id'], start=start, scoring=('last_save_date', -1)) for doc in documents: self._data['exceptions'].append(LoggingService.get_exception_group(doc['_id'])) else: self._data['exceptions'] = [] total_count = 0 if app: stats_data = {'today': [], 'previous': []} stats = LoggingService.get_statistics_for_application(app['_id'], severity=severity) for i, s in enumerate(stats): stats_data['today'].append([i, s['count']]) stats = LoggingService.get_statistics_for_application(app['_id'], days_back=1, severity=severity) for i, s in enumerate(stats): stats_data['previous'].append([i, s['count']]) self._data['stats_data'] = stats_data self._data['log_choice'] = log_choice self._data['severity'] = severity self._data['get_severity_string'] = LoggingService.get_severity_string self._data['keyword'] = keyword self._data['total_count'] = total_count self._data['lso_new_sort'] = lso_new_sort self._data['cnt_new_sort'] = cnt_new_sort self._data['cgi'] = cgi self._compute_paging(page, total_count, app_name) self._data['section_title'] = 'Dashboard : %s : %s' % (self._data['user']['company_name'], app['application']) self._data['application_name'] = app['application'] self._data['htmlTitle'] = 'OnErrorLog - Dashboard' self.write(self.render_view('../Views/dashboard.html', self._data)) else: self._data['section_title'] = 'Getting Started' self._data['htmlTitle'] = 'OnErrorLog - Getting Started' self.write(self.render_view('../Views/gettingstarted.html', self._data))
def insert_exception(d): exceptions = Database.Instance().exceptions() exception_groups = Database.Instance().exception_groups() applications = Database.Instance().applications() #Check for required keys required_fields = ['key', 'message', 'application', 'severity'] if len(list(set(required_fields) & set(d))) != len(required_fields): raise KeyError('Some required fields are missing') check_severity(d['severity']) #Validate the key which was passed if not _validate_key(d['key']): raise KeyError('The key you specified is invalid') d['application'] = ApplicationService.insert_application(d['key'], d['application']) if 'stacktrace' not in d: d['stacktrace'] = '' #Setup the string to be hashed hash_string = '%s\n%s\n%s\n%s\n%s\n%s' % (d['key'], d['severity'], str(d['stacktrace']), d['message'], d['filename'], d['application']) #Has the string unique_hash = hash(hash_string) #Create and insert the exception record exception = d exception['insert_date'] = datetime.datetime.utcnow() exception['unique_hash'] = unique_hash exception_id = exceptions.save(exception) #Check if the exception group exists group = exception_groups.find_one({'unique_hash': unique_hash, 'status': False }) #If Exception already exists, increment count if group: group['count'] += 1 group['exceptions'].append(exception_id) group['status'] = False #Otherwise, create the exception group record else: group = { 'message': d['message'], 'severity': d['severity'], 'key': d['key'], 'application': d['application'], 'exceptions': [ exception_id ], 'unique_hash': unique_hash, 'status': False, 'count': 1, 'insert_date': datetime.datetime.utcnow(), 'filename': d['filename'], 'stacktrace': d['stacktrace'], } applications.update({'_id': d['application'] }, {'$inc': {'count': 1 }}) applications.update({'_id': d['application'] }, {'$inc': {str(d['severity']): 1 }}) #Set the last seen_date group['last_seen_on'] = datetime.datetime.utcnow() #Save the exception group exception_group_id = exception_groups.save(group) increment_statistics(d, exception_group_id) #Update the Exception with the Exception Group Id exception['exception_group_id'] = exception_group_id exceptions.save(exception) exception_groups.ensure_index([('severity', 1), ('application', 1), ('key', 1), ('unique_hash', 1), ('status', 1 ), ('last_seen_on', -1) ]) exceptions.ensure_index([('severity', 1), ('application', 1), ('key', 1), ('unique_hash', 1), ('insert_date', -1), ]) #Index the Document mdb_search = mongodbsearch.mongodb_search(Database.Instance().db()) text = [d['message']] if 'headers' in d: for k, v in d['headers'].iteritems(): text.append(str(k)) text.append(str(v)) if 'params' in d: for k, v in d['params'].iteritems(): text.append(str(k)) text.append(str(v)) for s in d['stacktrace']: text.extend([str(x) for x in s.values()]) kwargs = {'key': d['key'], 'application': str(d['application']), 'severity': d['severity'], 'last_seen_on': group['last_seen_on'], 'filename': d['filename'], 'unique_hash': unique_hash } mdb_search.index_document(str(exception_group_id), ' '.join(text), ensureindex=kwargs.keys(), **kwargs) return exception_group_id, exception_id
def insert_exception(d): exceptions = Database.Instance().exceptions() exception_groups = Database.Instance().exception_groups() applications = Database.Instance().applications() # Check for required keys required_fields = ["key", "message", "application", "severity"] if len(list(set(required_fields) & set(d))) != len(required_fields): raise KeyError("Some required fields are missing") check_severity(d["severity"]) # Validate the key which was passed if not _validate_key(d["key"]): raise KeyError("The key you specified is invalid") d["application"] = ApplicationService.insert_application(d["key"], d["application"]) if "stacktrace" not in d: d["stacktrace"] = "" # Setup the string to be hashed hash_string = "%s\n%s\n%s\n%s\n%s\n%s" % ( d["key"], d["severity"], str(d["stacktrace"]), d["message"], d["filename"], d["application"], ) # Has the string unique_hash = hash(hash_string) # Create and insert the exception record exception = d exception["insert_date"] = datetime.datetime.utcnow() exception["unique_hash"] = unique_hash exception_id = exceptions.save(exception) # Check if the exception group exists group = exception_groups.find_one({"unique_hash": unique_hash, "status": False}) # If Exception already exists, increment count if group: group["count"] += 1 group["exceptions"].append(exception_id) group["status"] = False # Otherwise, create the exception group record else: group = { "message": d["message"], "severity": d["severity"], "key": d["key"], "application": d["application"], "exceptions": [exception_id], "unique_hash": unique_hash, "status": False, "count": 1, "insert_date": datetime.datetime.utcnow(), "filename": d["filename"], "stacktrace": d["stacktrace"], } applications.update({"_id": d["application"]}, {"$inc": {"count": 1}}) applications.update({"_id": d["application"]}, {"$inc": {str(d["severity"]): 1}}) # Set the last seen_date group["last_seen_on"] = datetime.datetime.utcnow() # Save the exception group exception_group_id = exception_groups.save(group) increment_statistics(d, exception_group_id) # Update the Exception with the Exception Group Id exception["exception_group_id"] = exception_group_id exceptions.save(exception) # Index the Document mdb_search = mongodbsearch.mongodb_search(Database.Instance().db()) text = [d["message"]] if "headers" in d: for k, v in d["headers"].iteritems(): text.append(str(k)) text.append(str(v)) if "params" in d: for k, v in d["params"].iteritems(): text.append(str(k)) text.append(str(v)) for s in d["stacktrace"]: text.extend([str(x) for x in s.values()]) kwargs = { "key": d["key"], "application": str(d["application"]), "severity": d["severity"], "last_seen_on": group["last_seen_on"], "filename": d["filename"], "unique_hash": unique_hash, } mdb_search.index_document(str(exception_group_id), " ".join(text), ensureindex=kwargs.keys(), **kwargs) return exception_group_id, exception_id