def manage_log_brok(self, b): data = b.data line = data['log'] if re.match("^\[[0-9]*\] [A-Z][a-z]*.:", line): # Match log which NOT have to be stored # print "Unexpected in manage_log_brok", line return logline = Logline(line=line) values = logline.as_dict() if logline.logclass != LOGCLASS_INVALID: try: self.db[self.collection].insert(values) self.is_connected = CONNECTED # If we have a backlog from an outage, we flush these lines # First we make a copy, so we can delete elements from # the original self.backlog backloglines = [bl for bl in self.backlog] for backlogline in backloglines: try: self.db[self.collection].insert(backlogline) self.backlog.remove(backlogline) except AutoReconnect, exp: self.is_connected = SWITCHING except Exception, exp: logger.error("[LogStoreMongoDB] Got an exception inserting the backlog" % str(exp))
def manage_log_brok(self, b): data = b.data line = data['log'] try: logline = Logline(line=line) values = logline.as_tuple() except Exception, exp: print "Unexpected error:", exp
def manage_log_brok(self, b): data = b.data line = data['log'] try: logline = Logline(line=line) values = logline.as_tuple() if logline.logclass != LOGCLASS_INVALID: self.execute('INSERT INTO LOGS VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', values) except LiveStatusLogStoreError, exp: print "An error occurred:", exp.args[0] print "DATABASE ERROR!!!!!!!!!!!!!!!!!"
def get_live_data_log(self): """Like get_live_data, but for log objects""" # finalize the filter stacks self.mongo_time_filter_stack.and_elements(self.mongo_time_filter_stack.qsize()) self.mongo_filter_stack.and_elements(self.mongo_filter_stack.qsize()) if self.use_aggressive_sql: # Be aggressive, get preselected data from sqlite and do less # filtering in python. But: only a subset of Filter:-attributes # can be mapped to columns in the logs-table, for the others # we must use "always-true"-clauses. This can result in # funny and potentially ineffective sql-statements mongo_filter_func = self.mongo_filter_stack.get_stack() else: # Be conservative, get everything from the database between # two dates and apply the Filter:-clauses in python mongo_filter_func = self.mongo_time_filter_stack.get_stack() dbresult = [] mongo_filter = mongo_filter_func() logger.debug("[Logstore MongoDB] Mongo filter is %s" % str(mongo_filter)) # We can apply the filterstack here as well. we have columns and filtercolumns. # the only additional step is to enrich log lines with host/service-attributes # A timerange can be useful for a faster preselection of lines filter_element = eval('{ ' + mongo_filter + ' }') logger.debug("[LogstoreMongoDB] Mongo filter is %s" % str(filter_element)) columns = ['logobject', 'attempt', 'logclass', 'command_name', 'comment', 'contact_name', 'host_name', 'lineno', 'message', 'plugin_output', 'service_description', 'state', 'state_type', 'time', 'type'] if not self.is_connected == CONNECTED: logger.warning("[LogStoreMongoDB] sorry, not connected") else: dbresult = [Logline([(c,) for c in columns], [x[col] for col in columns]) for x in self.db[self.collection].find(filter_element).sort([(u'time', pymongo.ASCENDING), (u'lineno', pymongo.ASCENDING)])] return dbresult
def manage_log_brok(self, b): if self.read_only: return data = b.data line = data['log'] if re.match("^\[[0-9]*\] [A-Z][a-z]*.:", line): # Match log which NOT have to be stored # print "Unexpected in manage_log_brok", line return try: logline = Logline(line=line) values = logline.as_tuple() if logline.logclass != LOGCLASS_INVALID: self.execute('INSERT INTO LOGS VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', values) except LiveStatusLogStoreError, exp: print "An error occurred:", exp.args[0] print "DATABASE ERROR!!!!!!!!!!!!!!!!!"
def manage_log_brok(self, b): if self.read_only: return data = b.data line = data['log'] if re.match("^\[[0-9]*\] [A-Z][a-z]*.:", line): # Match log which NOT have to be stored # print "Unexpected in manage_log_brok", line return try: logline = Logline(line=line) values = logline.as_tuple() if logline.logclass != LOGCLASS_INVALID: self.execute('INSERT INTO LOGS VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', values) except LiveStatusLogStoreError, exp: logger.error("[Logstore SQLite] An error occurred: %s", str(exp.args[0])) logger.error("[Logstore SQLite] DATABASE ERROR!!!!!!!!!!!!!!!!!")
def manage_log_brok(self, b): data = b.data line = data['log'] logline = Logline(line=line) values = logline.as_dict() if logline.logclass != LOGCLASS_INVALID: try: self.db[self.collection].insert(values, safe=True) self.is_connected = CONNECTED # If we have a backlog from an outage, we flush these lines # First we make a copy, so we can delete elements from # the original self.backlog backloglines = [bl for bl in self.backlog] for backlogline in backloglines: try: self.db[self.collection].insert(backlogline, safe=True) self.backlog.remove(backlogline) except Autoreconnect, exp: self.is_connected = SWITCHING except Exception, exp: print "Got an exception inserting the backlog", str(exp)
def row_factory(cursor, row): """Handler for the sqlite fetch method.""" return Logline(sqlite_cursor=cursor.description, sqlite_row=row)