def update_count(self, save_filters): ''' Updates counts for Times_seen_by_minute, Log group times seen and server times seen. :param session: db session :param save_filters: list of tlog.base.fliter.Filter ''' with new_session() as session: try: for filter_ in save_filters: Times_seen_by_minute._update( session=session, log_group_id=self.log_group.id, filter_id=filter_.id, ) Log_group._inc_seen(session=session, log_group=self.log_group) Server_count._add( session=session, log_group_id=self.log_group.id, name=self.hostname, ) session.commit() except IntegrityError as e: # try again logging.exception('Times seen duplicate key, trying again.') self.update_count(save_filters)
def test_new(self): user = User.new( name=u'Test user ø', email='*****@*****.**', ) store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5asd' )) Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Log_group_event.new( log_group_id=group.id, user_id=user.id, message=u'Test event', )) self.assertTrue( Log_group_event.new( log_group_id=group.id, user_id=user.id, message=u'Test event 2', )) events = Log_group_events.get(log_group_id=group.id) self.assertEqual(len(events), 2) self.assertEqual(events[0].user.id, user.id)
def get(self): ''' Deletes a log group and all the relations to it. ''' Log_group.delete(id_=int(self.get_argument('log_group_id'))) self.redirect('/stream') return
def post(self): ''' Changes the status of a log group. ''' log_group_id = int(self.get_argument('log_group_id')) status = int(self.get_argument('status')) if status <> constants.STATUS_RESOLVED and status <> constants.STATUS_UNRESOLVED: self.error(400, 'Unknown status {}'.format(status)) log_group = Log_group.get_by_id(log_group_id) if log_group.status <> status: reopened = None if log_group.status == constants.STATUS_RESOLVED: reopened = datetime.utcnow() Log_group.update_status( id_=log_group_id, status=status, reopened=reopened, ) if status == constants.STATUS_RESOLVED: Log_group_event.new( log_group_id=log_group_id, user_id=self.current_user.id, message='marked this log group as resolved', ) if status == constants.STATUS_UNRESOLVED: Log_group_event.new( log_group_id=log_group_id, user_id=self.current_user.id, message='reopened this log group', )
def test_new(self): user = User.new( name=u'Test user ø', email='*****@*****.**', ) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5asd')) Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Log_group_event.new( log_group_id=group.id, user_id=user.id, message=u'Test event', ) ) self.assertTrue( Log_group_event.new( log_group_id=group.id, user_id=user.id, message=u'Test event 2', ) ) events = Log_group_events.get(log_group_id=group.id) self.assertEqual(len(events), 2) self.assertEqual(events[0].user.id, user.id)
def test_set_events(self): filter_ = Filter.new(u'Test filter ø', data_yaml='store: true') store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su: BOM\'su root\' failed for lonvick on /dev/pts/8 æøå'), [filter_]) store.save() Log_group.update_status( id_=store.log_group.id, status=constants.STATUS_RESOLVED, reopened=None, ) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su: BOM\'su root\' failed for lonvick on /dev/pts/8 æøå'), [filter_]) store.save() self.assertEqual(Mock_log_group_event.message, 'reopened this log group')
def set_events(self): ''' Addes a event under the right circumstances. :param session: db session ''' if self.log_group.status == constants.STATUS_RESOLVED: Log_group.update_status( id_=self.log_group.id, status=constants.STATUS_UNRESOLVED, reopened=datetime.utcnow(), ) Log_group_event.new(log_group_id=self.log_group.id, user_id=constants.SYSTEM_USER, message='reopened this log group')
def test_get(self): ''' Creates 2 filters and creates a relation between the filters a a log_group. Tests that both strict and non strict version works. ''' filter1 = Filter.new(name=u'Test filter 1', data_yaml='') filter2 = Filter.new(name=u'Test filter 2', data_yaml='') store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group = Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Log_group_filters.add( filters=[filter1, filter2], log_group_id=group.id, ) ) groups = Log_groups.get( filters = [filter1, filter2], ) self.assertEqual(len(groups), 1) self.assertEqual(groups[0].id, group.id) # test that the strict version also works. filter2.update(id_=filter2.id, name='Test filter asd', data_yaml='') filter2 = Filter.get(id_=filter2.id) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - NEW MESSAGE ZwPpeQyUtrRKxw5')) group = Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Log_group_filters.add( filters=[filter1, filter2], log_group_id=group.id, ) ) groups = Log_groups.get( filters = [filter2], ) self.assertEqual(len(groups), 2) groups = Log_groups.get( filters = [filter2], strict_version = True, ) self.assertEqual(len(groups), 1)
def save(self): ''' Stores the object in the database. Returns None if there were no reason to sample the log message. :returns: boolean or None ''' if not self.valid: return False save_filters = self.get_save_filters() if save_filters: self.saved = False self.log_group = Log_group.add(self) self.update_count(save_filters) if self.should_sample(times_seen=self.log_group.times_seen, last_seen=self.log_group.last_seen): self.save_log() self.saved = True Log_group_filters.add(save_filters, self.log_group.id) self.set_events() self.send_notification() self.send_to_elasticsearch( ) # Check if the message should be stored in ElasticSearch, even if the log message was not saved. if not save_filters: return False if self.saved: return True return None
def get(self, log_group_id, log_id=None): ''' Shows a log group. ''' log_group = Log_group.get_by_id(id_=int(log_group_id)) if not log_group: self.error(404, 'Log group not found') if not log_id: log_id = -1 if log_group.last_log_id: log_id = log_group.last_log_id log = Log.get(id_=log_id) self.render( 'log_group_view.html', title='Log group', log_group=log_group, syslog_severity=constants.SYSLOG_SEVERITY, syslog_facility=constants.SYSLOG_FACILITY, events=Log_group_events.get(log_group.id), log=log, servers=Servers_count.get(log_group.id), filters=Log_group_filters.get(log_group.id), prev_log=Log.get_prev(id_=log_id, log_group_id=log_group.id), next_log=Log.get_next(id_=log_id, log_group_id=log_group.id), )
def test_add(self): filter1 = Filter.new(name=u'Test filter 1', data_yaml='') filter2 = Filter.new(name=u'Test filter 2', data_yaml='') store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group = Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Log_group_filters.add( filters=[filter1, filter2], log_group_id=group.id, ) ) # test get filters = Log_group_filters.get(log_group_id=group.id) self.assertEqual(len(filters), 2)
def test_check(self): with new_session() as session: session.query(models.Filter).delete() filter1 = Filter.new(u'Test filter 1', yaml.safe_dump({ 'inactivity': { 'enabled': True, 'minutes': 15, } })) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group1 = Log_group.add(store) group1 = Log_group.get(message_hash=store.message_hash) when = datetime.utcnow() - timedelta(minutes=16) Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, ) self.assertTrue(Filter_inactivity.check())
def test_update(self): store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group = Log_group.add(store) filter_ = Filter.new(name=u'Test filter ø', data_yaml='') Times_seen_by_minute.update( log_group_id=group.id, filter_id=filter_.id, ) minutes = Times_seen_by_minute.get_by_log_group_id(log_group_id=group.id) self.assertEqual(len(minutes), 1) self.assertEqual(minutes[0].times_seen, 1)
def test_set_events(self): filter_ = Filter.new(u'Test filter ø', data_yaml='store: true') store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su: BOM\'su root\' failed for lonvick on /dev/pts/8 æøå' ), [filter_]) store.save() Log_group.update_status( id_=store.log_group.id, status=constants.STATUS_RESOLVED, reopened=None, ) store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su: BOM\'su root\' failed for lonvick on /dev/pts/8 æøå' ), [filter_]) store.save() self.assertEqual(Mock_log_group_event.message, 'reopened this log group')
def save_log(self): ''' Inserts the log message to table `logs`. ''' with new_session() as session: log = models.Log( hostname=self.hostname, external_id=self.external_id, message_hash=self.message_hash, received=self.received, data=json_dumps(self.data).encode('zlib').encode('base64'), level=self.level, log_group_id=self.log_group.id, ) session.add(log) session.commit() Log_group._update_last_log_id( session=session, id_=self.log_group.id, last_log_id=log.id, )
def test_add(times_seen): store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) Log_group.inc_seen(log_group=group) group = Log_group.get(message_hash=store.message_hash) self.assertEqual(group.times_seen, times_seen)
def test_add(self): store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group = Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Server_count.add( log_group_id=group.id, name=store.hostname, ) ) self.assertTrue( Server_count.add( log_group_id=group.id, name=store.hostname, ) ) servers = Servers_count.get(log_group_id=group.id) self.assertEqual(len(servers), 1) self.assertEqual(servers[0].name, store.hostname) self.assertEqual(servers[0].count, 2)
def test_check(self): with new_session() as session: session.query(models.Filter).delete() filter1 = Filter.new( u'Test filter 1', yaml.safe_dump({'inactivity': { 'enabled': True, 'minutes': 15, }})) store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5' )) group1 = Log_group.add(store) group1 = Log_group.get(message_hash=store.message_hash) when = datetime.utcnow() - timedelta(minutes=16) Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, ) self.assertTrue(Filter_inactivity.check())
def test_add(self): store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5' )) group = Log_group.add(store) group = Log_group.get(message_hash=store.message_hash) self.assertTrue( Server_count.add( log_group_id=group.id, name=store.hostname, )) self.assertTrue( Server_count.add( log_group_id=group.id, name=store.hostname, )) servers = Servers_count.get(log_group_id=group.id) self.assertEqual(len(servers), 1) self.assertEqual(servers[0].name, store.hostname) self.assertEqual(servers[0].count, 2)
def test_update(self): store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5' )) group = Log_group.add(store) filter_ = Filter.new(name=u'Test filter ø', data_yaml='') Times_seen_by_minute.update( log_group_id=group.id, filter_id=filter_.id, ) minutes = Times_seen_by_minute.get_by_log_group_id( log_group_id=group.id) self.assertEqual(len(minutes), 1) self.assertEqual(minutes[0].times_seen, 1)
def test_check_filter_warning(self): with new_session() as session: session.query(models.Filter).delete() filter1 = Filter.new( u'Test filter 1', yaml.safe_dump({ 'rate_warning': { 'enabled': True, 'min_logs': 100, 'threshold': 500, } })) store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5123' )) group1 = Log_group.add(store) # create some intervals intervals = 20 now = datetime.utcnow() for i in xrange(1, intervals): Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=normalize_datetime(now - timedelta( minutes=(i * MINUTE_NORMALIZATION))), inc=1000 + 10 * i, ) # create what would look like a lot of new messages in a short time. This should be enough to trigger then warning notification. when = datetime.utcnow() - timedelta(minutes=1, seconds=30) Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, inc=1000, ) filters_to_check = Filter_warning.get_filters_to_check() self.assertTrue( Filter_warning.check_filter_warning(filters_to_check[0]), )
def test_check_filter_warning(self): with new_session() as session: session.query(models.Filter).delete() filter1 = Filter.new(u'Test filter 1', yaml.safe_dump({ 'rate_warning': { 'enabled': True, 'min_logs': 100, 'threshold': 500, } })) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5123')) group1 = Log_group.add(store) # create some intervals intervals = 20 now = datetime.utcnow() for i in xrange(1, intervals): Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=normalize_datetime(now - timedelta(minutes=(i * MINUTE_NORMALIZATION))), inc=1000 + 10 * i, ) # create what would look like a lot of new messages in a short time. This should be enough to trigger then warning notification. when = datetime.utcnow() - timedelta(minutes=1, seconds=30) Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, inc=1000, ) filters_to_check = Filter_warning.get_filters_to_check() self.assertTrue( Filter_warning.check_filter_warning(filters_to_check[0]), )
def test_get_filters_to_check(self): with new_session() as session: session.query(models.Filter).delete() ''' Checks that `get_filters_to_check` only returns those filters that has been active in the latests interval. ''' filter1 = Filter.new( u'Test filter 1', yaml.safe_dump({ 'rate_warning': { 'enabled': True, 'min_logs': 100, 'threshold': 500, } })) filter2 = Filter.new( u'Test filter 1', yaml.safe_dump({ 'rate_warning': { 'enabled': True, 'min_logs': 100, 'threshold': 500, } })) store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5' )) group1 = Log_group.add(store) group1 = Log_group.get(message_hash=store.message_hash) store = Store( Parse( u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5' )) group2 = Log_group.add(store) group2 = Log_group.get(message_hash=store.message_hash) # add som info that should not show up in the list we have to check for warnings. when = datetime.utcnow() - timedelta(minutes=MINUTE_NORMALIZATION) Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, inc=25, ) when = datetime.utcnow() - timedelta(minutes=10, seconds=30) # checks that the filters groups correctly Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, inc=1000, ) Times_seen_by_minute.update( log_group_id=group2.id, filter_id=filter1.id, when=when, inc=150, ) # there should not be enough messages received for this filter to be checked for alerts. Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter2.id, when=when, inc=1000, ) now = datetime.utcnow() from_date = now - timedelta(minutes=MINUTE_NORMALIZATION) filters_to_check = Filter_warning.get_filters_to_check() self.assertTrue(len(filters_to_check) > 0) self.assertEqual(filters_to_check[0].normalized_count, 100) return filters_to_check
def test_get_filters_to_check(self): with new_session() as session: session.query(models.Filter).delete() ''' Checks that `get_filters_to_check` only returns those filters that has been active in the latests interval. ''' filter1 = Filter.new(u'Test filter 1', yaml.safe_dump({ 'rate_warning': { 'enabled': True, 'min_logs': 100, 'threshold': 500, } })) filter2 = Filter.new(u'Test filter 1', yaml.safe_dump({ 'rate_warning': { 'enabled': True, 'min_logs': 100, 'threshold': 500, } })) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group1 = Log_group.add(store) group1 = Log_group.get(message_hash=store.message_hash) store = Store(Parse(u'<34>Oct 11 22:14:15 mymachine.example.com su - ID47 - ZwPpeQyUtrRKxw5')) group2 = Log_group.add(store) group2 = Log_group.get(message_hash=store.message_hash) # add som info that should not show up in the list we have to check for warnings. when = datetime.utcnow() - timedelta(minutes=MINUTE_NORMALIZATION) Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, inc=25, ) when = datetime.utcnow() - timedelta(minutes=10, seconds=30) # checks that the filters groups correctly Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter1.id, when=when, inc=1000, ) Times_seen_by_minute.update( log_group_id=group2.id, filter_id=filter1.id, when=when, inc=150, ) # there should not be enough messages received for this filter to be checked for alerts. Times_seen_by_minute.update( log_group_id=group1.id, filter_id=filter2.id, when=when, inc=1000, ) now = datetime.utcnow() from_date = now - timedelta(minutes=MINUTE_NORMALIZATION) filters_to_check = Filter_warning.get_filters_to_check() self.assertTrue(len(filters_to_check) > 0) self.assertEqual(filters_to_check[0].normalized_count, 100) return filters_to_check