def _add_journal(self, json_file): ''' Load the alert DB from the journal JSON file ''' self.teal = Teal('data/tllsalert_test/test.conf','stderr',msgLevel=self.msglevel) # Alerts ja = Journal('temp_journal', file=json_file) ja.insert_in_db(truncate=False, no_delay=True) self.teal.shutdown()
def testDemo1EventQ(self): '''Test that the first demo flow works -- Inject Event Q''' self.teal = Teal('data/teal_test/configurationtest_05_auto.conf', 'stderr', msgLevel=self.msglevel, commit_alerts=False, commit_checkpoints=False, run_mode=TEAL_RUN_MODE_HISTORIC) j_in = Journal('j_in', file='data/demo/data_sample_demo_NEW_001.json') j_out_aaq = Journal('j_out_aaq') j_out_dq = Journal('j_out_dq') j_out_lis = Journal('j_out_lis') q_in = registry.get_service(SERVICE_EVENT_Q) q_out_aaq = registry.get_service(SERVICE_ALERT_ANALYZER_Q) q_out_dq = registry.get_service(SERVICE_ALERT_DELIVERY_Q) q_out_dq.register_listener(j_out_dq) q_out_aaq.register_listener(j_out_aaq) listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: if listener.get_name() == 'outputJournal': j_out_lis = listener.journal j_in.inject_queue(q_in) self.assertTrue(j_out_lis.wait_for_entries(3)) j_exp_aaq = Journal('j_exp_aaq', 'data/teal_test/data_sample_demo_NEW_001_AAQ_Result.json') self.assertTrue(j_out_aaq.deep_match(j_exp_aaq, ignore_delay=True, ignore_times=True)) j_exp_dq = Journal('j_exp_dq', 'data/teal_test/data_sample_demo_NEW_001_DQ_Result.json') self.assertTrue(j_out_dq.deep_match(j_exp_dq, ignore_delay=True, ignore_times=True)) j_exp_lis = Journal('j_exp_lis', 'data/teal_test/data_sample_demo_NEW_001_LIS_Result.json') self.assertTrue(j_out_lis.deep_match(j_exp_lis, ignore_delay=True, ignore_times=True)) q_out_aaq.unregister_listener(j_out_aaq) q_out_dq.unregister_listener(j_out_dq) self.teal.shutdown()
def testGeneralFilters(self): """test alert delivery with global and local filtering""" j_in_dq = Journal("j_in_DQ", "data/alert_delivery_test/listener_failure/inject_DQ_alerts.json") dq_q = get_service(SERVICE_ALERT_DELIVERY_Q) # Get the AlertListenerJournal journals listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: name = listener.get_name() if name == "AllAlerts": j_out_all = listener.journal if name == "OnlyAnalyzer1": j_out_analyzer1 = listener.journal # inject j_in_dq.inject_queue(dq_q) # Create a TEAL alert create_teal_alert("XXXXXXXX", "no reason at all", "medium well", loc_instance="YYY") # Get expected values j_out_all_exp = Journal("all_exp", "data/alert_delivery_test/analyzer_filter/alerts_out_all.json") j_out_analyzer1_exp = Journal("analyzer1", "data/alert_delivery_test/analyzer_filter/alerts_out_analyzer1.json") # wait for stuff to come out self.assertTrue(j_out_all.wait_for_entries(len(j_out_all_exp) + 3)) self.assertTrue(j_out_analyzer1.wait_for_entries(len(j_out_analyzer1_exp))) # Check that it was what was expected # Can't really check this because the location is unique for each machine and run # Make sure only 3 extra self.assertEqual(len(j_out_all) - len(j_out_all_exp), 3) # self.assertTrue(j_out_all.deep_match(j_out_all_exp, ignore_delay=True, ignore_times=True)) self.assertTrue(j_out_analyzer1.deep_match(j_out_analyzer1_exp, ignore_delay=True, ignore_times=True)) return
def testGeneralFilters(self): """test alert delivery with global and local filtering""" j_in_dq = Journal("j_in_DQ", "data/alert_delivery_test/data_sample_inject_DQ.json") # p rint str(j_in_dq) dq_q = get_service(SERVICE_ALERT_DELIVERY_Q) # Get the AlertListenerJournal journals listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: name = listener.get_name() # p rint name if name == "AllAlerts": j_out_all = listener.journal if name == "OnlyAlertId": j_out_alert_id = listener.journal if name == "OnlyAlertIdUrgent": j_out_ai_urgent = listener.journal # inject j_in_dq.inject_queue(dq_q) # wait for stuff to come out self.assertTrue(j_out_all.wait_for_entries(5)) self.assertTrue(j_out_alert_id.wait_for_entries(3)) self.assertTrue(j_out_ai_urgent.wait_for_entries(2)) # j_out_all_exp = Journal("j_out_all_exp", "data/alert_delivery_test/data_sample_out_all_alerts.json") self.assertTrue(j_out_all.deep_match(j_out_all_exp, ignore_delay=True, ignore_times=True)) j_out_alert_id_exp = Journal("j_out_alert_id_exp", "data/alert_delivery_test/data_sample_out_alert_id.json") self.assertTrue(j_out_alert_id.deep_match(j_out_alert_id_exp, ignore_delay=True, ignore_times=True)) j_out_ai_urgent_exp = Journal("j_out_ai_urgent_exp", "data/alert_delivery_test/data_sample_out_ai_urgent.json") self.assertTrue(j_out_ai_urgent.deep_match(j_out_ai_urgent_exp, ignore_delay=True, ignore_times=True)) return
def testGeneralFilters(self): """test alert delivery with global and local filtering""" j_in_dq = Journal("j_in_DQ", "data/alert_delivery_test/analyzer_filter/inject_DQ_alerts.json") dq_q = get_service(SERVICE_ALERT_DELIVERY_Q) # Get the AlertListenerJournal journals listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: name = listener.get_name() if name == "AllAlerts": j_out_all = listener.journal if name == "OnlyAnalyzer1": j_out_analyzer1 = listener.journal if name == "AnyButAnalyzer1": j_out_not_analyzer1 = listener.journal if name == "OnlyAnalyzer2and3": j_out_analyzer2and3 = listener.journal if name == "AnyButAnalyzer2and3": j_out_not_analyzer2and3 = listener.journal if name == "AnyButAnalyzer1and2and3": j_out_not_analyzer1and2and3 = listener.journal # inject j_in_dq.inject_queue(dq_q) # Get expected values j_out_all_exp = Journal("all_exp", "data/alert_delivery_test/analyzer_filter/alerts_out_all.json") j_out_analyzer1_exp = Journal("analyzer1", "data/alert_delivery_test/analyzer_filter/alerts_out_analyzer1.json") j_out_not_analyzer1_exp = Journal( "not_analyzer1", "data/alert_delivery_test/analyzer_filter/alerts_out_not_analyzer1.json" ) j_out_analyzer2and3_exp = Journal( "analyzer2and3", "data/alert_delivery_test/analyzer_filter/alerts_out_analyzer2and3.json" ) j_out_not_analyzer2and3_exp = Journal( "not_analyzer2and3", "data/alert_delivery_test/analyzer_filter/alerts_out_not_analyzer2and3.json" ) j_out_not_analyzer1and2and3_exp = Journal( "not_analyzer1and2and3", "data/alert_delivery_test/analyzer_filter/alerts_out_not_analyzer1and2and3.json" ) # wait for stuff to come out self.assertTrue(j_out_all.wait_for_entries(len(j_out_all_exp))) self.assertTrue(j_out_analyzer1.wait_for_entries(len(j_out_analyzer1_exp))) self.assertTrue(j_out_not_analyzer1.wait_for_entries(len(j_out_not_analyzer1_exp))) self.assertTrue(j_out_analyzer2and3.wait_for_entries(len(j_out_analyzer2and3_exp))) self.assertTrue(j_out_not_analyzer2and3.wait_for_entries(len(j_out_not_analyzer2and3_exp))) self.assertTrue(j_out_not_analyzer1and2and3.wait_for_entries(len(j_out_not_analyzer1and2and3_exp))) # Check that it was what was expected self.assertTrue(j_out_all.deep_match(j_out_all_exp, ignore_delay=True, ignore_times=True)) self.assertTrue(j_out_analyzer1.deep_match(j_out_analyzer1_exp, ignore_delay=True, ignore_times=True)) self.assertTrue(j_out_not_analyzer1.deep_match(j_out_not_analyzer1_exp, ignore_delay=True, ignore_times=True)) self.assertTrue(j_out_analyzer2and3.deep_match(j_out_analyzer2and3_exp, ignore_delay=True, ignore_times=True)) self.assertTrue( j_out_not_analyzer2and3.deep_match(j_out_not_analyzer2and3_exp, ignore_delay=True, ignore_times=True) ) self.assertTrue( j_out_not_analyzer1and2and3.deep_match( j_out_not_analyzer1and2and3_exp, ignore_delay=True, ignore_times=True ) ) return
def testStateOtherStatesIM(self): '''test alert state NEW in memory''' self.teal = Teal('data/alert_test/test.conf', 'stderr', msgLevel=self.msglevel, commit_alerts=False, commit_checkpoints=False) j_in_dq = Journal('j_in_DQ', 'data/alert_test/inject_DQ_alerts.json') tq = ListenableQueue('test LQ') ql = CheckAlertStateListener(7) tq.register_listener(ql) j_in_dq.inject_queue(tq, progress_cb=None, fail_on_invalid=False, no_delay=True) ql.my_event.wait() self.assertEquals(ql.count, 7) ta1 = ql.alerts[0] am = get_service(SERVICE_ALERT_MGR) # TODO: Should not be hardcoded rec ids after this self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.close, 5) self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.close, 6) self.assertRaisesTealError(AlertMgrError, 'Current alert state does not allow this operation', am.reopen, ta1.rec_id) am.close(ta1.rec_id) self.assertEquals(ta1.state, ALERT_STATE_CLOSED) self.assertRaisesTealError(AlertMgrError, 'Current alert state does not allow this operation', am.close, ta1.rec_id) self.assertRaisesTealError(AlertMgrError, 'Alert with specified record id not found', am.close, 23456) self.assertEquals(ql.alerts[1].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[2].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[3].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[4].state, ALERT_STATE_CLOSED) self.assertEquals(ql.alerts[5].state, ALERT_STATE_CLOSED) self.assertEquals(ql.alerts[6].state, ALERT_STATE_OPEN) # reopen it self.assertRaisesTealError(AlertMgrError, 'Alert with specified record id not found', am.reopen, 23456) self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.reopen, 5) self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.reopen, 6) am.reopen(ta1.rec_id) self.assertEquals(ta1.state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[1].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[2].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[3].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[4].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[5].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[6].state, ALERT_STATE_OPEN) am.close(3) self.assertEquals(ta1.state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[1].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[2].state, ALERT_STATE_CLOSED) self.assertEquals(ql.alerts[3].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[4].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[5].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[6].state, ALERT_STATE_OPEN) am.reopen(3) self.assertEquals(ta1.state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[1].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[2].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[3].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[4].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[5].state, ALERT_STATE_OPEN) self.assertEquals(ql.alerts[6].state, ALERT_STATE_OPEN) self.teal.shutdown() return
def testFilter(self): ''' Test that a duplicate is filtered when backed by a database ''' j = Journal('AlertAnalyzer', 'data/alert_test/inject_DQ_alerts.json') j.inject_queue(registry.get_service(SERVICE_ALERT_DELIVERY_Q)) alj = find_listener() alj.journal.wait_for_entries(4) out_j = Journal('AlertListener', 'data/alert_filter_test/unfiltered_alerts.json') self.assertTrue(alj.journal.deep_match(out_j, ignore_times=True))
def inject_new_entries(self,exp_json='data/restart_test/three_events_one_fromq.json',exp_num=3): ''' Verify that events still flow through TEAL after startup ''' # Now make sure we start getting new events j_inj = Journal('After restart','data/restart_test/three_events_one.json') j_inj.insert_in_db(use_rec_ids=False, no_delay=True) registry.get_service(registry.SERVICE_NOTIFIER).post() j_exp = Journal('Inject New Entries', exp_json) j_act = self.find_analyzer().journal self.assertTrue(j_act.wait_for_entries(exp_num)) self.assertTrue(j_act.deep_match(j_exp, ignore_delay=True))
def testSmtpAlertListener(self): ''' Test the SMTP Alert Listener ''' server = start_smtp_server() t = teal.Teal('data/alert_listener_test/test_01.conf', msgLevel=self.msglevel, logFile='stderr', commit_alerts=False, commit_checkpoints=False) in_j = Journal('SMTP Journal', 'data/alert_test/inject_DQ_alerts.json') in_j.inject_queue(registry.get_service(SERVICE_ALERT_DELIVERY_Q)) self.assertTrue(server.compare_messages(gen_message_list(in_j))) t.shutdown()
def testDemo1DB(self): '''Test demo flow by injecting into DB''' self.prepare_db() keep_var = self.force_env('TEAL_TEST_POOL_TIMERS_OFF', 'YES') self.teal = Teal('data/teal_test/configurationtest_05_semaphore_auto.conf', 'stderr', msgLevel=self.msglevel) j_in = Journal('j_in', file='data/demo/data_sample_demo_NEW_001.json') j_out_eq = Journal('j_out_eq') j_out_aaq = Journal('j_out_aaq') j_out_dq = Journal('j_out_dq') j_out_lis = Journal('j_out_lis') q_out_eq = registry.get_service(SERVICE_EVENT_Q) q_out_aaq = registry.get_service(SERVICE_ALERT_ANALYZER_Q) q_out_dq = registry.get_service(SERVICE_ALERT_DELIVERY_Q) q_out_eq.register_listener(j_out_eq) q_out_dq.register_listener(j_out_dq) q_out_aaq.register_listener(j_out_aaq) listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: if listener.get_name() == 'outputJournal': j_out_lis = listener.journal try: j_in.insert_in_db(progress_cb=None, truncate=False, use_rec_ids=True, no_delay=False, post=True) except: print 'INSERTION FAILED' q_out_eq.unregister_listener(j_out_eq) q_out_dq.unregister_listener(j_out_dq) q_out_aaq.unregister_listener(j_out_aaq) raise # Yes, only 2: Flush can't be injected to connector, so pool does not get closed, so last event # Does not get turned into an alert! self.assertTrue(j_out_lis.wait_for_entries(2)) # Note these connector ('C') versions have one less alert # The analyzer is being run in historic mode (see configuration) if that was # changed to runtime then the pool would time out and the last alert would be journaled j_exp_aaq = Journal('j_exp_aaq', 'data/teal_test/data_sample_demo_NEW_001_AAQ_Result_C.json') self.assertTrue(j_out_aaq.deep_match(j_exp_aaq, ignore_delay=True, ignore_times=True)) j_exp_dq = Journal('j_exp_dq', 'data/teal_test/data_sample_demo_NEW_001_DQ_Result_C.json') self.assertTrue(j_out_dq.deep_match(j_exp_dq, ignore_delay=True, ignore_times=True)) j_exp_lis = Journal('j_exp_lis', 'data/teal_test/data_sample_demo_NEW_001_LIS_Result_C.json') self.assertTrue(j_out_lis.deep_match(j_exp_lis, ignore_delay=True, ignore_times=True)) q_out_eq.unregister_listener(j_out_eq) q_out_dq.unregister_listener(j_out_dq) q_out_aaq.unregister_listener(j_out_aaq) self.teal.shutdown() self.restore_env('TEAL_TEST_POOL_TIMERS_OFF', keep_var)
def testNegativeAlertIdMatch(self): ''' Runs a noise filter with all but one kind of alert id (negative match of alert id)''' self.prepare_db() t = teal.Teal('data/alert_filter_test/test_12.conf',msgLevel=self.msglevel,logFile='stderr') j = Journal('AlertAnalyzer', 'data/alert_filter_test/inject_DQ_alerts.json') j.inject_queue(registry.get_service(SERVICE_ALERT_DELIVERY_Q)) alj = find_listener() alj.journal.wait_for_entries(3) out_j = Journal('AlertListener', 'data/alert_filter_test/negative_alerts.json') self.assertTrue(alj.journal.deep_match(out_j, ignore_times=True)) t.shutdown()
def testJournalQueue(self): ''' Test injecting a Journal containing optional fields through a queue''' lq = ListenableQueue('test journal queue') j = Journal('test journal', file='data/journal_test/data_sample_002_NEW.json') j_rec = Journal('j_rec') lq.register_listener(j_rec) j.inject_queue(lq) while len(j) != len(j_rec): # p rint ('waiting for queue to process %s of %s' % (str(len(j_rec)), str(len(j)))) sleep(1.0) #p rint j #p rint j_rec self.assertTrue(j.deep_match(j)) self.assertTrue(j.deep_match(j_rec, ignore_delay=True)) return
def testPartialNoiseFilter(self): ''' Runs a noise filter with some values ''' self.prepare_db() t = teal.Teal('data/alert_filter_test/test_05.conf',msgLevel='warn',logFile='stderr') j = Journal('AlertAnalyzer', 'data/alert_filter_test/inject_DQ_alerts.json') j.inject_queue(registry.get_service(SERVICE_ALERT_DELIVERY_Q)) alj = find_listener() alj.journal.wait_for_entries(4) out_j = Journal('AlertListener', 'data/alert_filter_test/partial_alerts.json') self.assertTrue(alj.journal.deep_match(out_j, ignore_times=True)) t.shutdown()
def add_entries_before_restart(self, stop_teal=True): ''' Add events to TEAL and make sure they are processed ''' self.start_teal('now') # Insert a set of events and process them j_act = self.find_analyzer().journal j_inj = Journal('Pre-populate','data/restart_test/three_events_one.json') j_inj.insert_in_db(no_delay=True, truncate=True) # Truncate is testing that we handle the ckpt table being destroyed registry.get_service(registry.SERVICE_NOTIFIER).post() self.assertTrue(j_act.wait_for_entries(3)) j_exp = Journal('Expected', 'data/restart_test/three_events_one_fromq.json') self.assertTrue(j_act.deep_match(j_exp, ignore_delay=True)) # Stop this instance of TEAL if requested otherwise it is up # to the caller to stop it if stop_teal: self.stop_teal()
class JournalAnalyzer(EventAnalyzer): ''' Testcase analyzer that holds on to a journal so the testcase can determine what events have been processed by the framework ''' def __init__(self, name, inQueue, outQueue, config_dict=None, number=0): self.journal = Journal(name) EventAnalyzer.__init__(self, name, inQueue, outQueue, config_dict, number) def will_analyze_event(self, event): ''' always analyze it ''' return True def analyze_event(self, event): self.journal.journal_event(event) def handle_control_msg(self, control_msg): pass
def testEmptyNoiseFilter(self): ''' Runs a noise filter with no values ''' self.prepare_db() t = teal.Teal('data/alert_filter_test/test_03.conf',msgLevel='warn',logFile='stderr') dq = registry.get_service(SERVICE_ALERT_DELIVERY) self.assertEqual(dq.filters[0].get_name(),'NoiseAlertFilter') j = Journal('AlertAnalyzer', 'data/alert_filter_test/inject_DQ_alerts.json') j.inject_queue(registry.get_service(SERVICE_ALERT_DELIVERY_Q)) alj = find_listener() alj.journal.wait_for_entries(7) out_j = Journal('AlertListener', 'data/alert_filter_test/empty_alerts.json') self.assertTrue(alj.journal.deep_match(out_j, ignore_times=True)) t.shutdown()
def testAnalyzers(self): ''' Validate functionality of both analyzer types ''' self.prepare_db() t = teal.Teal('data/alert_filter_test/test_11.conf',msgLevel=self.msglevel,logFile='stderr') j = Journal('AlertAnalyzer', 'data/alert_filter_test/inject_DQ_alerts.json') j.inject_queue(registry.get_service(SERVICE_ALERT_DELIVERY_Q)) aif = find_listener('AlertFilterIfNameListener') aif.journal.wait_for_entries(5) anif = find_listener('AlertFilterIfNotNameListener') anif.journal.wait_for_entries(4) out_jif = Journal('AlertListener', 'data/alert_filter_test/ifname_alerts.json') self.assertTrue(aif.journal.deep_match(out_jif, ignore_times=True)) out_jnif = Journal('AlertListener', 'data/alert_filter_test/ifnotname_alerts.json') self.assertTrue(aif.journal.deep_match(out_jnif, ignore_times=True)) t.shutdown()
def testRestartLastProc(self): ''' Verify lastproc mode by reading events that have been added after shutdown ''' self.add_entries_before_restart() # Add a few more events so we process things self.start_teal_no_monitor() j_inj = Journal('Pre-populate','data/restart_test/three_events_one.json') j_inj.insert_in_db(use_rec_ids=False, no_delay=True) self.stop_teal() # Now start up in lastproc mode and see that we don't get entries ... not checkpoints so acts like Now self.start_teal('lastproc') j_act = self.find_analyzer().journal self.assertFalse(j_act.wait_for_entries(1,seconds=3, msg_mode='quiet')) #self.assertTrue(j_act.wait_for_entries(3)) #j_exp = Journal('After restart','data/restart_test/three_events_one_fromq.json') #self.assertTrue(j_act.deep_match(j_exp,ignore_delay=True)) j_act.clear() # Make sure we start getting new entries self.inject_new_entries()
def testJournalWriteEventDB3(self): ''' Test reading from event DB with a subset of fields ''' self.teal = Teal('data/journal_test/events_001.conf','stderr',msgLevel=self.msglevel) j = Journal('DB test journal to write', file='data/journal_test/events_001.json') j.insert_in_db(truncate=True, use_rec_ids=False, no_delay=True) jdb = Journal('DB test journal to read') jdb.select_from_db('event', event_fields=[EVENT_ATTR_REC_ID, EVENT_ATTR_EVENT_ID]) jexp = Journal('DB expected', 'data/journal_test/events_004.json') self.assertTrue(jexp.deep_match(jdb, ignore_delay=True, ignore_times=False, ignore_rec_id=True)) #p rint j #p rint jdb self.teal.shutdown() return
def testJournalWriteEventDB1(self): ''' Test writing to Event log DB basic ''' self.teal = Teal('data/journal_test/events_001.conf','stderr',msgLevel=self.msglevel) j = Journal('DB test journal to write', file='data/journal_test/events_001.json') j.insert_in_db(truncate=True, no_delay=True) jdb = Journal('DB test journal to read') jdb.select_from_db('event') self.assertTrue(j.deep_match(jdb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) #p rint j #p rint jdb self.teal.shutdown() return
def testAlertAnalyzerSubclassing01(self): ''' Test that the alert analyzer subclass works properly ''' # Get the alert listener for listener in get_service(SERVICE_ALERT_DELIVERY).listeners: if listener.get_name() == 'AllAlerts': j_out_all = listener.journal # Pump in some events j_in_event = Journal('j_in_events', 'data/alert_analyzer_test/inject_events01.json') j_in_event.inject_queue(get_service(SERVICE_EVENT_Q)) # Check that the analyzer got them -- only len because can't compare locations self.assertTrue(j_out_all.wait_for_entries(27)) # Pump in some alerts j_in_alert = Journal('j_in_events', 'data/alert_analyzer_test/inject_alerts01.json') j_in_alert.inject_queue(get_service(SERVICE_ALERT_ANALYZER_Q)) # Check that the analyzer got them -- only len because can't compare locations self.assertTrue(j_out_all.wait_for_entries(34)) alertmgr = get_service(SERVICE_ALERT_MGR) self.assertEqual(len(alertmgr.in_mem_alerts_duplicate), 0)
def testStateOtherStatesDB(self): '''test alert state NEW in memory''' self.prepare_db() self.teal = Teal('data/alert_test/test.conf', 'stderr', msgLevel=self.msglevel, commit_alerts=True, commit_checkpoints=False) j_in_dq = Journal('j_in_DQ', 'data/alert_test/inject_DQ_alerts.json') tq = ListenableQueue('test LQ') ql = CheckAlertStateListener(7) tq.register_listener(ql) j_in_dq.inject_queue(tq, progress_cb=None, fail_on_invalid=False, no_delay=True) ql.my_event.wait() self.assertEquals(ql.count, 7) ta1 = ql.alerts[0] am = get_service(SERVICE_ALERT_MGR) self.assertEquals(ta1.state, ALERT_STATE_OPEN) # TODO: Really should query to get the recid to use for hardcoded ones in rest of this test case self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.close, 5) self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.close, 6) self.assertRaisesTealError(AlertMgrError, 'Current alert state does not allow this operation', am.reopen, ta1.rec_id) am.close(ta1.rec_id) # Get duplicates of this one self.assertRaisesTealError(AlertMgrError, 'Current alert state does not allow this operation', am.close, ta1.rec_id) self.assertRaisesTealError(AlertMgrError, 'Alert with specified record id not found', am.close, 23456) # Note that in memory won't be updated ... only in DB # so lets get it from the DB dbi = get_service(SERVICE_DB_INTERFACE) event_cnxn, cursor =_get_connection(dbi) self.assert_alert_closed(dbi, cursor, ta1.rec_id) self.assert_alert_open(dbi, cursor, 2) self.assert_alert_open(dbi, cursor, 3) self.assert_alert_open(dbi, cursor, 4) self.assert_alert_closed(dbi, cursor, 5) self.assert_alert_closed(dbi, cursor, 6) self.assert_alert_open(dbi, cursor, 7) self.assertRaisesTealError(AlertMgrError, 'Alert with specified record id not found', am.reopen, 23456) self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.reopen, 5) self.assertRaisesTealError(AlertMgrError, 'Operation not allowed on duplicate alert', am.reopen, 6) # reopen it am.reopen(ta1.rec_id) event_cnxn, cursor =_get_connection(dbi, event_cnxn) self.assert_alert_open(dbi, cursor, ta1.rec_id) self.assert_alert_open(dbi, cursor, 2) self.assert_alert_open(dbi, cursor, 3) self.assert_alert_open(dbi, cursor, 4) self.assert_alert_open(dbi, cursor, 5) self.assert_alert_open(dbi, cursor, 6) self.assert_alert_open(dbi, cursor, 7) am.close(3) event_cnxn, cursor =_get_connection(dbi, event_cnxn) self.assert_alert_open(dbi, cursor, ta1.rec_id) self.assert_alert_open(dbi, cursor, 2) self.assert_alert_closed(dbi, cursor, 3) self.assert_alert_open(dbi, cursor, 4) self.assert_alert_open(dbi, cursor, 5) self.assert_alert_open(dbi, cursor, 6) self.assert_alert_open(dbi, cursor, 7) am.reopen(3) event_cnxn, cursor =_get_connection(dbi, event_cnxn) self.assert_alert_open(dbi, cursor, ta1.rec_id) self.assert_alert_open(dbi, cursor, 2) self.assert_alert_open(dbi, cursor, 3) self.assert_alert_open(dbi, cursor, 4) self.assert_alert_open(dbi, cursor, 5) self.assert_alert_open(dbi, cursor, 6) self.assert_alert_open(dbi, cursor, 7) event_cnxn.close() self.teal.shutdown() return
def _execute_rule(self, dir, debug=False, wait_sec=60, wait_num=None, force_save=False, max_key=None, inject_flush=False): '''Run the test defined in the specified directory''' config = 'data/gear_ruleset_test/' + dir + '/config.conf' input = 'data/gear_ruleset_test/' + dir + '/event_input.json' base_output = 'data/gear_ruleset_test/' + dir test_files = os.listdir('data/gear_ruleset_test/' + dir) # Figure out what output files need to be checked out_prefix = 'alert_output' out_to_check = [] for filename in test_files: base, ext = os.path.splitext(filename) if ext != '.json': continue if base[:len(out_prefix)] == out_prefix: out_to_check.append(base[len(out_prefix):]) # out_to_check now contains the unique part of the output file name if debug: msg_level = 'debug' else: msg_level = self.msglevel # TODO: Make work with duplicate checking keep_ADC = self.force_env('TEAL_ALERT_DUPLICATE_CHECK', 'No') myteal = teal.Teal(config, 'stderr', msgLevel=msg_level, commit_alerts=False, commit_checkpoints=False) j_in = Journal('j_in', input) # get the listeners to get the journals from # Make a list to find jl_names = [] j_out_list = {} list_prefix = 'ListenerJournal' for up in out_to_check: jl_names.append( list_prefix + up) listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: if listener.get_name() in jl_names: j_out_list[listener.get_name()[len(list_prefix):]] = listener.journal event_q = get_service(SERVICE_EVENT_Q) if debug: # Print before injection in case problem with injection print j_in j_in.inject_queue(event_q, no_delay=False, max_key=max_key) if inject_flush == True: self._inject_flush(event_q) for up in out_to_check: output = base_output + '/' + out_prefix + up + '.json' j_exp = Journal('j_exp' + up, output ) if wait_num is None: wait_num = len(j_exp) self.assertTrue(j_out_list[up].wait_for_entries(wait_num, seconds=wait_sec)) if debug: print j_exp print j_out_list[up] if force_save == True: j_out_list[up].save(output) self.assertTrue(j_out_list[up].deep_match(j_exp, ignore_delay=True, ignore_times=True, unordered=True)) myteal.shutdown() self.restore_env('TEAL_ALERT_DUPLICATE_CHECK', keep_ADC) return
def __init__(self, name, inQueue, outQueue, config_dict=None, number=0): self.journal = Journal(name) EventAnalyzer.__init__(self, name, inQueue, outQueue, config_dict, number)
def testJournalWriteAlertDB4(self): ''' Test writing of Alert log queue after reading from DB ''' # This test does not work with duplicate checking -- probably don't want it to keep_ADC = self.force_env('TEAL_ALERT_DUPLICATE_CHECK', 'No') self.teal = Teal('data/journal_test/events_002.conf','stderr',msgLevel=self.msglevel) # Events je = Journal('DB test input EVENTS', file='data/journal_test/events_002.json') je.insert_in_db(truncate=True, no_delay=True) # Alerts ja = Journal('DB test input ALERTS', file='data/journal_test/alerts_002.json') ja.insert_in_db(truncate=False, no_delay=True) # Check events jedb = Journal('Read DB test EVENTS') jedb.select_from_db('event') self.assertTrue(je.deep_match(jedb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) # Check alerts jadb = Journal('Read DB test ALERTS') jadb.select_from_db('alert') self.assertTrue(ja.deep_match(jadb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) # Now insert into the Delivery Queue and make sure all come out jadb.inject_queue(get_service(SERVICE_ALERT_DELIVERY_Q), progress_cb=None, fail_on_invalid=False, no_delay=True) listeners = get_service(SERVICE_ALERT_DELIVERY).listeners for listener in listeners: name = listener.get_name() if name == 'Journal': j_out_all = listener.journal self.assertTrue(j_out_all.wait_for_entries(6)) self.assertTrue(j_out_all.deep_match(jadb, ignore_delay=True, ignore_times=True)) self.teal.shutdown() self.restore_env('TEAL_ALERT_DUPLICATE_CHECK', keep_ADC) return
def testJournalWriteAlertDB3(self): ''' Test getting only some fields of an alert ''' self.teal = Teal('data/journal_test/events_001.conf','stderr',msgLevel=self.msglevel) # Events je = Journal('DB test input EVENTS', file='data/journal_test/events_002.json') je.insert_in_db(truncate=True, no_delay=True) # Alerts ja = Journal('DB test input ALERTS', file='data/journal_test/alerts_002.json') ja.insert_in_db(truncate=False, no_delay=True) # Check alerts jadb = Journal('Read DB test ALERTS') jadb.select_from_db('alert', include_alert_assoc=False, alert_fields=[ALERT_ATTR_REC_ID, ALERT_ATTR_ALERT_ID, ALERT_ATTR_RECOMMENDATION]) jaexp = Journal('DB test expected', 'data/journal_test/alerts_005.json') self.assertTrue(jaexp.deep_match(jadb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) #p rint ja #p rint jadb self.teal.shutdown() return
def testJournalWriteAlertDB2(self): ''' Test getting alerts without associations ''' self.teal = Teal('data/journal_test/events_001.conf','stderr',msgLevel=self.msglevel) # Events je = Journal('DB test input EVENTS', file='data/journal_test/events_002.json') je.insert_in_db(truncate=True, no_delay=True) # Alerts ja = Journal('DB test input ALERTS', file='data/journal_test/alerts_002.json') ja.insert_in_db(truncate=False, no_delay=True) # Check alerts jadb = Journal('Read DB test ALERTS') jadb.select_from_db('alert', include_alert_assoc=False) jaexp = Journal('DB test expected', 'data/journal_test/alerts_003.json') self.assertTrue(jaexp.deep_match(jadb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) #p rint ja #p rint jadb self.teal.shutdown() return
def testJournalWriteAlertDB1(self): ''' Test writing of Alert log DB basic ''' self.teal = Teal('data/journal_test/events_001.conf','stderr',msgLevel=self.msglevel) # Events je = Journal('DB test input EVENTS', file='data/journal_test/events_002.json') je.insert_in_db(truncate=True, no_delay=True) # Alerts ja = Journal('DB test input ALERTS', file='data/journal_test/alerts_002.json') ja.insert_in_db(truncate=False, no_delay=True) # Check events jedb = Journal('Read DB test EVENTS') jedb.select_from_db('event') self.assertTrue(je.deep_match(jedb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) #p rint je #p rint jedb # Check alerts jadb = Journal('Read DB test ALERTS') jadb.select_from_db('alert') self.assertTrue(ja.deep_match(jadb, ignore_delay=True, ignore_times=False, ignore_rec_id=False)) #p rint ja #p rint jadb self.teal.shutdown() return