def test_check_if_alert_under_threshold_no_alert(self): alerter = Alerter(alert_check_interval=120, high_traffic_threshold=10) loglines = generate_loglines(num_lines=100, sleep=0) has_alert = alerter.check_if_alert(loglines) self.assertFalse(has_alert) self.assertIsNone(alerter.last_alert)
def __init__(self): ##self.crawl_site = 'https://coronamask.kr' self.json_file = '/root/maskbot/data/coronamask.json' self.mask_list = { } # 크롤링할 마스크 사이트 정보 {name: {content, link, sell_time}} self.alerter = Alerter() pass
def test_check_if_alert_over_threshold_has_alert(self): alerter = Alerter(alert_check_interval=120, high_traffic_threshold=10) loglines_at_threshold = alerter.high_traffic_threshold * alerter.alert_check_interval loglines = generate_loglines(num_lines=loglines_at_threshold + 1, sleep=0) has_alert = alerter.check_if_alert(loglines) self.assertIsNotNone(alerter.last_alert) self.assertEqual(alerter.last_alert.state, AlertState.HIGH_TRAFFIC)
def __init__(self, args): self.args = args self.__check_for_testing() self.stats = Stats(args['alert'], args['window'], args['test_window_end']) self.console = Console(self.stats, self.args) self.log_watcher = LogWatcher(self.stats, self.args['file']) self.alerter = Alerter(self.stats) threads = self.__setup_threads() self.__start_threads(threads) self.__keep_alive()
def run(self): """Function which is called from the Daemon runner""" print "MACupdTracker Starting on HOST: " + self.host + " on port", self.port self.server = SocketServer.UDPServer((self.host, self.port), Handler) # # Set Variables # self.server.TRACKER = {} self.server.MACLIST = self.MACLIST self.server.CONFIG = self.CONFIG # # Set current log date # now = datetime.now() self.server.LOG_DATE = now.strftime('%Y%m%d') # # Read status if current tracker logfile exists # self.read_tracker_status() # # Set logfile sizes # self.set_logfile_sizes() # # Instantiate alerter # alerter = Alerter() self.server.alert = alerter.alert # # Instantiate reporter # reporter = Reporter(logdir=self.logdir, reportdir=self.reportdir, maclist=self.MACLIST) self.server.report = reporter.report # # Export function to handler # self.server.dump_tracker_log = self.dump_tracker_log self.server.set_line = self.set_line self.server.rotate_logs = self.rotate_logs self.server.logdir = self.logdir self.rotate_logs() # # Register atexit # atexit.register(self.dump_atexit) # # Start UDP handler forever ... # self.server.serve_forever()
def test_check_alert_recover_from_high_traffic(self): alerter = Alerter(alert_check_interval=120, high_traffic_threshold=10) loglines_at_threshold = alerter.high_traffic_threshold * alerter.alert_check_interval loglines = generate_loglines(num_lines=loglines_at_threshold + 1, sleep=0) has_alert = alerter.check_if_alert(loglines) self.assertIsNotNone(alerter.last_alert) loglines = generate_loglines(num_lines=100, sleep=0) has_alert = alerter.check_if_alert(loglines) self.assertIsNotNone(alerter.last_alert) self.assertEqual(alerter.last_alert.state, AlertState.RECOVERED)
def __init__(self, parent_pid, storage): """ Initialize the Analyzer """ super(Analyzer, self).__init__() self.redis_conn = StrictRedis( unix_socket_path=settings.REDIS_SOCKET_PATH) self.daemon = True self.parent_pid = parent_pid self.current_pid = getpid() self.lock = Lock() self.exceptions = Manager().dict() self.anomaly_breakdown = Manager().dict() self.anomalous_metrics = Manager().list() self.storage = storage self.alerter = Alerter(storage)
def test_check_if_alert_no_duplicate_alerts_created(self): alerter = Alerter(alert_check_interval=120, high_traffic_threshold=10) loglines_at_threshold = alerter.high_traffic_threshold * alerter.alert_check_interval loglines = generate_loglines(num_lines=loglines_at_threshold + 1, sleep=0) # Generate first alert has_alert = alerter.check_if_alert(loglines) first_alert = alerter.last_alert self.assertIsNotNone(alerter.last_alert) self.assertEqual(alerter.last_alert.state, AlertState.HIGH_TRAFFIC) # Try to generate second alert alerter.check_if_alert(loglines) self.assertEqual(first_alert.time, alerter.last_alert.time)
def __init__(self, config=None): self.config_mdb_atlas = config.get('mdb_atlas') self.config_email = config.get('email') self.config_sms = config.get('sms') self._discoverer = Discoverer(None, None) self._nmapper = Nmapper() self._alerter = Alerter() self._scan_db = MongoDBClient( self.config_mdb_atlas.get('scan_cluster_name'), self.config_mdb_atlas.get('scan_username'), self.config_mdb_atlas.get('scan_password'), self.config_mdb_atlas.get('scan_db')) self._running_event = None self._previous_scan = {'hosts_list': [], 'hosts_ports_scan': []} self._email_client = EmailClient( self.config_email.get('email_from_addr'), self.config_email.get('email_password'), self.config_email.get('email_smtp_server'), self.config_email.get('email_smtp_server_port')) self._sms_client = SmsClient(self.config_sms.get('sms_account_sid'), self.config_sms.get('sms_auth_token'))
TopNSectionsStatistic(n=top_n_value, statistic_delay=STATS_DELAY_INTERVAL) ] if 'response_codes' in requested_stats: default_stats += [ TopNResponseStatusCodes(n=top_n_value, statistic_delay=STATS_DELAY_INTERVAL) ] if 'request_size' in requested_stats: default_stats += [ AverageRequestSizeStatistic(statistic_delay=STATS_DELAY_INTERVAL) ] return default_stats logkeep = LogKeep(ALERT_DELAY_INTERVAL) consumer = LogConsumer(FILE_PATH, logkeep) alerter = Alerter(ALERT_DELAY_INTERVAL, HIGH_TRAFFIC_THRESHOLD) traffic_stats = create_traffic_statistics(TRAFFIC_STATS) monitor = HTTPLogMonitor(consumer, logkeep, alerter, traffic_stats, STATS_DELAY_INTERVAL) try: print('Monitoring {}...'.format(FILE_PATH)) monitor.run() except KeyboardInterrupt: # Cleanup offset file generated by Pygtail consumer = None os.remove('{}.offset'.format(FILE_PATH))