def main(): parser = argparse.ArgumentParser() parser.add_argument('--logFilePath', required=True, type=str, help='The log file path to be monitored.') parser.add_argument( '--numHitsToGenAlert', required=False, type=int, default=NUM_HITS_TO_GENERATE_ALERT, help= 'The number of hits within alerting window required to generate alert.' ) parser.add_argument('--alertWinLenSecs', required=False, type=int, default=TIME_WINDOW_TO_GENERATE_ALERT_SECS, help='The length of the alerting window in seconds.') parser.add_argument( '--useCurrTimestamps', action='store_true', help= 'Use current timestamp instead of logged timestamp when generating alerts.' ) args = parser.parse_args() analyzer = LogAnalyzer( Config( logFilePath=args.logFilePath, numHitsToGenAlert=args.numHitsToGenAlert, alertWinLenSecs=args.alertWinLenSecs, useCurrTimestamps=args.useCurrTimestamps, )) analyzer.runForever()
def create_cmd_parser(): parser = argparse.ArgumentParser() parser.add_argument('-f1', '--file_1') parser.add_argument('-f2', '--file_2') parser.add_argument('-f3', '--file_3') parser.add_argument('-o', '--file_out') return parser if __name__ == '__main__': cmd_parser = create_cmd_parser() log_analayzer = LogAnalyzer() with open('settings.json') as file: params = json.load(file) logging.basicConfig(filename=params['logs']['path'], level=logging.INFO) log = logging.getLogger('errors') try: args = cmd_parser.parse_args(sys.argv[1:]) tests = log_analayzer.process( file_1=args.file_1, file_2=args.file_2, file_3=args.file_3, schema_1=params['schemas_path']['schema_1'],
#!/usr/bin/python import sys from LogAnalyzer import LogAnalyzer laAnalyzer = LogAnalyzer() laAnalyzer.analyzeLog(sys.argv[1])
import datetime from LogAnalyzer import LogAnalyzer analyzer = LogAnalyzer() lines = analyzer.parse("webserver1.log") print("Parsed " + str(lines) + " lines from the log file") #print("The average object size is: " + str(analyzer.averageObjectSize())) #print("The most frequent ip is: " + str(analyzer.mostFrequentIp())) #print("The resources visted are:\n" + "\n".join(analyzer.resources())) #print("The distinct resources visted are:\n" + "\n".join(analyzer.distinctResources())) #print(str(analyzer.numberOfStatusCode(200)) + " successfully loaded got the requested resource") #print(str(analyzer.numberOfStatusCode(404)) + " got resource not found") #start = datetime.datetime(2018, 9, 3, 10, 15, 0, 0, datetime.timezone(datetime.timedelta(hours=0))) #end = datetime.datetime(2018, 9, 3, 10, 20, 0, 0, datetime.timezone(datetime.timedelta(hours=0))) #print(str(analyzer.countRequestsInTimeRange(start, end)) + " requests during the timeframe") #print("Resources with color=red:\n" + "\n".join(analyzer.resourcesWithQueryParam("color", "red"))) #print("Resources with year=2005:\n" + "\n".join(analyzer.resourcesWithQueryParam("year", "2005"))) print("Resources with year=2010 and color=orange:\n" + "\n".join( analyzer.resourcesWithAllQueryParams([("year", "2010"), ("color",
def setUp(self): self.log_analayzer = LogAnalyzer()
class TestLogAnalyzer(unittest.TestCase): def setUp(self): self.log_analayzer = LogAnalyzer() def test_correct_files(self): # TEST tests = self.log_analayzer.process( file_1='tests\materials\File_1.json', file_2='tests\materials\File_2.json', file_3='tests\materials\File_3.json') with open('tests\materials\Result_4_1.json') as file: res_expect = json.load(file) self.assertEqual(json.dumps(tests), json.dumps(res_expect)) # TEST tests = self.log_analayzer.process( file_1='tests\materials\File_1_2records.json', file_2='tests\materials\File_2_few_records.json', file_3='tests\materials\File_3_few_records.json') with open('tests\materials\Result_4_2.json') as file: res_expect = json.load(file) self.assertEqual(json.dumps(tests), json.dumps(res_expect)) # TEST tests = self.log_analayzer.process( file_1='tests\materials\File_empty_json.json', file_2='tests\materials\File_empty_json.json', file_3='tests\materials\File_empty_json.json') self.assertEqual(json.dumps(tests), json.dumps([])) # TEST with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") tests = self.log_analayzer.process( file_1='tests\materials\File_empty_json.json', file_2='tests\materials\File_2_few_records.json', file_3='tests\materials\File_3_few_records.json') self.assertTrue(issubclass(w[-1].category, RuntimeWarning)) with open('tests\materials\Result_4_3_not_all_data.json') as file: res_expect = json.load(file) self.assertEqual(json.dumps(tests), json.dumps(res_expect)) def test_exceptions(self): # TEST with self.assertRaises(FileNotFoundError): self.log_analayzer.process( file_1='not_existing', file_2='tests\materials\File_empty_json.json', file_3='tests\materials\File_empty_json.json') # TEST with self.assertRaises(json.decoder.JSONDecodeError): self.log_analayzer.process( file_1='tests\materials\File_empty.json', file_2='tests\materials\File_empty_json.json', file_3='tests\materials\File_empty_json.json') # TEST with self.assertRaises(KeyError): self.log_analayzer.process( file_1='tests\materials\File_1_without_status_and_name.json', file_2='tests\materials\File_empty_json.json', file_3='tests\materials\File_empty_json.json') # TEST with self.assertRaises(FileNotFoundError): self.log_analayzer.process( file_1='', file_2='tests\materials\File_empty_json.json', file_3='tests\materials\File_empty_json.json') def test_fail(self): # TEST two tests have some "time" field and we cannot distinguish them with self.assertRaises(KeyError): self.log_analayzer.process( file_1='tests\materials\File_1_fail_same_time.json', file_2='tests\materials\File_empty_json.json', file_3='tests\materials\File_empty_json.json') # TEST two tests have some "time" field and we cannot distinguish them with self.assertRaises(KeyError): self.log_analayzer.process( file_1='tests\materials\File_empty_json.json', file_2='tests\materials\File_2_fail_same_time.json', file_3='tests\materials\File_empty_json.json') # TEST two tests have some "time" field and we cannot distinguish them with self.assertRaises(KeyError): self.log_analayzer.process( file_1='tests\materials\File_1.json', file_2='tests\materials\File_2_fail_same_time.json', file_3='tests\materials\File_3_fail_same_time.json') def test_json_schema(self): # TEST tests = self.log_analayzer.process( file_1='tests\materials\File_1.json', file_2='tests\materials\File_2.json', file_3='tests\materials\File_3.json', schema_res='schemas\ResultFileSchema.json') with open('tests\materials\Result_4_1.json') as file: res_expect = json.load(file) self.assertEqual(json.dumps(tests), json.dumps(res_expect))
# -*- coding: utf-8 -*- """ Created on Sun Apr 26 00:42:10 2020 @author: Mariusz """ from LogAnalyzer import LogAnalyzer log = LogAnalyzer('log') df = log.read_all_logs() #print (log.search(df,'status code','404')) print() log.print_report(df, 10, 'ip', startDate='18/Feb/2016 00:00:00', endDate='01/Mar/2016 23:59:59', action=None, status=None) print() log.print_report(df, 3, 'action', startDate='18/Feb/2016 00:00:00', endDate='01/Mar/2016 23:59:59', action=None, status=None) print() log.print_report(df, 5,