def __init__(self): self.NeuralNetwork = Neural_Network() X = np.array(([0, 0], [1, 0], [0, 1], [1, 1]), dtype=float) y = np.array(([0], [1], [1], [1]), dtype=float) y = y / 1 self.TrainNetwork = trainer(self.NeuralNetwork) self.TrainNetwork.train(X, y) #Initialize and train neural Network self.AIMLPs = AIMLParser() self.logObj = LogParser() #Initialize Helper Objects self.logFile = open("/var/log/apache2/error.log", 'r') #log file Open self.readTime = open("time.txt", 'r') self.lastStat = self.readTime.readline() self.updatStat = self.lastStat self.Xin = self.parseFile( self.logFile) ## parse log file form current time self.Xin = [1.0, 1.0] self.ResOut = self.NeuralNetwork.forward(self.Xin) if 1.0 == round(self.ResOut[0]): self.command = self.getCommand('Apache2') self.doOperation() print "Restart successful!" self.logFile.close()
def ReadFile(): global resList text = request.data ff = text.decode('utf-8').split(",") file_name = ''.join(ff[0][4:]).replace("\"", '').strip() file_format = ff[1].replace("\\n", '').replace("\"", '').strip() file_text = ''.join(ff[2:]).replace("\\r", "") #print("File_text ---- :",file_text) file_text = str( file_text.replace("\\\\n", "/n").replace("\\n", "\n").replace("\"", '').strip()) file_text1 = file_text.strip().split("\n") del file_text1[len(file_text1) - 1] #print("File_text11 ---- :",file_text1) log_format = titles[file_format]['log_format'] print(log_format) if (file_format == 'Others'): rex = titles[file_format]['regex'][0] mainList = [] for line in file_text1: l = re.findall(rex, line) size = len(log_format) size1 = len(l) if (size1 < size): pass else: l[size - 1] = ' '.join(l[size - 1:]) del l[size:] d = OrderedDict() for k, v in it.zip_longest(log_format, l): d[k] = v mainList.append(d) resList = mainList df = pd.DataFrame(mainList, columns=log_format) df.to_csv(file_name + "_structured.csv", sep=',', encoding='utf-8', index=False) return jsonify(mainList, log_format, file_name) else: print("----") parser = LogParser(log_format, file_format) resList = parser.parse(file_text1) #rex = titles[file_format]['regex'][0] col = list(resList.columns.values) return jsonify(resList.to_dict('records'), col, file_name)
def ReadFile(): global resList text = request.data ff = text.decode('utf-8').split(",") file_name = ''.join(ff[0][4:]).replace("\"", '').strip() file_format = ff[1].replace("\\n", '').replace("\"", '').strip() file_text = ''.join(ff[2:]).replace("\\r", "") file_text = str(file_text.replace("\\n", "\n").replace("\"", '').strip()) file_text1 = file_text.strip().split("\n") del file_text1[len(file_text1) - 1] log_format = titles[file_format]['log_format'] parser = LogParser(log_format) parser.parse(log_file) rex = titles[file_format]['regex'][0] mainList = [] for line in file_text1: l = re.findall(rex, line) size = len(format) size1 = len(l) if (size1 < size): pass else: l[size - 1] = ' '.join(l[size - 1:]) del l[size:] d = OrderedDict() for k, v in it.zip_longest(format, l): d[k] = v mainList.append(d) resList = mainList #df = pd.DataFrame(mainList, columns=format) #df.to_csv(file_name+"_structured.csv", sep=',', encoding='utf-8', index=False) return jsonify(mainList, format)
#!/usr/bin/env python from LogParser import * import sys import matplotlib.pyplot as plt log_file = str(raw_input("Introduce log file: ")) parser = LogParser(log_file) print(parser.listTags()) while True: tag = str(raw_input("Introduce tag or list of tags separated by commas: ")) if ',' in tag: for x in tag.split(','): vector = parser.vectorFloat(x) if len(vector) > 1: plt.plot(vector) plt.ylabel("time") else: print("Tag not found") plt.legend(tag.split(',')) plt.show() else: vector = parser.vectorFloat(tag) if len(vector) > 1: plt.plot(vector) plt.ylabel(tag) plt.show()
from LogParser import * import os #file_name = input("Enter file Name : ") file_name = "w3c.txt" file_format = "W3C Extended" l = LogParser(file_name, file_format) #l.display()