def run(self, arguments=None): LogFileTool.run(self, arguments, get_unknowns=True) self.parse_logevents() self.group() if self.args['overlay'] == 'reset': self.remove_overlays() # if --overlay is set, save groups in a file, else load groups and plot if self.args['overlay'] == "list": self.list_overlays() raise SystemExit plot_specified = not sys.stdin.isatty() or len(self.args['logfile']) > 0 # if no plot is specified (either pipe or filename(s)) and reset, quit now if not plot_specified and self.args['overlay'] == 'reset': raise SystemExit if self.args['overlay'] == "" or self.args['overlay'] == "add": if plot_specified: self.save_overlay() else: print "Nothing to plot." raise SystemExit # else plot (with potential overlays) if there is something to plot overlay_loaded = self.load_overlays() if plot_specified or overlay_loaded: self.plot() else: print "Nothing to plot." raise SystemExit
def run(self, arguments=None): LogFileTool.run(self, arguments, get_unknowns=True) self.parse_loglines() self.group() if self.args['overlay'] == 'reset': self.remove_overlays() # if --overlay is set, save groups in a file, else load groups and plot if self.args['overlay'] == "list": self.list_overlays() raise SystemExit plot_specified = not sys.stdin.isatty() or len(self.args['logfile']) > 0 # if no plot is specified (either pipe or filename(s)) and reset, quit now if not plot_specified and self.args['overlay'] == 'reset': raise SystemExit if self.args['overlay'] == "" or self.args['overlay'] == "add": if plot_specified: self.save_overlay() else: print "Nothing to plot." raise SystemExit # else plot (with potential overlays) if there is something to plot overlay_loaded = self.load_overlays() if plot_specified or overlay_loaded: self.plot() else: print "Nothing to plot." raise SystemExit
def run(self, arguments=None): """Print useful information about the log file.""" LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print("\n ------------------------------------------\n") if self.logfile.datetime_format == 'ctime-pre2.4': # no milliseconds when datetime format doesn't support it start_time = (self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") else: # include milliseconds start_time = (self.logfile.start.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") print(" source: %s" % self.logfile.name) print(" host: %s" % (self.logfile.hostname + ':' + str(self.logfile.port) if self.logfile.hostname else "unknown")) print(" start: %s" % (start_time)) print(" end: %s" % (end_time)) # TODO: add timezone if iso8601 format print("date format: %s" % self.logfile.datetime_format) print(" length: %s" % len(self.logfile)) print(" binary: %s" % (self.logfile.binary or "unknown")) version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4.x ctime (milliseconds present)' elif (self.logfile.datetime_format == "iso8601-utc" or self.logfile.datetime_format == "iso8601-local"): if self.logfile.has_level: version = '>= 3.0 (iso8601 format, level, component)' else: version = '= 2.6.x (iso8601 format)' print(" version: %s" % version) print(" storage: %s" % (self.logfile.storage_engine or 'unknown')) # now run all sections for section in self.sections: if section.active: print("\n%s" % section.name.upper()) section.run()
def run(self, arguments=None): """ Go through each line, convert string to LogLine object, then print JSON representation of the line. """ LogFileTool.run(self, arguments) for line in self.args['logfile']: print LogLine(line).to_json()
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict( (k, self._arrayToString(self.args[k])) for k in self.args) # create filter objects from classes and pass args self.filters = [f(self.args) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: exit() for line in self.args['logfile']: logline = LogLine(line) if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten'], self.args['human']) else: # only print line if all filters agree if all([f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten'], self.args['human']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def run(self): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self) self.args = dict((k, self._arrayToString(self.args[k])) for k in self.args) # create filter objects from classes and pass args self.filters = [f(self.args) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: exit() for line in self.args['logfile']: logline = LogLine(line) if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten']) else: # only print line if all filters agree if all([f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def run(self, arguments=None): LogFileTool.run(self, arguments) possible_versions = set(Log2CodeConverter.all_versions) re_versiond = re.compile(r'db version v(\d\.\d\.\d), pdfile version') re_versions = re.compile(r'MongoS version (\d\.\d\.\d) starting:') re_brackets = re.compile(r'\[\w+\]') for i, line in enumerate(self.args['logfile']): match = re_brackets.search(line) if not match: continue start = match.end() # check for explicit version string match = re_versiond.search(line[start:]) or re_versions.search(line[start:]) if match: version = match.group(1) print "%32s %s" % ("restart detected in log line %i:"%(i+1), line.rstrip()) print "%32s %s" % ("previous possible versions:", ", ".join([pv[1:] for pv in sorted(possible_versions)])) print "%32s %s" % ("version after restart is:", version) print possible_versions = set(["r"+version]) if len(possible_versions) == 1: # from here on, version is known, skip to next section continue ll = LogLine(line) if ll.operation != None: # if log line is a known command operation (query, update, command, ...) skip continue lcl = self.log2code(line[start:]) if lcl: old_len = len(possible_versions) possible_versions = possible_versions & set(lcl.versions) if len(possible_versions) != old_len: print "%32s %s" % ("log line %i:"%(i+1), line.rstrip()) print "%32s %s" % ("matched pattern:", " ... ".join(lcl.pattern)) print "%32s %s" % ("only present in:", ", ".join(sorted(lcl.versions))) print "%32s %s" % ("possible versions now:", ", ".join(sorted(possible_versions))) print if len(possible_versions) == 0: print "empty version set. exiting." raise SystemExit if len(possible_versions) > 1: print "possible versions at end of file:", ", ".join([pv[1:] for pv in sorted(possible_versions)]) else: print "version at end of file: ", possible_versions.pop()[1:]
def load_file(self, filename): print('load data from %s' % (filename)) self.mlogtool = LogFileTool() filepath = os.path.join(self.log_folder, filename) sys.argv.append(filepath) LogFileTool.run(self.mlogtool) self._export() sys.argv.pop()
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) self.logfiles = self.args['logfile'] for i, logfileOpen in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print self.logfileOpen = logfileOpen self.logfile = LogFile(logfileOpen) print " filename: %s" % self.args['logfile'][i].name print "start of logfile: %s" % (self.logfile.start.strftime("%b %d %H:%M:%S") if self.logfile.start else "unknown") print " end of logfile: %s" % (self.logfile.end.strftime("%b %d %H:%M:%S") if self.logfile.start else "unknown") # get one logline (within first 20 lines) for datetime format logline = None for i in range(20): try: logline = LogLine(logfileOpen.next()) except StopIteration as e: raise SystemExit("no valid log lines found (datetime not available).") if logline.datetime: break # TODO: add timezone if iso8601 format print " line numbers: %s" % self.logfile.num_lines print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown' and logline: if logline.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif logline.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif logline.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self): LogFileTool.run(self) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' first_row = True for line in self.args['logfile']: logline = LogLine(line) # group regular connections together if logline.datetime and logline.duration: if logline.thread and logline.thread.startswith("conn"): logline._thread = "conn####" # write log line out as json if not first_row: # prepend comma and newline outf += ',\n' else: first_row = False outf += logline.to_json([ 'line_str', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration' ]) outf += ']}' dstfilelocation = os.path.join(os.getcwd(), '%s.html' % logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://" + dstfilelocation webbrowser.open("file://" + dstfilelocation)
def run(self): LogFileTool.run(self) # store in current local folder mlogvis_dir = "." # change stdin logfile name and remove the < > logname = self.args["logfile"].name if logname == "<stdin>": logname = "stdin" os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), "data") srcfilelocation = os.path.join(data_path, "index.html") outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' first_row = True for line in self.args["logfile"]: logline = LogLine(line) # group regular connections together if logline.datetime and logline.duration: if logline.thread and logline.thread.startswith("conn"): logline._thread = "conn####" # write log line out as json if not first_row: # prepend comma and newline outf += ",\n" else: first_row = False outf += logline.to_json( ["line_str", "datetime", "operation", "thread", "namespace", "nscanned", "nreturned", "duration"] ) outf += "]}" dstfilelocation = os.path.join(os.getcwd(), "%s.html" % logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, "wt") replaced_contents = contents.replace("##REPLACE##", outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://" + dstfilelocation webbrowser.open("file://" + dstfilelocation)
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print if self.logfile.datetime_format == 'ctime-pre2.4': # no milliseconds when datetime format doesn't support it start_time = self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown" end_time = self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown" else: # include milliseconds start_time = self.logfile.start.strftime("%Y %b %d %H:%M:%S.%f")[:-3] if self.logfile.start else "unknown" end_time = self.logfile.end.strftime("%Y %b %d %H:%M:%S.%f")[:-3] if self.logfile.start else "unknown" print " source: %s" % self.logfile.name print " host: %s" % (self.logfile.hostname + ':' + self.logfile.port if self.logfile.hostname else "unknown") print " start: %s" % (start_time) print " end: %s" % (end_time) # TODO: add timezone if iso8601 format print "date format: %s" % self.logfile.datetime_format print " length: %s" % len(self.logfile) print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif self.logfile.datetime_format == "iso8601-utc" or \ self.logfile.datetime_format == "iso8601-local": version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) logfile = LogFile(self.args['logfile']) print "start of logfile: %s" % (logfile.start.strftime( "%b %d %H:%M:%S") if logfile.start else "unknown") print " end of logfile: %s" % (logfile.end.strftime("%b %d %H:%M:%S") if logfile.start else "unknown") # get one logline (within first 20 lines) for datetime format logline = None for i in range(20): try: logline = LogLine(self.args['logfile'].next()) except StopIteration as e: raise SystemExit( "no valid log lines found (datetime not available).") if logline.datetime: break # TODO: add timezone if iso8601 format print " line numbers: %s" % logfile.num_lines print " binary: %s" % (logfile.binary or "unknown") version = (' -> '.join(logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown' and logline: if logline.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif logline.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif logline.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version # restarts section if self.args['restarts']: print print "RESTARTS" for version, logline in logfile.restarts: print " %s version %s" % ( logline.datetime.strftime("%b %d %H:%M:%S"), version) if len(logfile.restarts) == 0: print " no restarts found"
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for logevent in self.args['logfile']: line = logevent.line_str # replace IP addresses line = re.sub(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', self._replace_ip, line) # replace strings line = re.sub(r'".+?"', self._replace_string, line) # replace hostnames and namespaces line = re.sub(r'[a-zA-Z$][^ \t\n\r\f\v:]+(\.[a-zA-Z$][^ \t\n\r\f\v:]+)+', self._replace_dottedname, line) print line
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) logfile = LogFile(self.args['logfile']) print "start of logfile: %s" % (logfile.start.strftime("%b %d %H:%M:%S") if logfile.start else "unknown") print " end of logfile: %s" % (logfile.end.strftime("%b %d %H:%M:%S") if logfile.start else "unknown") # get one logline (within first 20 lines) for datetime format logline = None for i in range(20): try: logline = LogLine(self.args['logfile'].next()) except StopIteration as e: raise SystemExit("no valid log lines found (datetime not available).") if logline.datetime: break # TODO: add timezone if iso8601 format print " line numbers: %s" % logfile.num_lines print " binary: %s" % (logfile.binary or "unknown") version = (' -> '.join(logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown' and logline: if logline.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif logline.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif logline.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version # restarts section if self.args['restarts']: print print "RESTARTS" for version, logline in logfile.restarts: print " %s version %s" % (logline.datetime.strftime("%b %d %H:%M:%S"), version) if len(logfile.restarts) == 0: print " no restarts found"
def run(self, arguments=None): LogFileTool.run(self, arguments) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' if self.args['out'] is not None: outputname = self.args['out'] else: outputname = logname + '.html' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') json_docs = self._export(True) if not json_docs: json_docs = self._export(False) outf = ('{"type": "duration", "logfilename": "' + logname + '", "data":[' + json_docs + ']}') dstfilelocation = os.path.join(os.getcwd(), '%s' % outputname) print("copying %s to %s" % (srcfilelocation, dstfilelocation)) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() if not self.args['no_browser']: print("serving visualization on file://" + dstfilelocation) webbrowser.open("file://" + dstfilelocation)
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for logevent in self.args['logfile']: line = logevent.line_str # replace IP addresses line = re.sub(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', self._replace_ip, line) # replace strings line = re.sub(r'".+?"', self._replace_string, line) # replace hostnames and namespaces line = re.sub( r'[a-zA-Z$][^ \t\n\r\f\v:]+(\.[a-zA-Z$][^ \t\n\r\f\v:]+)+', self._replace_dottedname, line) print line
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print print " source: %s" % self.logfile.name print " start: %s" % (self.logfile.start.strftime( "%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") print " end: %s" % (self.logfile.end.strftime( "%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") # TODO: add timezone if iso8601 format print "date format: %s" % self.logfile.datetime_format print " length: %s" % len(self.logfile) print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif self.logfile.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self): """ go over each line in the logfile, run through log2code matcher and group by matched pattern. """ LogFileTool.run(self) codelines = defaultdict(lambda: 0) non_matches = 0 for line in self.args['logfile']: cl = self.log2code(line) if cl: codelines[cl.pattern] += 1 else: ll = LogLine(line) if ll.operation: # skip operations (command, insert, update, delete, query, getmore) continue if not ll.thread: # skip the lines that don't have a thread name (usually map/reduce or assertions) continue if len(ll.split_tokens) - ll._thread_offset <= 1: # skip empty log messages (after thread name) continue # everything else is a real non-match non_matches += 1 if self.args['verbose']: print "couldn't match:", line, if self.args['verbose']: print for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True): print "%8i"%codelines[cl], " ", " ... ".join(cl) print if non_matches > 0: print "couldn't match %i lines"%non_matches if not self.args['verbose']: print "to show non-matched lines, run with --verbose."
def run(self, arguments=None): """ go over each line in the logfile, run through log2code matcher and group by matched pattern. """ LogFileTool.run(self, arguments) codelines = defaultdict(lambda: 0) non_matches = 0 for line in self.args['logfile']: cl = self.log2code(line) if cl: codelines[cl.pattern] += 1 else: ll = LogLine(line) if ll.operation: # skip operations (command, insert, update, delete, query, getmore) continue if not ll.thread: # skip the lines that don't have a thread name (usually map/reduce or assertions) continue if len(ll.split_tokens) - ll._thread_offset <= 1: # skip empty log messages (after thread name) continue # everything else is a real non-match non_matches += 1 if self.args['verbose']: print "couldn't match:", line, if self.args['verbose']: print for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True): print "%8i"%codelines[cl], " ", " ... ".join(cl) print if non_matches > 0: print "couldn't match %i lines"%non_matches if not self.args['verbose']: print "to show non-matched lines, run with --verbose."
def run(self, arguments=None): LogFileTool.run(self, arguments) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') json_docs = self._export(True) if not json_docs: json_docs = self._export(False) outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' + json_docs + ']}' dstfilelocation = os.path.join(os.getcwd(), '%s.html' % logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('this.width = 980', 'this.width = 480') replaced_contents = replaced_contents.replace('this.height = 500', 'this.height = 270') replaced_contents = replaced_contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://" + dstfilelocation
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print print " source: %s" % self.logfile.name print " start: %s" % (self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") print " end: %s" % (self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") # TODO: add timezone if iso8601 format print "date format: %s" % self.logfile.datetime_format print " length: %s" % len(self.logfile) print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif self.logfile.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self, arguments=None): LogFileTool.run(self, arguments) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') json_docs = self._export(True) if not json_docs: json_docs = self._export(False) outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' + json_docs + ']}' dstfilelocation = os.path.join(os.getcwd(), '%s.html'%logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('this.width = 980', 'this.width = 480') replaced_contents = replaced_contents.replace('this.height = 500', 'this.height = 270') replaced_contents = replaced_contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://"+dstfilelocation
def run(self): LogFileTool.run(self) logfiles = self.args['logfile'] # handle labels parameter if len(self.args['labels']) == 1: label = self.args['labels'][0] if label == 'enum': labels = ['{%i}'%(i+1) for i in range(len(logfiles))] elif label == 'alpha': labels = ['{%s}'%chr(97+i) for i in range(len(logfiles))] elif label == 'none': labels = [None for _ in logfiles] elif label == 'filename': labels = ['{%s}'%fn.name for fn in logfiles] elif len(self.args['labels']) == len(logfiles): labels = self.args['labels'] else: raise SystemExit('Error: Number of labels not the same as number of files.') # handle timezone parameter if len(self.args['timezone']) == 1: self.args['timezone'] = self.args['timezone'] * len(logfiles) elif len(self.args['timezone']) == len(logfiles): pass elif len(self.args['timezone']) == 0: self.args['timezone'] = [0] * len(logfiles) else: raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).') # handle position parameter position = self.args['pos'] if position != 'eol': position = int(position) # define minimum and maximum datetime object mindate = datetime(MINYEAR, 1, 1, 0, 0, 0) maxdate = datetime(MAXYEAR, 12, 31, 23, 59, 59) # open files, read first lines, extract first dates lines = [f.readline() for f in logfiles] dates = [LogLine(l).datetime for l in lines] # replace all non-dates with mindate dates = [d if d else mindate for d in dates] dates = [d + timedelta(hours=self.args['timezone'][i]) for i,d in enumerate(dates) if d] while any([l != '' for l in lines]): # pick smallest date of all non-empty lines condDates = ([d if lines[i] != '' else maxdate for i,d in enumerate(dates)]) minCondDate = min(condDates) minIndex = condDates.index(minCondDate) # print out current line currLine = lines[minIndex].rstrip() try: oldDate = minCondDate - timedelta(hours=self.args['timezone'][minIndex]) except OverflowError: oldDate = minCondDate if minCondDate != mindate: currLine = currLine.replace(oldDate.strftime('%a %b %d %H:%M:%S'), minCondDate.strftime('%a %b %d %H:%M:%S')) if labels[minIndex]: if position == 0 or minCondDate == mindate: print labels[minIndex], currLine elif position == 'eol': print currLine, labels[minIndex] else: tokens = currLine.split() print " ".join(tokens[:position]), labels[minIndex], " ".join(tokens[position:]) else: print currLine # update lines and dates for that line lines[minIndex] = logfiles[minIndex].readline() dates[minIndex] = LogLine(lines[minIndex]).datetime if not dates[minIndex]: dates[minIndex] = mindate else: dates[minIndex] += timedelta(hours=self.args['timezone'][minIndex])
def run(self, arguments=None): """Print useful information about the log file.""" LogFileTool.run(self, arguments) if (self.args['logfile'] is None or len(self.args['logfile']) == 0): self.argparser.print_usage() print("\nERROR: At least one logfile argument must be provided") self.argparser.exit() for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print("\n ------------------------------------------\n") if self.logfile.datetime_format == 'ctime-pre2.4': # no milliseconds when datetime format doesn't support it start_time = (self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") else: # include milliseconds start_time = (self.logfile.start.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") print(" source: %s" % self.logfile.name) print(" host: %s" % (self.logfile.hostname + ':' + str(self.logfile.port) if self.logfile.hostname else "unknown")) print(" start: %s" % (start_time)) print(" end: %s" % (end_time)) print("date format: %s" % self.logfile.datetime_format) # self.logfile.timezone is a dateutil.tzinfo object tzdt = datetime.datetime.now(self.logfile.timezone) if (tzdt.tzname()): timezone = tzdt.tzname() else: timezone = f"UTC {tzdt.strftime('%z')}" print(" timezone: %s" % timezone) print(" length: %s" % len(self.logfile)) print(" binary: %s" % (self.logfile.binary or "unknown")) version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4.x ctime (milliseconds present)' elif (self.logfile.datetime_format == "iso8601-utc" or self.logfile.datetime_format == "iso8601-local"): if self.logfile.has_level: version = '>= 3.0 (iso8601 format, level, component)' else: version = '= 2.6.x (iso8601 format)' print(" version: %s" % version) print(" storage: %s" % (self.logfile.storage_engine or 'unknown')) # now run all sections for section in self.sections: if section.active: print("\n%s" % section.name.upper()) section.run()
def run(self, arguments=None): LogFileTool.run(self, arguments) possible_versions = set(Log2CodeConverter.all_versions) re_versiond = re.compile(r'db version v(\d\.\d\.\d), pdfile version') re_versions = re.compile(r'MongoS version (\d\.\d\.\d) starting:') re_brackets = re.compile(r'\[\w+\]') for i, line in enumerate(self.args['logfile']): match = re_brackets.search(line) if not match: continue start = match.end() # check for explicit version string match = re_versiond.search(line[start:]) or re_versions.search( line[start:]) if match: version = match.group(1) print "%32s %s" % ("restart detected in log line %i:" % (i + 1), line.rstrip()) print "%32s %s" % ("previous possible versions:", ", ".join( [pv[1:] for pv in sorted(possible_versions)])) print "%32s %s" % ("version after restart is:", version) print possible_versions = set(["r" + version]) if len(possible_versions) == 1: # from here on, version is known, skip to next section continue ll = LogLine(line) if ll.operation != None: # if log line is a known command operation (query, update, command, ...) skip continue lcl = self.log2code(line[start:]) if lcl: old_len = len(possible_versions) possible_versions = possible_versions & set(lcl.versions) if len(possible_versions) != old_len: print "%32s %s" % ("log line %i:" % (i + 1), line.rstrip()) print "%32s %s" % ("matched pattern:", " ... ".join( lcl.pattern)) print "%32s %s" % ("only present in:", ", ".join( sorted(lcl.versions))) print "%32s %s" % ("possible versions now:", ", ".join( sorted(possible_versions))) print if len(possible_versions) == 0: print "empty version set. exiting." raise SystemExit if len(possible_versions) > 1: print "possible versions at end of file:", ", ".join( [pv[1:] for pv in sorted(possible_versions)]) else: print "version at end of file: ", possible_versions.pop()[1:]
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict((k, self.args[k] if k in ['logfile', 'markers', 'timezone'] else self._arrayToString(self.args[k])) for k in self.args) # make sure logfile is always a list, even if 1 is provided through sys.stdin if type(self.args['logfile']) != types.ListType: self.args['logfile'] = [self.args['logfile']] # require at least 1 log file (either through stdin or as parameter) if len(self.args['logfile']) == 0: raise SystemExit('Error: Need at least 1 log file, either as command line parameter or through stdin.') # handle timezone parameter if len(self.args['timezone']) == 1: self.args['timezone'] = self.args['timezone'] * len(self.args['logfile']) elif len(self.args['timezone']) == len(self.args['logfile']): pass elif len(self.args['timezone']) == 0: self.args['timezone'] = [0] * len(self.args['logfile']) else: raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).') # create filter objects from classes and pass args self.filters = [f(self) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) print print "mlogfilter> active filters:", print ', '.join([f.__class__.__name__ for f in self.filters]) print # handle markers parameter if len(self.args['markers']) == 1: marker = self.args['markers'][0] if marker == 'enum': self.args['markers'] = ['{%i}'%(i+1) for i in range(len(self.args['logfile']))] elif marker == 'alpha': self.args['markers'] = ['{%s}'%chr(97+i) for i in range(len(self.args['logfile']))] elif marker == 'none': self.args['markers'] = [None for _ in self.args['logfile']] elif marker == 'filename': self.args['markers'] = ['{%s}'%logfile.name for logfile in self.args['logfile']] elif len(self.args['markers']) == len(self.args['logfile']): pass else: raise SystemExit('Error: Number of markers not the same as number of files.') # with --human, change to ctime format if not specified otherwise if self.args['timestamp_format'] == 'none' and self.args['human']: self.args['timestamp_format'] = 'ctime' # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: raise SystemExit('no logfile found.') for logevent in self.logfile_generator(): if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) else: # only print line if all filters agree if all([f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict((k, self.args[k] if k in ['logfile', 'markers', 'timezone'] else self._arrayToString(self.args[k])) for k in self.args) # make sure logfile is always a list, even if 1 is provided through sys.stdin if type(self.args['logfile']) != types.ListType: self.args['logfile'] = [self.args['logfile']] # require at least 1 log file (either through stdin or as parameter) if len(self.args['logfile']) == 0: raise SystemExit('Error: Need at least 1 log file, either as command line parameter or through stdin.') # handle timezone parameter if len(self.args['timezone']) == 1: self.args['timezone'] = self.args['timezone'] * len(self.args['logfile']) elif len(self.args['timezone']) == len(self.args['logfile']): pass elif len(self.args['timezone']) == 0: self.args['timezone'] = [0] * len(self.args['logfile']) else: raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).') # create filter objects from classes and pass args self.filters = [f(self) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "command line arguments" for a in self.args: print " %s: %s" % (a, self.args[a]) print print "active filters:", print ', '.join([f.__class__.__name__ for f in self.filters]) print print '====================' # handle markers parameter if len(self.args['markers']) == 1: marker = self.args['markers'][0] if marker == 'enum': self.args['markers'] = ['{%i}'%(i+1) for i in range(len(self.args['logfile']))] elif marker == 'alpha': self.args['markers'] = ['{%s}'%chr(97+i) for i in range(len(self.args['logfile']))] elif marker == 'none': self.args['markers'] = [None for _ in self.args['logfile']] elif marker == 'filename': self.args['markers'] = ['{%s}'%logfile.name for logfile in self.args['logfile']] elif len(self.args['markers']) == len(self.args['logfile']): pass else: raise SystemExit('Error: Number of markers not the same as number of files.') # with --human, change to ctime format if not specified otherwise if self.args['timestamp_format'] == 'none' and self.args['human']: self.args['timestamp_format'] = 'ctime' # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: raise SystemExit('no logfile found.') for logevent in self.logfile_generator(): if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) else: # only print line if all filters agree if all([f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict( (k, self.args[k] if k in ["logfile", "markers", "timezone"] else self._arrayToString(self.args[k])) for k in self.args ) # make sure logfile is always a list, even if 1 is provided through sys.stdin if type(self.args["logfile"]) != types.ListType: self.args["logfile"] = [self.args["logfile"]] # handle timezone parameter if len(self.args["timezone"]) == 1: self.args["timezone"] = self.args["timezone"] * len(self.args["logfile"]) elif len(self.args["timezone"]) == len(self.args["logfile"]): pass elif len(self.args["timezone"]) == 0: self.args["timezone"] = [0] * len(self.args["logfile"]) else: raise SystemExit( "Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments)." ) # create filter objects from classes and pass args self.filters = [f(self) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args["shorten"] != False: if self.args["shorten"] == None: self.args["shorten"] = 200 if self.args["verbose"]: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) print print "mlogfilter> active filters:", print ", ".join([f.__class__.__name__ for f in self.filters]) print # handle markers parameter if len(self.args["markers"]) == 1: marker = self.args["markers"][0] if marker == "enum": self.args["markers"] = ["{%i}" % (i + 1) for i in range(len(self.args["logfile"]))] elif marker == "alpha": self.args["markers"] = ["{%s}" % chr(97 + i) for i in range(len(self.args["logfile"]))] elif marker == "none": self.args["markers"] = [None for _ in self.args["logfile"]] elif marker == "filename": self.args["markers"] = ["{%s}" % fn.name for fn in self.args["logfile"]] elif len(self.args["markers"]) == len(self.args["logfile"]): pass else: raise SystemExit("Error: Number of markers not the same as number of files.") # with --human, change to ctime format if not specified otherwise if self.args["timestamp_format"] == "none" and self.args["human"]: self.args["timestamp_format"] = "ctime" # go through each line and ask each filter if it accepts if not "logfile" in self.args or not self.args["logfile"]: raise SystemExit("no logfile found.") for logline in self.logfile_generator(): if self.args["exclude"]: # print line if any filter disagrees if any([not f.accept(logline) for f in self.filters]): self._outputLine(logline, self.args["shorten"], self.args["human"]) else: # only print line if all filters agree if all([f.accept(logline) for f in self.filters]): self._outputLine(logline, self.args["shorten"], self.args["human"]) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break