def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) self.argparser.description='A script to plot various information from logfiles. ' \ 'Clicking on any of the plot points will print the corresponding log line to stdout.' # disable some default shortcuts plt.rcParams['keymap.xscale'] = '' plt.rcParams['keymap.yscale'] = '' # import all plot type classes in plottypes module self.plot_types = [c[1] for c in inspect.getmembers(plottypes, inspect.isclass)] self.plot_types = dict((pt.plot_type_str, pt) for pt in self.plot_types) self.plot_instances = [] # main parser arguments self.argparser.add_argument('--logscale', action='store_true', help='plot y-axis in logarithmic scale (default=off)') self.argparser.add_argument('--overlay', action='store', nargs='?', default=None, const='add', choices=['add', 'list', 'reset'], help="create combinations of several plots. Use '--overlay' to create an overlay (this will not plot anything). The first call without '--overlay' will additionally plot all existing overlays. Use '--overlay reset' to clear all overlays.") self.argparser.add_argument('--type', action='store', default='scatter', choices=self.plot_types.keys(), help='type of plot (default=scatter with --yaxis duration).') self.argparser.add_argument('--title', action='store', default=None, help='change the title of the plot (default=filename(s))') self.argparser.add_argument('--group', help="specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', 'pattern', range and histogram plots can additionally group on 'log2code'. The group can also be a regular expression.") self.argparser.add_argument('--group-limit', metavar='N', type=int, default=None, help="specify an upper limit of the number of groups. Groups are sorted by number of data points. If limit is specified, only the top N will be listed separately, the rest are grouped together in an 'others' group") self.argparser.add_argument('--no-others', action='store_true', default=False, help="if this flag is used, the 'others' group (see --group-limit) will be discarded.") self.argparser.add_argument('--optime-start', action='store_true', default=False, help="plot operations with a duration when they started instead (by subtracting the duration). The default is to plot them when they finish (at the time they are logged).") self.legend = None # progress bar self.progress_bar_enabled = not self.is_stdin
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = ('mongod/mongos log file visualizer ' '(browser edition). Extracts ' 'information from each line of the log ' 'file and outputs a html file that can ' 'be viewed in a browser. Automatically ' 'opens a browser tab and shows the ' 'file.') self.argparser.add_argument('--no-browser', action='store_true', help=('only creates .html file, but does ' 'not open the browser.')) self.argparser.add_argument('--out', '-o', action='store', default=None, help=('filename to output. Default is ' '<original logfile>.html')) self.argparser.add_argument('--line-max', action='store', type=int, default=10000, help=('max count of datapoints at which ' 'actual log line strings are not ' 'printed any more.'))
def run(self, arguments=None): LogFileTool.run(self, arguments, get_unknowns=True) self.parse_logevents() self.group() if self.args['overlay'] == 'reset': self.remove_overlays() # if --overlay is set, save groups in a file, else load groups and plot if self.args['overlay'] == "list": self.list_overlays() raise SystemExit plot_specified = not sys.stdin.isatty() or len(self.args['logfile']) > 0 # if no plot is specified (either pipe or filename(s)) and reset, quit now if not plot_specified and self.args['overlay'] == 'reset': raise SystemExit if self.args['overlay'] == "" or self.args['overlay'] == "add": if plot_specified: self.save_overlay() else: print "Nothing to plot." raise SystemExit # else plot (with potential overlays) if there is something to plot overlay_loaded = self.load_overlays() if plot_specified or overlay_loaded: self.plot() else: print "Nothing to plot." raise SystemExit
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) self.argparser.description='A script to plot various information from logfiles. ' \ 'Clicking on any of the plot points will print the corresponding log line to stdout.' # import all plot type classes in plottypes module self.plot_types = [c[1] for c in inspect.getmembers(plottypes, inspect.isclass)] self.plot_types = dict((pt.plot_type_str, pt) for pt in self.plot_types) self.plot_instances = [] # main parser arguments self.argparser.add_argument('--exclude-ns', action='store', nargs='*', metavar='NS', help='(deprecated) use a prior mlogfilter instead.') self.argparser.add_argument('--ns', action='store', nargs='*', metavar='NS', help='(deprecated) use a prior mlogfilter instead. ') self.argparser.add_argument('--logscale', action='store_true', help='plot y-axis in logarithmic scale (default=off)') self.argparser.add_argument('--overlay', action='store', nargs='?', default=None, const='add', choices=['add', 'list', 'reset'], help="create combinations of several plots. Use '--overlay' to create an overlay (this will not plot anything). The first call without '--overlay' will additionally plot all existing overlays. Use '--overlay reset' to clear all overlays.") self.argparser.add_argument('--type', action='store', default='scatter', choices=self.plot_types.keys(), help='type of plot (default=scatter with --yaxis duration).') self.argparser.add_argument('--group', help="specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', range and histogram plots can additionally group on 'log2code'.") # mutex = self.argparser.add_mutually_exclusive_group() # mutex.add_argument('--label', help="instead of specifying a group, a label can be specified. Grouping is then disabled, and the single group for all data points is named LABEL.") self.legend = None # store logfile ranges self.logfile_ranges = []
def run(self, arguments=None): LogFileTool.run(self, arguments, get_unknowns=True) self.parse_loglines() self.group() if self.args['overlay'] == 'reset': self.remove_overlays() # if --overlay is set, save groups in a file, else load groups and plot if self.args['overlay'] == "list": self.list_overlays() raise SystemExit plot_specified = not sys.stdin.isatty() or len(self.args['logfile']) > 0 # if no plot is specified (either pipe or filename(s)) and reset, quit now if not plot_specified and self.args['overlay'] == 'reset': raise SystemExit if self.args['overlay'] == "" or self.args['overlay'] == "add": if plot_specified: self.save_overlay() else: print "Nothing to plot." raise SystemExit # else plot (with potential overlays) if there is something to plot overlay_loaded = self.load_overlays() if plot_specified or overlay_loaded: self.plot() else: print "Nothing to plot." raise SystemExit
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) self.argparser.description='A script to plot various information from logfiles. ' \ 'Clicking on any of the plot points will print the corresponding log line to stdout.' # disable some default shortcuts plt.rcParams['keymap.xscale'] = '' plt.rcParams['keymap.yscale'] = '' # import all plot type classes in plottypes module self.plot_types = [c[1] for c in inspect.getmembers(plottypes, inspect.isclass)] self.plot_types = dict((pt.plot_type_str, pt) for pt in self.plot_types) self.plot_instances = [] self.plot_instance = None # main parser arguments self.argparser.add_argument('--logscale', action='store_true', help='plot y-axis in logarithmic scale (default=off)') self.argparser.add_argument('--overlay', action='store', nargs='?', default=None, const='add', choices=['add', 'list', 'reset'], help="create combinations of several plots. Use '--overlay' to create an overlay (this will not plot anything). The first call without '--overlay' will additionally plot all existing overlays. Use '--overlay reset' to clear all overlays.") self.argparser.add_argument('--type', action='store', default='scatter', choices=self.plot_types.keys(), help='type of plot (default=scatter with --yaxis duration).') self.argparser.add_argument('--title', action='store', default=None, help='change the title of the plot (default=filename(s))') self.argparser.add_argument('--group', help="specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', 'pattern', range and histogram plots can additionally group on 'log2code'. The group can also be a regular expression.") self.argparser.add_argument('--group-limit', metavar='N', type=int, default=None, help="specify an upper limit of the number of groups. Groups are sorted by number of data points. If limit is specified, only the top N will be listed separately, the rest are grouped together in an 'others' group") self.argparser.add_argument('--no-others', action='store_true', default=False, help="if this flag is used, the 'others' group (see --group-limit) will be discarded.") self.argparser.add_argument('--optime-start', action='store_true', default=False, help="plot operations with a duration when they started instead (by subtracting the duration). The default is to plot them when they finish (at the time they are logged).") self.argparser.add_argument('--ylimits', action='store', default=None, type=int, nargs=2, metavar='VAL', help="if set, limits the y-axis view to [min, max], requires exactly 2 values.") self.argparser.add_argument('--output-file', metavar='FILE', action='store', default=None, help="Save the plot to a file instead of displaying it in a window") self.legend = None # progress bar self.progress_bar_enabled = not self.is_stdin
def __init__(self): """Constructor: add description to argparser.""" LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=False) self.argparser.description = ('Extracts general information from ' 'logfile and prints it to stdout.') self.argparser.add_argument('--debug', action='store_true', help=('show debug output ' '(depends on info section)')) self.argparser.add_argument('--verbose', action='store_true', help=('show more verbose output ' '(depends on info section)')) inf = 'info sections' cmds = ('Below commands activate additional info sections for the ' 'log file.') self.argparser_sectiongroup = self.argparser.add_argument_group( inf, cmds) # add all filter classes from the filters module self.sections = ([ c[1](self) for c in inspect.getmembers(sections, inspect.isclass) ])
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) self.argparser.description='A script to plot various information from logfiles. ' \ 'Clicking on any of the plot points will print the corresponding log line to stdout.' # import all plot type classes in plottypes module self.plot_types = [ c[1] for c in inspect.getmembers(plottypes, inspect.isclass) ] self.plot_types = dict( (pt.plot_type_str, pt) for pt in self.plot_types) self.plot_instances = [] # main parser arguments self.argparser.add_argument( '--exclude-ns', action='store', nargs='*', metavar='NS', help='(deprecated) use a prior mlogfilter instead.') self.argparser.add_argument( '--ns', action='store', nargs='*', metavar='NS', help='(deprecated) use a prior mlogfilter instead. ') self.argparser.add_argument( '--logscale', action='store_true', help='plot y-axis in logarithmic scale (default=off)') self.argparser.add_argument( '--overlay', action='store', nargs='?', default=None, const='add', choices=['add', 'list', 'reset'], help= "create combinations of several plots. Use '--overlay' to create an overlay (this will not plot anything). The first call without '--overlay' will additionally plot all existing overlays. Use '--overlay reset' to clear all overlays." ) self.argparser.add_argument( '--type', action='store', default='scatter', choices=self.plot_types.keys(), help='type of plot (default=scatter with --yaxis duration).') self.argparser.add_argument( '--group', help= "specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', range and histogram plots can additionally group on 'log2code'." ) # mutex = self.argparser.add_mutually_exclusive_group() # mutex.add_argument('--label', help="instead of specifying a group, a label can be specified. Grouping is then disabled, and the single group for all data points is named LABEL.") self.legend = None # store logfile ranges self.logfile_ranges = []
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) # add all filter classes from the filters module self.filters = [ c[1] for c in inspect.getmembers(filters, inspect.isclass) ] self.argparser.description = 'mongod/mongos log file parser. Use parameters to enable filters. A line only gets printed if it passes all enabled filters.' self.argparser.add_argument( '--verbose', action='store_true', help='outputs information about the parser and arguments.') self.argparser.add_argument( '--shorten', action='store', type=int, default=False, nargs='?', metavar='LENGTH', help= 'shortens long lines by cutting characters out of the middle until the length is <= LENGTH (default 200)' ) self.argparser.add_argument( '--exclude', action='store_true', default=False, help= 'if set, excludes the matching lines rather than includes them.') self.argparser.add_argument( '--human', action='store_true', help= 'outputs numbers formatted with commas and milliseconds as hr,min,sec,ms for easier readability.' )
def run(self, arguments=None): """Print useful information about the log file.""" LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print("\n ------------------------------------------\n") if self.logfile.datetime_format == 'ctime-pre2.4': # no milliseconds when datetime format doesn't support it start_time = (self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") else: # include milliseconds start_time = (self.logfile.start.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") print(" source: %s" % self.logfile.name) print(" host: %s" % (self.logfile.hostname + ':' + str(self.logfile.port) if self.logfile.hostname else "unknown")) print(" start: %s" % (start_time)) print(" end: %s" % (end_time)) # TODO: add timezone if iso8601 format print("date format: %s" % self.logfile.datetime_format) print(" length: %s" % len(self.logfile)) print(" binary: %s" % (self.logfile.binary or "unknown")) version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4.x ctime (milliseconds present)' elif (self.logfile.datetime_format == "iso8601-utc" or self.logfile.datetime_format == "iso8601-local"): if self.logfile.has_level: version = '>= 3.0 (iso8601 format, level, component)' else: version = '= 2.6.x (iso8601 format)' print(" version: %s" % version) print(" storage: %s" % (self.logfile.storage_engine or 'unknown')) # now run all sections for section in self.sections: if section.active: print("\n%s" % section.name.upper()) section.run()
def __init__(self): """ Constructor: add description to argparser. """ LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'Anonymizes log files by replacing IP addresses, namespaces, strings.' self.argparser.add_argument('--seed', '-s', action='store', metavar='S', default=None, help='seed the random number generator with S (any string)') self.replacements = {}
def run(self, arguments=None): """ Go through each line, convert string to LogLine object, then print JSON representation of the line. """ LogFileTool.run(self, arguments) for line in self.args['logfile']: print LogLine(line).to_json()
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'mongod/mongos log file visualizer (browser edition). Extracts \ information from each line of the log file and outputs a html file that can be viewed in \ a browser. Automatically opens a browser tab and shows the file.' self.argparser.add_argument('--no-browser', action='store_true', help='only creates .html file, but does not open the browser.') self.argparser.add_argument('--out', '-o', action='store', default=None, help='filename to output. Default is <original logfile>.html')
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) # add all filter classes from the filters module self.filters = [c[1] for c in inspect.getmembers(filters, inspect.isclass)] self.argparser.description = "mongod/mongos log file parser. Use parameters to enable filters. A line only gets printed if it passes all enabled filters. If several log files are provided, their lines are merged by timestamp." self.argparser.add_argument( "--verbose", action="store_true", help="outputs information about the parser and arguments." ) self.argparser.add_argument( "--shorten", action="store", type=int, default=False, nargs="?", metavar="LENGTH", help="shortens long lines by cutting characters out of the middle until the length is <= LENGTH (default 200)", ) self.argparser.add_argument( "--exclude", action="store_true", default=False, help="if set, excludes the matching lines rather than includes them.", ) self.argparser.add_argument( "--human", action="store_true", help="outputs large numbers formatted with commas and print milliseconds as hr,min,sec,ms for easier readability.", ) self.argparser.add_argument( "--json", action="store_true", help="outputs all matching lines in json format rather than the native log line.", ) self.argparser.add_argument( "--markers", action="store", nargs="*", default=["filename"], help="use markers when merging several files to distinguish them. Choose from none, enum, alpha, filename (default), or provide list.", ) self.argparser.add_argument( "--timezone", action="store", nargs="*", default=[], type=int, metavar="N", help="timezone adjustments: add N hours to corresponding log file, single value for global adjustment.", ) self.argparser.add_argument( "--timestamp-format", action="store", default="none", choices=["none", "ctime-pre2.4", "ctime", "iso8601-utc", "iso8601-local"], help="choose datetime format for log output", )
def __init__(self): """ Constructor: add description to argparser. """ LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'Groups all log messages in the logfile together \ and only displays a distinct set of messages with count' self.argparser.add_argument('--verbose', action='store_true', default=False, help="outputs lines that couldn't be matched.")
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict( (k, self._arrayToString(self.args[k])) for k in self.args) # create filter objects from classes and pass args self.filters = [f(self.args) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: exit() for line in self.args['logfile']: logline = LogLine(line) if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten'], self.args['human']) else: # only print line if all filters agree if all([f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten'], self.args['human']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.filters = [] self.argparser.description = 'mongod/mongos log file parser. Use parameters to enable filters. A line only gets printed if it passes all enabled filters.' self.argparser.add_argument('--verbose', action='store_true', help='outputs information about the parser and arguments.') self.argparser.add_argument('--shorten', action='store', type=int, default=False, nargs='?', metavar='LENGTH', help='shortens long lines by cutting characters out of the middle until the length is <= LENGTH (default 200)') self.argparser.add_argument('--exclude', action='store_true', default=False, help='if set, excludes the matching lines rather than includes them.')
def run(self, arguments=None): LogFileTool.run(self, arguments) possible_versions = set(Log2CodeConverter.all_versions) re_versiond = re.compile(r'db version v(\d\.\d\.\d), pdfile version') re_versions = re.compile(r'MongoS version (\d\.\d\.\d) starting:') re_brackets = re.compile(r'\[\w+\]') for i, line in enumerate(self.args['logfile']): match = re_brackets.search(line) if not match: continue start = match.end() # check for explicit version string match = re_versiond.search(line[start:]) or re_versions.search(line[start:]) if match: version = match.group(1) print "%32s %s" % ("restart detected in log line %i:"%(i+1), line.rstrip()) print "%32s %s" % ("previous possible versions:", ", ".join([pv[1:] for pv in sorted(possible_versions)])) print "%32s %s" % ("version after restart is:", version) print possible_versions = set(["r"+version]) if len(possible_versions) == 1: # from here on, version is known, skip to next section continue ll = LogLine(line) if ll.operation != None: # if log line is a known command operation (query, update, command, ...) skip continue lcl = self.log2code(line[start:]) if lcl: old_len = len(possible_versions) possible_versions = possible_versions & set(lcl.versions) if len(possible_versions) != old_len: print "%32s %s" % ("log line %i:"%(i+1), line.rstrip()) print "%32s %s" % ("matched pattern:", " ... ".join(lcl.pattern)) print "%32s %s" % ("only present in:", ", ".join(sorted(lcl.versions))) print "%32s %s" % ("possible versions now:", ", ".join(sorted(possible_versions))) print if len(possible_versions) == 0: print "empty version set. exiting." raise SystemExit if len(possible_versions) > 1: print "possible versions at end of file:", ", ".join([pv[1:] for pv in sorted(possible_versions)]) else: print "version at end of file: ", possible_versions.pop()[1:]
def run(self): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self) self.args = dict((k, self._arrayToString(self.args[k])) for k in self.args) # create filter objects from classes and pass args self.filters = [f(self.args) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: exit() for line in self.args['logfile']: logline = LogLine(line) if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten']) else: # only print line if all filters agree if all([f.accept(logline) for f in self.filters]): self._outputLine(logline.line_str, self.args['shorten']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def load_file(self, filename): print('load data from %s' % (filename)) self.mlogtool = LogFileTool() filepath = os.path.join(self.log_folder, filename) sys.argv.append(filepath) LogFileTool.run(self.mlogtool) self._export() sys.argv.pop()
def __init__(self): """ Constructor: add description to argparser. """ LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=False) self.argparser.description = 'Extracts general information from logfile and prints it to stdout.' self.argparser.add_argument('--verbose', action='store_true', help='show more verbose output (depends on info section)') self.argparser_sectiongroup = self.argparser.add_argument_group('info sections', 'Below commands activate additional info sections for the log file.') # add all filter classes from the filters module self.sections = [c[1](self) for c in inspect.getmembers(sections, inspect.isclass)]
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) # add all filter classes from the filters module self.filters = [c[1] for c in inspect.getmembers(filters, inspect.isclass)] self.argparser.description = 'mongod/mongos log file parser. Use parameters to enable filters. A line only gets printed if it passes all enabled filters.' self.argparser.add_argument('--verbose', action='store_true', help='outputs information about the parser and arguments.') self.argparser.add_argument('--shorten', action='store', type=int, default=False, nargs='?', metavar='LENGTH', help='shortens long lines by cutting characters out of the middle until the length is <= LENGTH (default 200)') self.argparser.add_argument('--exclude', action='store_true', default=False, help='if set, excludes the matching lines rather than includes them.') self.argparser.add_argument('--human', action='store_true', help='outputs numbers formatted with commas and milliseconds as hr,min,sec,ms for easier readability.')
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) self.argparser.description='A script to plot various information from logfiles. ' \ 'Clicking on any of the plot points will print the corresponding log line to stdout.' self.plot_types = [ DurationPlotType, EventPlotType, RangePlotType, RSStatePlotType ] self.plot_types = dict( (pt.plot_type_str, pt) for pt in self.plot_types) self.plot_instances = [] # main parser arguments self.argparser.add_argument('--exclude-ns', action='store', nargs='*', metavar='NS', help='namespaces to exclude in the plot') self.argparser.add_argument( '--ns', action='store', nargs='*', metavar='NS', help='namespaces to include in the plot (default=all)') self.argparser.add_argument( '--logscale', action='store_true', help='plot y-axis in logarithmic scale (default=off)') self.argparser.add_argument('--overlay', action='store', nargs='?', default=None, const='add', choices=['add', 'list', 'reset']) self.argparser.add_argument('--type', action='store', default='duration', choices=self.plot_types.keys(), help='type of plot (default=duration)') mutex = self.argparser.add_mutually_exclusive_group() mutex.add_argument( '--group', help= "specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', range plots can additionally group on 'log2code'." ) mutex.add_argument( '--label', help= "instead of specifying a group, a label can be specified. Grouping is then disabled, and the single group for all data points is named LABEL." ) self.legend = None
def __init__(self): """ Constructor: add description to argparser. """ LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=False) self.argparser.description = 'Extracts general information from logfile and prints it to stdout.' self.argparser.add_argument( '--restarts', action='store_true', help='outputs information about every detected restart.')
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) self.logfiles = self.args['logfile'] for i, logfileOpen in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print self.logfileOpen = logfileOpen self.logfile = LogFile(logfileOpen) print " filename: %s" % self.args['logfile'][i].name print "start of logfile: %s" % (self.logfile.start.strftime("%b %d %H:%M:%S") if self.logfile.start else "unknown") print " end of logfile: %s" % (self.logfile.end.strftime("%b %d %H:%M:%S") if self.logfile.start else "unknown") # get one logline (within first 20 lines) for datetime format logline = None for i in range(20): try: logline = LogLine(logfileOpen.next()) except StopIteration as e: raise SystemExit("no valid log lines found (datetime not available).") if logline.datetime: break # TODO: add timezone if iso8601 format print " line numbers: %s" % self.logfile.num_lines print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown' and logline: if logline.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif logline.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif logline.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self): LogFileTool.run(self) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' first_row = True for line in self.args['logfile']: logline = LogLine(line) # group regular connections together if logline.datetime and logline.duration: if logline.thread and logline.thread.startswith("conn"): logline._thread = "conn####" # write log line out as json if not first_row: # prepend comma and newline outf += ',\n' else: first_row = False outf += logline.to_json([ 'line_str', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration' ]) outf += ']}' dstfilelocation = os.path.join(os.getcwd(), '%s.html' % logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://" + dstfilelocation webbrowser.open("file://" + dstfilelocation)
def run(self): LogFileTool.run(self) # store in current local folder mlogvis_dir = "." # change stdin logfile name and remove the < > logname = self.args["logfile"].name if logname == "<stdin>": logname = "stdin" os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), "data") srcfilelocation = os.path.join(data_path, "index.html") outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' first_row = True for line in self.args["logfile"]: logline = LogLine(line) # group regular connections together if logline.datetime and logline.duration: if logline.thread and logline.thread.startswith("conn"): logline._thread = "conn####" # write log line out as json if not first_row: # prepend comma and newline outf += ",\n" else: first_row = False outf += logline.to_json( ["line_str", "datetime", "operation", "thread", "namespace", "nscanned", "nreturned", "duration"] ) outf += "]}" dstfilelocation = os.path.join(os.getcwd(), "%s.html" % logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, "wt") replaced_contents = contents.replace("##REPLACE##", outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://" + dstfilelocation webbrowser.open("file://" + dstfilelocation)
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print if self.logfile.datetime_format == 'ctime-pre2.4': # no milliseconds when datetime format doesn't support it start_time = self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown" end_time = self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown" else: # include milliseconds start_time = self.logfile.start.strftime("%Y %b %d %H:%M:%S.%f")[:-3] if self.logfile.start else "unknown" end_time = self.logfile.end.strftime("%Y %b %d %H:%M:%S.%f")[:-3] if self.logfile.start else "unknown" print " source: %s" % self.logfile.name print " host: %s" % (self.logfile.hostname + ':' + self.logfile.port if self.logfile.hostname else "unknown") print " start: %s" % (start_time) print " end: %s" % (end_time) # TODO: add timezone if iso8601 format print "date format: %s" % self.logfile.datetime_format print " length: %s" % len(self.logfile) print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif self.logfile.datetime_format == "iso8601-utc" or \ self.logfile.datetime_format == "iso8601-local": version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) logfile = LogFile(self.args['logfile']) print "start of logfile: %s" % (logfile.start.strftime( "%b %d %H:%M:%S") if logfile.start else "unknown") print " end of logfile: %s" % (logfile.end.strftime("%b %d %H:%M:%S") if logfile.start else "unknown") # get one logline (within first 20 lines) for datetime format logline = None for i in range(20): try: logline = LogLine(self.args['logfile'].next()) except StopIteration as e: raise SystemExit( "no valid log lines found (datetime not available).") if logline.datetime: break # TODO: add timezone if iso8601 format print " line numbers: %s" % logfile.num_lines print " binary: %s" % (logfile.binary or "unknown") version = (' -> '.join(logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown' and logline: if logline.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif logline.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif logline.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version # restarts section if self.args['restarts']: print print "RESTARTS" for version, logline in logfile.restarts: print " %s version %s" % ( logline.datetime.strftime("%b %d %H:%M:%S"), version) if len(logfile.restarts) == 0: print " no restarts found"
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) # add all filter classes from the filters module self.filters = [c[1] for c in inspect.getmembers(filters, inspect.isclass)] self.argparser.description = 'mongod/mongos log file parser. Use parameters to enable filters. A line only gets printed if it passes all enabled filters. If several log files are provided, their lines are merged by timestamp.' self.argparser.add_argument('--verbose', action='store_true', help='outputs information about the parser and arguments.') self.argparser.add_argument('--shorten', action='store', type=int, default=False, nargs='?', metavar='LENGTH', help='shortens long lines by cutting characters out of the middle until the length is <= LENGTH (default 200)') self.argparser.add_argument('--exclude', action='store_true', default=False, help='if set, excludes the matching lines rather than includes them.') self.argparser.add_argument('--human', action='store_true', help='outputs large numbers formatted with commas and print milliseconds as hr,min,sec,ms for easier readability.') self.argparser.add_argument('--json', action='store_true', help='outputs all matching lines in json format rather than the native log line.') self.argparser.add_argument('--markers', action='store', nargs='*', default=['filename'], help='use markers when merging several files to distinguish them. Choose from none, enum, alpha, filename (default), or provide list.') self.argparser.add_argument('--timezone', action='store', nargs='*', default=[], type=int, metavar="N", help="timezone adjustments: add N hours to corresponding log file, single value for global adjustment.") self.argparser.add_argument('--timestamp-format', action='store', default='none', choices=['none', 'ctime-pre2.4', 'ctime', 'iso8601-utc', 'iso8601-local'], help="choose datetime format for log output")
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'mongod/mongos log file visualizer (browser edition). Extracts \ information from each line of the log file and outputs a html file that can be viewed in \ a browser. Automatically opens a browser tab and shows the file.' self.argparser.add_argument( '--no-browser', action='store_true', help='only creates .html file, but does not open the browser.') self.argparser.add_argument( '--out', '-o', action='store', default=None, help='filename to output. Default is <original logfile>.html')
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for logevent in self.args['logfile']: line = logevent.line_str # replace IP addresses line = re.sub(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', self._replace_ip, line) # replace strings line = re.sub(r'".+?"', self._replace_string, line) # replace hostnames and namespaces line = re.sub(r'[a-zA-Z$][^ \t\n\r\f\v:]+(\.[a-zA-Z$][^ \t\n\r\f\v:]+)+', self._replace_dottedname, line) print line
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) logfile = LogFile(self.args['logfile']) print "start of logfile: %s" % (logfile.start.strftime("%b %d %H:%M:%S") if logfile.start else "unknown") print " end of logfile: %s" % (logfile.end.strftime("%b %d %H:%M:%S") if logfile.start else "unknown") # get one logline (within first 20 lines) for datetime format logline = None for i in range(20): try: logline = LogLine(self.args['logfile'].next()) except StopIteration as e: raise SystemExit("no valid log lines found (datetime not available).") if logline.datetime: break # TODO: add timezone if iso8601 format print " line numbers: %s" % logfile.num_lines print " binary: %s" % (logfile.binary or "unknown") version = (' -> '.join(logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown' and logline: if logline.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif logline.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif logline.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version # restarts section if self.args['restarts']: print print "RESTARTS" for version, logline in logfile.restarts: print " %s version %s" % (logline.datetime.strftime("%b %d %H:%M:%S"), version) if len(logfile.restarts) == 0: print " no restarts found"
def run(self, arguments=None): LogFileTool.run(self, arguments) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' if self.args['out'] is not None: outputname = self.args['out'] else: outputname = logname + '.html' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') json_docs = self._export(True) if not json_docs: json_docs = self._export(False) outf = ('{"type": "duration", "logfilename": "' + logname + '", "data":[' + json_docs + ']}') dstfilelocation = os.path.join(os.getcwd(), '%s' % outputname) print("copying %s to %s" % (srcfilelocation, dstfilelocation)) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() if not self.args['no_browser']: print("serving visualization on file://" + dstfilelocation) webbrowser.open("file://" + dstfilelocation)
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for logevent in self.args['logfile']: line = logevent.line_str # replace IP addresses line = re.sub(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', self._replace_ip, line) # replace strings line = re.sub(r'".+?"', self._replace_string, line) # replace hostnames and namespaces line = re.sub( r'[a-zA-Z$][^ \t\n\r\f\v:]+(\.[a-zA-Z$][^ \t\n\r\f\v:]+)+', self._replace_dottedname, line) print line
def run(self): """ go over each line in the logfile, run through log2code matcher and group by matched pattern. """ LogFileTool.run(self) codelines = defaultdict(lambda: 0) non_matches = 0 for line in self.args['logfile']: cl = self.log2code(line) if cl: codelines[cl.pattern] += 1 else: ll = LogLine(line) if ll.operation: # skip operations (command, insert, update, delete, query, getmore) continue if not ll.thread: # skip the lines that don't have a thread name (usually map/reduce or assertions) continue if len(ll.split_tokens) - ll._thread_offset <= 1: # skip empty log messages (after thread name) continue # everything else is a real non-match non_matches += 1 if self.args['verbose']: print "couldn't match:", line, if self.args['verbose']: print for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True): print "%8i"%codelines[cl], " ", " ... ".join(cl) print if non_matches > 0: print "couldn't match %i lines"%non_matches if not self.args['verbose']: print "to show non-matched lines, run with --verbose."
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print print " source: %s" % self.logfile.name print " start: %s" % (self.logfile.start.strftime( "%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") print " end: %s" % (self.logfile.end.strftime( "%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") # TODO: add timezone if iso8601 format print "date format: %s" % self.logfile.datetime_format print " length: %s" % len(self.logfile) print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif self.logfile.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def run(self, arguments=None): """ go over each line in the logfile, run through log2code matcher and group by matched pattern. """ LogFileTool.run(self, arguments) codelines = defaultdict(lambda: 0) non_matches = 0 for line in self.args['logfile']: cl = self.log2code(line) if cl: codelines[cl.pattern] += 1 else: ll = LogLine(line) if ll.operation: # skip operations (command, insert, update, delete, query, getmore) continue if not ll.thread: # skip the lines that don't have a thread name (usually map/reduce or assertions) continue if len(ll.split_tokens) - ll._thread_offset <= 1: # skip empty log messages (after thread name) continue # everything else is a real non-match non_matches += 1 if self.args['verbose']: print "couldn't match:", line, if self.args['verbose']: print for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True): print "%8i"%codelines[cl], " ", " ... ".join(cl) print if non_matches > 0: print "couldn't match %i lines"%non_matches if not self.args['verbose']: print "to show non-matched lines, run with --verbose."
def run(self, arguments=None): LogFileTool.run(self, arguments) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') json_docs = self._export(True) if not json_docs: json_docs = self._export(False) outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' + json_docs + ']}' dstfilelocation = os.path.join(os.getcwd(), '%s.html' % logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('this.width = 980', 'this.width = 480') replaced_contents = replaced_contents.replace('this.height = 500', 'this.height = 270') replaced_contents = replaced_contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://" + dstfilelocation
def run(self, arguments=None): """ Print out useful information about the log file. """ LogFileTool.run(self, arguments) for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print print ' ------------------------------------------' print print " source: %s" % self.logfile.name print " start: %s" % (self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") print " end: %s" % (self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") # TODO: add timezone if iso8601 format print "date format: %s" % self.logfile.datetime_format print " length: %s" % len(self.logfile) print " binary: %s" % (self.logfile.binary or "unknown") version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4 (milliseconds present)' elif self.logfile.datetime_format.startswith('iso8601-'): version = '>= 2.6 (iso8601 format)' print " version: %s" % version, print # now run all sections for section in self.sections: if section.active: print print section.name.upper() section.run()
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=True) self.argparser.description='A script to plot various information from logfiles. ' \ 'Clicking on any of the plot points will print the corresponding log line to stdout.' self.plot_types = [DurationPlotType, EventPlotType, RangePlotType, RSStatePlotType] self.plot_types = dict((pt.plot_type_str, pt) for pt in self.plot_types) self.plot_instances = [] # main parser arguments self.argparser.add_argument('--exclude-ns', action='store', nargs='*', metavar='NS', help='namespaces to exclude in the plot') self.argparser.add_argument('--ns', action='store', nargs='*', metavar='NS', help='namespaces to include in the plot (default=all)') self.argparser.add_argument('--logscale', action='store_true', help='plot y-axis in logarithmic scale (default=off)') self.argparser.add_argument('--overlay', action='store', nargs='?', default=None, const='add', choices=['add', 'list', 'reset']) self.argparser.add_argument('--type', action='store', default='duration', choices=self.plot_types.keys(), help='type of plot (default=duration)') mutex = self.argparser.add_mutually_exclusive_group() mutex.add_argument('--group', help="specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', range plots can additionally group on 'log2code'.") mutex.add_argument('--label', help="instead of specifying a group, a label can be specified. Grouping is then disabled, and the single group for all data points is named LABEL.") self.legend = None
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = ('mongod/mongos log file visualizer ' '(browser edition). Extracts ' 'information from each line of the log ' 'file and outputs a html file that can ' 'be viewed in a browser. Automatically ' 'opens a browser tab and shows the ' 'file.') self.argparser.add_argument('--no-browser', action='store_true', help=('only creates .html file, but does ' 'not open the browser.')) self.argparser.add_argument('--out', '-o', action='store', default=None, help=('filename to output. Default is ' '<original logfile>.html')) self.argparser.add_argument('--line-max', action='store', default=10000, help=('max count of datapoints at which ' 'actual log line strings are not ' 'printed any more.'))
def run(self, arguments=None): LogFileTool.run(self, arguments) # store in current local folder mlogvis_dir = '.' # change stdin logfile name and remove the < > logname = self.args['logfile'].name if logname == '<stdin>': logname = 'stdin' os.chdir(mlogvis_dir) data_path = os.path.join(os.path.dirname(mtools.__file__), 'data') srcfilelocation = os.path.join(data_path, 'index.html') json_docs = self._export(True) if not json_docs: json_docs = self._export(False) outf = '{"type": "duration", "logfilename": "' + logname + '", "data":[' + json_docs + ']}' dstfilelocation = os.path.join(os.getcwd(), '%s.html'%logname) print "copying %s to %s" % (srcfilelocation, dstfilelocation) srcfile = open(srcfilelocation) contents = srcfile.read() srcfile.close() dstfile = open(dstfilelocation, 'wt') replaced_contents = contents.replace('this.width = 980', 'this.width = 480') replaced_contents = replaced_contents.replace('this.height = 500', 'this.height = 270') replaced_contents = replaced_contents.replace('##REPLACE##', outf) dstfile.write(replaced_contents) dstfile.close() print "serving visualization on file://"+dstfilelocation
def run(self): LogFileTool.run(self) logfiles = self.args['logfile'] # handle labels parameter if len(self.args['labels']) == 1: label = self.args['labels'][0] if label == 'enum': labels = ['{%i}'%(i+1) for i in range(len(logfiles))] elif label == 'alpha': labels = ['{%s}'%chr(97+i) for i in range(len(logfiles))] elif label == 'none': labels = [None for _ in logfiles] elif label == 'filename': labels = ['{%s}'%fn.name for fn in logfiles] elif len(self.args['labels']) == len(logfiles): labels = self.args['labels'] else: raise SystemExit('Error: Number of labels not the same as number of files.') # handle timezone parameter if len(self.args['timezone']) == 1: self.args['timezone'] = self.args['timezone'] * len(logfiles) elif len(self.args['timezone']) == len(logfiles): pass elif len(self.args['timezone']) == 0: self.args['timezone'] = [0] * len(logfiles) else: raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).') # handle position parameter position = self.args['pos'] if position != 'eol': position = int(position) # define minimum and maximum datetime object mindate = datetime(MINYEAR, 1, 1, 0, 0, 0) maxdate = datetime(MAXYEAR, 12, 31, 23, 59, 59) # open files, read first lines, extract first dates lines = [f.readline() for f in logfiles] dates = [LogLine(l).datetime for l in lines] # replace all non-dates with mindate dates = [d if d else mindate for d in dates] dates = [d + timedelta(hours=self.args['timezone'][i]) for i,d in enumerate(dates) if d] while any([l != '' for l in lines]): # pick smallest date of all non-empty lines condDates = ([d if lines[i] != '' else maxdate for i,d in enumerate(dates)]) minCondDate = min(condDates) minIndex = condDates.index(minCondDate) # print out current line currLine = lines[minIndex].rstrip() try: oldDate = minCondDate - timedelta(hours=self.args['timezone'][minIndex]) except OverflowError: oldDate = minCondDate if minCondDate != mindate: currLine = currLine.replace(oldDate.strftime('%a %b %d %H:%M:%S'), minCondDate.strftime('%a %b %d %H:%M:%S')) if labels[minIndex]: if position == 0 or minCondDate == mindate: print labels[minIndex], currLine elif position == 'eol': print currLine, labels[minIndex] else: tokens = currLine.split() print " ".join(tokens[:position]), labels[minIndex], " ".join(tokens[position:]) else: print currLine # update lines and dates for that line lines[minIndex] = logfiles[minIndex].readline() dates[minIndex] = LogLine(lines[minIndex]).datetime if not dates[minIndex]: dates[minIndex] = mindate else: dates[minIndex] += timedelta(hours=self.args['timezone'][minIndex])
def run(self, arguments=None): """Print useful information about the log file.""" LogFileTool.run(self, arguments) if (self.args['logfile'] is None or len(self.args['logfile']) == 0): self.argparser.print_usage() print("\nERROR: At least one logfile argument must be provided") self.argparser.exit() for i, self.logfile in enumerate(self.args['logfile']): if i > 0: print("\n ------------------------------------------\n") if self.logfile.datetime_format == 'ctime-pre2.4': # no milliseconds when datetime format doesn't support it start_time = (self.logfile.start.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d %H:%M:%S") if self.logfile.start else "unknown") else: # include milliseconds start_time = (self.logfile.start.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") end_time = (self.logfile.end.strftime("%Y %b %d " "%H:%M:%S.%f")[:-3] if self.logfile.start else "unknown") print(" source: %s" % self.logfile.name) print(" host: %s" % (self.logfile.hostname + ':' + str(self.logfile.port) if self.logfile.hostname else "unknown")) print(" start: %s" % (start_time)) print(" end: %s" % (end_time)) print("date format: %s" % self.logfile.datetime_format) # self.logfile.timezone is a dateutil.tzinfo object tzdt = datetime.datetime.now(self.logfile.timezone) if (tzdt.tzname()): timezone = tzdt.tzname() else: timezone = f"UTC {tzdt.strftime('%z')}" print(" timezone: %s" % timezone) print(" length: %s" % len(self.logfile)) print(" binary: %s" % (self.logfile.binary or "unknown")) version = (' -> '.join(self.logfile.versions) or "unknown") # if version is unknown, go by date if version == 'unknown': if self.logfile.datetime_format == 'ctime-pre2.4': version = '< 2.4 (no milliseconds)' elif self.logfile.datetime_format == 'ctime': version = '>= 2.4.x ctime (milliseconds present)' elif (self.logfile.datetime_format == "iso8601-utc" or self.logfile.datetime_format == "iso8601-local"): if self.logfile.has_level: version = '>= 3.0 (iso8601 format, level, component)' else: version = '= 2.6.x (iso8601 format)' print(" version: %s" % version) print(" storage: %s" % (self.logfile.storage_engine or 'unknown')) # now run all sections for section in self.sections: if section.active: print("\n%s" % section.name.upper()) section.run()
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=True, stdin_allowed=False) self.argparser.add_argument('--labels', action='store', nargs='*', default=['enum'], help='labels to distinguish original files. Choose from none, enum, alpha, filename, or provide list.') self.argparser.add_argument('--pos', action='store', default=0, help="position of label (default=0, front of line, other options are 'eol' or the position as int.") self.argparser.add_argument('--timezone', action='store', nargs='*', default=[], type=int, metavar="N", help="timezone adjustments: add N hours to corresponding log file")
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'mongod/mongos log file version detector. Parses a log file and matches each line to its original source code version. Each line that limits the remaining possible set of versions is printed. If a mongos/d restart is detected, the definitive version is printed instead.'
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict((k, self.args[k] if k in ['logfile', 'markers', 'timezone'] else self._arrayToString(self.args[k])) for k in self.args) # make sure logfile is always a list, even if 1 is provided through sys.stdin if type(self.args['logfile']) != types.ListType: self.args['logfile'] = [self.args['logfile']] # require at least 1 log file (either through stdin or as parameter) if len(self.args['logfile']) == 0: raise SystemExit('Error: Need at least 1 log file, either as command line parameter or through stdin.') # handle timezone parameter if len(self.args['timezone']) == 1: self.args['timezone'] = self.args['timezone'] * len(self.args['logfile']) elif len(self.args['timezone']) == len(self.args['logfile']): pass elif len(self.args['timezone']) == 0: self.args['timezone'] = [0] * len(self.args['logfile']) else: raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).') # create filter objects from classes and pass args self.filters = [f(self) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "command line arguments" for a in self.args: print " %s: %s" % (a, self.args[a]) print print "active filters:", print ', '.join([f.__class__.__name__ for f in self.filters]) print print '====================' # handle markers parameter if len(self.args['markers']) == 1: marker = self.args['markers'][0] if marker == 'enum': self.args['markers'] = ['{%i}'%(i+1) for i in range(len(self.args['logfile']))] elif marker == 'alpha': self.args['markers'] = ['{%s}'%chr(97+i) for i in range(len(self.args['logfile']))] elif marker == 'none': self.args['markers'] = [None for _ in self.args['logfile']] elif marker == 'filename': self.args['markers'] = ['{%s}'%logfile.name for logfile in self.args['logfile']] elif len(self.args['markers']) == len(self.args['logfile']): pass else: raise SystemExit('Error: Number of markers not the same as number of files.') # with --human, change to ctime format if not specified otherwise if self.args['timestamp_format'] == 'none' and self.args['human']: self.args['timestamp_format'] = 'ctime' # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: raise SystemExit('no logfile found.') for logevent in self.logfile_generator(): if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) else: # only print line if all filters agree if all([f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def __init__(self): LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'mongod/mongos log file visualizer (browser edition). Extracts \
def run(self, arguments=None): """ parses the logfile and asks each filter if it accepts the line. it will only be printed if all filters accept the line. """ # add arguments from filter classes before calling superclass run for f in self.filters: for fa in f.filterArgs: self.argparser.add_argument(fa[0], **fa[1]) # now parse arguments and post-process LogFileTool.run(self, arguments) self.args = dict((k, self.args[k] if k in ['logfile', 'markers', 'timezone'] else self._arrayToString(self.args[k])) for k in self.args) # make sure logfile is always a list, even if 1 is provided through sys.stdin if type(self.args['logfile']) != types.ListType: self.args['logfile'] = [self.args['logfile']] # require at least 1 log file (either through stdin or as parameter) if len(self.args['logfile']) == 0: raise SystemExit('Error: Need at least 1 log file, either as command line parameter or through stdin.') # handle timezone parameter if len(self.args['timezone']) == 1: self.args['timezone'] = self.args['timezone'] * len(self.args['logfile']) elif len(self.args['timezone']) == len(self.args['logfile']): pass elif len(self.args['timezone']) == 0: self.args['timezone'] = [0] * len(self.args['logfile']) else: raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).') # create filter objects from classes and pass args self.filters = [f(self) for f in self.filters] # remove non-active filter objects self.filters = [f for f in self.filters if f.active] # call setup for each active filter for f in self.filters: f.setup() if self.args['shorten'] != False: if self.args['shorten'] == None: self.args['shorten'] = 200 if self.args['verbose']: print "mlogfilter> command line arguments" for a in self.args: print "mlogfilter> %8s: %s" % (a, self.args[a]) print print "mlogfilter> active filters:", print ', '.join([f.__class__.__name__ for f in self.filters]) print # handle markers parameter if len(self.args['markers']) == 1: marker = self.args['markers'][0] if marker == 'enum': self.args['markers'] = ['{%i}'%(i+1) for i in range(len(self.args['logfile']))] elif marker == 'alpha': self.args['markers'] = ['{%s}'%chr(97+i) for i in range(len(self.args['logfile']))] elif marker == 'none': self.args['markers'] = [None for _ in self.args['logfile']] elif marker == 'filename': self.args['markers'] = ['{%s}'%logfile.name for logfile in self.args['logfile']] elif len(self.args['markers']) == len(self.args['logfile']): pass else: raise SystemExit('Error: Number of markers not the same as number of files.') # with --human, change to ctime format if not specified otherwise if self.args['timestamp_format'] == 'none' and self.args['human']: self.args['timestamp_format'] = 'ctime' # go through each line and ask each filter if it accepts if not 'logfile' in self.args or not self.args['logfile']: raise SystemExit('no logfile found.') for logevent in self.logfile_generator(): if self.args['exclude']: # print line if any filter disagrees if any([not f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) else: # only print line if all filters agree if all([f.accept(logevent) for f in self.filters]): self._outputLine(logevent, self.args['shorten'], self.args['human']) # if at least one filter refuses to accept any remaining lines, stop if any([f.skipRemaining() for f in self.filters]): # if input is not stdin if sys.stdin.isatty(): break
def __init__(self): """ Constructor: add description to argparser. """ LogFileTool.__init__(self, multiple_logfiles=False, stdin_allowed=True) self.argparser.description = 'mongod/mongos log file to json converter. \