示例#1
0
    def _merge_logfiles(self):
        """ helper method to merge several files together by datetime. """
        # open files, read first lines, extract first dates
        lines = [f.readline() for f in self.args["logfile"]]
        lines = [LogLine(l) if l else None for l in lines]

        # adjust lines by timezone
        for i in range(len(lines)):
            if lines[i] and lines[i].datetime:
                lines[i]._datetime = lines[i].datetime + timedelta(hours=self.args["timezone"][i])

        while any(lines):
            min_line = min(lines, key=self._datetime_key_for_merge)
            min_index = lines.index(min_line)

            if self.args["markers"][min_index]:
                min_line.merge_marker_str = self.args["markers"][min_index]

            yield min_line

            # update lines array with a new line from the min_index'th logfile
            new_line = self.args["logfile"][min_index].readline()
            lines[min_index] = LogLine(new_line) if new_line else None
            if lines[min_index] and lines[min_index].datetime:
                lines[min_index]._datetime = lines[min_index].datetime + timedelta(
                    hours=self.args["timezone"][min_index]
                )
示例#2
0
    def _export(self, with_line_str=True):
        fields = ['_id', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration', 'numYields', 'w', 'r']
        if with_line_str:
            fields.append('line_str')

        first_row = True
        result_str = ''
        out_count = 0
        for line_no, line in enumerate(self.args['logfile']):
            logline = LogLine(line)
            # only export lines that have a datetime and duration
            if logline.datetime and logline.duration:
                out_count += 1
                # if too many lines include a line_str, the page won't load
                if with_line_str and out_count > 10000:
                    print "Warning: more than 10,000 data points detected. Skipping actual log line strings for faster plotting."
                    return False
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    result_str += ',\n'
                else:
                    first_row = False
                # hack to include _id for log lines from file
                logline._id = line_no
                result_str += logline.to_json(fields)
        return result_str
示例#3
0
    def _calculate_bounds(self):
        """ calculate beginning and end of logfile. """

        if self.from_stdin:
            return None

        # get start datetime
        for line in self.logfile:
            logline = LogLine(line)
            date = logline.datetime
            if date:
                self._start = date
                break

        # get end datetime (lines are at most 10k, go back 15k at most to make sure)
        self.logfile.seek(0, 2)
        file_size = self.logfile.tell()
        self.logfile.seek(-min(file_size, 15000), 2)

        for line in reversed(self.logfile.readlines()):
            logline = LogLine(line)
            date = logline.datetime
            if date:
                self._end = date
                break

        # if there was a roll-over, subtract 1 year from start time
        if self._end < self._start:
            self._start = self._start.replace(year=self._start.year - 1)

        # reset logfile
        self.logfile.seek(0)
示例#4
0
    def _export(self, with_line_str=True):
        fields = [
            '_id', 'datetime', 'operation', 'thread', 'namespace', 'nscanned',
            'nreturned', 'duration', 'numYields', 'w', 'r'
        ]
        if with_line_str:
            fields.append('line_str')

        first_row = True
        result_str = ''
        out_count = 0
        for line_no, line in enumerate(self.args['logfile']):
            logline = LogLine(line)
            # only export lines that have a datetime and duration
            if logline.datetime and logline.duration:
                out_count += 1
                # if too many lines include a line_str, the page won't load
                if with_line_str and out_count > 10000:
                    print "Warning: more than 10,000 data points detected. Skipping actual log line strings for faster plotting."
                    return False
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    result_str += ',\n'
                else:
                    first_row = False
                # hack to include _id for log lines from file
                logline._id = line_no
                result_str += logline.to_json(fields)
        return result_str
示例#5
0
    def parse_loglines(self):
        multiple_files = False

        # create generator for logfile(s) handles
        if type(self.args['logfile']) != types.ListType:
            logfiles = [self.args['logfile']]
        else:
            logfiles = self.args['logfile']

        if len(logfiles) > 1:
            multiple_files = True
            self.args['group'] = 'filename'

        plot_instance = self.plot_types[self.args['type']](args=self.args)

        for logfile in logfiles:

            for line in logfile:
                # create LogLine object
                logline = LogLine(line)

                if multiple_files:
                    # amend logline object with filename for group by filename
                    logline.filename = logfile.name

                # offer plot_instance and see if it can plot it
                line_accepted = False
                if plot_instance.accept_line(logline):

                    # only add if it doesn't conflict with namespace restrictions
                    if self.args[
                            'ns'] != None and logline.namespace not in self.args[
                                'ns']:
                        continue

                    if self.args['exclude_ns'] != None and (
                            logline.namespace in self.args['exclude_ns']):
                        continue

                    # if logline doesn't have datetime, skip
                    if logline.datetime == None:
                        continue

                    if logline.namespace == None:
                        logline._namespace = "None"

                    line_accepted = True
                    plot_instance.add_line(logline)

        self.plot_instances.append(plot_instance)

        # close files after parsing
        if sys.stdin.isatty():
            for f in logfiles:
                f.close()
示例#6
0
    def run(self):
        LogFileTool.run(self)

        # store in current local folder
        mlogvis_dir = '.'

        # change stdin logfile name and remove the < >
        logname = self.args['logfile'].name
        if logname == '<stdin>':
            logname = 'stdin'

        os.chdir(mlogvis_dir)

        data_path = os.path.join(os.path.dirname(mtools.__file__), 'data')
        srcfilelocation = os.path.join(data_path, 'index.html')
        outf = '{"type": "duration", "logfilename": "' + logname + '", "data":['

        first_row = True
        for line in self.args['logfile']:
            logline = LogLine(line)
            # group regular connections together
            if logline.datetime and logline.duration:
                if logline.thread and logline.thread.startswith("conn"):
                    logline._thread = "conn####"
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    outf += ',\n'
                else:
                    first_row = False
                outf += logline.to_json([
                    'line_str', 'datetime', 'operation', 'thread', 'namespace',
                    'nscanned', 'nreturned', 'duration'
                ])
        outf += ']}'

        dstfilelocation = os.path.join(os.getcwd(), '%s.html' % logname)

        print "copying %s to %s" % (srcfilelocation, dstfilelocation)

        srcfile = open(srcfilelocation)
        contents = srcfile.read()
        srcfile.close()

        dstfile = open(dstfilelocation, 'wt')
        replaced_contents = contents.replace('##REPLACE##', outf)
        dstfile.write(replaced_contents)
        dstfile.close()

        print "serving visualization on file://" + dstfilelocation

        webbrowser.open("file://" + dstfilelocation)
示例#7
0
    def parse_loglines(self):
        multiple_files = False

        # create generator for logfile(s) handles
        if sys.stdin.isatty():
            logfiles = ( open(f, 'r') for f in self.args['filename'] )
            if len(self.args['filename']) > 1:
                multiple_files = True
                self.args['group'] = 'filename'

        else:
            logfiles = [sys.stdin]
        
        plot_instance = self.plot_types[self.args['type']](args=self.args)
        
        for logfile in logfiles:

            for line in logfile:
                # create LogLine object
                logline = LogLine(line)

                if multiple_files:
                    # amend logline object with filename for group by filename
                    logline.filename = logfile.name

                # offer plot_instance and see if it can plot it
                line_accepted = False
                if plot_instance.accept_line(logline):
                    
                    # only add if it doesn't conflict with namespace restrictions
                    if self.args['ns'] != None and logline.namespace not in self.args['ns']:
                        continue

                    if self.args['exclude_ns'] != None and (logline.namespace in self.args['exclude_ns']):
                        continue

                    # if logline doesn't have datetime, skip
                    if logline.datetime == None:
                        continue
                    
                    if logline.namespace == None:
                        logline._namespace = "None"

                    line_accepted = True
                    plot_instance.add_line(logline)

        self.plot_instances.append(plot_instance)

        # close files after parsing
        if sys.stdin.isatty():
            for f in logfiles:
                f.close()
示例#8
0
文件: mlogvis.py 项目: rgsingh/mtools
    def run(self):
        LogFileTool.run(self)

        # store in current local folder
        mlogvis_dir = "."

        # change stdin logfile name and remove the < >
        logname = self.args["logfile"].name
        if logname == "<stdin>":
            logname = "stdin"

        os.chdir(mlogvis_dir)

        data_path = os.path.join(os.path.dirname(mtools.__file__), "data")
        srcfilelocation = os.path.join(data_path, "index.html")
        outf = '{"type": "duration", "logfilename": "' + logname + '", "data":['

        first_row = True
        for line in self.args["logfile"]:
            logline = LogLine(line)
            # group regular connections together
            if logline.datetime and logline.duration:
                if logline.thread and logline.thread.startswith("conn"):
                    logline._thread = "conn####"
                # write log line out as json
                if not first_row:
                    # prepend comma and newline
                    outf += ",\n"
                else:
                    first_row = False
                outf += logline.to_json(
                    ["line_str", "datetime", "operation", "thread", "namespace", "nscanned", "nreturned", "duration"]
                )
        outf += "]}"

        dstfilelocation = os.path.join(os.getcwd(), "%s.html" % logname)

        print "copying %s to %s" % (srcfilelocation, dstfilelocation)

        srcfile = open(srcfilelocation)
        contents = srcfile.read()
        srcfile.close()

        dstfile = open(dstfilelocation, "wt")
        replaced_contents = contents.replace("##REPLACE##", outf)
        dstfile.write(replaced_contents)
        dstfile.close()

        print "serving visualization on file://" + dstfilelocation

        webbrowser.open("file://" + dstfilelocation)
示例#9
0
def test_logline_lazy_evaluation():
    """ Check that all LogLine variables are evaluated lazily. """
    
    fields = ['_thread', '_operation', '_namespace', '_duration', '_numYields', '_r', '_ntoreturn', '_nreturned', '_pattern']

    # before parsing all member variables need to be None
    ll = LogLine(line_getmore)
    for attr in fields:
        assert(getattr(ll, attr) == None)

    # after parsing, they all need to be filled out
    ll.parse_all()
    for attr in fields:
        assert(getattr(ll, attr) != None)
示例#10
0
    def setup(self):
        """ get start end end date of logfile before starting to parse. """
        logfile = self.commandLineArgs['logfile']
        seekable = False

        if logfile:
            seekable = logfile.name != "<stdin>"

        if not seekable:
            # assume this year (we have no other info)
            now = datetime.now()
            self.startDateTime = datetime(now.year, 1, 1)
            self.endDateTime = datetime(MAXYEAR, 12, 31)
            # self.fromDateTime = datetime(MINYEAR, 1, 1)
            # self.toDateTime = datetime(MAXYEAR, 12, 31)

        else:
            # get start datetime
            for line in logfile:
                logline = LogLine(line)
                date = logline.datetime
                if date:
                    break
            self.startDateTime = date

            # get end datetime (lines are at most 10k, go back 15k at most to make sure)
            logfile.seek(0, 2)
            file_size = logfile.tell()
            logfile.seek(-min(file_size, 15000), 2)

            for line in reversed(logfile.readlines()):
                logline = LogLine(line)
                date = logline.datetime
                if date:
                    break
            self.endDateTime = date

            # if there was a roll-over, subtract 1 year from start time
            if self.endDateTime < self.startDateTime:
                self.startDateTime = self.startDateTime.replace(
                    year=self.startDateTime.year - 1)

            # reset logfile
            logfile.seek(0)

        # now parse for further changes to from and to datetimes
        dtbound = DateTimeBoundaries(self.startDateTime, self.endDateTime)
        self.fromDateTime, self.toDateTime = dtbound(
            self.commandLineArgs['from'] or None, self.commandLineArgs['to']
            or None)
示例#11
0
    def _iterate_lines(self):
        """ count number of lines (can be expensive). """
        self._num_lines = 0
        self._restarts = []

        l = 0
        for l, line in enumerate(self.logfile):

            # find version string
            if "version" in line:

                restart = None
                # differentiate between different variations
                if "mongos" in line or "MongoS" in line:
                    self._binary = 'mongos'
                elif "db version v" in line:
                    self._binary = 'mongod'

                else:
                    continue

                version = re.search(r'(\d\.\d\.\d+)', line)
                if version:
                    version = version.group(1)
                    restart = (version, LogLine(line))
                    self._restarts.append(restart)

        self._num_lines = l

        # reset logfile
        self.logfile.seek(0)
示例#12
0
 def test_slow_fast(self):
     self.tool.run('%s --slow 145 --fast 500' % self.logfile_path)
     output = sys.stdout.getvalue()
     assert (len(output.splitlines()) > 0)
     for line in output.splitlines():
         ll = LogLine(line)
         assert (ll.duration >= 145 and ll.duration <= 500)
示例#13
0
    def run(self, arguments=None):
        """ Go through each line, convert string to LogLine object, then print
            JSON representation of the line. 
        """
        LogFileTool.run(self, arguments)

        for line in self.args['logfile']:
            print LogLine(line).to_json()
示例#14
0
def test_logline_lazy_evaluation():
    """ Check that all LogLine variables are evaluated lazily. """

    fields = [
        '_thread', '_operation', '_namespace', '_duration', '_numYields', '_r',
        '_ntoreturn', '_nreturned'
    ]

    # before parsing all member variables need to be None
    ll = LogLine(line_getmore)
    for attr in fields:
        assert (getattr(ll, attr) == None)

    # after parsing, they all need to be filled out
    ll.parse_all()
    for attr in fields:
        assert (getattr(ll, attr) != None)
示例#15
0
    def run(self, arguments=None):
        """ parses the logfile and asks each filter if it accepts the line.
            it will only be printed if all filters accept the line.
        """

        # add arguments from filter classes before calling superclass run
        for f in self.filters:
            for fa in f.filterArgs:
                self.argparser.add_argument(fa[0], **fa[1])

        # now parse arguments and post-process
        LogFileTool.run(self, arguments)
        self.args = dict(
            (k, self._arrayToString(self.args[k])) for k in self.args)

        # create filter objects from classes and pass args
        self.filters = [f(self.args) for f in self.filters]

        # remove non-active filter objects
        self.filters = [f for f in self.filters if f.active]

        # call setup for each active filter
        for f in self.filters:
            f.setup()

        if self.args['shorten'] != False:
            if self.args['shorten'] == None:
                self.args['shorten'] = 200

        if self.args['verbose']:
            print "mlogfilter> command line arguments"
            for a in self.args:
                print "mlogfilter> %8s: %s" % (a, self.args[a])

        # go through each line and ask each filter if it accepts
        if not 'logfile' in self.args or not self.args['logfile']:
            exit()

        for line in self.args['logfile']:
            logline = LogLine(line)
            if self.args['exclude']:
                # print line if any filter disagrees
                if any([not f.accept(logline) for f in self.filters]):
                    self._outputLine(logline.line_str, self.args['shorten'],
                                     self.args['human'])

            else:
                # only print line if all filters agree
                if all([f.accept(logline) for f in self.filters]):
                    self._outputLine(logline.line_str, self.args['shorten'],
                                     self.args['human'])

                # if at least one filter refuses to accept any remaining lines, stop
                if any([f.skipRemaining() for f in self.filters]):
                    # if input is not stdin
                    if sys.stdin.isatty():
                        break
示例#16
0
def test_logline_datetime_parsing():
    """ Check that all four timestamp formats are correctly parsed. """

    ll = LogLine(line_ctime_pre24)
    assert (str(ll.datetime) == '2013-08-03 21:52:05')
    assert (ll._datetime_format == 'ctime-pre2.4')

    ll = LogLine(line_ctime)
    assert (str(ll.datetime) == '2013-08-03 21:52:05.995000')
    assert (ll._datetime_format == 'ctime')

    ll = LogLine(line_iso8601_utc)
    assert (str(ll.datetime) == '2013-08-03 11:52:05.995000+00:00')
    assert (ll._datetime_format == 'iso8601-utc')

    ll = LogLine(line_iso8601_local)
    assert (str(ll.datetime) == '2013-08-03 21:52:05.995000+10:00')
    assert (ll._datetime_format == 'iso8601-local')
示例#17
0
    def test_from(self):
        random_start = random_date(self.logfile.start, self.logfile.end)

        self.tool.run(
            '%s --from %s' %
            (self.logfile_path, random_start.strftime("%b %d %H:%M:%S")))
        output = sys.stdout.getvalue()
        for line in output.splitlines():
            ll = LogLine(line)
            assert (ll.datetime >= random_start)
示例#18
0
    def logfile_generator(self):
        """ generator method that yields each line of the logfile, or the next line in case of several log files. """
        
        if not self.is_stdin and not self.args['exclude']:
            # find datetime filter and binary-search for start date 
            dtfilter = filter(lambda x: isinstance(x, filters.DateTimeFilter), self.filters)
            if len(dtfilter) > 0:
                dtfilter[0].seek_binary()

        if len(self.args['logfile']) > 1:
            # todo, merge
            for logline in self._merge_logfiles():
                yield logline
        else:
            # only one file
            for line in self.args['logfile'][0]:
                logline = LogLine(line)
                if logline.datetime: 
                    logline._datetime = logline.datetime + timedelta(hours=self.args['timezone'][0])
                yield logline
示例#19
0
def test_logline_value_extraction():
    """ Check that all four timestamp formats are correctly parsed. """

    ll = LogLine(line_getmore)
    assert (ll.thread == 'conn9')
    assert (ll.operation == 'getmore')
    assert (ll.namespace == 'local.oplog.rs')
    assert (ll.duration == 144)
    assert (ll.numYields == 107)
    assert (ll.r == 85093)
    assert (ll.ntoreturn == 0)
    assert (ll.nreturned == 13551)
示例#20
0
    def run(self, arguments=None):
        """ Print out useful information about the log file. """
        LogFileTool.run(self, arguments)

        logfile = LogFile(self.args['logfile'])
        print "start of logfile: %s" % (logfile.start.strftime(
            "%b %d %H:%M:%S") if logfile.start else "unknown")
        print "  end of logfile: %s" % (logfile.end.strftime("%b %d %H:%M:%S")
                                        if logfile.start else "unknown")

        # get one logline (within first 20 lines) for datetime format
        logline = None
        for i in range(20):
            try:
                logline = LogLine(self.args['logfile'].next())
            except StopIteration as e:
                raise SystemExit(
                    "no valid log lines found (datetime not available).")
            if logline.datetime:
                break

        # TODO: add timezone if iso8601 format

        print "    line numbers: %s" % logfile.num_lines
        print "          binary: %s" % (logfile.binary or "unknown")

        version = (' -> '.join(logfile.versions) or "unknown")

        # if version is unknown, go by date
        if version == 'unknown' and logline:
            if logline.datetime_format == 'ctime-pre2.4':
                version = '< 2.4 (no milliseconds)'
            elif logline.datetime_format == 'ctime':
                version = '>= 2.4 (milliseconds present)'
            elif logline.datetime_format.startswith('iso8601-'):
                version = '>= 2.6 (iso8601 format)'

        print "         version: %s" % version

        # restarts section
        if self.args['restarts']:
            print
            print "RESTARTS"

            for version, logline in logfile.restarts:
                print "   %s version %s" % (
                    logline.datetime.strftime("%b %d %H:%M:%S"), version)

            if len(logfile.restarts) == 0:
                print "  no restarts found"
示例#21
0
    def logfile_generator(self):
        """ generator method that yields each line of the logfile, or the next line in case of several log files. """

        if not self.args["exclude"]:
            # ask all filters for a start_limit and fast-forward to the maximum
            start_limits = [f.start_limit for f in self.filters if hasattr(f, "start_limit")]

            if start_limits:
                for logfile in self.args["logfile"]:
                    lf_info = LogFile(logfile)
                    lf_info.fast_forward(max(start_limits))

        if len(self.args["logfile"]) > 1:
            # todo, merge
            for logline in self._merge_logfiles():
                yield logline
        else:
            # only one file
            for line in self.args["logfile"][0]:
                logline = LogLine(line)
                if logline.datetime:
                    logline._datetime = logline.datetime + timedelta(hours=self.args["timezone"][0])
                yield logline
示例#22
0
    def run(self):
        """ go over each line in the logfile, run through log2code matcher 
            and group by matched pattern.
        """
        LogFileTool.run(self)

        codelines = defaultdict(lambda: 0)
        non_matches = 0

        for line in self.args['logfile']:
            cl = self.log2code(line)
            if cl:
                codelines[cl.pattern] += 1
            else:
                ll = LogLine(line)
                if ll.operation:
                    # skip operations (command, insert, update, delete, query, getmore)
                    continue
                if not ll.thread:
                    # skip the lines that don't have a thread name (usually map/reduce or assertions)
                    continue
                if len(ll.split_tokens) - ll._thread_offset <= 1:
                    # skip empty log messages (after thread name)
                    continue

                # everything else is a real non-match
                non_matches += 1
                if self.args['verbose']:
                    print "couldn't match:", line,

        if self.args['verbose']: 
            print

        for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True):
            print "%8i"%codelines[cl], "  ", " ... ".join(cl)

        print
        if non_matches > 0:
            print "couldn't match %i lines"%non_matches
            if not self.args['verbose']:
                print "to show non-matched lines, run with --verbose."
示例#23
0
    def run(self):
        LogFileTool.run(self)

        logfiles = self.args['logfile']

        # handle labels parameter
        if len(self.args['labels']) == 1:
            label = self.args['labels'][0]
            if label == 'enum':
                labels = ['{%i}'%(i+1) for i in range(len(logfiles))]
            elif label == 'alpha':
                labels = ['{%s}'%chr(97+i) for i in range(len(logfiles))]
            elif label == 'none':
                labels = [None for _ in logfiles]
            elif label == 'filename':
                labels = ['{%s}'%fn.name for fn in logfiles]
        elif len(self.args['labels']) == len(logfiles):
            labels = self.args['labels']
        else:
            raise SystemExit('Error: Number of labels not the same as number of files.')

        # handle timezone parameter
        if len(self.args['timezone']) == 1:
            self.args['timezone'] = self.args['timezone'] * len(logfiles)

        elif len(self.args['timezone']) == len(logfiles):
            pass

        elif len(self.args['timezone']) == 0:
            self.args['timezone'] = [0] * len(logfiles)

        else:
            raise SystemExit('Error: Invalid number of timezone parameters. Use either one parameter (for global adjustment) or the number of log files (for individual adjustments).')

        # handle position parameter
        position = self.args['pos']
        if position != 'eol':
            position = int(position)

        # define minimum and maximum datetime object
        mindate = datetime(MINYEAR, 1, 1, 0, 0, 0)
        maxdate = datetime(MAXYEAR, 12, 31, 23, 59, 59)

        # open files, read first lines, extract first dates
        lines = [f.readline() for f in logfiles]
        dates = [LogLine(l).datetime for l in lines]
        
        # replace all non-dates with mindate
        dates = [d if d else mindate for d in dates]
        dates = [d + timedelta(hours=self.args['timezone'][i]) for i,d in enumerate(dates) if d]

        while any([l != '' for l in lines]):
            # pick smallest date of all non-empty lines
            condDates = ([d if lines[i] != '' else maxdate for i,d in enumerate(dates)])
            minCondDate = min(condDates)
            minIndex = condDates.index(minCondDate)

            # print out current line
            currLine = lines[minIndex].rstrip()
            try:
                oldDate = minCondDate - timedelta(hours=self.args['timezone'][minIndex])
            except OverflowError:
                oldDate = minCondDate
                
            if minCondDate != mindate:
                currLine = currLine.replace(oldDate.strftime('%a %b %d %H:%M:%S'), minCondDate.strftime('%a %b %d %H:%M:%S'))

            if labels[minIndex]:
                if position == 0 or minCondDate == mindate:
                    print labels[minIndex], currLine
                elif position == 'eol':
                    print currLine, labels[minIndex]
                else:
                    tokens = currLine.split()
                    print " ".join(tokens[:position]), labels[minIndex], " ".join(tokens[position:])

            else:
                print currLine

            # update lines and dates for that line
            lines[minIndex] = logfiles[minIndex].readline()
            dates[minIndex] = LogLine(lines[minIndex]).datetime


            if not dates[minIndex]:
                dates[minIndex] = mindate 
            else:
                dates[minIndex] += timedelta(hours=self.args['timezone'][minIndex])
示例#24
0
    def parse_loglines(self):
        multiple_files = False

        # create generator for logfile(s) handles
        if type(self.args['logfile']) != types.ListType:
            self.logfiles = [self.args['logfile']]
        else:
            self.logfiles = self.args['logfile']
            
        if len(self.logfiles) > 1:
            multiple_files = True
            self.args['group'] = 'filename'
        
        plot_instance = self.plot_types[self.args['type']](args=self.args, unknown_args=self.unknown_args)

        for logfile in self.logfiles:
            start = None
            end = None
            
            # get log file information
            if self.progress_bar_enabled:
                lfinfo = LogFile(logfile)
                if lfinfo.start and lfinfo.end:
                    progress_start = self._datetime_to_epoch(lfinfo.start)
                    progress_total = self._datetime_to_epoch(lfinfo.end) - progress_start
                else:
                    self.progress_bar_enabled = False
                
                if progress_total == 0:
                    # protect from division by zero errors
                    self.progress_bar_enabled = False

            for i, line in enumerate(logfile):
                # create LogLine object
                logline = LogLine(line)

                # adjust times if --optime-start is enabled
                if self.args['optime_start'] and logline.duration:
                    # create new variable end_datetime in logline object and store starttime there
                    logline.end_datetime = logline.datetime 
                    logline._datetime = logline._datetime - timedelta(milliseconds=logline.duration)
                    logline._datetime_calculated = True

                if not start:
                    start = logline.datetime

                if logline.datetime:
                    if self.args['optime_start'] and hasattr(logline, 'end_datetime'):
                        end = logline.end_datetime
                    else:
                        end = logline.datetime

                # update progress bar every 1000 lines
                if self.progress_bar_enabled and (i % 1000 == 0) and logline.datetime:
                    progress_curr = self._datetime_to_epoch(logline.datetime)
                    self.update_progress(float(progress_curr-progress_start) / progress_total, 'parsing %s'%logfile.name)

                if multiple_files:
                    # amend logline object with filename for group by filename
                    logline.filename = logfile.name

                # offer plot_instance and see if it can plot it
                line_accepted = False
                if plot_instance.accept_line(logline):
                    
                    # if logline doesn't have datetime, skip
                    if logline.datetime == None:
                        continue
                    
                    if logline.namespace == None:
                        logline._namespace = "None"

                    line_accepted = True
                    plot_instance.add_line(logline)

            # store start and end for each logfile
            plot_instance.date_range = (start, end)

        # clear progress bar
        if self.logfiles and self.progress_bar_enabled:
            self.update_progress(1.0)

        self.plot_instances.append(plot_instance)

        # close files after parsing
        if sys.stdin.isatty():
            for f in self.logfiles:
                f.close()
示例#25
0
        logfile = open(args['logfile'], 'r')
    else:
        logfile = sys.stdin

    # make sub-folder .mlogvis and change to it
    mlogvis_dir = '.mlogvis'

    if not os.path.exists(mlogvis_dir):
        os.makedirs(mlogvis_dir)
    os.chdir(mlogvis_dir)

    outf = open('events.json', 'w')
    outf.write('{"type": "duration", "data":[')
    first_row = True
    for line in logfile:
        logline = LogLine(line)
        # group regular connections together
        if logline.datetime and logline.duration:
            if logline.thread and logline.thread.startswith("conn"):
                logline._thread = "conn####"
            # write log line out as json
            if not first_row:
                # prepend comma and newline
                outf.write(',\n')
            else:
                first_row = False
            outf.write(logline.to_json(['line_str', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration']))
    outf.write(']}')
    outf.close()

    data_path = os.path.join(os.path.dirname(mtools.__file__), 'data')
示例#26
0
 def test_thread(self):
     self.tool.run('%s --thread initandlisten' % self.logfile_path)
     output = sys.stdout.getvalue()
     for line in output.splitlines():
         ll = LogLine(line)
         assert (ll.thread == 'initandlisten')
示例#27
0
 def test_operation(self):
     self.tool.run('%s --operation insert' % self.logfile_path)
     output = sys.stdout.getvalue()
     for line in output.splitlines():
         ll = LogLine(line)
         assert (ll.operation == 'insert')
示例#28
0
    def run(self, arguments=None):
        LogFileTool.run(self, arguments)

        possible_versions = set(Log2CodeConverter.all_versions)

        re_versiond = re.compile(r'db version v(\d\.\d\.\d), pdfile version')
        re_versions = re.compile(r'MongoS version (\d\.\d\.\d) starting:')

        re_brackets = re.compile(r'\[\w+\]')

        for i, line in enumerate(self.args['logfile']):
            match = re_brackets.search(line)
            if not match:
                continue

            start = match.end()

            # check for explicit version string
            match = re_versiond.search(line[start:]) or re_versions.search(
                line[start:])

            if match:
                version = match.group(1)
                print "%32s %s" % ("restart detected in log line %i:" %
                                   (i + 1), line.rstrip())
                print "%32s %s" % ("previous possible versions:", ", ".join(
                    [pv[1:] for pv in sorted(possible_versions)]))
                print "%32s %s" % ("version after restart is:", version)
                print
                possible_versions = set(["r" + version])

            if len(possible_versions) == 1:
                # from here on, version is known, skip to next section
                continue

            ll = LogLine(line)
            if ll.operation != None:
                # if log line is a known command operation (query, update, command, ...) skip
                continue

            lcl = self.log2code(line[start:])
            if lcl:
                old_len = len(possible_versions)
                possible_versions = possible_versions & set(lcl.versions)
                if len(possible_versions) != old_len:
                    print "%32s %s" % ("log line %i:" % (i + 1), line.rstrip())
                    print "%32s %s" % ("matched pattern:", " ... ".join(
                        lcl.pattern))
                    print "%32s %s" % ("only present in:", ", ".join(
                        sorted(lcl.versions)))
                    print "%32s %s" % ("possible versions now:", ", ".join(
                        sorted(possible_versions)))
                    print

            if len(possible_versions) == 0:
                print "empty version set. exiting."
                raise SystemExit

        if len(possible_versions) > 1:
            print "possible versions at end of file:", ", ".join(
                [pv[1:] for pv in sorted(possible_versions)])
        else:
            print "version at end of file: ", possible_versions.pop()[1:]
示例#29
0
 def test_namespace(self):
     self.tool.run('%s --namespace local.oplog.rs' % self.logfile_path)
     output = sys.stdout.getvalue()
     for line in output.splitlines():
         ll = LogLine(line)
         assert (ll.namespace == 'local.oplog.rs')