示例#1
0
    def _merge_logfiles(self):
        """ helper method to merge several files together by datetime. """
        # open files, read first lines, extract first dates
        lines = [f.readline() for f in self.args["logfile"]]
        lines = [LogLine(l) if l else None for l in lines]

        # adjust lines by timezone
        for i in range(len(lines)):
            if lines[i] and lines[i].datetime:
                lines[i]._datetime = lines[i].datetime + timedelta(hours=self.args["timezone"][i])

        while any(lines):
            min_line = min(lines, key=self._datetime_key_for_merge)
            min_index = lines.index(min_line)

            if self.args["markers"][min_index]:
                min_line.merge_marker_str = self.args["markers"][min_index]

            yield min_line

            # update lines array with a new line from the min_index'th logfile
            new_line = self.args["logfile"][min_index].readline()
            lines[min_index] = LogLine(new_line) if new_line else None
            if lines[min_index] and lines[min_index].datetime:
                lines[min_index]._datetime = lines[min_index].datetime + timedelta(
                    hours=self.args["timezone"][min_index]
                )
示例#2
0
    def logfile_generator(self):
        """ generator method that yields each line of the logfile, or the next line in case of several log files. """
        
        if not self.is_stdin and not self.args['exclude']:
            # find datetime filter and binary-search for start date 
            dtfilter = filter(lambda x: isinstance(x, filters.DateTimeFilter), self.filters)
            if len(dtfilter) > 0:
                dtfilter[0].seek_binary()

        if len(self.args['logfile']) > 1:
            # todo, merge
            for logline in self._merge_logfiles():
                yield logline
        else:
            # only one file
            for line in self.args['logfile'][0]:
                logline = LogLine(line)
                if logline.datetime: 
                    logline._datetime = logline.datetime + timedelta(hours=self.args['timezone'][0])
                yield logline
示例#3
0
    def logfile_generator(self):
        """ generator method that yields each line of the logfile, or the next line in case of several log files. """

        if not self.args["exclude"]:
            # ask all filters for a start_limit and fast-forward to the maximum
            start_limits = [f.start_limit for f in self.filters if hasattr(f, "start_limit")]

            if start_limits:
                for logfile in self.args["logfile"]:
                    lf_info = LogFile(logfile)
                    lf_info.fast_forward(max(start_limits))

        if len(self.args["logfile"]) > 1:
            # todo, merge
            for logline in self._merge_logfiles():
                yield logline
        else:
            # only one file
            for line in self.args["logfile"][0]:
                logline = LogLine(line)
                if logline.datetime:
                    logline._datetime = logline.datetime + timedelta(hours=self.args["timezone"][0])
                yield logline
示例#4
0
    def parse_loglines(self):
        multiple_files = False

        # create generator for logfile(s) handles
        if type(self.args['logfile']) != types.ListType:
            self.logfiles = [self.args['logfile']]
        else:
            self.logfiles = self.args['logfile']
            
        if len(self.logfiles) > 1:
            multiple_files = True
            self.args['group'] = 'filename'
        
        plot_instance = self.plot_types[self.args['type']](args=self.args, unknown_args=self.unknown_args)

        for logfile in self.logfiles:
            start = None
            end = None
            
            # get log file information
            if self.progress_bar_enabled:
                lfinfo = LogFile(logfile)
                if lfinfo.start and lfinfo.end:
                    progress_start = self._datetime_to_epoch(lfinfo.start)
                    progress_total = self._datetime_to_epoch(lfinfo.end) - progress_start
                else:
                    self.progress_bar_enabled = False
                
                if progress_total == 0:
                    # protect from division by zero errors
                    self.progress_bar_enabled = False

            for i, line in enumerate(logfile):
                # create LogLine object
                logline = LogLine(line)

                # adjust times if --optime-start is enabled
                if self.args['optime_start'] and logline.duration:
                    # create new variable end_datetime in logline object and store starttime there
                    logline.end_datetime = logline.datetime 
                    logline._datetime = logline._datetime - timedelta(milliseconds=logline.duration)
                    logline._datetime_calculated = True

                if not start:
                    start = logline.datetime

                if logline.datetime:
                    if self.args['optime_start'] and hasattr(logline, 'end_datetime'):
                        end = logline.end_datetime
                    else:
                        end = logline.datetime

                # update progress bar every 1000 lines
                if self.progress_bar_enabled and (i % 1000 == 0) and logline.datetime:
                    progress_curr = self._datetime_to_epoch(logline.datetime)
                    self.update_progress(float(progress_curr-progress_start) / progress_total, 'parsing %s'%logfile.name)

                if multiple_files:
                    # amend logline object with filename for group by filename
                    logline.filename = logfile.name

                # offer plot_instance and see if it can plot it
                line_accepted = False
                if plot_instance.accept_line(logline):
                    
                    # if logline doesn't have datetime, skip
                    if logline.datetime == None:
                        continue
                    
                    if logline.namespace == None:
                        logline._namespace = "None"

                    line_accepted = True
                    plot_instance.add_line(logline)

            # store start and end for each logfile
            plot_instance.date_range = (start, end)

        # clear progress bar
        if self.logfiles and self.progress_bar_enabled:
            self.update_progress(1.0)

        self.plot_instances.append(plot_instance)

        # close files after parsing
        if sys.stdin.isatty():
            for f in self.logfiles:
                f.close()