Esempio n. 1
0
    def run(self):
        """ run this section and print out information. """
        grouping = Grouping(group_by=lambda x: (x.namespace, x.pattern))
        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = self.mloginfo._datetime_to_epoch(logfile.end) - progress_start
        else:
            self.progress_bar_enabled = False


        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(le.datetime)
                    self.mloginfo.update_progress(float(progress_curr-progress_start) / progress_total)

            if le.operation in ['query', 'update', 'remove']:
                grouping.add(le)

        grouping.sort_by_size()

        # clear progress bar again
        self.mloginfo.update_progress(1.0)

        titles = ['namespace', 'pattern', 'count', 'min (ms)', 'max (ms)', 'mean (ms)', 'sum (ms)']
        table_rows = []
        for g in grouping:
            # calculate statistics for this group
            namespace, pattern = g

            group_events = [le.duration for le in grouping[g] if le.duration != None]

            stats = OrderedDict()
            stats['namespace'] = namespace
            stats['pattern'] = pattern
            stats['count'] = len( group_events )
            stats['min'] = min( group_events ) if group_events else '-'
            stats['max'] = max( group_events ) if group_events else '-'
            stats['mean'] = 0
            stats['sum'] = sum( group_events ) if group_events else '-'
            stats['mean'] = stats['sum'] / stats['count'] if group_events else '-'

            if self.mloginfo.args['verbose']:
                stats['example'] = grouping[g][0]
                titles.append('example')

            table_rows.append(stats)

        table_rows = sorted(table_rows, key=itemgetter('sum'), reverse=True)
        print_table(table_rows, titles, uppercase_headers=False)
        print 
Esempio n. 2
0
    def group(self):
        """(re-)group all logevents by the given group."""
        if hasattr(self, 'group_by'):
            group_by = self.group_by
        else:
            group_by = self.default_group_by
            if self.args['group'] is not None:
                group_by = self.args['group']

        self.groups = Grouping(self.logevents, group_by)
        self.groups.move_items(None, 'others')
        self.groups.sort_by_size(group_limit=self.args['group_limit'],
                                 discard_others=self.args['no_others'])
Esempio n. 3
0
    def __init__(self, args=None, unknown_args=None):
        self.args = args
        self.unknown_args = unknown_args
        self.groups = OrderedDict()
        self.empty = True
        self.limits = None

        if self.args['optime_start']:
            self.xlabel = 'time (start of ops)'
        else:
            self.xlabel = 'time (end of ops)'
Esempio n. 4
0
    def __init__(self, args=None, unknown_args=None):
        self.args = args
        self.unknown_args = unknown_args
        self.groups = OrderedDict()
        self.empty = True
        self.limits = None

        if self.args['optime_start']:
            self.xlabel = 'time (start of ops)'
        else:
            self.xlabel = 'time (end of ops)'
Esempio n. 5
0
    def group(self):
        """ (re-)group all logevents by the given group. """
        if hasattr(self, 'group_by'):
            group_by = self.group_by
        else:
            group_by = self.default_group_by
            if self.args['group'] != None:
                group_by = self.args['group']

        self.groups = Grouping(self.logevents, group_by)
        self.groups.move_items(None, 'others')
        self.groups.sort_by_size(group_limit=self.args['group_limit'], discard_others=self.args['no_others'])
Esempio n. 6
0
    def run(self):
        """Run this section and print out information."""
        grouping = Grouping(
            group_by=lambda x: (x.datetime, x.cursorid, x.reapedtime))
        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
                              progress_start)
        else:
            self.mloginfo.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(
                        le.datetime)
                    if progress_total:
                        (self.mloginfo.update_progress(
                            float(progress_curr - progress_start) /
                            progress_total))

            if 'Cursor id' in le.line_str:
                lt = LogTuple(le.datetime, le.cursor, le._reapedtime)
                grouping.add(lt)

        grouping.sort_by_size()

        # clear progress bar again
        if self.mloginfo.progress_bar_enabled:
            self.mloginfo.update_progress(1.0)

        # no cursor information in the log file
        if not len(grouping):
            print('no cursor information found.')
            return

        titles = ['datetime', 'cursorid', 'reapedtime']

        table_rows = []
        # using only important key-values
        for g in grouping:
            # calculate statistics for this group
            datetime, cursorid, reapedtime = g
            stats = OrderedDict()
            stats['datetime'] = str(datetime)
            stats['cursorid'] = str(cursorid)
            stats['reapedtime'] = str(reapedtime)
            table_rows.append(stats)

        print_table(table_rows, titles, uppercase_headers=True)

        print('')
Esempio n. 7
0
class BasePlotType(object):

    # 14 most distinguishable colors, according to 
    # http://stackoverflow.com/questions/309149/generate-distinctly-different-rgb-colors-in-graphs
    colors = ['#000000','#00FF00','#0000FF','#FF0000','#01FFFE','#FFA6FE','#FFDB66','#006401', \
              '#010067','#95003A','#007DB5','#FF00F6','#FFEEE8','#774D00']
    color_index = 0
    markers = ['o', 's', '<', 'D']
    marker_index = 0

    sort_order = 0
    plot_type_str = 'base'
    default_group_by = None
    date_range = (datetime(MAXYEAR, 12, 31), datetime(MINYEAR, 1, 1))


    def __init__(self, args=None, unknown_args=None):
        self.args = args
        self.unknown_args = unknown_args
        self.groups = OrderedDict()
        self.empty = True
        self.limits = None

        if self.args['optime_start']:
            self.xlabel = 'time (start of ops)'
        else:
            self.xlabel = 'time (end of ops)'


    def accept_line(self, logevent):
        """ return True if this PlotType can plot this line. """
        return True

    def add_line(self, logevent):
        """ append log line to this plot type. """
        key = None
        self.empty = False
        self.groups.setdefault(key, list()).append(logevent)

    @property 
    def logevents(self):
        """ iterator yielding all logevents from groups dictionary. """
        for key in self.groups:
            for logevent in self.groups[key]:
                yield logevent

    @classmethod
    def color_map(cls, group):
        color = cls.colors[cls.color_index]
        cls.color_index += 1

        marker = cls.markers[cls.marker_index]
        if cls.color_index >= len(cls.colors):
            cls.marker_index += 1
            cls.marker_index %= len(cls.markers)
            cls.color_index %= cls.color_index

        return color, marker


    def group(self):
        """ (re-)group all logevents by the given group. """
        if hasattr(self, 'group_by'):
            group_by = self.group_by
        else:
            group_by = self.default_group_by
            if self.args['group'] != None:
                group_by = self.args['group']

        self.groups = Grouping(self.logevents, group_by)
        self.groups.move_items(None, 'others')
        self.groups.sort_by_size(group_limit=self.args['group_limit'], discard_others=self.args['no_others'])

    def plot_group(self, group, idx, axis):
        raise NotImplementedError("BasePlotType can't plot. Use a derived class instead")


    def clicked(self, event):
        """ this is called if an element of this plottype was clicked. Implement in sub class. """
        pass


    def plot(self, axis, ith_plot, total_plots, limits):
        self.limits = limits

        artists = []
        print self.plot_type_str.upper(), "plot"
        print "%5s %9s  %s"%("id", " #points", "group")

        for idx, group in enumerate(self.groups):
            print "%5s %9s  %s"%(idx+1, len(self.groups[group]), group)
            group_artists = self.plot_group(group, idx+ith_plot, axis)
            if isinstance(group_artists, list):
                artists.extend(group_artists)
            else:
                artists.append(group_artists)

        print

        return artists
Esempio n. 8
0
    def run(self):
        """ run this section and print out information. """
        grouping = Grouping(group_by=lambda x: (x.namespace, x.pattern))
        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = self.mloginfo._datetime_to_epoch(
                logfile.end) - progress_start
        else:
            self.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(
                        le.datetime)
                    self.mloginfo.update_progress(
                        float(progress_curr - progress_start) / progress_total)

            if le.operation in ['query', 'update', 'remove']:
                grouping.add(le)

        grouping.sort_by_size()

        # clear progress bar again
        self.mloginfo.update_progress(1.0)

        titles = [
            'namespace', 'pattern', 'count', 'min (ms)', 'max (ms)',
            'mean (ms)', 'sum (ms)'
        ]
        table_rows = []
        for g in grouping:
            # calculate statistics for this group
            namespace, pattern = g

            group_events = [
                le.duration for le in grouping[g] if le.duration != None
            ]

            stats = OrderedDict()
            stats['namespace'] = namespace
            stats['pattern'] = pattern
            stats['count'] = len(group_events)
            stats['min'] = min(group_events) if group_events else '-'
            stats['max'] = max(group_events) if group_events else '-'
            stats['mean'] = 0
            stats['sum'] = sum(group_events) if group_events else '-'
            stats['mean'] = stats['sum'] / stats[
                'count'] if group_events else '-'

            if self.mloginfo.args['verbose']:
                stats['example'] = grouping[g][0]
                titles.append('example')

            table_rows.append(stats)

        table_rows = sorted(table_rows, key=itemgetter('sum'), reverse=True)
        print_table(table_rows, titles, uppercase_headers=False)
        print
Esempio n. 9
0
class BasePlotType(object):

    # 14 most distinguishable colors, according to
    # http://stackoverflow.com/questions/309149/generate-distinctly-different-rgb-colors-in-graphs
    colors = [
        '#0000FF', '#FF00F6', '#01FFFE', '#505050', '#909090', '#FF0000',
        '#00FF00', '#FFA6FE', '#FFDB66', '#006401', '#010067', '#95003A',
        '#007DB5', '#FFEEE8', '#774D00'
    ]

    color_index = 0
    markers = ['o', 's', '<', 'D']
    marker_index = 0

    sort_order = 0
    plot_type_str = 'base'
    default_group_by = None
    date_range = (datetime(MAXYEAR, 12, 31, tzinfo=tzutc()),
                  datetime(MINYEAR, 1, 1, tzinfo=tzutc()))

    def __init__(self, args=None, unknown_args=None):
        self.args = args
        self.unknown_args = unknown_args
        self.groups = OrderedDict()
        self.empty = True
        self.limits = None

        if self.args['optime_start']:
            self.xlabel = 'time (start of ops)'
        else:
            self.xlabel = 'time (end of ops)'

    def accept_line(self, logevent):
        """Return True if this PlotType can plot this line."""
        return True

    def add_line(self, logevent):
        """Append log line to this plot type."""
        key = None
        self.empty = False
        self.groups.setdefault(key, list()).append(logevent)

    @property
    def logevents(self):
        """Iterator yielding all logevents from groups dictionary."""
        for key in self.groups:
            for logevent in self.groups[key]:
                try:
                    yield logevent
                except StopIteration:
                    return

    @classmethod
    def color_map(cls, group):
        color = cls.colors[cls.color_index]
        cls.color_index += 1

        marker = cls.markers[cls.marker_index]
        if cls.color_index >= len(cls.colors):
            cls.marker_index += 1
            cls.marker_index %= len(cls.markers)
            cls.color_index %= cls.color_index

        return color, marker

    def group(self):
        """(re-)group all logevents by the given group."""
        if hasattr(self, 'group_by'):
            group_by = self.group_by
        else:
            group_by = self.default_group_by
            if self.args['group'] is not None:
                group_by = self.args['group']

        self.groups = Grouping(self.logevents, group_by)
        self.groups.move_items(None, 'others')
        self.groups.sort_by_size(group_limit=self.args['group_limit'],
                                 discard_others=self.args['no_others'])

    def plot_group(self, group, idx, axis):
        raise NotImplementedError("BasePlotType can't plot. "
                                  "Use a derived class instead")

    def clicked(self, event):
        """
        Call if an element of this plottype was clicked.

        Implement in sub class.
        """
        pass

    def plot(self, axis, ith_plot, total_plots, limits):
        self.limits = limits

        artists = []
        print(self.plot_type_str.upper() + " plot")
        print("%5s %9s  %s" % ("id", " #points", "group"))

        for idx, group in enumerate(self.groups):
            print("%5s %9s  %s" % (idx + 1, len(self.groups[group]), group))
            group_artists = self.plot_group(group, idx + ith_plot, axis)
            if isinstance(group_artists, list):
                artists.extend(group_artists)
            else:
                artists.append(group_artists)

        print()

        return artists
Esempio n. 10
0
    def run(self):
        """Run this section and print out information."""
        grouping = Grouping(
            group_by=lambda x: (x.namespace, x.operation, x.pattern))
        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
                              progress_start)
        else:
            self.mloginfo.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(
                        le.datetime)
                    if progress_total:
                        (self.mloginfo.update_progress(
                            float(progress_curr - progress_start) /
                            progress_total))

            if (le.operation in ['query', 'getmore', 'update', 'remove']
                    or le.command
                    in ['count', 'findandmodify', 'geonear', 'find']):
                lt = LogTuple(namespace=le.namespace,
                              operation=op_or_cmd(le),
                              pattern=le.pattern,
                              duration=le.duration)
                grouping.add(lt)

        grouping.sort_by_size()

        # clear progress bar again
        if self.mloginfo.progress_bar_enabled:
            self.mloginfo.update_progress(1.0)

        # no queries in the log file
        if len(grouping) < 1:
            print('no queries found.')
            return

        titles = [
            'namespace', 'operation', 'pattern', 'count', 'min (ms)',
            'max (ms)', 'mean (ms)', '95%-ile (ms)', 'sum (ms)'
        ]
        table_rows = []

        for g in grouping:
            # calculate statistics for this group
            namespace, op, pattern = g

            group_events = [
                le.duration for le in grouping[g] if le.duration is not None
            ]

            stats = OrderedDict()
            stats['namespace'] = namespace
            stats['operation'] = op
            stats['pattern'] = pattern
            stats['count'] = len(group_events)
            stats['min'] = min(group_events) if group_events else '-'
            stats['max'] = max(group_events) if group_events else '-'
            stats['mean'] = 0
            if np:
                stats['95%'] = (np.percentile(group_events, 95)
                                if group_events else '-')
            else:
                stats['95%'] = 'n/a'
            stats['sum'] = sum(group_events) if group_events else '-'
            stats['mean'] = (stats['sum'] /
                             stats['count'] if group_events else '-')

            if self.mloginfo.args['verbose']:
                stats['example'] = grouping[g][0]
                titles.append('example')

            table_rows.append(stats)

        # sort order depending on field names
        reverse = True
        if self.mloginfo.args['sort'] in ['namespace', 'pattern']:
            reverse = False

        table_rows = sorted(table_rows,
                            key=itemgetter(self.mloginfo.args['sort']),
                            reverse=reverse)
        print_table(table_rows, titles, uppercase_headers=False)
        print('')
Esempio n. 11
0
    def _print_chunk_migrations(self, chunks, moved_from=False):
        """Prints the chunk migration statistics in a table depending on to/from flag"""
        verbose = self.mloginfo.args['verbose']
        chunks.reverse()

        if verbose:
            chunk_groupings = Grouping(group_by=lambda x: x.time)
        else:
            chunk_groupings = Grouping(group_by=lambda x: (x.time.strftime(
                "%Y-%m-%dT%H"), x.movedFromTo, x.namespace))

        for chunk_moved in chunks:
            time, chunk_range, moved_to_from, namespace, steps, status, error_message = chunk_moved
            moved_tuple = ChunksTuple(time=time,
                                      range=chunk_range,
                                      movedFromTo=moved_to_from,
                                      namespace=namespace,
                                      steps=steps,
                                      migrationStatus=status,
                                      errorMessage=error_message)
            chunk_groupings.add(moved_tuple)

        move_to_from_title = 'to shard' if moved_from else 'from shard'
        if verbose:
            titles = [
                '  time', move_to_from_title, 'namespace',
                'chunk migration status'
            ]
        else:
            titles = [
                '  time (/hour)', move_to_from_title, 'namespace',
                '# chunks migrations attempted', 'successful chunk migrations',
                'failed chunk migrations'
            ]

        if len(chunk_groupings) == 0:
            print("  no chunk migrations found.")
        else:
            table_rows = []
            for group, chunks in chunk_groupings.items():

                if verbose:
                    time = group.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3]
                    chunk = chunks[0]
                else:
                    time, moved_to_from, namespace = group
                    successful_count = 0
                    total_time_spent = 0
                    failed = dict()
                    succeeded_after = dict()
                    for chunk in chunks:
                        if chunk.migrationStatus == "success":
                            successful_count += 1
                            succeeded_after[chunk.range] = (True, chunk.time)
                            total_time_spent += sum(
                                int(ms) for step, ms in chunk.steps)
                        else:
                            count, timestamps = failed.get(
                                chunk.errorMessage, (0, list()))
                            count += 1
                            successful_after, timestamp = succeeded_after.get(
                                chunk.range, (False, None))
                            if successful_after:
                                timestamp = timestamp.strftime(
                                    "%H:%M:%S.%f")[:-3]
                                chunk_time = chunk.time.strftime(
                                    "%H:%M:%S.%f")[:-3]
                                timestamps.append(
                                    chunk_time +
                                    f' BECAME SUCCESSFUL AT: {timestamp}')
                            else:
                                timestamps.append(
                                    chunk.time.strftime("%H:%M:%S.%f")[:-3])
                            failed[chunk.errorMessage] = (count, timestamps)

                moved_chunks = OrderedDict()

                moved_chunks['time'] = f"  {time}"
                moved_chunks['movedFromTo'] = moved_to_from
                moved_chunks['namespace'] = namespace

                if verbose:
                    if chunk.migrationStatus == "success":
                        total_time_spent = sum(
                            int(ms) for step, ms in chunk.steps)
                        msg = f"Successful | Total time spent {total_time_spent}ms"
                        step_breakdown = ', '.join(f"{step}: {ms}ms"
                                                   for step, ms in chunk.steps)
                        moved_chunks[
                            'chunkMigrationStatus'] = msg + f" ({step_breakdown})"
                    else:
                        moved_chunks[
                            'chunkMigrationStatus'] = f"Failed with {chunk.errorMessage}"
                else:
                    moved_chunks['numberOfChunks'] = f'{len(chunks)} chunk(s)'
                    msg = (f"{successful_count} chunk(s) moved " +
                           f"| Total time spent: {total_time_spent}ms")
                    moved_chunks['successChunkMigrations'] = msg

                    failed_migrations = ""
                    for error, info in failed.items():
                        count, timestamps = info
                        failed_migrations += (
                            f'{count} chunk(s): {timestamps} '
                            f'failed with "{error}".')

                    if len(failed_migrations):
                        moved_chunks[
                            'failedChunkMigrations'] = failed_migrations
                    else:
                        moved_chunks[
                            'failedChunkMigrations'] = "no failed chunks."

                table_rows.append(moved_chunks)

            print_table(table_rows, titles)
            if not verbose:
                print(
                    "\nto show individual chunk migration, run with --verbose."
                )
Esempio n. 12
0
    def _print_chunk_statistics(self):
        """Prints the chunk split statistics in a table"""
        self.mloginfo.logfile.chunk_splits.reverse()

        chunk_split_groupings = Grouping(
            group_by=lambda x: (x.time.strftime("%Y-%m-%dT%H"), x.namespace))

        for chunk_split in self.mloginfo.logfile.chunk_splits:
            time, split_range, namespace, numSplits, success, timeTaken, error = chunk_split
            split_tuple = SplitTuple(time=time,
                                     range=split_range,
                                     namespace=namespace,
                                     numSplits=numSplits,
                                     success=success,
                                     timeTaken=timeTaken,
                                     error=error)
            chunk_split_groupings.add(split_tuple)

        titles = [
            '  time (/hour)', 'namespace', '# split-vectors issued',
            'successful chunk splits', 'failed chunk splits'
        ]

        if len(chunk_split_groupings) == 0:
            print("  no chunk splits found.")
        else:
            table_rows = []
            for group, splits in chunk_split_groupings.items():

                time, namespace = group
                successful_count = 0
                total_number_vectors = 0
                split_succeeded_after = dict()
                failed_splits = dict()
                total_time_taken = 0
                for split in splits:
                    total_number_vectors += int(split.numSplits)
                    if (not split.success) and split.error:
                        count, timestamps = failed_splits.get(
                            split.error, (0, list()))
                        count += 1
                        if split_succeeded_after.get(split.range, False):
                            timestamps.append(
                                split.time.strftime("%H:%M:%S.%f")[:-3] +
                                ' **WAS SUCCESSFUL AFTER**')
                        else:
                            timestamps.append(
                                split.time.strftime("%H:%M:%S.%f")[:-3])
                        failed_splits[split.error] = (count, timestamps)
                    elif split.success:
                        split_succeeded_after[split.range] = True
                        successful_count += 1
                        total_time_taken += sum(
                            int(ms) for ms in split.timeTaken)

                split_summary = OrderedDict()

                split_summary['time'] = f"  {time}"
                split_summary['namespace'] = namespace

                split_summary[
                    'numSplitVectors'] = f'{total_number_vectors} split vector(s)'
                msg = (f"{successful_count} chunk(s) splitted" +
                       f" | Total time spent: {total_time_taken}ms")
                split_summary['successfulSplits'] = msg

                failed_split = ""
                for error, info in failed_splits.items():
                    count, timestamps = info
                    if error == "Jumbo":
                        failed_split += (f'{count} chunk(s): ' +
                                         f'{timestamps} marked as {error}.')
                    else:
                        failed_split += (f'{count} chunk(s): {timestamps} ' +
                                         f'failed with "{error}". ')

                if len(failed_split):
                    split_summary['failedChunkSplits'] = failed_split
                else:
                    split_summary[
                        'failedChunkSplits'] = "no failed chunk splits."

                table_rows.append(split_summary)

            print_table(table_rows, titles)
Esempio n. 13
0
    def run(self):
        """Run this section and print out information."""
        grouping = Grouping(group_by=lambda x: (
            x.datetime, x.txnNumber, x.autocommit, x.readConcern, x.
            timeActiveMicros, x.timeInactiveMicros, x.duration))

        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
                              progress_start)
        else:
            self.mloginfo.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines

            if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(
                        le.datetime)
                    if progress_total:
                        (self.mloginfo.update_progress(
                            float(progress_curr - progress_start) /
                            progress_total))

            if re.search('transaction', le.line_str):
                lt = LogTuple(le.datetime, le.txnNumber, le.autocommit,
                              le.readConcern, le.timeActiveMicros,
                              le.timeInactiveMicros, le.duration)

                grouping.add(lt)

        grouping.sort_by_size()

        # clear progress bar again
        if self.mloginfo.progress_bar_enabled:
            self.mloginfo.update_progress(1.0)

        # no queries in the log file
        if not len(grouping):
            print('no transactions found.')
            return

        titles = [
            'datetime', 'txnNumber', 'autocommit', 'readConcern',
            'timeActiveMicros', 'timeInactiveMicros', 'duration'
        ]

        table_rows = []
        # using only important key-values
        # can be used in future
        for g in grouping:
            # calculate statistics for this group
            datetime, txnNumber, autocommit, readConcern, timeActiveMicros, timeInactiveMicros, duration = g
            stats = OrderedDict()
            stats['datetime'] = str(datetime)
            stats['txnNumber'] = txnNumber
            stats['autocommit'] = autocommit
            stats['readConcern'] = readConcern
            stats['timeActiveMicros'] = timeActiveMicros
            stats['timeInactiveMicros'] = timeInactiveMicros
            stats['duration'] = duration
            table_rows.append(stats)

        if self.mloginfo.args['tsort'] == 'duration':
            table_rows = sorted(table_rows,
                                key=itemgetter(self.mloginfo.args['tsort']),
                                reverse=True)

        print_table(table_rows, titles, uppercase_headers=True)

        print('')
Esempio n. 14
0
    def run(self):
        """Run this section and print out information."""
        grouping = Grouping(group_by=lambda x: (x.namespace, x.operation,
                                                x.pattern))
        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
                              progress_start)
        else:
            self.mloginfo.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(le
                                                                     .datetime)
                    if progress_total:
                        (self.mloginfo
                         .update_progress(float(progress_curr -
                                                progress_start) /
                                          progress_total))

            if (le.operation in ['query', 'getmore', 'update', 'remove'] or
                    le.command in ['count', 'findandmodify',
                                   'geonear', 'find']):
                lt = LogTuple(namespace=le.namespace, operation=op_or_cmd(le),
                              pattern=le.pattern, duration=le.duration)
                grouping.add(lt)

        grouping.sort_by_size()

        # clear progress bar again
        if self.mloginfo.progress_bar_enabled:
            self.mloginfo.update_progress(1.0)

        # no queries in the log file
        if len(grouping) < 1:
            print('no queries found.')
            return

        titles = ['namespace', 'operation', 'pattern', 'count', 'min (ms)',
                  'max (ms)', 'mean (ms)', '95%-ile (ms)', 'sum (ms)']
        table_rows = []

        for g in grouping:
            # calculate statistics for this group
            namespace, op, pattern = g

            group_events = [le.duration for le in grouping[g]
                            if le.duration is not None]

            stats = OrderedDict()
            stats['namespace'] = namespace
            stats['operation'] = op
            stats['pattern'] = pattern
            stats['count'] = len(group_events)
            stats['min'] = min(group_events) if group_events else '-'
            stats['max'] = max(group_events) if group_events else '-'
            stats['mean'] = 0
            if np:
                stats['95%'] = (np.percentile(group_events, 95)
                                if group_events else '-')
            else:
                stats['95%'] = 'n/a'
            stats['sum'] = sum(group_events) if group_events else '-'
            stats['mean'] = (stats['sum'] / stats['count']
                             if group_events else '-')

            if self.mloginfo.args['verbose']:
                stats['example'] = grouping[g][0]
                titles.append('example')

            table_rows.append(stats)

        # sort order depending on field names
        reverse = True
        if self.mloginfo.args['sort'] in ['namespace', 'pattern']:
            reverse = False

        table_rows = sorted(table_rows,
                            key=itemgetter(self.mloginfo.args['sort']),
                            reverse=reverse)
        print_table(table_rows, titles, uppercase_headers=False)
        print('')
Esempio n. 15
0
    def run(self):
        """Run this section and print out information."""
        grouping = Grouping(group_by=lambda x: (
            x.namespace, x.operation, x.bytesRead, x.bytesWritten, x.
            timeReadingMicros, x.timeWritingMicros))
        logfile = self.mloginfo.logfile

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
                              progress_start)
        else:
            self.mloginfo.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(
                        le.datetime)
                    if progress_total:
                        (self.mloginfo.update_progress(
                            float(progress_curr - progress_start) /
                            progress_total))

            if (le.operation in ['update'] or le.command in ['insert']):
                lt = LogTuple(namespace=le.namespace,
                              operation=op_or_cmd(le),
                              bytesRead=le.bytesRead,
                              bytesWritten=le.bytesWritten,
                              timeReadingMicros=le.timeReadingMicros,
                              timeWritingMicros=le.timeWritingMicros)
                grouping.add(lt)

        grouping.sort_by_size()

        # clear progress bar again
        if self.mloginfo.progress_bar_enabled:
            self.mloginfo.update_progress(1.0)

        # no queries in the log file
        if not len(grouping):
            print('no statistics found.')
            return

        titles = [
            'namespace', 'operation', 'bytesRead', 'bytesWritten',
            'timeReadingMicros', 'timeWritingMicros'
        ]
        table_rows = []

        for g in grouping:
            # calculate statistics for this group
            namespace, op, bytesRead, bytesWritten, timeReadingMicros, timeWritingMicros = g

            stats = OrderedDict()
            stats['namespace'] = namespace
            stats['operation'] = op
            stats['bytesRead'] = bytesRead
            stats['bytesWritten'] = bytesWritten
            stats['timeReadingMicros'] = timeReadingMicros
            stats['timeWritingMicros'] = timeWritingMicros

            table_rows.append(stats)

        print_table(table_rows, titles, uppercase_headers=False)
        print('')
    def run(self):
        """ run this section and print out information. """
        grouping = Grouping(group_by=lambda x: (x.collection, x.operation, x.
                                                pattern, x.sort_pattern))
        logfile = self.mloginfo.logfile
        min_duration = self.mloginfo.args['min_duration']
        min_nscanned = self.mloginfo.args['min_nscanned']

        if logfile.start and logfile.end:
            progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
            progress_total = self.mloginfo._datetime_to_epoch(
                logfile.end) - progress_start
        else:
            self.mloginfo.progress_bar_enabled = False

        for i, le in enumerate(logfile):
            # update progress bar every 1000 lines
            if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
                if le.datetime:
                    progress_curr = self.mloginfo._datetime_to_epoch(
                        le.datetime)
                    self.mloginfo.update_progress(
                        float(progress_curr - progress_start) / progress_total)

            if min_duration and le.duration < min_duration:
                continue
            if min_nscanned and le.nscanned < min_nscanned:
                continue

            if le.operation in [
                    'query', 'getmore', 'update', 'remove'
            ] or le.command in ['count', 'findandmodify', 'geonear']:
                db, collection = le.namespace.split(".")
                lt = LogTuple(db=db,
                              collection=collection,
                              nscanned=le.nscanned,
                              ntoreturn=le.ntoreturn,
                              writeConflicts=le.writeConflicts,
                              operation=op_or_cmd(le),
                              pattern=le.pattern,
                              duration=le.duration,
                              sort_pattern=le.sort_pattern)
                grouping.add(lt)

        grouping.sort_by_size(group_limit=30)

        # clear progress bar again
        if self.mloginfo.progress_bar_enabled:
            self.mloginfo.update_progress(1.0)

        # no queries in the log file
        if len(grouping) < 1:
            print 'no queries found.'
            return

        titles = [
            'collection', 'operation', 'pattern', 'sort_pattern', 'count',
            'mean (ms)', 'sum (mins)'
        ]
        table_rows = []

        for g in grouping:
            # calculate statistics for this group
            try:
                collection, op, pattern, sort_pattern = g
            except:
                collection, op, pattern, sort_pattern = [
                    'others', 'others', 'others', 'others'
                ]

            group_events = [
                le.duration for le in grouping[g] if le.duration != None
            ]

            stats = OrderedDict()
            stats['collection'] = collection
            stats['operation'] = op
            stats['pattern'] = pattern
            stats['sort_pattern'] = sort_pattern
            stats['count'] = len(group_events)
            stats['mean'] = 0

            stats['sum'] = sum(group_events) if group_events else '-'
            stats['mean'] = stats['sum'] / stats[
                'count'] if group_events else '-'
            stats['sum'] = round(stats['sum'] / 1000.0 /
                                 60, 2) if group_events else '-'

            if self.mloginfo.args['verbose']:
                stats['example'] = grouping[g][0]
                titles.append('example')

            table_rows.append(stats)

        # sort order depending on field names
        reverse = True
        if self.mloginfo.args['sort'] in ['namespace', 'pattern']:
            reverse = False

        table_rows = sorted(table_rows,
                            key=itemgetter(self.mloginfo.args['sort']),
                            reverse=reverse)
        print_table(table_rows, titles, uppercase_headers=False)
        print