def run(self): """Run this section and print out information.""" grouping = Grouping( group_by=lambda x: (x.datetime, x.cursorid, x.reapedtime)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) - progress_start) else: self.mloginfo.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.mloginfo.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch( le.datetime) if progress_total: (self.mloginfo.update_progress( float(progress_curr - progress_start) / progress_total)) if 'Cursor id' in le.line_str: lt = LogTuple(le.datetime, le.cursor, le._reapedtime) grouping.add(lt) grouping.sort_by_size() # clear progress bar again if self.mloginfo.progress_bar_enabled: self.mloginfo.update_progress(1.0) # no cursor information in the log file if not len(grouping): print('no cursor information found.') return titles = ['datetime', 'cursorid', 'reapedtime'] table_rows = [] # using only important key-values for g in grouping: # calculate statistics for this group datetime, cursorid, reapedtime = g stats = OrderedDict() stats['datetime'] = str(datetime) stats['cursorid'] = str(cursorid) stats['reapedtime'] = str(reapedtime) table_rows.append(stats) print_table(table_rows, titles, uppercase_headers=True) print('')
def run(self): """ run this section and print out information. """ grouping = Grouping(group_by=lambda x: (x.namespace, x.pattern)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = self.mloginfo._datetime_to_epoch(logfile.end) - progress_start else: self.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch(le.datetime) self.mloginfo.update_progress(float(progress_curr-progress_start) / progress_total) if le.operation in ['query', 'update', 'remove']: grouping.add(le) grouping.sort_by_size() # clear progress bar again self.mloginfo.update_progress(1.0) titles = ['namespace', 'pattern', 'count', 'min (ms)', 'max (ms)', 'mean (ms)', 'sum (ms)'] table_rows = [] for g in grouping: # calculate statistics for this group namespace, pattern = g group_events = [le.duration for le in grouping[g] if le.duration != None] stats = OrderedDict() stats['namespace'] = namespace stats['pattern'] = pattern stats['count'] = len( group_events ) stats['min'] = min( group_events ) if group_events else '-' stats['max'] = max( group_events ) if group_events else '-' stats['mean'] = 0 stats['sum'] = sum( group_events ) if group_events else '-' stats['mean'] = stats['sum'] / stats['count'] if group_events else '-' if self.mloginfo.args['verbose']: stats['example'] = grouping[g][0] titles.append('example') table_rows.append(stats) table_rows = sorted(table_rows, key=itemgetter('sum'), reverse=True) print_table(table_rows, titles, uppercase_headers=False) print
class BasePlotType(object): # 14 most distinguishable colors, according to # http://stackoverflow.com/questions/309149/generate-distinctly-different-rgb-colors-in-graphs colors = ['#000000','#00FF00','#0000FF','#FF0000','#01FFFE','#FFA6FE','#FFDB66','#006401', \ '#010067','#95003A','#007DB5','#FF00F6','#FFEEE8','#774D00'] color_index = 0 markers = ['o', 's', '<', 'D'] marker_index = 0 sort_order = 0 plot_type_str = 'base' default_group_by = None date_range = (datetime(MAXYEAR, 12, 31), datetime(MINYEAR, 1, 1)) def __init__(self, args=None, unknown_args=None): self.args = args self.unknown_args = unknown_args self.groups = OrderedDict() self.empty = True self.limits = None if self.args['optime_start']: self.xlabel = 'time (start of ops)' else: self.xlabel = 'time (end of ops)' def accept_line(self, logevent): """ return True if this PlotType can plot this line. """ return True def add_line(self, logevent): """ append log line to this plot type. """ key = None self.empty = False self.groups.setdefault(key, list()).append(logevent) @property def logevents(self): """ iterator yielding all logevents from groups dictionary. """ for key in self.groups: for logevent in self.groups[key]: yield logevent @classmethod def color_map(cls, group): color = cls.colors[cls.color_index] cls.color_index += 1 marker = cls.markers[cls.marker_index] if cls.color_index >= len(cls.colors): cls.marker_index += 1 cls.marker_index %= len(cls.markers) cls.color_index %= cls.color_index return color, marker def group(self): """ (re-)group all logevents by the given group. """ if hasattr(self, 'group_by'): group_by = self.group_by else: group_by = self.default_group_by if self.args['group'] != None: group_by = self.args['group'] self.groups = Grouping(self.logevents, group_by) self.groups.move_items(None, 'others') self.groups.sort_by_size(group_limit=self.args['group_limit'], discard_others=self.args['no_others']) def plot_group(self, group, idx, axis): raise NotImplementedError("BasePlotType can't plot. Use a derived class instead") def clicked(self, event): """ this is called if an element of this plottype was clicked. Implement in sub class. """ pass def plot(self, axis, ith_plot, total_plots, limits): self.limits = limits artists = [] print self.plot_type_str.upper(), "plot" print "%5s %9s %s"%("id", " #points", "group") for idx, group in enumerate(self.groups): print "%5s %9s %s"%(idx+1, len(self.groups[group]), group) group_artists = self.plot_group(group, idx+ith_plot, axis) if isinstance(group_artists, list): artists.extend(group_artists) else: artists.append(group_artists) print return artists
def run(self): """ run this section and print out information. """ grouping = Grouping(group_by=lambda x: (x.namespace, x.pattern)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = self.mloginfo._datetime_to_epoch( logfile.end) - progress_start else: self.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch( le.datetime) self.mloginfo.update_progress( float(progress_curr - progress_start) / progress_total) if le.operation in ['query', 'update', 'remove']: grouping.add(le) grouping.sort_by_size() # clear progress bar again self.mloginfo.update_progress(1.0) titles = [ 'namespace', 'pattern', 'count', 'min (ms)', 'max (ms)', 'mean (ms)', 'sum (ms)' ] table_rows = [] for g in grouping: # calculate statistics for this group namespace, pattern = g group_events = [ le.duration for le in grouping[g] if le.duration != None ] stats = OrderedDict() stats['namespace'] = namespace stats['pattern'] = pattern stats['count'] = len(group_events) stats['min'] = min(group_events) if group_events else '-' stats['max'] = max(group_events) if group_events else '-' stats['mean'] = 0 stats['sum'] = sum(group_events) if group_events else '-' stats['mean'] = stats['sum'] / stats[ 'count'] if group_events else '-' if self.mloginfo.args['verbose']: stats['example'] = grouping[g][0] titles.append('example') table_rows.append(stats) table_rows = sorted(table_rows, key=itemgetter('sum'), reverse=True) print_table(table_rows, titles, uppercase_headers=False) print
class BasePlotType(object): # 14 most distinguishable colors, according to # http://stackoverflow.com/questions/309149/generate-distinctly-different-rgb-colors-in-graphs colors = [ '#0000FF', '#FF00F6', '#01FFFE', '#505050', '#909090', '#FF0000', '#00FF00', '#FFA6FE', '#FFDB66', '#006401', '#010067', '#95003A', '#007DB5', '#FFEEE8', '#774D00' ] color_index = 0 markers = ['o', 's', '<', 'D'] marker_index = 0 sort_order = 0 plot_type_str = 'base' default_group_by = None date_range = (datetime(MAXYEAR, 12, 31, tzinfo=tzutc()), datetime(MINYEAR, 1, 1, tzinfo=tzutc())) def __init__(self, args=None, unknown_args=None): self.args = args self.unknown_args = unknown_args self.groups = OrderedDict() self.empty = True self.limits = None if self.args['optime_start']: self.xlabel = 'time (start of ops)' else: self.xlabel = 'time (end of ops)' def accept_line(self, logevent): """Return True if this PlotType can plot this line.""" return True def add_line(self, logevent): """Append log line to this plot type.""" key = None self.empty = False self.groups.setdefault(key, list()).append(logevent) @property def logevents(self): """Iterator yielding all logevents from groups dictionary.""" for key in self.groups: for logevent in self.groups[key]: try: yield logevent except StopIteration: return @classmethod def color_map(cls, group): color = cls.colors[cls.color_index] cls.color_index += 1 marker = cls.markers[cls.marker_index] if cls.color_index >= len(cls.colors): cls.marker_index += 1 cls.marker_index %= len(cls.markers) cls.color_index %= cls.color_index return color, marker def group(self): """(re-)group all logevents by the given group.""" if hasattr(self, 'group_by'): group_by = self.group_by else: group_by = self.default_group_by if self.args['group'] is not None: group_by = self.args['group'] self.groups = Grouping(self.logevents, group_by) self.groups.move_items(None, 'others') self.groups.sort_by_size(group_limit=self.args['group_limit'], discard_others=self.args['no_others']) def plot_group(self, group, idx, axis): raise NotImplementedError("BasePlotType can't plot. " "Use a derived class instead") def clicked(self, event): """ Call if an element of this plottype was clicked. Implement in sub class. """ pass def plot(self, axis, ith_plot, total_plots, limits): self.limits = limits artists = [] print(self.plot_type_str.upper() + " plot") print("%5s %9s %s" % ("id", " #points", "group")) for idx, group in enumerate(self.groups): print("%5s %9s %s" % (idx + 1, len(self.groups[group]), group)) group_artists = self.plot_group(group, idx + ith_plot, axis) if isinstance(group_artists, list): artists.extend(group_artists) else: artists.append(group_artists) print() return artists
def run(self): """Run this section and print out information.""" grouping = Grouping( group_by=lambda x: (x.namespace, x.operation, x.pattern)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) - progress_start) else: self.mloginfo.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.mloginfo.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch( le.datetime) if progress_total: (self.mloginfo.update_progress( float(progress_curr - progress_start) / progress_total)) if (le.operation in ['query', 'getmore', 'update', 'remove'] or le.command in ['count', 'findandmodify', 'geonear', 'find']): lt = LogTuple(namespace=le.namespace, operation=op_or_cmd(le), pattern=le.pattern, duration=le.duration) grouping.add(lt) grouping.sort_by_size() # clear progress bar again if self.mloginfo.progress_bar_enabled: self.mloginfo.update_progress(1.0) # no queries in the log file if len(grouping) < 1: print('no queries found.') return titles = [ 'namespace', 'operation', 'pattern', 'count', 'min (ms)', 'max (ms)', 'mean (ms)', '95%-ile (ms)', 'sum (ms)' ] table_rows = [] for g in grouping: # calculate statistics for this group namespace, op, pattern = g group_events = [ le.duration for le in grouping[g] if le.duration is not None ] stats = OrderedDict() stats['namespace'] = namespace stats['operation'] = op stats['pattern'] = pattern stats['count'] = len(group_events) stats['min'] = min(group_events) if group_events else '-' stats['max'] = max(group_events) if group_events else '-' stats['mean'] = 0 if np: stats['95%'] = (np.percentile(group_events, 95) if group_events else '-') else: stats['95%'] = 'n/a' stats['sum'] = sum(group_events) if group_events else '-' stats['mean'] = (stats['sum'] / stats['count'] if group_events else '-') if self.mloginfo.args['verbose']: stats['example'] = grouping[g][0] titles.append('example') table_rows.append(stats) # sort order depending on field names reverse = True if self.mloginfo.args['sort'] in ['namespace', 'pattern']: reverse = False table_rows = sorted(table_rows, key=itemgetter(self.mloginfo.args['sort']), reverse=reverse) print_table(table_rows, titles, uppercase_headers=False) print('')
def run(self): """Run this section and print out information.""" grouping = Grouping(group_by=lambda x: ( x.datetime, x.txnNumber, x.autocommit, x.readConcern, x. timeActiveMicros, x.timeInactiveMicros, x.duration)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) - progress_start) else: self.mloginfo.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.mloginfo.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch( le.datetime) if progress_total: (self.mloginfo.update_progress( float(progress_curr - progress_start) / progress_total)) if re.search('transaction', le.line_str): lt = LogTuple(le.datetime, le.txnNumber, le.autocommit, le.readConcern, le.timeActiveMicros, le.timeInactiveMicros, le.duration) grouping.add(lt) grouping.sort_by_size() # clear progress bar again if self.mloginfo.progress_bar_enabled: self.mloginfo.update_progress(1.0) # no queries in the log file if not len(grouping): print('no transactions found.') return titles = [ 'datetime', 'txnNumber', 'autocommit', 'readConcern', 'timeActiveMicros', 'timeInactiveMicros', 'duration' ] table_rows = [] # using only important key-values # can be used in future for g in grouping: # calculate statistics for this group datetime, txnNumber, autocommit, readConcern, timeActiveMicros, timeInactiveMicros, duration = g stats = OrderedDict() stats['datetime'] = str(datetime) stats['txnNumber'] = txnNumber stats['autocommit'] = autocommit stats['readConcern'] = readConcern stats['timeActiveMicros'] = timeActiveMicros stats['timeInactiveMicros'] = timeInactiveMicros stats['duration'] = duration table_rows.append(stats) if self.mloginfo.args['tsort'] == 'duration': table_rows = sorted(table_rows, key=itemgetter(self.mloginfo.args['tsort']), reverse=True) print_table(table_rows, titles, uppercase_headers=True) print('')
def run(self): """Run this section and print out information.""" grouping = Grouping(group_by=lambda x: (x.namespace, x.operation, x.pattern)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) - progress_start) else: self.mloginfo.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.mloginfo.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch(le .datetime) if progress_total: (self.mloginfo .update_progress(float(progress_curr - progress_start) / progress_total)) if (le.operation in ['query', 'getmore', 'update', 'remove'] or le.command in ['count', 'findandmodify', 'geonear', 'find']): lt = LogTuple(namespace=le.namespace, operation=op_or_cmd(le), pattern=le.pattern, duration=le.duration) grouping.add(lt) grouping.sort_by_size() # clear progress bar again if self.mloginfo.progress_bar_enabled: self.mloginfo.update_progress(1.0) # no queries in the log file if len(grouping) < 1: print('no queries found.') return titles = ['namespace', 'operation', 'pattern', 'count', 'min (ms)', 'max (ms)', 'mean (ms)', '95%-ile (ms)', 'sum (ms)'] table_rows = [] for g in grouping: # calculate statistics for this group namespace, op, pattern = g group_events = [le.duration for le in grouping[g] if le.duration is not None] stats = OrderedDict() stats['namespace'] = namespace stats['operation'] = op stats['pattern'] = pattern stats['count'] = len(group_events) stats['min'] = min(group_events) if group_events else '-' stats['max'] = max(group_events) if group_events else '-' stats['mean'] = 0 if np: stats['95%'] = (np.percentile(group_events, 95) if group_events else '-') else: stats['95%'] = 'n/a' stats['sum'] = sum(group_events) if group_events else '-' stats['mean'] = (stats['sum'] / stats['count'] if group_events else '-') if self.mloginfo.args['verbose']: stats['example'] = grouping[g][0] titles.append('example') table_rows.append(stats) # sort order depending on field names reverse = True if self.mloginfo.args['sort'] in ['namespace', 'pattern']: reverse = False table_rows = sorted(table_rows, key=itemgetter(self.mloginfo.args['sort']), reverse=reverse) print_table(table_rows, titles, uppercase_headers=False) print('')
def run(self): """Run this section and print out information.""" grouping = Grouping(group_by=lambda x: ( x.namespace, x.operation, x.bytesRead, x.bytesWritten, x. timeReadingMicros, x.timeWritingMicros)) logfile = self.mloginfo.logfile if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) - progress_start) else: self.mloginfo.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.mloginfo.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch( le.datetime) if progress_total: (self.mloginfo.update_progress( float(progress_curr - progress_start) / progress_total)) if (le.operation in ['update'] or le.command in ['insert']): lt = LogTuple(namespace=le.namespace, operation=op_or_cmd(le), bytesRead=le.bytesRead, bytesWritten=le.bytesWritten, timeReadingMicros=le.timeReadingMicros, timeWritingMicros=le.timeWritingMicros) grouping.add(lt) grouping.sort_by_size() # clear progress bar again if self.mloginfo.progress_bar_enabled: self.mloginfo.update_progress(1.0) # no queries in the log file if not len(grouping): print('no statistics found.') return titles = [ 'namespace', 'operation', 'bytesRead', 'bytesWritten', 'timeReadingMicros', 'timeWritingMicros' ] table_rows = [] for g in grouping: # calculate statistics for this group namespace, op, bytesRead, bytesWritten, timeReadingMicros, timeWritingMicros = g stats = OrderedDict() stats['namespace'] = namespace stats['operation'] = op stats['bytesRead'] = bytesRead stats['bytesWritten'] = bytesWritten stats['timeReadingMicros'] = timeReadingMicros stats['timeWritingMicros'] = timeWritingMicros table_rows.append(stats) print_table(table_rows, titles, uppercase_headers=False) print('')
def run(self): """ run this section and print out information. """ grouping = Grouping(group_by=lambda x: (x.collection, x.operation, x. pattern, x.sort_pattern)) logfile = self.mloginfo.logfile min_duration = self.mloginfo.args['min_duration'] min_nscanned = self.mloginfo.args['min_nscanned'] if logfile.start and logfile.end: progress_start = self.mloginfo._datetime_to_epoch(logfile.start) progress_total = self.mloginfo._datetime_to_epoch( logfile.end) - progress_start else: self.mloginfo.progress_bar_enabled = False for i, le in enumerate(logfile): # update progress bar every 1000 lines if self.mloginfo.progress_bar_enabled and (i % 1000 == 0): if le.datetime: progress_curr = self.mloginfo._datetime_to_epoch( le.datetime) self.mloginfo.update_progress( float(progress_curr - progress_start) / progress_total) if min_duration and le.duration < min_duration: continue if min_nscanned and le.nscanned < min_nscanned: continue if le.operation in [ 'query', 'getmore', 'update', 'remove' ] or le.command in ['count', 'findandmodify', 'geonear']: db, collection = le.namespace.split(".") lt = LogTuple(db=db, collection=collection, nscanned=le.nscanned, ntoreturn=le.ntoreturn, writeConflicts=le.writeConflicts, operation=op_or_cmd(le), pattern=le.pattern, duration=le.duration, sort_pattern=le.sort_pattern) grouping.add(lt) grouping.sort_by_size(group_limit=30) # clear progress bar again if self.mloginfo.progress_bar_enabled: self.mloginfo.update_progress(1.0) # no queries in the log file if len(grouping) < 1: print 'no queries found.' return titles = [ 'collection', 'operation', 'pattern', 'sort_pattern', 'count', 'mean (ms)', 'sum (mins)' ] table_rows = [] for g in grouping: # calculate statistics for this group try: collection, op, pattern, sort_pattern = g except: collection, op, pattern, sort_pattern = [ 'others', 'others', 'others', 'others' ] group_events = [ le.duration for le in grouping[g] if le.duration != None ] stats = OrderedDict() stats['collection'] = collection stats['operation'] = op stats['pattern'] = pattern stats['sort_pattern'] = sort_pattern stats['count'] = len(group_events) stats['mean'] = 0 stats['sum'] = sum(group_events) if group_events else '-' stats['mean'] = stats['sum'] / stats[ 'count'] if group_events else '-' stats['sum'] = round(stats['sum'] / 1000.0 / 60, 2) if group_events else '-' if self.mloginfo.args['verbose']: stats['example'] = grouping[g][0] titles.append('example') table_rows.append(stats) # sort order depending on field names reverse = True if self.mloginfo.args['sort'] in ['namespace', 'pattern']: reverse = False table_rows = sorted(table_rows, key=itemgetter(self.mloginfo.args['sort']), reverse=reverse) print_table(table_rows, titles, uppercase_headers=False) print