def _export(self, with_line_str=True): fields = [ '_id', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration', 'numYields', 'w', 'r' ] if with_line_str: fields.append('line_str') first_row = True result_str = '' out_count = 0 for line_no, line in enumerate(self.args['logfile']): logline = LogLine(line) # only export lines that have a datetime and duration if logline.datetime and logline.duration: out_count += 1 # if too many lines include a line_str, the page won't load if with_line_str and out_count > 10000: print "Warning: more than 10,000 data points detected. Skipping actual log line strings for faster plotting." return False # write log line out as json if not first_row: # prepend comma and newline result_str += ',\n' else: first_row = False # hack to include _id for log lines from file logline._id = line_no result_str += logline.to_json(fields) return result_str
def _export(self, with_line_str=True): fields = ['_id', 'datetime', 'operation', 'thread', 'namespace', 'nscanned', 'nreturned', 'duration', 'numYields', 'w', 'r'] if with_line_str: fields.append('line_str') first_row = True result_str = '' out_count = 0 for line_no, line in enumerate(self.args['logfile']): logline = LogLine(line) # only export lines that have a datetime and duration if logline.datetime and logline.duration: out_count += 1 # if too many lines include a line_str, the page won't load if with_line_str and out_count > 10000: print "Warning: more than 10,000 data points detected. Skipping actual log line strings for faster plotting." return False # write log line out as json if not first_row: # prepend comma and newline result_str += ',\n' else: first_row = False # hack to include _id for log lines from file logline._id = line_no result_str += logline.to_json(fields) return result_str