def test_logevent_sort_pattern_parsing(): le = LogEvent(line_pattern_26_a) assert (le.sort_pattern) is None le = LogEvent(line_pattern_26_b) assert (le.sort_pattern) == '{"b": 1}' le = LogEvent(line_pattern_26_c) assert (le.sort_pattern) == '{"b": 1}'
def test_logevent_command_parsing(): le = LogEvent(line_command_26_a) assert (le.command) == 'replsetgetstatus' le = LogEvent(line_command_26_b) assert (le.command) == 'aggregate' le = LogEvent(line_getmore) assert (le.command) is None
def test_logevent_pattern_parsing(): le = LogEvent(line_pattern_26_a) assert (le.pattern) == '{"a": 1}' le = LogEvent(line_pattern_26_b) assert (le.pattern) == '{"a": 1}' le = LogEvent(line_pattern_26_c) assert (le.pattern) == '{"a": 1}'
def _calculate_bounds(self): """Calculate beginning and end of logfile.""" if self._bounds_calculated: # Assume no need to recalc bounds for lifetime of a Logfile object return if self.from_stdin: return False # we should be able to find a valid log line within max_start_lines max_start_lines = 10 lines_checked = 0 # get start datetime for line in self.filehandle: logevent = LogEvent(line) lines_checked += 1 if logevent.datetime: self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break if lines_checked > max_start_lines: break # sanity check before attempting to find end date if (self._start is None): raise SystemExit("Error: <%s> does not appear to be a supported " "MongoDB log file format" % self.filehandle.name) # get end datetime (lines are at most 10k, # go back 30k at most to make sure we catch one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime: self._end = logevent.datetime break # if there was a roll-over, subtract 1 year from start time if self._end < self._start: self._start = self._start.replace(year=self._start.year - 1) self._year_rollover = self._end else: self._year_rollover = False # reset logfile self.filehandle.seek(0) self._bounds_calculated = True return True
def test_merge_same(self): file_length = len(self.logfile) self.tool.run('%s %s' % (self.logfile_path, self.logfile_path)) output = sys.stdout.getvalue() lines = output.splitlines() assert len(lines) == 2 * file_length for prev, next in zip(lines[:-1], lines[1:]): prev_le = LogEvent(prev) next_le = LogEvent(next) if not prev_le.datetime or not next_le.datetime: continue assert prev_le.datetime <= next_le.datetime
def next(self): """ get next line, adjust for year rollover and hint datetime format. """ # use readline here because next() iterator uses internal readahead buffer so seek position is wrong line = self.filehandle.readline() if line == '': raise StopIteration line = line.rstrip('\n') le = LogEvent(line) # hint format and nextpos from previous line if self._datetime_format and self._datetime_nextpos != None: ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover) if not ret: # logevent indicates timestamp format has changed, invalidate hint info self._datetime_format = None self._datetime_nextpos = None elif le.datetime: # gather new hint info from another logevent self._datetime_format = le.datetime_format self._datetime_nextpos = le._datetime_nextpos return le
def test_slow_fast(self): self.tool.run('%s --slow 145 --fast 500' % self.logfile_path) output = sys.stdout.getvalue() assert (len(output.splitlines()) > 0) for line in output.splitlines(): le = LogEvent(line) assert (le.duration >= 145 and le.duration <= 500)
def test_no_timestamp_format(self): self.tool.run('%s --timestamp-format none --timezone 5'%self.logfile_path) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) if le.datetime: assert le.datetime_format == 'ctime-pre2.4'
def test_pattern(self): # test that pattern is correctly parsed, reordered and compared to logevent pattern self.tool.run('%s --pattern {ns:1,_id:1,host:1}' % self.logfile_path) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert (le.pattern == '{"_id": 1, "host": 1, "ns": 1}')
def test_command(self): self.tool.run('%s --command dropDatabase deleteIndexes' % self.logfile_path) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert (le.command in ['dropDatabase', 'deleteIndexes'])
def _iterate_lines(self): """ count number of lines (can be expensive). """ self._num_lines = 0 self._restarts = [] l = 0 for l, line in enumerate(self.filehandle): # find version string if "version" in line: restart = None # differentiate between different variations if "mongos" in line or "MongoS" in line: self._binary = 'mongos' elif "db version v" in line: self._binary = 'mongod' else: continue version = re.search(r'(\d\.\d\.\d+)', line) if version: version = version.group(1) restart = (version, LogEvent(line)) self._restarts.append(restart) self._num_lines = l + 1 # reset logfile self.filehandle.seek(0)
def test_from_to_26_log(self): logfile_26_path = os.path.join(os.path.dirname(mtools.__file__), 'test/logfiles/', 'mongod_26.log') logfile_26 = LogFile(open(logfile_26_path, 'r')) random_start = random_date(logfile_26.start, logfile_26.end) random_end = random_date(random_start + timedelta(minutes=1), logfile_26.end + timedelta(minutes=1)) print random_start, random_end print logfile_26.start, logfile_26.end self.tool.run( '%s --from %s --to %s' % (logfile_26_path, random_start.strftime("%b %d %H:%M:%S"), random_end.strftime("%b %d %H:%M:%S"))) output = sys.stdout.getvalue() assert len(output.splitlines()) > 0 at_least_one = False for line in output.splitlines(): le = LogEvent(line) if not le.datetime: continue at_least_one = True assert (le.datetime >= random_start and le.datetime <= random_end) assert at_least_one
def __iter__(self): """Iterate over host object. Return LogEvent obj for each document.""" self.cursor = self.coll_handle.find().sort([("ts", ASCENDING)]) for doc in self.cursor: doc['thread'] = self.name le = LogEvent(doc) yield le
def test_from(self): random_start = random_date(self.logfile.start, self.logfile.end) self.tool.run('%s --from %s'%(self.logfile_path, random_start.strftime("%b %d %H:%M:%S"))) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) if not le.datetime: continue assert(le.datetime >= random_start)
def __iter__(self): """ iteration over ProfileCollection object will return a LogEvent object for each document. """ self.cursor = self.coll_handle.find().sort([("ts", ASCENDING)]) for doc in self.cursor: doc['thread'] = self.name le = LogEvent(doc) yield le
def test_from_iso8601_timestamp(self): random_start = random_date(self.logfile.start, self.logfile.end) self.tool.run('%s --from %s'%(self.logfile_path, random_start.isoformat())) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) if not le.datetime: continue assert(le.datetime >= random_start)
def next(self): """ makes iterators. """ if not self.cursor: self.cursor = self.coll_handle.find().sort([("ts", ASCENDING)]) doc = self.cursor.next() doc['thread'] = self.name le = LogEvent(doc) return le
def test_planSummary(self): logfile_path = os.path.join(os.path.dirname(mtools.__file__), 'test/logfiles/', 'mongod_26_corrupt.log') self.tool.run('%s --planSummary IXSCAN' % logfile_path) output = sys.stdout.getvalue() lines = output.splitlines() assert (len(lines) > 0) for line in lines: le = LogEvent(line) assert (le.planSummary == "IXSCAN")
def _test_thread(self,path,thread): self.tool.run('%s --thread mongosMain'%path ) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert(le.thread == thread) md = re.match("^.* connection accepted from [0-9\.:]+ #(?P<conn>[0-9]+) " , le.line_str) if md is None: assert(le.conn is None) else: assert(le.conn == "conn" + md.group('conn'))
def _test_thread_conn1(self,path,thread): self.tool.run('%s --thread conn1'%path ) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) md = re.match("^.* connection accepted from [0-9\.:]+ #(?P<conn>[0-9]+) " , le.line_str) assert(le.conn == 'conn1') if md is None: assert(le.thread == 'conn1') else: assert(le.thread == thread)
def test_logevent_value_extraction(): """ Check for correct value extraction of all fields. """ le = LogEvent(line_getmore) assert (le.thread == 'conn9') assert (le.operation == 'getmore') assert (le.namespace == 'local.oplog.rs') assert (le.duration == 144) assert (le.numYields == 107) assert (le.r == 85093) assert (le.ntoreturn == 0) assert (le.nreturned == 13551) assert (le.pattern == '{"ts": 1}')
def test_from_to_stdin(self): year = datetime.now().year start = datetime(year, 8, 5, 20, 45) end = datetime(year, 8, 5, 21, 01) self.tool.is_stdin = True self.tool.run('%s --from %s --to %s'%(self.logfile_path, start.strftime("%b %d %H:%M:%S"), end.strftime("%b %d %H:%M:%S"))) self.tool.is_stdin = False output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert(le.datetime >= start and le.datetime <= end)
def _calculate_bounds(self): """ calculate beginning and end of logfile. """ if self.from_stdin: return False # get start datetime for line in self.filehandle: logevent = LogEvent(line) if logevent.datetime: self._start = logevent.datetime self._timezone = logevent.datetime.tzinfo self._datetime_format = logevent.datetime_format self._datetime_nextpos = logevent._datetime_nextpos break # get end datetime (lines are at most 10k, go back 30k at most to make sure we catch one) self.filehandle.seek(0, 2) self._filesize = self.filehandle.tell() self.filehandle.seek(-min(self._filesize, 30000), 2) for line in reversed(self.filehandle.readlines()): logevent = LogEvent(line) if logevent.datetime: self._end = logevent.datetime break # if there was a roll-over, subtract 1 year from start time if self._end < self._start: self._start = self._start.replace(year=self._start.year - 1) self._year_rollover = self._end else: self._year_rollover = False # reset logfile self.filehandle.seek(0) return True
def test_logevent_non_log_line(): """ Check that LogEvent correctly ignores non log lines""" le = LogEvent(line_fassert) assert (le.thread == None) assert (le.operation == None) assert (le.namespace == None) assert (le.duration == None) assert (le.numYields == None) assert (le.r == None) assert (le.ntoreturn == None) assert (le.nreturned == None) assert (le.pattern == None) le = LogEvent(line_empty) assert (le.thread == None) assert (le.operation == None) assert (le.namespace == None) assert (le.duration == None) assert (le.numYields == None) assert (le.r == None) assert (le.ntoreturn == None) assert (le.nreturned == None) assert (le.pattern == None)
def test_logevent_datetime_parsing(): """ Check that all four timestamp formats are correctly parsed. """ le = LogEvent(line_ctime_pre24) this_year = datetime.datetime.now().year le_str = le.line_str assert (str(le.datetime) == '%s-08-03 21:52:05+00:00' % this_year) assert (le._datetime_format == 'ctime-pre2.4') assert (le.line_str[4:] == le_str[4:]) # make sure all datetime objects are timezone aware assert (le.datetime.tzinfo != None) le = LogEvent(line_ctime) le_str = le.line_str assert (str(le.datetime) == '%s-08-03 21:52:05.095000+00:00' % this_year) assert (le._datetime_format == 'ctime') assert (le.line_str[4:] == le_str[4:]) # make sure all datetime objects are timezone aware assert (le.datetime.tzinfo != None) le = LogEvent(line_iso8601_utc) le_str = le.line_str assert (str(le.datetime) == '2013-08-03 11:52:05.095000+00:00') assert (le._datetime_format == 'iso8601-utc') assert (le.line_str[4:] == le_str[4:]) # make sure all datetime objects are timezone aware assert (le.datetime.tzinfo != None) le = LogEvent(line_iso8601_local) le_str = le.line_str assert (str(le.datetime) == '2013-08-03 21:52:05.095000+10:00') assert (le._datetime_format == 'iso8601-local') assert (le.line_str[4:] == le_str[4:]) # make sure all datetime objects are timezone aware assert (le.datetime.tzinfo != None)
def test_mask_end(self): mask_path = os.path.join(os.path.dirname(mtools.__file__), 'test/logfiles/', 'mask_centers.log') event1 = parser.parse("Mon Aug 5 20:27:15 UTC") event2 = parser.parse("Mon Aug 5 20:30:09 UTC") mask_size = randrange(10, 60) padding = timedelta(seconds=mask_size/2) self.tool.run('%s --mask %s --mask-size %i'%(self.logfile_path, mask_path, mask_size)) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert( (le.datetime >= event1 - padding and le.datetime <= event1 + padding) or (le.datetime >= event2 - padding and le.datetime <= event2 + padding) )
def test_logevent_lazy_evaluation(): """ Check that all LogEvent variables are evaluated lazily. """ fields = [ '_thread', '_operation', '_namespace', '_duration', '_numYields', '_r', '_ntoreturn', '_nreturned', '_pattern' ] # before parsing all member variables need to be None le = LogEvent(line_getmore) for attr in fields: assert (getattr(le, attr) == None) # after parsing, they all need to be filled out le.parse_all() for attr in fields: assert (getattr(le, attr) != None)
def test_namespace(self): self.tool.run('%s --namespace local.oplog.rs' % self.logfile_path) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert (le.namespace == 'local.oplog.rs')
def test_multiple_operations(self): self.tool.run('%s --operation insert query' % self.logfile_path) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert (le.operation in ['insert', 'query'])
def test_operation(self): self.tool.run('%s --operation insert' % self.logfile_path) output = sys.stdout.getvalue() for line in output.splitlines(): le = LogEvent(line) assert (le.operation == 'insert')