def open(self, path): """Open the journal to be able to iterate over its contents. Args: path: [string] The path to load the journal from. """ if self.__input_stream != None: raise ValueError('Navigator is already open.') self.__input_stream = RecordInputStream(open(path, 'r'))
def assertMessagesEqual(self, expect, got): expect_stream = RecordInputStream(BytesIO(expect)) got_stream = RecordInputStream(BytesIO(got)) while True: try: expect_data = next(expect_stream) except StopIteration: break got_data = next(got_stream) self.assertItemsEqual(expect_data, got_data) with self.assertRaises(StopIteration): next(got_stream)
class JournalNavigator(object): """Iterates over journal JSON.""" def __init__(self): """Constructor""" self.__input_stream = None self.__decoder = json.JSONDecoder() def __iter__(self): """Iterate over the contents of the journal.""" self.__check_open() return self def open(self, path): """Open the journal to be able to iterate over its contents. Args: path: [string] The path to load the journal from. """ if self.__input_stream != None: raise ValueError('Navigator is already open.') self.__input_stream = RecordInputStream(open(path, 'r')) def close(self): """Close the journal.""" self.__check_open() self.__input_stream.close() self.__input_stream = None def next(self): """Return the next item in the journal. Raises: StopIteration when there are no more elements. """ self.__check_open() json_str = self.__input_stream.next() try: return self.__decoder.decode(json_str) except ValueError: print 'Invalid json record:\n{0}'.format(json_str) raise def __check_open(self): """Verify that the navigator is open (and thus valid to iterate).""" if self.__input_stream is None: raise ValueError('Navigator is not open.')
def test_lifecycle(self): """Verify we store multiple objects as a list of snapshots.""" first = TestData('first', 1, TestDetails()) second = TestData('second', 2) journal = TestJournal(StringIO()) journal.store(first) journal.store(second) journal.terminate() decoder = json.JSONDecoder(encoding='ASCII') got_stream = RecordInputStream(StringIO(journal.final_content)) got_str = [e for e in got_stream] got_json = '[{0}]'.format(','.join(got_str)) got = decoder.decode(got_json) self.assertEquals(4, len(got)) snapshot = JsonSnapshot() snapshot.add_object(first) json_object = snapshot.to_json_object() json_object['_timestamp'] = journal.clock.last_time - 1 json_object['_thread'] = threading.current_thread().ident self.assertItemsEqual(json_object, got[1]) snapshot = JsonSnapshot() snapshot.add_object(second) json_object = snapshot.to_json_object() json_object['_timestamp'] = journal.clock.last_time json_object['_thread'] = threading.current_thread().ident self.assertItemsEqual(json_object, got[2])
def test_write_message_with_metadata(self): """Verify the journal messages contain the metadata we add.""" output = StringIO() journal = TestJournal(output) offset = len(output.getvalue()) journal.write_message('My message.', str='ABC', num=123) metadata = {'str': 'ABC', 'num': 123} message_json_text = self.expect_message_text(journal.clock, 'My message.', metadata) input_stream = RecordInputStream(StringIO(output.getvalue()[offset:])) decoder = json.JSONDecoder(encoding='ASCII') expect_obj = decoder.decode(message_json_text) got_obj = decoder.decode(input_stream.next()) self.assertItemsEqual(expect_obj, got_obj)
def test_write_message_with_metadata(self): """Verify the journal messages contain the metadata we add.""" output = StringIO() journal = TestJournal(output) offset = len(output.getvalue()) journal.write_message('My message.', str='ABC', num=123) metadata = {'str': 'ABC', 'num': 123} message_json_text = self.expect_message_text( journal.clock, 'My message.', metadata) input_stream = RecordInputStream(StringIO(output.getvalue()[offset:])) decoder = json.JSONDecoder(encoding='ASCII') expect_obj = decoder.decode(message_json_text) got_obj = decoder.decode(input_stream.next()) self.assertItemsEqual(expect_obj, got_obj)
def test_context_logging(self): offset = len(_journal_file.getvalue()) logger = JournalLogger('test_journal_logger') logger.addHandler(JournalLogHandler(path=None)) citest_extra = {'foo':'bar'} start_time = _journal_clock.last_time JournalLogger.execute_in_context( 'The Test Context', lambda: {logger.debug('Test Log Message')}, **citest_extra) expect_sequence = [ { '_title': 'The Test Context', '_type': 'JournalContextControl', '_timestamp': start_time + 1, '_thread': thread.get_ident(), 'control': 'BEGIN', 'foo': 'bar', }, { '_value': 'Test Log Message', '_type': 'JournalMessage', '_level': logging.DEBUG, '_timestamp': start_time + 2, '_thread': thread.get_ident(), 'format': 'pre' }, { '_type': 'JournalContextControl', '_timestamp': start_time + 3, '_thread': thread.get_ident(), 'control': 'END' } ] entry_str = _journal_file.getvalue()[offset:] input_stream = RecordInputStream(StringIO(entry_str)) for expect in expect_sequence: json_str = input_stream.next() json_dict = json_module.JSONDecoder(encoding='utf-8').decode(json_str) self.assertEqual(expect, json_dict)
def test_store(self): """Verify we store objects as JSON snapshots.""" data = TestData('NAME', 1234, TestDetails()) decoder = json.JSONDecoder(encoding='ASCII') snapshot = JsonSnapshot() snapshot.add_object(data) time_function = lambda: 1.23 journal = Journal(time_function) output = StringIO() journal.open_with_file(output) offset = len(output.getvalue()) journal.store(data) contents = output.getvalue() got_stream = RecordInputStream(StringIO(contents[offset:])) got_json_str = got_stream.next() got = decoder.decode(got_json_str) json_object = snapshot.to_json_object() json_object['_timestamp'] = time_function() json_object['_thread'] = threading.current_thread().ident self.assertItemsEqual(json_object, got)
def test_context_logging(self): offset = len(_journal_file.getvalue()) logger = JournalLogger('test_journal_logger') logger.addHandler(JournalLogHandler(path=None)) citest_extra = {'foo': 'bar'} start_time = _journal_clock.last_time JournalLogger.execute_in_context( 'The Test Context', lambda: {logger.debug('Test Log Message')}, **citest_extra) expect_sequence = [{ '_title': 'The Test Context', '_type': 'JournalContextControl', '_timestamp': start_time + 1, '_thread': thread.get_ident(), 'control': 'BEGIN', 'foo': 'bar', }, { '_value': 'Test Log Message', '_type': 'JournalMessage', '_level': logging.DEBUG, '_timestamp': start_time + 2, '_thread': thread.get_ident(), 'format': 'pre' }, { '_type': 'JournalContextControl', '_timestamp': start_time + 3, '_thread': thread.get_ident(), 'control': 'END' }] entry_str = _journal_file.getvalue()[offset:] input_stream = RecordInputStream(StringIO(entry_str)) for expect in expect_sequence: json_str = input_stream.next() json_dict = json_module.JSONDecoder( encoding='utf-8').decode(json_str) self.assertEqual(expect, json_dict)
def test_journal_data_summary(self): """Check the classes journal_data.""" if FAILURE_TESTS_DISABLED: self.skipTest('Not run from test_runner_test.main') stream = RecordInputStream(BytesIO(self.journal_data)) record_list = [] for record_str in stream: record = json.JSONDecoder().decode(record_str) record_list.append(record) last_record = record_list[-1] self.assertEqual('JournalMessage', last_record.get('_type')) self.assertEqual('Finished journal.', last_record.get('_value')) summary_record = record_list[-2] self.assertEqual('JsonSnapshot', summary_record.get('_type')) self.assertEqual('Summary for %s' % self.expected_fixture_name, summary_record.get('_title')) self.assertEqual(self.expected_summary_relation, summary_record.get('_default_relation'))
def test_journal_logger_with_custom_message(self): offset = len(_journal_file.getvalue()) logger = JournalLogger(__name__) logger.addHandler(JournalLogHandler(path=None)) citest_extra = {'foo':'bar', '_journal_message':'HELLO, JOURNAL'} logger.debug('Hello, World!', extra={'citest_journal': citest_extra}) expect = { '_value': 'HELLO, JOURNAL', '_type': 'JournalMessage', '_level': logging.DEBUG, '_timestamp': _journal_clock.last_time, '_thread': current_thread().ident, 'foo': 'bar', 'format': 'pre' } entry_str = _journal_file.getvalue()[offset:] json_str = next(RecordInputStream(BytesIO(entry_str))) json_dict = json_module.JSONDecoder().decode(json_str) self.assertEqual(expect, json_dict)
def test_journal_logger(self): offset = len(_journal_file.getvalue()) logger = JournalLogger('test_journal_logger') logger.addHandler(JournalLogHandler(path=None)) citest_extra = {'foo':'bar', 'format':'FMT'} logger.info('Hello, World!', extra={'citest_journal': citest_extra}) expect = { '_value': 'Hello, World!', '_type': 'JournalMessage', '_level': logging.INFO, '_timestamp': _journal_clock.last_time, '_thread': current_thread().ident, 'foo': 'bar', 'format': 'FMT', } entry_str = _journal_file.getvalue()[offset:] json_str = next(RecordInputStream(BytesIO(entry_str))) json_dict = json_module.JSONDecoder().decode(json_str) self.assertEqual(expect, json_dict)
def test_journal_log_handler_from_generic_logger(self): offset = len(_journal_file.getvalue()) logger = logging.getLogger('test_journal_log_handler') logger.addHandler(JournalLogHandler(path=None)) citest_extra = {'foo':'bar', '_journal_message':'HELLO, JOURNAL'} logger.error('Hello, World!', extra={'citest_journal': citest_extra}) # Note the extra args arent visible because they arent in the normal # LogRecord. expect = { '_value': 'HELLO, JOURNAL', '_type': 'JournalMessage', '_level': logging.ERROR, '_timestamp': _journal_clock.last_time, '_thread': current_thread().ident, 'foo': 'bar', 'format': 'pre', } entry_str = _journal_file.getvalue()[offset:] json_str = next(RecordInputStream(BytesIO(entry_str))) json_dict = json_module.JSONDecoder().decode(json_str) self.assertEqual(expect, json_dict)