def testParseExceptions(self): MooseDocs.LOG_LEVEL = logging.DEBUG reader = readers.Reader(lexers.RecursiveLexer('foo')) with self.assertRaises(exceptions.MooseDocsException) as e: reader.parse([], u'') self.assertIn("The argument 'root'", e.exception.message) with self.assertRaises(exceptions.MooseDocsException) as e: reader.parse(tokens.Token(), []) self.assertIn("The argument 'content'", e.exception.message)
def testParseExceptions(self): MooseDocs.LOG_LEVEL = logging.DEBUG reader = readers.Reader(lexers.RecursiveLexer('foo')) with self.assertRaises(exceptions.MooseDocsException) as e: reader.tokenize([], '', None) self.assertIn("The argument 'root'", str(e.exception)) with self.assertRaises(exceptions.MooseDocsException) as e: reader.tokenize(tokens.Token(), [], None) self.assertIn("The argument 'content'", str(e.exception)) MooseDocs.LOG_LEVEL = logging.INFO
def testTokenizeException(self): root = tokens.Token(None) content = page.PageNodeBase(None) reader = readers.Reader(lexers.RecursiveLexer('foo')) translator = Translator(content, reader, HTMLRenderer(), []) translator.init('') reader.add('foo', WordComponent()) reader.parse(root, u'throw bar') self.assertIsInstance(root(0), tokens.ExceptionToken) self.assertIsInstance(root(1), tokens.Word) self.assertEqual(root(1).content, u'bar')
def testTokenizeException(self): root = tokens.Token(None) reader = readers.Reader(lexers.RecursiveLexer('block', 'inline')) reader.add('block', WordComponent()) reader.add('inline', LetterComponent()) reader.tokenize(root, 'throw bar', pages.Page('foo', source='foo')) self.assertEqual(root(0).name, 'ErrorToken') self.assertEqual(root(1).name, 'Word') self.assertEqual(root(1)(0).name, 'Letter') self.assertEqual(root(1)(1).name, 'Letter') self.assertEqual(root(1)(2).name, 'Letter') self.assertEqual(root(1)(0)['content'], 'b') self.assertEqual(root(1)(1)['content'], 'a') self.assertEqual(root(1)(2)['content'], 'r')
def testBasic(self): root = tokens.Token(None) content = page.PageNodeBase(None) reader = readers.Reader(lexers.RecursiveLexer('block', 'inline')) translator = Translator(content, reader, HTMLRenderer(), []) translator.init('') reader.add('block', BlockComponent()) reader.add('inline', WordComponent()) reader.parse(root, u'foo bar') self.assertIsInstance(root(0), tokens.Token) self.assertIsInstance(root(0)(0), tokens.Word) self.assertEqual(root(0)(0).content, u'foo') self.assertIsInstance(root(0)(1), tokens.Word) self.assertEqual(root(0)(1).content, u'bar')
def testAddExceptions(self): MooseDocs.LOG_LEVEL = logging.DEBUG reader = readers.Reader(lexers.RecursiveLexer('foo')) with self.assertRaises(exceptions.MooseDocsException) as e: reader.add([], u'', '') self.assertIn("The argument 'group'", e.exception.message) with self.assertRaises(exceptions.MooseDocsException) as e: reader.add('foo', u'', '') self.assertIn("The argument 'component'", e.exception.message) with self.assertRaises(exceptions.MooseDocsException) as e: reader.add('foo', components.TokenComponent(), []) self.assertIn("The argument 'location'", e.exception.message)
def testTokenize(self): lexer = lexers.RecursiveLexer('block', 'inline') lexer.add('block', 'foo', re.compile('(?P<inline>\w+) *'), letters_func) lexer.add('inline', 'bar', re.compile('(?P<content>\w)'), letter_func) root = tokens.Token(None) lexer.tokenize(root, 'foo', None, lexer.grammar('block')) self.assertIsInstance(root(0), tokens.Token) self.assertEqual(root(0).name, 'Letters') self.assertEqual(root(0)(0).name, 'Letter') self.assertEqual(root(0)(0)['content'], 'f') self.assertEqual(root(0)(1).name, 'Letter') self.assertEqual(root(0)(1)['content'], 'o') self.assertEqual(root(0)(2).name, 'Letter') self.assertEqual(root(0)(2)['content'], 'o')
def testTokenize(self): lexer = lexers.RecursiveLexer('block', 'inline') lexer.add('block', 'foo', re.compile('(?P<inline>\w+) *'), EmptyComponent()) lexer.add('inline', 'bar', re.compile('(?P<content>\w)'), WordComponent()) root = tokens.Token(None) lexer.tokenize(root, lexer.grammer(), u'foo') self.assertIsInstance(root(0), tokens.Token) self.assertNotIsInstance(root(0), tokens.Word) self.assertIsInstance(root(0)(0), tokens.Word) self.assertEqual(root(0)(0).content, u'f') self.assertIsInstance(root(0)(1), tokens.Word) self.assertEqual(root(0)(1).content, u'o') self.assertIsInstance(root(0)(2), tokens.Word) self.assertEqual(root(0)(2).content, u'o')
def testParse(self): root = tokens.Token(None) reader = readers.Reader(lexers.RecursiveLexer('block', 'inline')) reader.add('block', WordComponent()) reader.add('inline', LetterComponent()) reader.tokenize(root, 'foo bar', None) self.assertEqual(root(0).name, 'Word') self.assertEqual(root(0)(0).name, 'Letter') self.assertEqual(root(0)(1).name, 'Letter') self.assertEqual(root(0)(2).name, 'Letter') self.assertEqual(root(0)(0)['content'], 'f') self.assertEqual(root(0)(1)['content'], 'o') self.assertEqual(root(0)(2)['content'], 'o') self.assertEqual(root(1).name, 'Word') self.assertEqual(root(1)(0).name, 'Letter') self.assertEqual(root(1)(1).name, 'Letter') self.assertEqual(root(1)(2).name, 'Letter') self.assertEqual(root(1)(0)['content'], 'b') self.assertEqual(root(1)(1)['content'], 'a') self.assertEqual(root(1)(2)['content'], 'r')
def testConstruction(self): lexer = lexers.RecursiveLexer('foo') reader = readers.Reader(lexer) self.assertIs(reader.lexer, lexer)