コード例 #1
0
class LatexIncrementalLexerTest(TestCase):

    errors = 'strict'

    def setUp(self):
        self.lexer = LatexIncrementalLexer(errors=self.errors)

    def lex_it(self, latex_code, latex_tokens, final=False):
        tokens = self.lexer.get_tokens(latex_code, final=final)
        self.assertEqual(
            list(token.text for token in tokens),
            latex_tokens)

    def tearDown(self):
        del self.lexer

    def test_newline(self):
        self.lex_it(
            b"hello\nworld", b"h|e|l|l|o| |w|o|r|l|d".split(b'|'),
            final=True)

    def test_par(self):
        self.lex_it(
            b"hello\n\nworld", b"h|e|l|l|o| |\\par|w|o|r|l|d".split(b'|'),
            final=True)
コード例 #2
0
class LatexIncrementalLexerTest(TestCase):

    errors = 'strict'

    def setUp(self):
        self.lexer = LatexIncrementalLexer(errors=self.errors)

    def lex_it(self, latex_code, latex_tokens, final=False):
        tokens = self.lexer.get_tokens(latex_code, final=final)
        self.assertEqual(list(token.text for token in tokens), latex_tokens)

    def tearDown(self):
        del self.lexer

    def test_newline(self):
        self.lex_it(b"hello\nworld",
                    b"h|e|l|l|o| |w|o|r|l|d".split(b'|'),
                    final=True)

    def test_par(self):
        self.lex_it(b"hello\n\nworld",
                    b"h|e|l|l|o| |\\par|w|o|r|l|d".split(b'|'),
                    final=True)
コード例 #3
0
 def setUp(self):
     self.lexer = LatexIncrementalLexer(errors=self.errors)
コード例 #4
0
 def setUp(self):
     self.lexer = LatexIncrementalLexer(errors=self.errors)