class BaseTexLexerTest(TestCase):
    """Tex lexer fixture."""

    errors = 'strict'

    def setUp(self):
        self.lexer = LatexIncrementalDecoder(self.errors)

    def lex_it(self, latex_code, latex_tokens, final=False):
        tokens = self.lexer.get_tokens(latex_code, final=final)
        self.assertEqual(list(token.text for token in tokens), latex_tokens)

    def tearDown(self):
        del self.lexer
class BaseTexLexerTest(TestCase):
    """Tex lexer fixture."""

    errors = 'strict'

    def setUp(self):
        self.lexer = LatexIncrementalDecoder(self.errors)

    def lex_it(self, latex_code, latex_tokens, final=False):
        tokens = self.lexer.get_tokens(latex_code, final=final)
        self.assertEqual(
            list(token.text for token in tokens),
            latex_tokens)

    def tearDown(self):
        del self.lexer
def invalid_state_test_2():
    lexer = LatexIncrementalDecoder()
    # piggyback invalid state
    lexer.state = '**invalid**'
    nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'   '))
def invalid_token_test():
    lexer = LatexIncrementalDecoder()
    # piggyback an implementation which results in invalid tokens
    lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)]
    nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'hello'))
 def setUp(self):
     self.lexer = LatexIncrementalDecoder(self.errors)
def invalid_state_test_2():
    lexer = LatexIncrementalDecoder()
    # piggyback invalid state
    lexer.state = '**invalid**'
    nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'   '))
def invalid_token_test():
    lexer = LatexIncrementalDecoder()
    # piggyback an implementation which results in invalid tokens
    lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)]
    nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'hello'))
 def setUp(self):
     self.lexer = LatexIncrementalDecoder(self.errors)