def invalid_token_test(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'hello'))
def invalid_token_test(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'hello'))
def test_invalid_token(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] with pytest.raises(AssertionError): lexer.decode(b'hello')