def invalid_state_test_2(): lexer = LatexIncrementalDecoder() # piggyback invalid state lexer.state = '**invalid**' nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b' '))
def invalid_token_test(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'hello'))
def test_invalid_state_2(): lexer = LatexIncrementalDecoder() # piggyback invalid state lexer.state = '**invalid**' with pytest.raises(AssertionError): lexer.decode(b' ')
def test_invalid_token(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] with pytest.raises(AssertionError): lexer.decode(b'hello')