def test_token_assign_name(): t = Token('hello', b'world') t.name = 'test'
def test_token_assign_text(): t = Token('hello', b'world') t.text = 'test'
def test_token_assign_other(): t = Token('hello', b'world') t.blabla = 'test'
def test_invalid_token(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] with pytest.raises(AssertionError): lexer.decode(b'hello')
def invalid_token_test(): lexer = LatexIncrementalDecoder() # piggyback an implementation which results in invalid tokens lexer.get_raw_tokens = lambda bytes_, final: [Token('**invalid**', bytes_)] nose.tools.assert_raises(AssertionError, lambda: lexer.decode(b'hello'))
def test_token_assign_other(): t = Token() t.blabla = 'test'
def test_token_assign_text(): t = Token() t.text = 'test'
def test_token_assign_name(): t = Token() t.name = 'test'
def test_token_create(): t = Token() nose.tools.assert_equal(t.name, 'unknown') nose.tools.assert_equal(t.text, b'')
def test_token_assign_other(): with pytest.raises(AttributeError): t = Token('hello', u'world') t.blabla = 'test'
def test_token_assign_text(): with pytest.raises(AttributeError): t = Token('hello', u'world') t.text = 'test'
def test_token_create_with_args(): t = Token('hello', b'world') nose.tools.assert_equal(t.name, 'hello') nose.tools.assert_equal(t.text, b'world')
def test_token_create_with_args(): t = Token('hello', u'world') assert t.name == 'hello' assert t.text == u'world'