Esempio n. 1
0
 def test_insert_eof(self):
     lex = Lexer('abc')
     i = lex.parse()
     token = i.next()
     self.assertEqual(token.type, 'IDENTIFIER')
     self.assertEqual(token.value, 'abc')
     lex.insert_text('foo')
     token = i.next()
     self.assertEqual(token.type, 'IDENTIFIER')
     self.assertEqual(token.value, 'foo')
Esempio n. 2
0
 def test_peek_char(self):
     lex = Lexer('abc xyz')
     i = lex.parse()
     self.assertEqual(i.peek_char(), 'a')
     token = i.next()
     self.assertEqual(token.type, 'IDENTIFIER')
     self.assertEqual(token.value, 'abc')
     self.assertEqual(i.peek_char(), ' ')
     token = i.next()
     self.assertEqual(token, Token(' '))
     self.assertEqual(i.peek_char(), 'x')
     token = i.next()
     self.assertEqual(token.type, 'IDENTIFIER')
     self.assertEqual(token.value, 'xyz')
     self.assertIs(i.peek_char(), EOF)
Esempio n. 3
0
 def test_changequote(self):
     lex = Lexer("`abc'`abc'[xyz]`abc'")
     i = lex.parse()
     token = i.next()
     self.assertEqual(token.type, 'STRING')
     self.assertEqual(token.value, 'abc')
     lex.changequote('[', ']')
     # changing the quote characters should make the default quote
     # characters be treated as normal characters.
     token = i.next()
     self.assertEqual(token, Token('`'))
     token = i.next()
     self.assertEqual(token.type, 'IDENTIFIER')
     self.assertEqual(token.value, 'abc')
     token = i.next()
     self.assertEqual(token, Token('\''))
     # ...and the new quote characters should work
     token = i.next()
     self.assertEqual(token.type, 'STRING')
     self.assertEqual(token.value, 'xyz')
     # check that the defaults work
     lex.changequote()
     token = i.next()
     self.assertEqual(token.type, 'STRING')
     self.assertEqual(token.value, 'abc')
Esempio n. 4
0
 def lex(self, text):
     return list(Lexer(text).parse())