def testTokenMatches(self): lxr = lexer.Lexer(version=4) lxr._process_line(b'break') self.assertEqual(1, len(lxr._tokens)) self.assertTrue(lxr._tokens[0].matches(lexer.TokKeyword(b'break'))) self.assertTrue(lxr._tokens[0].matches(lexer.TokKeyword)) self.assertFalse(lxr._tokens[0].matches(lexer.TokKeyword(b'and'))) self.assertFalse(lxr._tokens[0].matches(lexer.TokSpace))
def testCursorExpect(self): p = get_parser('break name 7.42 -- Comment text\n"string literal" ==') self.assertEqual(0, p._pos) self.assertRaises(parser.ParserError, p._expect, lexer.TokKeyword('and')) self.assertEqual(0, p._pos) tok_break = p._expect(lexer.TokKeyword('break')) self.assertEqual('break', tok_break.value) self.assertEqual(1, p._pos) tok_name = p._expect(lexer.TokName) self.assertEqual('name', tok_name.value) self.assertEqual(3, p._pos) # "break, space, name"
def testCursorAccept(self): p = get_parser('break name 7.42 -- Comment text\n"string literal" ==') self.assertEqual(0, p._pos) self.assertIsNone(p._accept(lexer.TokName)) self.assertIsNone(p._accept(lexer.TokKeyword('and'))) self.assertIsNotNone(p._accept(lexer.TokKeyword('break'))) self.assertEqual(1, p._pos) self.assertIsNotNone(p._accept(lexer.TokName)) self.assertIsNotNone(p._accept(lexer.TokNumber)) self.assertIsNotNone(p._accept(lexer.TokString)) self.assertIsNotNone(p._accept(lexer.TokSymbol('=='))) self.assertEqual(11, p._pos)
def testCursorAcceptStopsAtMaxPos(self): p = get_parser(b'break name 7.42 -- Comment text\n"string literal" ==') p._max_pos = 4 self.assertEqual(0, p._pos) self.assertIsNone(p._accept(lexer.TokName)) self.assertIsNone(p._accept(lexer.TokKeyword(b'and'))) self.assertIsNotNone(p._accept(lexer.TokKeyword(b'break'))) self.assertEqual(1, p._pos) self.assertIsNotNone(p._accept(lexer.TokName)) self.assertIsNone(p._accept(lexer.TokNumber)) self.assertIsNone(p._accept(lexer.TokString)) self.assertIsNone(p._accept(lexer.TokSymbol(b'=='))) self.assertEqual(3, p._pos)
def testStringMultipleLinesPlusAToken(self): lxr = lexer.Lexer(version=4) lxr._process_line(b'"abc def ghi \nand jkl" and\n') self.assertEqual(4, len(lxr._tokens)) self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'), lxr._tokens[0]) self.assertEqual(lexer.TokKeyword(b'and'), lxr._tokens[2])
def testTokenAndComment(self): lxr = lexer.Lexer(version=4) lxr._process_line('and-- comment text and stuff\n') self.assertEqual(3, len(lxr._tokens)) self.assertEqual(lexer.TokKeyword('and'), lxr._tokens[0]) self.assertEqual(lexer.TokComment('-- comment text and stuff'), lxr._tokens[1])
def testValidLuaNoErrors(self): lxr = lexer.Lexer(version=4) for line in VALID_LUA.split(b'\n'): lxr._process_line(line) tokens = lxr.tokens self.assertEqual(lexer.TokName(b'v1'), tokens[0]) self.assertEqual(lexer.TokSpace(b' '), tokens[1]) self.assertEqual(lexer.TokSymbol(b'='), tokens[2]) self.assertEqual(lexer.TokSpace(b' '), tokens[3]) self.assertEqual(lexer.TokKeyword(b'nil'), tokens[4])
def testOneKeyword(self): lxr = lexer.Lexer(version=4) lxr._process_line(b'and\n') self.assertEqual(2, len(lxr._tokens)) self.assertEqual(lexer.TokKeyword(b'and'), lxr._tokens[0])