示例#1
0
 def test_read_1(self):
     l = Lexer(File(io.StringIO("aaa = 123\n")))
     self.assertTokenEqual(l.read(), IdToken(1, "aaa"))
     self.assertTokenEqual(l.read(), IdToken(1, "="))
     self.assertTokenEqual(l.read(), NumToken(1, 123))
     self.assertTokenEqual(l.read(), IdToken(1, EOL))
     self.assertTokenEqual(l.read(), EOF)
示例#2
0
 def test_peek_1(self):
     l = Lexer(File(io.StringIO("aaa = 123\n")))
     self.assertTokenEqual(l.peek(0), IdToken(1, "aaa"))
     self.assertTokenEqual(l.peek(1), IdToken(1, "="))
     self.assertTokenEqual(l.peek(2), NumToken(1, 123))
     self.assertTokenEqual(l.peek(3), IdToken(1, EOL))
     self.assertTokenEqual(l.peek(4), EOF)
示例#3
0
class TestNumToken(StoneTestBase):
    def setUp(self):
        self.num_token = NumToken(10, 20)

    def test_number(self):
        self.assertEqual(self.num_token.number, 20)

    def test_is_num(self):
        self.assertTrue(self.num_token.is_num())
示例#4
0
文件: lexer.py 项目: taxpon/stone
    def _add_token(self, line_number: int, matcher):
        groups = matcher.groups()
        m = groups[0]

        if m is not None:  # if not space
            if groups[1] is None:  # if not comment
                if groups[2] is not None:
                    token = NumToken(line_number=line_number,
                                     value=int(groups[2]))
                elif groups[3] is not None:
                    token = StrToken(line_number=line_number,
                                     text=self._to_string(groups[3]))
                else:
                    token = IdToken(line_number=line_number, _id=m)
                self.queue.append(token)
示例#5
0
 def test_read_2(self):
     l = Lexer(File(io.StringIO('aaa = 123\nif a = b {print("Hello")}')))
     self.assertTokenEqual(l.read(), IdToken(1, "aaa"))
     self.assertTokenEqual(l.read(), IdToken(1, "="))
     self.assertTokenEqual(l.read(), NumToken(1, 123))
     self.assertTokenEqual(l.read(), IdToken(1, EOL))
     self.assertTokenEqual(l.read(), IdToken(2, "if"))
     self.assertTokenEqual(l.read(), IdToken(2, "a"))
     self.assertTokenEqual(l.read(), IdToken(2, "="))
     self.assertTokenEqual(l.read(), IdToken(2, "b"))
     self.assertTokenEqual(l.read(), IdToken(2, "{"))
     self.assertTokenEqual(l.read(), IdToken(2, "print"))
     self.assertTokenEqual(l.read(), IdToken(2, "("))
     self.assertTokenEqual(l.read(), StrToken(2, "Hello"))
     self.assertTokenEqual(l.read(), IdToken(2, ")"))
     self.assertTokenEqual(l.read(), IdToken(2, "}"))
     self.assertTokenEqual(l.read(), IdToken(2, EOL))
     self.assertTokenEqual(l.read(), EOF)
示例#6
0
 def test_peek_2(self):
     l = Lexer(File(io.StringIO('aaa = 123\nif a = b {print("Hello")}')))
     self.assertTokenEqual(l.peek(0), IdToken(1, "aaa"))
     self.assertTokenEqual(l.peek(1), IdToken(1, "="))
     self.assertTokenEqual(l.peek(2), NumToken(1, 123))
     self.assertTokenEqual(l.peek(3), IdToken(1, EOL))
     self.assertTokenEqual(l.peek(4), IdToken(2, "if"))
     self.assertTokenEqual(l.peek(5), IdToken(2, "a"))
     self.assertTokenEqual(l.peek(6), IdToken(2, "="))
     self.assertTokenEqual(l.peek(7), IdToken(2, "b"))
     self.assertTokenEqual(l.peek(8), IdToken(2, "{"))
     self.assertTokenEqual(l.peek(9), IdToken(2, "print"))
     self.assertTokenEqual(l.peek(10), IdToken(2, "("))
     self.assertTokenEqual(l.peek(11), StrToken(2, "Hello"))
     self.assertTokenEqual(l.peek(12), IdToken(2, ")"))
     self.assertTokenEqual(l.peek(13), IdToken(2, "}"))
     self.assertTokenEqual(l.peek(14), IdToken(2, EOL))
     self.assertTokenEqual(l.peek(15), EOF)
示例#7
0
 def setUp(self):
     self.num_token = NumToken(10, 20)
示例#8
0
 def test_value(self):
     value = 100
     nl = NumberLiteral(NumToken(1, value))
     self.assertEqual(nl.value, value)