def test_read_1(self): l = Lexer(File(io.StringIO("aaa = 123\n"))) self.assertTokenEqual(l.read(), IdToken(1, "aaa")) self.assertTokenEqual(l.read(), IdToken(1, "=")) self.assertTokenEqual(l.read(), NumToken(1, 123)) self.assertTokenEqual(l.read(), IdToken(1, EOL)) self.assertTokenEqual(l.read(), EOF)
def test_peek_1(self): l = Lexer(File(io.StringIO("aaa = 123\n"))) self.assertTokenEqual(l.peek(0), IdToken(1, "aaa")) self.assertTokenEqual(l.peek(1), IdToken(1, "=")) self.assertTokenEqual(l.peek(2), NumToken(1, 123)) self.assertTokenEqual(l.peek(3), IdToken(1, EOL)) self.assertTokenEqual(l.peek(4), EOF)
def setUp(self): super(TestBinaryExpr, self).setUp() self.be = BinaryExpr([ ASTLeaf(IdToken(1, "foo")), ASTLeaf(IdToken(1, "+")), ASTLeaf(StrToken(1, "sample")) ])
class TestIdToken(StoneTestBase): def setUp(self): self.id_token = IdToken(10, "some_id") def test_text(self): self.assertEqual(self.id_token.text, "some_id") def test_is_id(self): self.assertTrue(self.id_token.is_id())
def _read_line(self): line = self.fo.readline() if not line: self.has_more = False return pos = 0 end_pos = len(line) while pos < end_pos: m = self.pattern.match(line[pos:end_pos]) self._add_token(self.fo.line_number, m) pos += m.end() self.queue.append(IdToken(self.fo.line_number, EOL))
def _add_token(self, line_number: int, matcher): groups = matcher.groups() m = groups[0] if m is not None: # if not space if groups[1] is None: # if not comment if groups[2] is not None: token = NumToken(line_number=line_number, value=int(groups[2])) elif groups[3] is not None: token = StrToken(line_number=line_number, text=self._to_string(groups[3])) else: token = IdToken(line_number=line_number, _id=m) self.queue.append(token)
def setUp(self): self.id_token = IdToken(10, "some_id")
def test_read_2(self): l = Lexer(File(io.StringIO('aaa = 123\nif a = b {print("Hello")}'))) self.assertTokenEqual(l.read(), IdToken(1, "aaa")) self.assertTokenEqual(l.read(), IdToken(1, "=")) self.assertTokenEqual(l.read(), NumToken(1, 123)) self.assertTokenEqual(l.read(), IdToken(1, EOL)) self.assertTokenEqual(l.read(), IdToken(2, "if")) self.assertTokenEqual(l.read(), IdToken(2, "a")) self.assertTokenEqual(l.read(), IdToken(2, "=")) self.assertTokenEqual(l.read(), IdToken(2, "b")) self.assertTokenEqual(l.read(), IdToken(2, "{")) self.assertTokenEqual(l.read(), IdToken(2, "print")) self.assertTokenEqual(l.read(), IdToken(2, "(")) self.assertTokenEqual(l.read(), StrToken(2, "Hello")) self.assertTokenEqual(l.read(), IdToken(2, ")")) self.assertTokenEqual(l.read(), IdToken(2, "}")) self.assertTokenEqual(l.read(), IdToken(2, EOL)) self.assertTokenEqual(l.read(), EOF)
def test_peek_2(self): l = Lexer(File(io.StringIO('aaa = 123\nif a = b {print("Hello")}'))) self.assertTokenEqual(l.peek(0), IdToken(1, "aaa")) self.assertTokenEqual(l.peek(1), IdToken(1, "=")) self.assertTokenEqual(l.peek(2), NumToken(1, 123)) self.assertTokenEqual(l.peek(3), IdToken(1, EOL)) self.assertTokenEqual(l.peek(4), IdToken(2, "if")) self.assertTokenEqual(l.peek(5), IdToken(2, "a")) self.assertTokenEqual(l.peek(6), IdToken(2, "=")) self.assertTokenEqual(l.peek(7), IdToken(2, "b")) self.assertTokenEqual(l.peek(8), IdToken(2, "{")) self.assertTokenEqual(l.peek(9), IdToken(2, "print")) self.assertTokenEqual(l.peek(10), IdToken(2, "(")) self.assertTokenEqual(l.peek(11), StrToken(2, "Hello")) self.assertTokenEqual(l.peek(12), IdToken(2, ")")) self.assertTokenEqual(l.peek(13), IdToken(2, "}")) self.assertTokenEqual(l.peek(14), IdToken(2, EOL)) self.assertTokenEqual(l.peek(15), EOF)
def test_left(self): self.assertASTLeafEqual(self.be.left(), ASTLeaf(IdToken(1, "foo")))