def test_tokenize(self): in_port = InPort(io.StringIO('(list 1 2.3 "string")')) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) self.assertIsInstance(token_list[0], Symbol) self.assertIsInstance(token_list[1], Integer) self.assertIsInstance(token_list[2], SingleFloat) self.assertIsInstance(token_list[3], String)
def test_tokenize_sharpquote(self): in_port = InPort(io.StringIO("#'+")) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) # token_list must be [FUNCTION +] self.assertIsInstance(token_list[0], Symbol) self.assertTrue(token_list[0] is Symbol('FUNCTION')) self.assertIsInstance(token_list[1], Symbol)
def test_tokenize_backquote(self): in_port = InPort(io.StringIO("`(+ 1 2.3)")) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) # token_list must be [BACKQUOTE, [+, 1, 2.3]] self.assertIsInstance(token_list[0], Symbol) self.assertTrue(token_list[0] is Symbol('BACKQUOTE')) self.assertIsInstance(token_list[1][0], Symbol) self.assertIsInstance(token_list[1][1], Integer) self.assertIsInstance(token_list[1][2], SingleFloat)
def test_tokenize_unquote_splicing(self): in_port = InPort(io.StringIO("`(+ 1 ,@(- 2 3))")) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) # token_list must be [BACKQUOTE, [+, 1, [UNQUOTE-SPLICING, [-, 2, 3]]]] self.assertIsInstance(token_list[0], Symbol) self.assertTrue(token_list[0] is Symbol('BACKQUOTE')) self.assertIsInstance(token_list[1][0], Symbol) self.assertIsInstance(token_list[1][1], Integer) self.assertIsInstance(token_list[1][2][0], Symbol) self.assertTrue(token_list[1][2][0] is Symbol('UNQUOTE-SPLICING')) self.assertIsInstance(token_list[1][2][1][0], Symbol) self.assertIsInstance(token_list[1][2][1][1], Integer) self.assertIsInstance(token_list[1][2][1][2], Integer)
def test_tokenize_atom(self): inport = InPort(io.StringIO('+')) token = inport.next_token() token_list = Parser._read_ahead(token, inport) self.assertIsInstance(token_list, Symbol)