def test_tokenize_unclosed_parentheses(self): # fail to tokenize in_port = InPort(io.StringIO('(+ 1 2.3')) token = in_port.next_token() self.assertRaisesRegex(SyntaxError, "unexpected EOF in list", Parser._read_ahead, token, in_port)
def testInPort(self): in_port = InPort(io.StringIO('(list 1 2.3 "string")')) self.assertIsInstance(in_port, InPort) # tokens self.assertEqual(in_port.next_token(), '(') self.assertEqual(in_port.next_token(), 'list') self.assertEqual(in_port.next_token(), '1') self.assertEqual(in_port.next_token(), '2.3') self.assertEqual(in_port.next_token(), '"string"') self.assertEqual(in_port.next_token(), ')') # #<eof-object> self.assertIsInstance(in_port.next_token(), Symbol) self.assertTrue(in_port.next_token() is Symbol('#<EOF-OJBECT>'))
def test_tokenize(self): in_port = InPort(io.StringIO('(list 1 2.3 "string")')) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) self.assertIsInstance(token_list[0], Symbol) self.assertIsInstance(token_list[1], Integer) self.assertIsInstance(token_list[2], SingleFloat) self.assertIsInstance(token_list[3], String)
def test_tokenize_sharpquote(self): in_port = InPort(io.StringIO("#'+")) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) # token_list must be [FUNCTION +] self.assertIsInstance(token_list[0], Symbol) self.assertTrue(token_list[0] is Symbol('FUNCTION')) self.assertIsInstance(token_list[1], Symbol)
def test_tokenize_backquote(self): in_port = InPort(io.StringIO("`(+ 1 2.3)")) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) # token_list must be [BACKQUOTE, [+, 1, 2.3]] self.assertIsInstance(token_list[0], Symbol) self.assertTrue(token_list[0] is Symbol('BACKQUOTE')) self.assertIsInstance(token_list[1][0], Symbol) self.assertIsInstance(token_list[1][1], Integer) self.assertIsInstance(token_list[1][2], SingleFloat)
def test_tokenize_unquote_splicing(self): in_port = InPort(io.StringIO("`(+ 1 ,@(- 2 3))")) token = in_port.next_token() token_list = Parser._read_ahead(token, in_port) # token_list must be [BACKQUOTE, [+, 1, [UNQUOTE-SPLICING, [-, 2, 3]]]] self.assertIsInstance(token_list[0], Symbol) self.assertTrue(token_list[0] is Symbol('BACKQUOTE')) self.assertIsInstance(token_list[1][0], Symbol) self.assertIsInstance(token_list[1][1], Integer) self.assertIsInstance(token_list[1][2][0], Symbol) self.assertTrue(token_list[1][2][0] is Symbol('UNQUOTE-SPLICING')) self.assertIsInstance(token_list[1][2][1][0], Symbol) self.assertIsInstance(token_list[1][2][1][1], Integer) self.assertIsInstance(token_list[1][2][1][2], Integer)
def test_tokenize_atom(self): inport = InPort(io.StringIO('+')) token = inport.next_token() token_list = Parser._read_ahead(token, inport) self.assertIsInstance(token_list, Symbol)