Пример #1
0
    def test_tokenize_unclosed_parentheses(self):
        # fail to tokenize
        in_port = InPort(io.StringIO('(+ 1 2.3'))
        token = in_port.next_token()

        self.assertRaisesRegex(SyntaxError, "unexpected EOF in list",
                               Parser._read_ahead, token, in_port)
Пример #2
0
    def test_tokenize(self):
        in_port = InPort(io.StringIO('(list 1 2.3 "string")'))
        token = in_port.next_token()
        token_list = Parser._read_ahead(token, in_port)

        self.assertIsInstance(token_list[0], Symbol)
        self.assertIsInstance(token_list[1], Integer)
        self.assertIsInstance(token_list[2], SingleFloat)
        self.assertIsInstance(token_list[3], String)
Пример #3
0
    def test_tokenize_sharpquote(self):
        in_port = InPort(io.StringIO("#'+"))
        token = in_port.next_token()
        token_list = Parser._read_ahead(token, in_port)

        # token_list must be [FUNCTION +]

        self.assertIsInstance(token_list[0], Symbol)
        self.assertTrue(token_list[0] is Symbol('FUNCTION'))

        self.assertIsInstance(token_list[1], Symbol)
Пример #4
0
    def test_tokenize_backquote(self):
        in_port = InPort(io.StringIO("`(+ 1 2.3)"))
        token = in_port.next_token()
        token_list = Parser._read_ahead(token, in_port)

        # token_list must be [BACKQUOTE, [+, 1, 2.3]]

        self.assertIsInstance(token_list[0], Symbol)
        self.assertTrue(token_list[0] is Symbol('BACKQUOTE'))

        self.assertIsInstance(token_list[1][0], Symbol)
        self.assertIsInstance(token_list[1][1], Integer)
        self.assertIsInstance(token_list[1][2], SingleFloat)
Пример #5
0
    def test_read(self):
        # test only EOF
        in_port = InPort(io.StringIO(''))
        eof = Parser._read(in_port)

        self.assertIsInstance(eof, Symbol)
        self.assertEqual(eof, Parser.eof_object)
Пример #6
0
    def test_tokenize_unquote_splicing(self):
        in_port = InPort(io.StringIO("`(+ 1 ,@(- 2 3))"))
        token = in_port.next_token()
        token_list = Parser._read_ahead(token, in_port)

        # token_list must be [BACKQUOTE, [+, 1, [UNQUOTE-SPLICING, [-, 2, 3]]]]

        self.assertIsInstance(token_list[0], Symbol)
        self.assertTrue(token_list[0] is Symbol('BACKQUOTE'))

        self.assertIsInstance(token_list[1][0], Symbol)
        self.assertIsInstance(token_list[1][1], Integer)

        self.assertIsInstance(token_list[1][2][0], Symbol)
        self.assertTrue(token_list[1][2][0] is Symbol('UNQUOTE-SPLICING'))

        self.assertIsInstance(token_list[1][2][1][0], Symbol)
        self.assertIsInstance(token_list[1][2][1][1], Integer)
        self.assertIsInstance(token_list[1][2][1][2], Integer)
Пример #7
0
    def testInPort(self):
        in_port = InPort(io.StringIO('(list 1 2.3 "string")'))

        self.assertIsInstance(in_port, InPort)

        # tokens
        self.assertEqual(in_port.next_token(), '(')
        self.assertEqual(in_port.next_token(), 'list')
        self.assertEqual(in_port.next_token(), '1')
        self.assertEqual(in_port.next_token(), '2.3')
        self.assertEqual(in_port.next_token(), '"string"')
        self.assertEqual(in_port.next_token(), ')')

        # #<eof-object>
        self.assertIsInstance(in_port.next_token(), Symbol)
        self.assertTrue(in_port.next_token() is Symbol('#<EOF-OJBECT>'))
Пример #8
0
 def load_file(cls, file_path):
     with open(file_path) as f:
         Interpreter.repl(prompt=None,
                          inport=InPort(f),
                          out=None,
                          zen=False)
Пример #9
0
    def repl(cls,
             prompt='=>',
             inport=InPort(sys.stdin),
             out=sys.stdout,
             zen=True):
        PackageManager.in_package(String("COMMON-LISP-USER"))
        while True:
            try:
                if prompt is not None:
                    # Set prompt.
                    try:
                        prompt = PackageManager.current_package.package_nicknames[
                            0] + '=>'
                    except IndexError:
                        prompt = PackageManager.current_package.package_name + '=>'

                    # Wait input.
                    print(prompt, end=' ', file=out, flush=True)

                # Parse inport.
                forms = Parser.parse(inport)

                # Check eof.
                if forms is Symbol('#<EOF-OJBECT>'):
                    return

                # Expand token.
                forms = Expander.expand(
                    forms,
                    var_env=PackageManager.current_package.env['VARIABLE'],
                    func_env=PackageManager.current_package.env['FUNCTION'],
                    macro_env=PackageManager.current_package.env['MACRO'])

                # Evaluate expression.
                retval = Evaluator.eval(
                    forms,
                    var_env=PackageManager.current_package.env['VARIABLE'],
                    func_env=PackageManager.current_package.env['FUNCTION'],
                    macro_env=PackageManager.current_package.env['MACRO'])

                # Print return value.
                if out is not None:
                    print(retval, end="\n\n", file=out, flush=True)

            except Interrupt:
                if zen:
                    # Print the zen of python at random.
                    print(random.choices(cls.the_zen_of_python)[0],
                          end="\n\n",
                          file=out,
                          flush=True)
                return

            except Exception as e:
                print(
                    "------------------------------------------------------------"
                )
                print("{}: {}".format(type(e).__name__, e),
                      end="\n\n",
                      file=out,
                      flush=True)
Пример #10
0
 def test_tokenize_atom(self):
     inport = InPort(io.StringIO('+'))
     token = inport.next_token()
     token_list = Parser._read_ahead(token, inport)
     self.assertIsInstance(token_list, Symbol)
Пример #11
0
    def test_parse_from_stream(self):
        in_port = InPort(io.StringIO('(+ 1 2.3)'))
        cons = Parser.parse(in_port)

        self.assertIsInstance(cons, Cons)