Ejemplo n.º 1
0
    def test_read_number(self):
        token_stream = TokenStream(InputStream('123='))
        result = token_stream._read_number()
        self.assertEqual(Token('num', 123.0), result)

        token_stream = TokenStream(InputStream('123.3.='))
        result = token_stream._read_number()
        self.assertEqual(Token('num', 123.3), result)
Ejemplo n.º 2
0
    def test_read_string(self):
        token_stream = TokenStream(InputStream('"ab"'))
        result = token_stream._read_string()
        self.assertEqual(Token('str', 'ab'), result)

        token_stream = TokenStream(InputStream('"ab\\c"'))
        result = token_stream._read_string()
        self.assertEqual(Token('str', 'abc'), result)

        token_stream = TokenStream(InputStream('"abc'))
        with self.assertRaises(Exception):
            token_stream._read_string()
Ejemplo n.º 3
0
    def test04(self):
        """Test token seek and tell."""
        filepath = os.path.join(TokenStreamTest.path, 'obj_stream3.dat')
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(98, tok.data)

            # Memorize position after doing a first next_token(), this works
            xpos = tk.tell()

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(73, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(5, tok.data)
            
            tk.seek(xpos)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(73, tok.data)
Ejemplo n.º 4
0
    def test_read_ident(self):
        token_stream = TokenStream(InputStream('a=1'))
        result = token_stream._read_identifier()
        self.assertEqual(Token('var', 'a=1'), result)

        token_stream = TokenStream(InputStream('a = 1'))
        result = token_stream._read_identifier()
        self.assertEqual(Token('var', 'a'), result)

        token_stream = TokenStream(InputStream('let(a = 1'))
        result = token_stream._read_identifier()
        self.assertEqual(Token('kw', 'let'), result)

        token_stream = TokenStream(InputStream('js "aaa"'))
        result = token_stream._read_identifier()
        self.assertEqual(Token('kw', 'js'), result)
def main():
    code = "sum = lambda(x, y) x + y; print(sum(2, 3));"
    code = """
    fib = λ(n) if n < 2 then n else fib(n - 1) + fib(n - 2);
    time( λ() println(fib(25)) );
    """
    code = """
    sum = lambda(n, ret)
            if n == 0 then ret
                    else sum(n - 1, ret + n);
    time(lambda() println(sum(50000, 0)));
    """
    code = """
    println("foo");
    halt();
    println("bar");
    """
    global_env = Environment()

    for name, func in primitive.items():
        global_env.define(name, func)
    with open(sys.argv[1]) as file:
        code = file.read()
        parser = Parser(TokenStream(InputStream(code)))
        execute(evaluate, (parser(), global_env,
                           lambda result: print(f"*** Result: {result}")))
Ejemplo n.º 6
0
def main():
    with open(sys.argv[1]) as file:
        code = file.read()
    # code = 'let foo(x = 1, y = 1) foo(x + y)'
    # code = 'lambda foo(x) x'
    parser = Parser(TokenStream(InputStream(code)))
    js_code = to_js(parser())
    print(js_code)
Ejemplo n.º 7
0
def main():
    global_env = Environment()
    for name, func in primitive.items():
        global_env.define(name, func)
    lambda_file_path = sys.argv[1]
    with open(lambda_file_path) as file:
        code = file.read()
    parser = Parser(TokenStream(InputStream(code)))
    evaluate(parser(), global_env)
Ejemplo n.º 8
0
    def test_read_next(self):
        token_stream = TokenStream(
            InputStream(' # comment\n123 abc "nba" let a=2  >= js;'))
        self.assertEqual(token_stream._read_next(), Token('num', 123.0))
        self.assertEqual(token_stream._read_next(), Token('var', 'abc'))
        self.assertEqual(token_stream._read_next(), Token('str', 'nba'))
        self.assertEqual(token_stream._read_next(), Token('kw', 'let'))
        self.assertEqual(token_stream._read_next(), Token('var', 'a=2'))
        self.assertEqual(token_stream._read_next(), Token('op', '>='))
        self.assertEqual(token_stream._read_next(), Token('kw', 'js'))
        self.assertEqual(token_stream._read_next(), Token('punc', ';'))
        self.assertEqual(token_stream._read_next(), Token('null', 'null'))
        token_stream = TokenStream(InputStream('\x08'))
        with self.assertRaises(Exception):
            token_stream._read_next()

        token_stream = TokenStream(InputStream('λ (n) 1'))
        self.assertEqual(token_stream._read_next(), Token("kw", 'λ'))
Ejemplo n.º 9
0
def main():
    with open(sys.argv[1]) as file:
        code = file.read()
    parser = Parser(TokenStream(InputStream(code)))
    cps_code = to_cps(parser(), lambda ast: CallAst(
        VarAst('β_TOPLEVEL'),
        [ast],
    ))
    print(cps_code)
Ejemplo n.º 10
0
    def test05(self):
        """Test error from flex & bison book."""
        filepath = os.path.join(TokenStreamTest.path, 'flex0.dat')
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            tok = tk.next_token()
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            s = tok.data.decode('unicode_escape')
            self.assertEqual('Antenna', s[:7])
Ejemplo n.º 11
0
def main():
    with open(sys.argv[1]) as file:
        code = file.read()
    parser = Parser(TokenStream(InputStream(code)))
    ast = parser()
    ast = to_cps(ast, lambda ast: CallAst(VarAst('β_TOPLEVEL'), [ast]))
    # print(ast)
    ast = Optimizer().optimize(ast)
    # print(ast)
    js_code = to_js(ast)
    print(js_code)
Ejemplo n.º 12
0
    def test_literal(self):
        """Test literal strings."""
        filepath = os.path.join(TokenStreamTest.path, 'literal.dat')
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            tok = tk.next_token()
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(17, len(b))
            self.assertEqual('This', b[1:5])
Ejemplo n.º 13
0
    def test_peek_and_next(self):
        token_stream = TokenStream(
            InputStream(' # comment\n123 abc let a=2  >=;'))
        self.assertEqual(token_stream.peek(), Token('num', 123.0))
        self.assertEqual(token_stream.peek(), Token('num', 123.0))
        self.assertEqual(token_stream.next(), Token('num', 123.0))

        token_stream = TokenStream(
            InputStream(' # comment\n123 abc let a=2  js >=;'))
        self.assertEqual(token_stream.next(), Token('num', 123.0))
        self.assertEqual(token_stream.next(), Token('var', 'abc'))
        self.assertEqual(token_stream.next(), Token('kw', 'let'))
        self.assertEqual(token_stream.next(), Token('var', 'a=2'))
        self.assertEqual(token_stream.next(), Token('kw', 'js'))

        token_stream = TokenStream(InputStream('λ (n) 1'))
        self.assertEqual(token_stream.next(), Token("kw", "λ"))
        self.assertEqual(token_stream.next(), Token("punc", "("))
        self.assertEqual(token_stream.next(), Token("var", "n"))
        self.assertEqual(token_stream.next(), Token("punc", ")"))
        self.assertEqual(token_stream.next(), Token("num", 1.0))
Ejemplo n.º 14
0
    def test_literal02(self):
        """Test escape sequences in literal strings."""
        filepath = 't/literal02.dat'
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            # This is a string
            tok = tk.next_token()
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(2, len(b))
            self.assertEqual(40, b[0])
            self.assertEqual(41, b[1])
Ejemplo n.º 15
0
 def test_to_cps(self):
     js_raw_ast = JsAst("aa")
     cps_ast = _cps_js_raw(js_raw_ast, lambda x: x)
     self.assertEqual(cps_ast, js_raw_ast)
     atom_ast = LiteralAst(1.0)
     cps_ast = to_cps(atom_ast, lambda x: x)
     self.assertEqual(atom_ast, cps_ast)
     let_ast = LetAst([], LiteralAst(False))
     cps_ast = to_cps(let_ast, lambda x: x)
     self.assertEqual(cps_ast, LiteralAst(False))
     prog_ast = ProgAst([])
     cps_ast = to_cps(prog_ast, lambda x: x)
     self.assertEqual(cps_ast, LiteralAst(False))
     prog_ast = ProgAst([LiteralAst(1)])
     cps_ast = to_cps(prog_ast, lambda x: x)
     self.assertEqual(cps_ast, LiteralAst(1))
     prog_ast = ProgAst([LiteralAst(1), LiteralAst(2)])
     cps_ast = to_cps(prog_ast, lambda x: x)
     self.assertEqual(cps_ast, ProgAst([LiteralAst(1), LiteralAst(2)]))
     if_ast = IfAst(LiteralAst(1), LiteralAst(2), LiteralAst(3))
     cps_ast: CallAst = to_cps(if_ast, lambda x: x)
     expected_ast = CallAst(
         LambdaAst(
             '', cps_ast.func.params,
             IfAst(LiteralAst(1),
                   CallAst(VarAst(cps_ast.func.params[0]), [LiteralAst(2)]),
                   CallAst(VarAst(cps_ast.func.params[0]),
                           [LiteralAst(3)]))), [
                               LambdaAst('', cps_ast.args[0].params,
                                         VarAst(cps_ast.args[0].params[0]))
                           ])
     self.assertEqual(cps_ast, expected_ast)
     lambda_ast = LambdaAst('', ['x', 'y'], LiteralAst(1))
     cps_ast = to_cps(lambda_ast, lambda x: x)
     expected_ast = LambdaAst(
         '', [cps_ast.params[0]] + ['x', 'y'],
         CallAst(VarAst(cps_ast.params[0]), [LiteralAst(1)]))
     self.assertEqual(cps_ast, expected_ast)
     binary_ast = BinaryAst('+', LiteralAst(1), LiteralAst(2))
     cps_ast = to_cps(binary_ast, lambda x: x)
     self.assertEqual(cps_ast, binary_ast)
     parse = Parser(TokenStream(InputStream("a = foo(10);")))
     cps_ast = to_cps(parse(), lambda x: x)
     expected_ast = CallAst(VarAst('foo'), [
         LambdaAst(
             '', [cps_ast.args[0].params[0]],
             AssignAst(VarAst('a'), VarAst(cps_ast.args[0].params[0]))),
         LiteralAst(10)
     ])
     self.assertEqual(cps_ast, expected_ast)
Ejemplo n.º 16
0
def main():
    # code = "sum = lambda(x, y) x + y; print(sum(2, 3));"
    code = """
    fib = λ(n) if n < 2 then n else fib(n - 1) + fib(n - 2);
    time( λ() println(fib(12)) );
    """
    # code = "print(1 + 2 * 3)"
    # code = """
    # fib = λ(n) if n < 2 then n else fib(n - 1) + fib(n - 2);
    # println(fib(8));
    # """
    parser = Parser(TokenStream(InputStream(code)))
    global_env = Environment()
    for name, func in primitive.items():
        global_env.define(name, func)
    evaluate(parser(), global_env, lambda result: result)
Ejemplo n.º 17
0
    def test_literal03(self):
        """Test escape sequences in literal strings."""
        filepath = 't/literal03.dat'
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            # This is a string
            tok = tk.next_token()
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            for i in b:
                print(f'i="{i}"')
            self.assertEqual(9, len(b))
            self.assertEqual(13, b[0])  # \r CR
            self.assertEqual(10, b[1])  # \n LF
            print(f'b[2]="{b[2]}"')
            self.assertEqual(8, b[2])   # \b BS
            self.assertEqual(9, b[3])   # \t TAB
            self.assertEqual(12, b[4])
            self.assertEqual(40, b[5])
            self.assertEqual(41, b[6])
            self.assertEqual(0x5c, b[7])
            self.assertEqual(83, b[8])
Ejemplo n.º 18
0
def parse_tokens(filepath):
    # Array for token storage
    tokens = []

    # Parse a character stream into a token stream
    with open(filepath, 'rb') as f:
        tk = TokenStream(filepath, f)
        # tk.cc = tk.bf.next_byte()
        indent = 0
        while True:
            t = tk.next_token()
            if t.type == EToken.EOF:
                break
            if t.type in [
                    EToken.ARRAY_END, EToken.DICT_END, EToken.OBJECT_END
            ]:
                indent -= 1
            t.print_indented(indent)
            if t.type in [
                    EToken.ARRAY_BEGIN, EToken.DICT_BEGIN, EToken.OBJECT_BEGIN
            ]:
                indent += 1

            tokens.append(t)
Ejemplo n.º 19
0
    def test01(self):
        """Test simple next_token() and peek_token() calls."""
        filepath = os.path.join(TokenStreamTest.path, 'token_stream.dat')
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            # Retrieve a few tokens
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_BEGIN, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'Contents', tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(6624, tok.data)

            # Now peek once
            tok2 = tk.peek_token()
            self.assertEqual(EToken.INTEGER, tok2.type)
            self.assertEqual(0, tok2.data)

            # Retrieve a peeked token
            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(0, tok.data)

            # Peek 3 tokens ahead
            tok2 = tk.peek_token()
            self.assertEqual(EToken.OBJ_REF, tok2.type)
            tok2 = tk.peek_token()
            self.assertEqual(EToken.NAME, tok2.type)
            self.assertEqual(b'CropBox', tok2.data)
            tok2 = tk.peek_token()
            self.assertEqual(EToken.ARRAY_BEGIN, tok2.type)

            # Retrieve 2 tokens
            tok = tk.next_token()
            self.assertEqual(EToken.OBJ_REF, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'CropBox', tok.data)

            # I still have the ARRAY_BEGIN in 'peeked'

            # I'm not sure this is the right spec... 

            # Peeking 5 more
            tok2 = tk.peek_token()
            self.assertEqual(EToken.INTEGER, tok2.type)
            self.assertEqual(0, tok2.data)
            tok2 = tk.peek_token()
            self.assertEqual(EToken.INTEGER, tok2.type)
            self.assertEqual(0, tok2.data)
            tok2 = tk.peek_token()
            self.assertEqual(EToken.REAL, tok2.type)
            self.assertEqual(595.276, tok2.data)
            tok2 = tk.peek_token()
            self.assertEqual(EToken.REAL, tok2.type)
            self.assertEqual(841.89, tok2.data)
            tok2 = tk.peek_token()
            self.assertEqual(EToken.ARRAY_END, tok2.type)

            # Retrieve 1 plus 5 plus 1
            tok = tk.next_token()
            self.assertEqual(EToken.ARRAY_BEGIN, tok.type)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(0, tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(0, tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.REAL, tok.type)
            self.assertEqual(595.276, tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.REAL, tok.type)
            self.assertEqual(841.89, tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.ARRAY_END, tok.type)

            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'MediaBox', tok.data)
Ejemplo n.º 20
0
    def test05(self):
        """Test token seek and tell."""
        filepath = os.path.join(TokenStreamTest.path, 'obj_stream3.dat')
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            # Memorize position at the beginning, this bugs
            xpos = tk.tell()

            # tk.seek(0)
            # print(f'test05: seek(0): cc="{chr(tk.cc)}", bf.s_pos={tk.bf.s_pos}')

            # tk.seek(1)
            # print(f'test05: seek(1), cc="{chr(tk.cc)}", bf.s_pos={tk.bf.s_pos}')

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(98, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(73, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(5, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(19, tok.data)

            pos2 = tk.tell()

            # Go back
            tk.seek(xpos)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(98, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(73, tok.data)

            # Move forward
            tk.seek(pos2)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(18, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(33, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(45, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(66, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(13, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.INTEGER, tok.type)
            self.assertEqual(2, tok.data)

            tok = tk.next_token()
            self.assertEqual(EToken.OBJ_REF, tok.type)
Ejemplo n.º 21
0
 def test_eof(self):
     token_stream = TokenStream(InputStream(' # comment\n'))
     self.assertTrue(token_stream.eof())
Ejemplo n.º 22
0
    def test02(self):
        """Test simple next_token() calls."""
        filepath = r't\token.dat'
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            # [[[
            tok = tk.next_token()
            self.assertEqual(EToken.ARRAY_BEGIN, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.ARRAY_BEGIN, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.ARRAY_BEGIN, tok.type)

            # <<>> >>
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_BEGIN, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_END, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_END, tok.type)

            # ]
            tok = tk.next_token()
            self.assertEqual(EToken.ARRAY_END, tok.type)
            
            # /// 
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'', tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'', tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'', tok.data)

            for i in range(6):
                tok = tk.next_token()

            # >>\r\n<<
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_END, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.CRLF, tok.type)
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_BEGIN, tok.type)

            # /a
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'a', tok.data)

            # /b
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'b', tok.data)
            # /c
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'c', tok.data)
            # /d
            tok = tk.next_token()
            self.assertEqual(EToken.NAME, tok.type)
            self.assertEqual(b'd', tok.data)
            tok = tk.next_token()
            self.assertEqual(EToken.DICT_END, tok.type)
Ejemplo n.º 23
0
 def __init__(self, filepath, f):
     self.tk = TokenStream(filepath, f)
     self.f = f
     self.tok = self.tk.next_token()
Ejemplo n.º 24
0
 def test_evaluate(self):
     ast = LiteralAst(1.0)
     environment = Environment()
     evaluate(ast, environment, lambda value: self.assertEqual(value, 1.0))
     ast = LiteralAst(True)
     environment = Environment()
     evaluate(ast, environment, self.assertTrue)
     ast = LiteralAst(False)
     environment = Environment()
     evaluate(ast, environment, self.assertFalse)
     ast = LiteralAst("aaa")
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, "aaa"))
     ast = BinaryAst('+', LiteralAst(1), LiteralAst(2))
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 3.0))
     ast = ProgAst([])
     evaluate(ast, Environment(), self.assertFalse)
     ast = ProgAst([LiteralAst(1)])
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 1.0))
     ast = ProgAst([LiteralAst(1), LiteralAst(2)])
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 2.0))
     ast = AssignAst(LiteralAst(1), LiteralAst("a"))
     with self.assertRaises(Exception):
         evaluate(ast, Environment(), lambda value: value)
     ast = ProgAst([AssignAst(VarAst('a'), LiteralAst("foo")), VarAst('a')])
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, "foo"))
     ast = AssignAst(VarAst("a"), LiteralAst("foo"))
     with self.assertRaises(Exception):
         evaluate(ast, Environment(Environment()), lambda value: value)
     ast = CallAst(
         LambdaAst("", ["a"], VarAst("a")),
         [LiteralAst(1)],
     )
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 1.0))
     ast = CallAst(LambdaAst("", ["a"], VarAst("a")), [LiteralAst("abc")])
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, "abc"))
     # # (λ loop (n) if n > 0 then n + loop(n - 1) else 0) (10)
     ast = CallAst(
         LambdaAst(
             "loop", ["n"],
             IfAst(
                 BinaryAst(">", VarAst("n"), LiteralAst(0)),
                 BinaryAst(
                     "+", VarAst("n"),
                     CallAst(VarAst("loop"),
                             [BinaryAst('-', VarAst('n'), LiteralAst(1))])),
                 LiteralAst(0))), [LiteralAst(10)])
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 55.0))
     # # let (x) x;
     ast = LetAst([VarDefAst("x", None)], VarAst("x"))
     evaluate(ast, Environment(), self.assertFalse)
     # # let (x = 2, y = x + 1, z = x + y) x + y + z
     ast = LetAst([
         VarDefAst("x", LiteralAst(2)),
         VarDefAst("y", BinaryAst("+", VarAst("x"), LiteralAst(1))),
         VarDefAst("z", BinaryAst("+", VarAst("x"), VarAst("y")))
     ], BinaryAst("+", BinaryAst("+", VarAst("x"), VarAst("y")),
                  VarAst("z")))
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 10.0))
     # # the second expression will result an errors,
     # since x, y, z are bound to the let body
     # # let (x = 2, y = x + 1, z = x + y) x + y + z; x + y + z
     ast = ProgAst([
         LetAst([
             VarDefAst('x', LiteralAst(2)),
             VarDefAst('y', BinaryAst('+', VarAst('x'), LiteralAst(1))),
             VarDefAst('z', BinaryAst('+', VarAst('x'), VarAst('y')))
         ],
                BinaryAst('+', BinaryAst('+', VarAst('x'), VarAst('y')),
                          VarAst('z'))),
         BinaryAst('+', BinaryAst('+', VarAst('x'), VarAst('y')),
                   VarAst('z'))
     ])
     with self.assertRaises(Exception):
         evaluate(ast, Environment(), lambda value: value)
     ast = IfAst(LiteralAst(""), LiteralAst(1), None)
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 1.0))
     ast = IfAst(LiteralAst(False), LiteralAst(1), LiteralAst(2))
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 2.0))
     ast = IfAst(LiteralAst(False), LiteralAst(1), LiteralAst(False))
     evaluate(ast, Environment(), self.assertFalse)
     ast = {"type": "foo", "value": 'foo'}
     with self.assertRaises(Exception):
         evaluate(ast, Environment(), lambda value: value)
     # fib = λ(n) if n < 2 then n else fib(n - 1) + fib(n - 2);
     # fib(6);
     #
     ast = ProgAst([
         AssignAst(
             VarAst('fib'),
             LambdaAst(
                 'n', ['n'],
                 IfAst(
                     BinaryAst('<', VarAst('n'), LiteralAst(2)),
                     VarAst('n'),
                     BinaryAst(
                         '+',
                         CallAst(
                             VarAst('fib'),
                             [BinaryAst('-', VarAst('n'), LiteralAst(1))]),
                         CallAst(
                             VarAst('fib'),
                             [BinaryAst('-', VarAst('n'), LiteralAst(2))
                              ]))))),
         CallAst(VarAst('fib'), [LiteralAst(6)])
     ])
     evaluate(ast, Environment(),
              lambda value: self.assertEqual(value, 8.0))
     ast = IfAst(LiteralAst(False), LiteralAst(1), LiteralAst(False))
     evaluate(ast, Environment(), self.assertFalse)
     ast = CallAst(LiteralAst(1), [])
     with self.assertRaises(Exception):
         evaluate(ast, Environment(), self.assertFalse)
     code = """
     2 + twice(3, 4)
     """
     global_env = Environment()
     for name, func in primitive.items():
         global_env.define(name, func)
     parser = Parser(TokenStream(InputStream(code)))
     evaluate(parser(), global_env, lambda result: result)
Ejemplo n.º 25
0
 def test_skip_comment(self):
     token_stream = TokenStream(InputStream('# abc\ndef'))
     token_stream._skip_comment()
     self.assertEqual(token_stream._input_stream.peek(), 'd')
Ejemplo n.º 26
0
 def test_croak(self):
     token_stream = TokenStream(InputStream(' # comment\n'))
     with self.assertRaises(Exception):
         token_stream.croak('foo')
Ejemplo n.º 27
0
    def test_literal01(self):
        """Test the set of example strings from the spec."""
        filepath = 't/literal01.dat'
        with open(filepath, 'rb') as f:
            tk = TokenStream(filepath, f)

            # This is a string
            tok = tk.next_token()
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(16, len(b))
            self.assertEqual(b'This', b[0:4])

            # Skip over end of lines
            while True:
                tok = tk.next_token()
                if tok.type not in [EToken.CR, EToken.LF, EToken.CRLF]:
                    break

            # Strings may contain newlines\n and such
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertTrue(b.startswith(b'Strings may'))
            self.assertTrue(b.endswith(b'such.'))

            # Skip over end of lines
            while True:
                tok = tk.next_token()
                if tok.type not in [EToken.CR, EToken.LF, EToken.CRLF]:
                    break
                
            # Strings may contain balanced parentheses...
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(b'(x)', b[41:44])
            self.assertTrue(b.endswith(b'% and so on).'))

            # Skip over end of lines
            while True:
                tok = tk.next_token()
                if tok.type not in [EToken.CR, EToken.LF, EToken.CRLF]:
                    break

            # The following is an empty string.
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(b'The following is an empty string.', b)
            while True:
                tok = tk.next_token()
                if tok.type not in [EToken.CR, EToken.LF, EToken.CRLF]:
                    break

            # Empty string
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(0, len(b))
            self.assertEqual(b'', b)

            # Skip over end of lines
            while True:
                tok = tk.next_token()
                if tok.type not in [EToken.CR, EToken.LF, EToken.CRLF]:
                    break

            # It has zero (0) length.
            self.assertEqual(EToken.LITERAL_STRING, tok.type)
            b = tok.data
            self.assertEqual(23, len(b))
            self.assertEqual(b'It has zero (0) length.', b)
Ejemplo n.º 28
0
        if token.type != 'op':
            return left
        his_prec = self.PRECEDENCE[token.value]
        if his_prec > my_prec:
            self._token_stream.next()
            right = self._maybe_binary(self._parse_atom(), his_prec)
            if token.value == '=':
                binary = AssignAst(left, right)
            else:
                binary = BinaryAst(token.value, left, right)
            return self._maybe_binary(binary, my_prec)
        return left

    def __call__(self) -> ProgAst:
        return self._parse_toplevel()

    def unexpected(self):
        """
        raise exception with error msg and error location
        whenever encountered error.
        """
        self._token_stream.croak(
            f'Unexpected token: {self._token_stream.peek()}')


if __name__ == '__main__':
    with open(sys.argv[1]) as f:
        code = f.read()
    ast = Parser(TokenStream(InputStream(code)))()
    print(ast)
Ejemplo n.º 29
0
 def test_read_while(self):
     token_stream = TokenStream(InputStream('ab123='))
     result = token_stream._read_while(lambda ch: ch.isalnum())
     self.assertEqual(result, 'ab123')