コード例 #1
0
    def test_if_expression(self) -> None:
        source: str = 'if x > y then {z}'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser, program)

        # Test correct node type
        if_expression = cast(If, cast(ExpressionStatement,
                                      program.statements[0]).expression)
        self.assertIsInstance(if_expression, If)

        # Test condition
        assert if_expression.condition is not None
        self._test_infix_expression(if_expression.condition, 'x', '>', 'y')

        assert if_expression.consequence is not None
        self.assertIsInstance(if_expression.consequence, Block)
        self.assertEquals(len(if_expression.consequence.statements), 1)

        consequence_statement = cast(
            ExpressionStatement, if_expression.consequence.statements[0])
        assert consequence_statement.expression is not None
        self._test_identifier(consequence_statement.expression, 'z')

        # Test alternative
        self.assertIsNone(if_expression.alternative)
コード例 #2
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_funtion_declaration(self) -> None:
        source: str = '''
            let sum = x::int, y::int -> int {
                => x + y
            }
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(18):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENT, 'sum'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.TYPEASSIGN, '::'),
            Token(TokenType.CLASSNAME, 'int'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.TYPEASSIGN, '::'),
            Token(TokenType.CLASSNAME, 'int'),
            Token(TokenType.OUTPUTFUNTION, '->'),
            Token(TokenType.CLASSNAME, 'int'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RETURN, '=>'),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.RBRACE, '}'),
        ]

        self.assertEquals(tokens, expected_tokens)
コード例 #3
0
ファイル: repl.py プロジェクト: FabianVegaA/sigmaF
def _check_errors(source: str, enviroment: Environment) -> str:
    source = _clean_comments(source)

    lexer: Lexer = Lexer(source)
    parser: Parser = Parser(lexer)

    program: Program = parser.parse_program()
    env: Environment = enviroment

    if len(parser.errors) > 0:
        _print_parse_errors(parser.errors)
        return ''

    try:
        evaluated = evaluate(program, env)

        if evaluated is not None:
            print(evaluated.inspect())
            return ''
    except RecursionError:
        print('[Error] ' + _MAXIMUMRECURSIONDEPTH.format(''))
    except AssertionError:
        print('\n[Error] ' + _EVALUATIONERROR.format('') + '\n')

    return source
コード例 #4
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_assignment(self) -> None:
        source: str = '''
            let x = 5;
            let y = "cinco";
            let foo = 5.0;
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(15):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.INT, '5'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.STRING, '"cinco"'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENT, 'foo'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.FLOAT, '5.0'),
            Token(TokenType.SEMICOLON, ';'),
        ]
        self.assertEquals(tokens, expected_tokens)
コード例 #5
0
    def test_parse_errors(self) -> None:
        source: str = 'let x 5;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertEquals(len(parser.errors), 1)
コード例 #6
0
    def test_parser_program(self) -> None:
        source: str = 'let x = 5;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertIsNotNone(program)
        self.assertIsInstance(program, Program)
コード例 #7
0
    def _evaluate_tests(self, source: str) -> Object:
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)
        program: Program = parser.parse_program()
        env: Environment = Environment()

        evaluated = evaluate(program, env)

        assert evaluated is not None
        return evaluated
コード例 #8
0
ファイル: repl.py プロジェクト: FabianVegaA/sigmaF
def start_repl(source: str = '', _path: Optional[str] = None) -> None:
    scanned: List[str] = []
    env: Environment = Environment()

    scanned.append(_check_errors(source, env))

    lexer: Lexer = Lexer(' '.join(scanned))

    _ = process(lexer, env)

    _pattern_path = re.compile(r'load\(([\w\.-_\/]+)\)')

    while True:
        try:
            source = input('>> ')
        except EOFError:
            print()
            break

        if source.strip() == 'exit()':
            break
        elif source.strip() == "clear()":
            clear()
        elif source == "update()":
            env = update(_path, env)
        elif (path := re.match(_pattern_path, source)) is not None:
            env = update(path.group(1), env)
            _path = path.group(1)
        else:
            if source != '':
                source += read_sublines(source)

            scanned.append(_check_errors(source, env))

            lexer = Lexer(' '.join(scanned))

            _ = process(lexer, env)

        while (token := lexer.next_token()) != EOF_TOKEN:
            print(token)
コード例 #9
0
    def _test_interger_expression(self) -> None:
        source: str = '5;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser, program)

        expression_statement = cast(ExpressionStatement, program.statements[0])

        assert expression_statement.expression is not None
        self._test_literal_expression(expression_statement.expression, 5)
コード例 #10
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_eof(self) -> None:
        source: str = '+'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source) + 1):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.PLUS, '+'),
            Token(TokenType.EOF, '')
        ]

        self.assertEquals(tokens, expected_tokens)
コード例 #11
0
    def test_return_statement(self) -> None:
        source: str = '''
            => 5;
            => "Hello, World";
        '''
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertEquals(len(program.statements), 2)
        for statement in program.statements:
            self.assertEquals(statement.token_literal(), '=>')
            self.assertIsInstance(statement, ReturnStatement)
コード例 #12
0
    def test_infix_statements(self) -> None:
        source: str = '''
            5 + 5;
            5 - 5;
            5 * 5;
            5 / 5;
            5 % 5;
            5 > 5;
            5 >= 5;
            5 < 5;
            5 <= 5;
            5 == 5;
            5 != 5;
            true == true;
            true != false;
        '''
        lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(
            parser, program, expected_statement_count=13)

        expected_operators_and_values: List[Tuple[Any, str, Any]] = [
            (5, '+', 5),
            (5, '-', 5),
            (5, '*', 5),
            (5, '/', 5),
            (5, '%', 5),
            (5, '>', 5),
            (5, '>=', 5),
            (5, '<', 5),
            (5, '<=', 5),
            (5, '==', 5),
            (5, '!=', 5),
            (True, '==', True),
            (True, '!=', False),
        ]

        for statement, (expected_left, expected_operator, expected_right) in zip(
                program.statements, expected_operators_and_values):
            statement = cast(ExpressionStatement, statement)
            assert statement.expression is not None
            self.assertIsInstance(statement.expression, Infix)
            self._test_infix_expression(statement.expression,
                                        expected_left,
                                        expected_operator,
                                        expected_right)
コード例 #13
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_illegal(self) -> None:
        source: str = '¡¿@'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_token: List[Token] = [
            Token(TokenType.ILLEGAL, '¡'),
            Token(TokenType.ILLEGAL, '¿'),
            Token(TokenType.ILLEGAL, '@')
        ]

        self.assertEqual(tokens, expected_token)
コード例 #14
0
    def test_let_statements(self) -> None:
        source: str = '''
                let x = 5 ;
                let y = 10;
                let foo = 20;
                let float_val = 3.14159;
                let string_val = "This is a string;"
            '''
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertEqual(len(program.statements), 5)

        for statement in program.statements:
            self.assertEqual(statement.token_literal(), 'let')
            self.assertIsInstance(statement, LetStatement)
コード例 #15
0
ファイル: repl.py プロジェクト: FabianVegaA/sigmaF
def update(_path: Optional[str], env: Environment):
    if _path is None:
        print(f"[Warning] There is no path to be uploaded")
        return env

    print(f"[Warning] Updated the path: { _path}")

    new_env = Environment()

    source: str = read_module(_path)
    _ = Lexer(_check_errors(source, new_env))

    for key, value in new_env._store.items():
        if key in env.keys():
            env.__delitem__(key)

        env.__setitem__(key, value)
    return env
コード例 #16
0
    def _test_prefix_expression(self) -> None:
        source: str = '-15;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()
        self._test_program_statements(
            parser, program, expected_statement_count=1)

        for statement, (expected_operator, expected_value) in zip(
                program.statements, [('-', 15)]):
            statement = cast(ExpressionStatement, statement)
            self.assertIsInstance(statement.expression, Prefix)

            prefix = cast(Prefix, statement.expression)
            self.assertEquals(prefix.operator, expected_operator)

            assert prefix.right is not None
            self._test_literal_expression(prefix.right, expected_value)
コード例 #17
0
    def test_tuple_and_list(self) -> None:
        tests: List[Tuple[str, List[List[int]]]] = [
            ('[(1,2)]', [[1, 2]]),
            ('[(1,2), (2,3)]', [[1, 2], [2, 3]]),
        ]

        for source, expected in tests:
            lexer: Lexer = Lexer(source)
            parser: Parser = Parser(lexer)

            program: Program = parser.parse_program()

            list_values = cast(ListValues, cast(ExpressionStatement,
                                                program.statements[0]).expression)

            self.assertIsNotNone(list_values)
            for items, expects in zip(list_values.values, expected):
                for item, expect in zip(items.values, expects):
                    self.assertEquals(item.value, expect)
コード例 #18
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_delimiters(self) -> None:
        source: str = '(){}[],;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LPAREN, '('),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.LBRAKET, '['),
            Token(TokenType.RBRAKET, ']'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.SEMICOLON, ';')
        ]
        self.assertEquals(tokens, expected_tokens)
コード例 #19
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_funtion_call(self) -> None:
        source: str = 'let variable = suma(2,3)'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(9):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LET, 'let'),
            Token(TokenType.IDENT, 'variable'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.IDENT, 'suma'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.INT, '2'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.INT, '3'),
            Token(TokenType.RPAREN, ')')
        ]
        self.assertEquals(tokens, expected_tokens)
コード例 #20
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_one_character_operator(self) -> None:
        source: str = '=+-/*<>%'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.MINUS, '-'),
            Token(TokenType.DIVISION, '/'),
            Token(TokenType.MULTIPLICATION, '*'),
            Token(TokenType.LT, '<'),
            Token(TokenType.GT, '>'),
            Token(TokenType.MODULUS, '%'),
        ]

        self.assertEquals(tokens, expected_tokens)
コード例 #21
0
    def test_names_in_let_statements(self) -> None:
        source: str = '''
                let x = 5 ;
                let y = 10 ;
                let foo = 20 ;
            '''
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        names: List[str] = []
        for statement in program.statements:
            statement = cast(LetStatement, statement)
            assert statement.name is not None
            names.append(statement.name.value)

        expected_names: List[str] = ['x', 'y', 'foo']

        self.assertEquals(names, expected_names)
コード例 #22
0
    def test_tuple(self) -> None:
        tests: List[Tuple[str, List[int]]] = [
            ('(1,2);', [1, 2]),
            ('(1,2,3);', [1, 2, 3]),
            ('(1,1,2,3,5);', [1, 1, 2, 3, 5]),
            ('(2,3,5,7,11);', [2, 3, 5, 7, 11])
        ]

        for source, expected in tests:
            lexer: Lexer = Lexer(source)
            parser: Parser = Parser(lexer)

            program: Program = parser.parse_program()

            tuple_values = cast(TupleValues, cast(ExpressionStatement,
                                                  program.statements[0]).expression)

            self.assertIsNotNone(tuple_values)
            for item, expect in zip(tuple_values.values, expected):
                self.assertEquals(item.value, expect)
コード例 #23
0
    def test_call_list(self) -> None:
        source: str = 'value_list[1, 2 * 3];'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser, program)

        call_list = cast(CallList, cast(ExpressionStatement,
                                        program.statements[0]).expression)

        self.assertIsInstance(call_list, CallList)
        self._test_identifier(call_list.list_identifier, 'value_list')

        # Test arguments
        assert call_list.range is not None
        self.assertEquals(len(call_list.range), 2)
        self._test_literal_expression(call_list.range[0], 1)
        self._test_infix_expression(call_list.range[1], 2, '*', 3)
コード例 #24
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_two_character_operator(self) -> None:

        source: str = '''
            10 == 10
            10 != 10
            10 >= 10
            10 <= 10
            10 ** 10
            10 || 10
            10 && 10
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(21):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.INT, '10'),
            Token(TokenType.EQ, '=='),
            Token(TokenType.INT, '10'),
            Token(TokenType.INT, '10'),
            Token(TokenType.NOT_EQ, '!='),
            Token(TokenType.INT, '10'),
            Token(TokenType.INT, '10'),
            Token(TokenType.G_OR_EQ_T, '>='),
            Token(TokenType.INT, '10'),
            Token(TokenType.INT, '10'),
            Token(TokenType.L_OR_EQ_T, '<='),
            Token(TokenType.INT, '10'),
            Token(TokenType.INT, '10'),
            Token(TokenType.EXPONENTIATION, '**'),
            Token(TokenType.INT, '10'),
            Token(TokenType.INT, '10'),
            Token(TokenType.OR, '||'),
            Token(TokenType.INT, '10'),
            Token(TokenType.INT, '10'),
            Token(TokenType.AND, '&&'),
            Token(TokenType.INT, '10'),
        ]
        self.assertEquals(tokens, expected_tokens)
コード例 #25
0
    def test_function_parameters(self) -> None:
        tests = [
            {'input':  'fn x::int -> int {1}',
             'expected_params': ['x'],
             'expected_type_params': ['int'],
             'expected_type_output': 'int'
             },
            {'input':  'fn x::int, y::int -> int {1}',
             'expected_params': ['x', 'y'],
             'expected_type_params': ['int', 'int'],
             'expected_type_output': 'int'
             },
            {'input':  'fn x::int, y::int, z::int -> int {1}',
             'expected_params': ['x', 'y', 'z'],
             'expected_type_params': ['int', 'int', 'int'],
             'expected_type_output': 'int'
             },
        ]

        for test in tests:
            lexer: Lexer = Lexer(str(test['input']))
            parser: Parser = Parser(lexer)

            program: Program = parser.parse_program()

            function = cast(Function, cast(
                ExpressionStatement, program.statements[0]).expression)

            self.assertEquals(len(function.parameters),
                              len(test['expected_params']))

            for idx, param in enumerate(test['expected_params']):
                self._test_literal_expression(function.parameters[idx], param)

            for idx, type_param in enumerate(test['expected_type_params']):
                self._test_literal_expression(
                    function.type_parameters[idx], type_param)

            assert function.type_output is not None
            self.assertEquals(function.type_output.value,
                              test['expected_type_output'])
コード例 #26
0
    def test_boolean_expression(self) -> None:
        source: str = '''
            true;
            false;
        '''
        lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(
            parser, program, expected_statement_count=2)

        expected_values: List[bool] = [True, False]

        for statement, expected_value in zip(program.statements, expected_values):
            expression_statement = cast(ExpressionStatement, statement)

            assert expression_statement.expression is not None
            self._test_literal_expression(expression_statement.expression,
                                          expected_value)
コード例 #27
0
ファイル: lexer_test.py プロジェクト: FabianVegaA/sigmaF
    def test_control_statements(self) -> None:
        source: str = '''
            if 5 < 10 then true else false
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(8):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.IF, 'if'),
            Token(TokenType.INT, '5'),
            Token(TokenType.LT, '<'),
            Token(TokenType.INT, '10'),
            Token(TokenType.THEN, 'then'),
            Token(TokenType.TRUE, 'true'),
            Token(TokenType.ELSE, 'else'),
            Token(TokenType.FALSE, 'false')
        ]
        self.assertEquals(tokens, expected_tokens)
コード例 #28
0
    def test_call_expression(self) -> None:
        source: str = 'sum(1, 2 * 3, 4 + 5);'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser, program)

        call = cast(Call, cast(ExpressionStatement,
                               program.statements[0]).expression)

        self.assertIsInstance(call, Call)
        self._test_identifier(call.function, 'sum')

        # Test arguments
        assert call.arguments is not None
        self.assertEquals(len(call.arguments), 3)
        self._test_literal_expression(call.arguments[0], 1)
        self._test_infix_expression(call.arguments[1], 2, '*', 3)
        self._test_infix_expression(call.arguments[2], 4, '+', 5)
コード例 #29
0
    def test_operator_precedence(self) -> None:
        test_sources: List[Tuple[str, str, int]] = [
            ('-a * b;', '((-a) * b)', 1),
            ('a + b / c;', '(a + (b / c))', 1),
            ('3 + 4; -5 * 5;', '(3 + 4)((-5) * 5)', 2),
            ('2 * 3 / 5 * -6 % 7;', '((((2 * 3) / 5) * (-6)) % 7)', 1),
            ('-3 ** 2;', '((-3) ** 2)', 1),
            ('2 * 4 / 5 ** 7;', '((2 * 4) / (5 ** 7))', 1),
            ('-1 + 2 % -3 + 345354**4;', '(((-1) + (2 % (-3))) + (345354 ** 4))', 1),
            ('-4 + 6 * 5; 45 / 6 * 8 - -1;',
             '((-4) + (6 * 5))(((45 / 6) * 8) - (-1))', 2),
            ('a + 4 - 5 + -3 + -b;', '((((a + 4) - 5) + (-3)) + (-b))', 1),
            ('a ** 4 + 5 - -46 ** 6;', '(((a ** 4) + 5) - ((-46) ** 6))', 1),
            ('a ** 5 % 3 / 2;', '(((a ** 5) % 3) / 2)', 1),
            ('-5 * 45 ** 5 - 15 % 2;', '(((-5) * (45 ** 5)) - (15 % 2))', 1),
            ('34 ** 3 / 7 % 45 + -102', '((((34 ** 3) / 7) % 45) + (-102))', 1),
            ('3 ** (4 % 7) + 23 * -21', '((3 ** (4 % 7)) + (23 * (-21)))', 1),
            ('5 >= 34 == 4 < 2 == (a == a)',
             '(((5 >= 34) == (4 < 2)) == (a == a))', 1),
            ('a + sum(b * c) + d;', '((a + sum((b * c))) + d)', 1),
            ('sum(a, b, 1, 2 * 3, 4 + 5, sum(6, 7 * 8));',
             'sum(a, b, 1, (2 * 3), (4 + 5), sum(6, (7 * 8)))', 1),
            ('4 % 2 == 0 && 2 > 0;', '(((4 % 2) == 0) && (2 > 0))', 1),
            ('-4 + 2 == 0 || 2 > 0;', '((((-4) + 2) == 0) || (2 > 0))', 1)
            # ('-function(-something, other_thing ** 23) ** (-34 % 90);',
            #  '((-function((-something), (other_thing ** 23))) ** ((-34) % 90))', 1),
        ]

        for source, expected_result, expected_statement_count in test_sources:
            lexer: Lexer = Lexer(source)
            parser: Parser = Parser(lexer)

            program: Program = parser.parse_program()

            self._test_program_statements(
                parser, program, expected_statement_count)
            self.assertEquals(str(program), expected_result)
コード例 #30
0
    def _test_function_literal(self) -> None:
        source: str = 'fn x::int, y::int -> int {=> x + y}'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser, program)

        # Test correct node type
        function_literal = cast(Function, cast(ExpressionStatement,
                                               program.statements[0]).expression)

        self.assertIsInstance(function_literal, Function)

        # Test params
        self.assertEquals(len(function_literal.parameters), 2)
        self._test_literal_expression(function_literal.parameters[0], 'x')
        self._test_literal_expression(function_literal.parameters[1], 'y')

        self._test_literal_expression(
            function_literal.type_parameters[0], 'int')
        self._test_literal_expression(
            function_literal.type_parameters[1], 'int')

        # Test output
        assert function_literal.type_output is not None
        self.assertEquals(function_literal.type_output, 'int')

        # Test body
        assert function_literal.body is not None
        self.assertEquals(len(function_literal.body.statements), 1)

        body = cast(ExpressionStatement, function_literal.body.statements[0])
        assert body.expression is not None
        self._test_infix_expression(body.expression, 'x', '+', 'y')