def test_function_declaration(self) -> None:
        source: str = '''
        var res = func(x, y) {
            x + y;
        };
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(16):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.VAR, 'var'),
            Token(TokenType.IDENT, 'res'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.FUNCTION, 'func'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
    def test_control_statement(self) -> None:
        source: str = '''
        if (5 < 10) {
            return true;
        } else {
            return false;
        }
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(17):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.IF, 'if'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.INT, '5'),
            Token(TokenType.LT, '<'),
            Token(TokenType.INT, '10'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RETURN, 'return'),
            Token(TokenType.TRUE, 'true'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.ELSE, 'else'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RETURN, 'return'),
            Token(TokenType.FALSE, 'false'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.RBRACE, '}'),
        ]
        self.assertEqual(tokens, expected_tokens)
Пример #3
0
    def test_parse_errors(self) -> None:
        source: str = 'var x 5;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        parser.parse_program()

        self.assertEqual(len(parser.errors), 1)
Пример #4
0
 def __init__(self):
     lexer = Lexer()
     parser = Parser()
     solutionFactory = SolutionFactory()
     self.__calculus_types = [
         basic.Basic(lexer, parser, solutionFactory),
         equation.Equation(lexer, parser, solutionFactory),
     ]
Пример #5
0
    def test_parse_program(self) -> None:
        source: str = 'var x = 5;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertIsNotNone(program)
        self.assertIsInstance(program, Program)
Пример #6
0
    def test_integer_expressions(self) -> None:
        source: str = '5;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser, program)

        expression_statement = cast(ExpressionStatement, program.statements[0])

        assert expression_statement.expression
        self._test_literal_expression(expression_statement.expression, 5)
    def test_eof(self) -> None:
        source: str = '+'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source) + 1):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.PLUS, '+'),
            Token(TokenType.EOF, ''),
        ]

        self.assertEqual(tokens, expected_tokens)
    def test_illegal(self) -> None:
        source: str = '¡¿@'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ILLEGAL, '¡'),
            Token(TokenType.ILLEGAL, '¿'),
            Token(TokenType.ILLEGAL, '@'),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #9
0
    def test_return_statement(self) -> None:
        source: str = '''
        return 5;
        return foo;
        '''

        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertEqual(len(program.statements), 2)

        for statement in program.statements:
            self.assertEqual(statement.token_literal(), 'return')
            self.assertIsInstance(statement, ReturnStatement)
Пример #10
0
    def test_var_statements(self) -> None:
        source: str = '''
        var x = 5;
        var y = 10;
        var foo = 20;
        '''
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self.assertEqual(len(program.statements), 3)

        for statement in program.statements:
            self.assertEqual(statement.token_literal(), 'var')
            self.assertIsInstance(statement, VarStatement)
    def test_complex_var_name(self) -> None:
        source: str = 'var num_1 = 10;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(5):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.VAR, 'var'),
            Token(TokenType.IDENT, 'num_1'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.INT, '10'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
    def test_assignment(self) -> None:
        source: str = 'var num = 5;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(5):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.VAR, 'var'),
            Token(TokenType.IDENT, 'num'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.INT, '5'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
    def test_delimiters(self) -> None:
        source: str = '(){},;'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.LPAREN, '('),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.LBRACE, '{'),
            Token(TokenType.RBRACE, '}'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #14
0
    def test_names_in_let_statements(self) -> None:
        source: str = '''
        var x = 5;
        var y = 10;
        var foo = 20;
        '''
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        names: List[str] = []
        for statement in program.statements:
            statement = cast(VarStatement, statement)
            assert statement.name
            names.append(statement.name.value)

        expected_names: List[str] = ['x', 'y', 'foo']

        self.assertEqual(names, expected_names)
    def test_one_character_operators(self) -> None:
        source: str = '=+-/*<>!'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []

        for i in range(len(source)):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.PLUS, '+'),
            Token(TokenType.MINUS, "-"),
            Token(TokenType.DIVISION, "/"),
            Token(TokenType.MULTIPLICATION, "*"),
            Token(TokenType.LT, "<"),
            Token(TokenType.GT, ">"),
            Token(TokenType.NEGATION, "!"),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #16
0
    def test_prefix_expression(self) -> None:
        source: str = '!5; -15;'
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser,
                                      program,
                                      expected_statement_count=2)

        for statement, (expected_operator,
                        expected_value) in zip(program.statements,
                                               [('!', 5), ('-', 15)]):
            statement = cast(ExpressionStatement, statement)
            self.assertIsInstance(statement.expression, Prefix)

            prefix = cast(Prefix, statement.expression)
            self.assertEqual(prefix.operator, expected_operator)

            assert prefix.right
            self._test_literal_expression(prefix.right, expected_value)
    def test_function_call(self) -> None:
        source: str = 'var res = sum(x, y);'
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(10):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.VAR, 'var'),
            Token(TokenType.IDENT, 'res'),
            Token(TokenType.ASSIGN, '='),
            Token(TokenType.IDENT, 'sum'),
            Token(TokenType.LPAREN, '('),
            Token(TokenType.IDENT, 'x'),
            Token(TokenType.COMMA, ','),
            Token(TokenType.IDENT, 'y'),
            Token(TokenType.RPAREN, ')'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
    def test_two_character_operations(self) -> None:
        source: str = '''
        10 == 10;
        10 != 9;
        '''
        lexer: Lexer = Lexer(source)

        tokens: List[Token] = []
        for i in range(8):
            tokens.append(lexer.next_token())

        expected_tokens: List[Token] = [
            Token(TokenType.INT, '10'),
            Token(TokenType.EQ, '=='),
            Token(TokenType.INT, '10'),
            Token(TokenType.SEMICOLON, ';'),
            Token(TokenType.INT, '10'),
            Token(TokenType.NOT_EQ, '!='),
            Token(TokenType.INT, '9'),
            Token(TokenType.SEMICOLON, ';'),
        ]

        self.assertEqual(tokens, expected_tokens)
Пример #19
0
    def test_infix_expressions(self) -> None:
        source: str = '''
        5 + 5;
        5 - 5;
        5 * 5;
        5 / 5;
        5 > 5;
        5 < 5;
        5 == 5;
        5 != 5;
        '''
        lexer: Lexer = Lexer(source)
        parser: Parser = Parser(lexer)

        program: Program = parser.parse_program()

        self._test_program_statements(parser,
                                      program,
                                      expected_statement_count=8)

        expected_operators_and_values: List[Tuple[Any, str, Any]] = [
            (5, '+', 5),
            (5, '-', 5),
            (5, '*', 5),
            (5, '/', 5),
            (5, '>', 5),
            (5, '<', 5),
            (5, '==', 5),
            (5, '!=', 5),
        ]
        for statement, (expected_left, expected_operator,
                        expected_right) in zip(program.statements,
                                               expected_operators_and_values):
            statement = cast(ExpressionStatement, statement)
            assert statement.expression
            self._test_infix_expression(statement.expression, expected_left,
                                        expected_operator, expected_right)
Пример #20
0
def get_parser():
    lexer = Lexer()
    return Parser(lexer=lexer)
Пример #21
0
# for t in l:
#     print(t)

# from helpers import imm_converter

# print(imm_converter.imm_12(5))
# print(imm_converter.imm_12(-5))

# print(imm_converter.imm_13_effective(6))
# print(imm_converter.imm_13_effective(-6))

# print(imm_converter.imm_20(5))
# print(imm_converter.imm_20(-5))

l = Lexer()
p = Parser(l)
# for ln in code.split('\n'):
#     if ln:
#         print(p.parse_line(ln + '\n'))

from io import StringIO

istream = StringIO(code)
ostream = StringIO()

p.assemble(istream, ostream)
# print(p.symbol_table)
ostream.seek(0)
print(ostream.read())
Пример #22
0
from src.lexer.lexer import Lexer
from src.parser.opPrecedenceParser import OpPrecedenceParser
import dis
from src.myvm.VirtualMachine import VirtualMachine
from src.code.code import SSCode
if __name__ == '__main__':

    def cc():
        a=10
        b=20
        c=a-b
    # dis.dis(cc)
    # for each in cc.__code__.co_code:
    #     print(each)
    #
    lexer = Lexer(file)
    ep = OpPrecedenceParser(lexer)
    sscode =SSCode(ep.expression())
    vm = VirtualMachine()
    vm.run_code(sscode)




    # while True:
    #     cc=lexer.read()
    #     print(cc)
    #     if cc == EOF:
    #         break