Beispiel #1
0
    def test_scalar_starts_with_zeros(self):
        string = '0'

        # should pass
        try:
            Lexer(StringSource(string))
        except LexerError:
            self.fail()

        string += '0'
        # should fail
        with self.assertRaises(LexerError):
            Lexer(StringSource(string))
Beispiel #2
0
    def test_too_long_id(self):
        id = ''.join(['a'] * 128)

        # should pass
        try:
            Lexer(StringSource(id))
        except LexerError:
            self.fail()

        # should fail
        id += 'a'
        with self.assertRaises(LexerError) as e:
            Lexer(StringSource(id))
        self.assertEqual(ErrorCode.EXCEED_MAX_ID_SIZE, e.exception.error_code)
Beispiel #3
0
    def test_microterm(self):
        source = 'a**b;'
        program = Parser(StringSource(source)).parse_program()
        actual = program.objects[0]
        expected = create_expected_binary_operator(
            [(('id', 'a',), ('op', '**',), ('id', 'b',))]
        )
        self.visit_and_compare(expected, actual)

        source = '5**4.5;'
        program = Parser(StringSource(source)).parse_program()
        actual = program.objects[0]
        expected = create_expected_binary_operator(
            [(('scalar', 5.0,), ('op', '**',), ('scalar', 4.5,))]
        )
        self.visit_and_compare(expected, actual)
Beispiel #4
0
 def test_unknown_single_char_token(self):
     string = '^'
     string_source = StringSource(string)
     with self.assertRaises(LexerError) as e:
         Lexer(string_source)
     self.assertNotEqual(ErrorCode.UNEXPECTED_TOKEN, e.exception.error_code)
     self.assertEqual(ErrorCode.TOKEN_BUILD_FAIL, e.exception.error_code)
Beispiel #5
0
    def test_double_char_operators(self):
        string = '<= > <= < < >= = < > >= < <= <= < >= <= >= >= != > >='
        tokens = [
            Token(TokenType.LEQ),
            Token(TokenType.GRE),
            Token(TokenType.LEQ),
            Token(TokenType.LESS),
            Token(TokenType.LESS),
            Token(TokenType.GEQ),
            Token(TokenType.ASSIGN),
            Token(TokenType.LESS),
            Token(TokenType.GRE),
            Token(TokenType.GEQ),
            Token(TokenType.LESS),
            Token(TokenType.LEQ),
            Token(TokenType.LEQ),
            Token(TokenType.LESS),
            Token(TokenType.GEQ),
            Token(TokenType.LEQ),
            Token(TokenType.GEQ),
            Token(TokenType.GEQ),
            Token(TokenType.NEQ),
            Token(TokenType.GRE),
            Token(TokenType.GEQ)
        ]
        lexer = Lexer(StringSource(string))

        for expected_token in tokens:
            token = lexer.current_token
            self.assertEqual(expected_token.type, token.type)
            self.assertEqual(expected_token.value, token.value)
            lexer.build_next_token()
Beispiel #6
0
    def test_something(self):
        string_source = 'i = 100;\n' \
                        'while (a>1){\n' \
                        '   print(a-1);};'

        parser = Parser(StringSource(string_source))
        program = parser.parse_program()
        ast_dumper = AstDumper()
        ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
Beispiel #7
0
 def test_equality_expression(self):
     source = 'a == b != c == 0;'
     program = Parser(StringSource(source)).parse_program()
     actual = program.objects[0]
     expected = create_expected_binary_operator(
         [(('id', 'a',), ('op', '==',), ('id', 'b',)),
          (('prev', None,), ('op', '!=',), ('id', 'c',)),
          (('prev', None,), ('op', '==',), ('scalar', 0.0,))]
     )
     self.visit_and_compare(expected, actual)
Beispiel #8
0
 def test_term(self):
     source = 'a mod b / c div d * 0;'
     program = Parser(StringSource(source)).parse_program()
     actual = program.objects[0]
     expected = create_expected_binary_operator(
         [(('id', 'a',), ('op', 'mod',), ('id', 'b',)),
          (('prev', None,), ('op', '/',), ('id', 'c',)),
          (('prev', None,), ('op', 'div',), ('id', 'd',)),
          (('prev', None,), ('op', '*',), ('scalar', 0.0,))]
     )
     self.visit_and_compare(expected, actual)
Beispiel #9
0
 def test_relative_expression(self):
     source = 'a >= b > c < d <= 0;'
     program = Parser(StringSource(source)).parse_program()
     actual = program.objects[0]
     expected = create_expected_binary_operator(
         [(('id', 'a',), ('op', '>=',), ('id', 'b',)),
          (('prev', None,), ('op', '>',), ('id', 'c',)),
          (('prev', None,), ('op', '<',), ('id', 'd',)),
          (('prev', None,), ('op', '<=',), ('scalar', 0.0,))]
     )
     self.visit_and_compare(expected, actual)
Beispiel #10
0
def should_fail(
    tester,
    string_source,
    expected_error_code=None,
    expected_id=None,
):
    source = StringSource(string_source)
    with tester.assertRaises(InterpreterError) as e:
        Interpreter(source).interpret()
    if expected_error_code is not None:
        tester.assertEqual(expected_error_code, e.exception.error_code)
    if expected_id is not None:
        tester.assertEqual(expected_id, e.exception.id)
Beispiel #11
0
def should_fail(tester,
                string_source,
                expected_error_code=None,
                expected_token_type=None,
                expected_description=None):
    parser = Parser(StringSource(string_source))
    with tester.assertRaises(ParserError) as e:
        parser.parse_program()
    if expected_error_code is not None:
        tester.assertEqual(expected_error_code, e.exception.error_code)
    if expected_token_type is not None:
        tester.assertEqual(expected_token_type,
                           e.exception.expected_token_type)
    if expected_description:
        tester.assertEqual(expected_description, e.exception.description)
Beispiel #12
0
 def test_arithmetic_expression(self):
     source = 'a+b-c+d+5-4-7+4+3;'
     program = Parser(StringSource(source)).parse_program()
     actual = program.objects[0]
     expected = create_expected_binary_operator(
         [(('id', 'a',), ('op', '+',), ('id', 'b',)),
          (('prev', None,), ('op', '-',), ('id', 'c',)),
          (('prev', None,), ('op', '+',), ('id', 'd',)),
          (('prev', None,), ('op', '+',), ('scalar', 5.0,)),
          (('prev', None,), ('op', '-',), ('scalar', 4.0,)),
          (('prev', None,), ('op', '-',), ('scalar', 7.0,)),
          (('prev', None,), ('op', '+',), ('scalar', 4.0,)),
          (('prev', None,), ('op', '+',), ('scalar', 3.0,))]
     )
     self.visit_and_compare(expected, actual)
Beispiel #13
0
    def test_etx_on_comment_line(self):
        string = 'not_comment = 1; # a comment'
        lexer = Lexer(StringSource(string))

        tokens = [
            Token(TokenType.ID, 'not_comment'),
            Token(TokenType.ASSIGN),
            Token(TokenType.SCALAR, 1),
            Token(TokenType.SEMI),
            Token(TokenType.ETX)
        ]

        for expected_token in tokens:
            token = lexer.current_token
            self.assertEqual(expected_token.type, token.type)
            self.assertEqual(expected_token.value, token.value)
            lexer.build_next_token()
Beispiel #14
0
    def test_something(self):
        string_source = '   fun partition(arr, low, high)' \
                        '   {' \
                        '       i = (low -1);' \
                        '       pivot = arr[high];' \
                        '       for (j in range)' \
                        '           if (arr[j] < pivot)' \
                        '           {' \
                        '               i = i+1;' \
                        '               temp = arr[i];' \
                        '               arr[i] = arr[j];' \
                        '               arr[j] = temp;' \
                        '           }' \
                        '       temp = arr[i+1];' \
                        '       arr[i+1] = arr[high];' \
                        '       arr[high] = temp;' \
                        '       ret (i+1);' \
                        '   }' \
                        '   fun quickSort(arr, low, high)' \
                        '       if (low < high)' \
                        '       {' \
                        '           pi = partition(arr, low, high);' \
                        '           quickSort(arr, low, pi-1);' \
                        '           quickSort(arr, pi+1, high);' \
                        '       }'

        s = 'x[2];\n' \
            'a[1, 2];\n' \
            'b[a];\n' \
            'c[:];\n' \
            'd[:, 1];\n' \
            'e[1, :];\n' \
            'f[:, :];'

        parser = Parser(StringSource(string_source))
        program = parser.parse_program()
        ast_dumper = AstDumper()
        ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
Beispiel #15
0
 def test_parse_program_with_string(self):
     s = 'a = 1 + 2;' \
         's = "some string";'
     Parser(StringSource(s)).parse_program()
Beispiel #16
0
 def test_etx_in_unfinished_string(self):
     string = '"some random string'
     with self.assertRaises(LexerError):
         lexer = Lexer(StringSource(string))
         while lexer.current_token.type != TokenType.ETX:
             lexer.build_next_token()
Beispiel #17
0
 def test_assign(self):
     a = 'var = 0;'
     self.assertTrue(
         isinstance(
             Parser(StringSource(a)).try_to_parse_expression(), Assignment))
Beispiel #18
0
 def parse_matrix(string):
     parser = Parser(StringSource(string))
     return parser.try_to_parse_matrix()
Beispiel #19
0
 def test_fix_dump(self):
     s = 'a = 0;'
     parser = Parser(StringSource(s))
     program = parser.parse_program()
     ast_dumper = AstDumper()
     ast_dumper.add_child(lambda: ast_dumper.visit(program), str(program))
Beispiel #20
0
 def test_simple(self):
     s = '"Hello world";'
     self.assertEqual(String,
                      type(Parser(StringSource(s)).try_to_parse_string()))
Beispiel #21
0
def interpret(string_source):
    source = StringSource(string_source)
    return Interpreter(source).interpret()