示例#1
0
    def compile_subroutine_call(self):
        # eat identifier
        if not lexical_elements.is_identifier(self.tokenizer.current_token):
            raise CompilationError(
                "Expected valid identifier in subroutine call but found " +
                self.tokenizer.current_token)
        self.eat(self.tokenizer.current_token)

        if self.tokenizer.current_token == "(":
            self.eat("(")
            self.compile_expression_list()
            self.eat(")")
        elif self.tokenizer.current_token == ".":
            self.eat(".")
            # eat subroutine name
            if not lexical_elements.is_identifier(
                    self.tokenizer.current_token):
                raise CompilationError(
                    "Expected valid identifier in subroutine call but found " +
                    self.tokenizer.current_token)
            self.eat(self.tokenizer.current_token)
            self.eat("(")
            self.compile_expression_list()
            self.eat(")")
        else:
            raise CompilationError(
                "Expected '(' or '.' in subroutine call but found " +
                self.tokenizer.current_token)
示例#2
0
    def compile_subroutine_dec(self):
        while self.tokenizer.current_token in [CONSTRUCTOR, FUNCTION, METHOD]:
            self.openNonTerminal(SUBROUTINE_DEC)

            # eat subroutine type: method, function or constructor
            self.eat(self.tokenizer.current_token)

            # eat subroutine return type
            if self.tokenizer.current_token != VOID and not self.is_valid_type(
                    self.tokenizer.current_token):
                raise CompilationError(
                    "Expected valid subroutine return type or void but was " +
                    self.tokenizer.current_token)
            self.eat(self.tokenizer.current_token)

            # eat subroutine name
            if not lexical_elements.is_identifier(
                    self.tokenizer.current_token):
                raise CompilationError(
                    "Expected valid subroutine name but was " +
                    self.tokenizer.current_token)
            self.eat(self.tokenizer.current_token)

            # compile subroutine parameter list (possibly empty)
            self.compile_parameter_list()

            # compile subroutine body
            self.compile_subroutine_body()

            self.closeNonTerminal(SUBROUTINE_DEC)
示例#3
0
 def compile_term(self):
     self.openNonTerminal(TERM)
     if lexical_elements.is_int_constant(self.tokenizer.current_token):
         self.eat(self.tokenizer.current_token)
     elif lexical_elements.is_string_constant(self.tokenizer.current_token):
         self.eat(self.tokenizer.current_token)
     elif self.is_keyword_constant(self.tokenizer.current_token):
         self.eat(self.tokenizer.current_token)
     elif self.tokenizer.current_token == "(":
         self.eat("(")
         self.compile_expression()
         self.eat(")")
     elif self.tokenizer.current_token == "-" or self.tokenizer.current_token == "~":
         self.eat(self.tokenizer.current_token)
         self.compile_term()
     elif lexical_elements.is_identifier(self.tokenizer.current_token):
         if self.tokenizer.peek() == "[":
             self.eat(self.tokenizer.current_token)
             self.eat("[")
             self.compile_expression()
             self.eat("]")
         elif self.tokenizer.peek() == "(" or self.tokenizer.peek() == ".":
             self.compile_subroutine_call()
         else:
             self.eat(self.tokenizer.current_token)
     self.closeNonTerminal(TERM)
示例#4
0
    def compile_class(self):
        self.tokenizer.advance()
        self.openNonTerminal(CLASS)

        # eat 'class' keyword
        self.eat(CLASS)

        # eat class name
        if not lexical_elements.is_identifier(self.tokenizer.current_token):
            raise CompilationError(
                "Class name must be a valid identifier but was " +
                self.tokenizer.current_token)
        self.eat(self.tokenizer.current_token)

        # eat opening brace
        self.eat("{")

        # compile class variable declarations
        self.compile_class_var_dec()

        # compile class subroutines
        self.compile_subroutine_dec()

        # eat closing brace
        self.eat("}")

        self.closeNonTerminal(CLASS)
示例#5
0
 def eat_var_sequence(self):
     self.eat_var_dec()
     while self.tokenizer.current_token == ",":
         self.eat(",")
         if not lexical_elements.is_identifier(
                 self.tokenizer.current_token):
             raise CompilationError(
                 "Expected valid variable name but was " +
                 self.tokenizer.current_token)
         self.eat(self.tokenizer.current_token)  # eat varName
示例#6
0
 def token_type(self, token):
     if lexical_elements.is_symbol(token):
         return SYMBOL
     if lexical_elements.is_string_constant(token):
         return STRING_CONSTANT
     if lexical_elements.is_keyword(token):
         return KEYWORD
     if lexical_elements.is_int_constant(token):
         return INTEGER_CONSTANT
     if lexical_elements.is_identifier(token):
         return IDENTIFIER
示例#7
0
 def eat_var_dec(self):
     # eat var type
     if not self.is_valid_type(self.tokenizer.current_token):
         raise CompilationError("Expected valid var type but found " +
                                self.tokenizer.current_token)
     self.eat(self.tokenizer.current_token)
     # eat var name
     if not lexical_elements.is_identifier(self.tokenizer.current_token):
         raise CompilationError("Expected valid var name but found " +
                                self.tokenizer.current_token)
     self.eat(self.tokenizer.current_token)
示例#8
0
    def compile_let_statement(self):
        self.openNonTerminal(LET_STATEMENT)
        # eat 'let' keyword
        self.eat(LET)

        # eat variable name
        if not lexical_elements.is_identifier(self.tokenizer.current_token):
            raise CompilationError("Expected valid variable name but was " +
                                   self.tokenizer.current_token)
        self.eat(self.tokenizer.current_token)

        # eat potential array indexing expression
        if self.tokenizer.current_token == "[":
            self.eat("[")
            self.compile_expression()
            self.eat("]")

        self.eat("=")
        self.compile_expression()
        self.eat(";")
        self.closeNonTerminal(LET_STATEMENT)
示例#9
0
 def compile_term(self):
     if lexical_elements.is_int_constant(self.tokenizer.current_token):
         self.compile_int_constant()
     elif lexical_elements.is_string_constant(self.tokenizer.current_token):
         self.compile_string_constant()
     elif self.tokenizer.is_keyword_constant(self.tokenizer.current_token):
         self.compile_keyword_constant()
     elif self.tokenizer.current_token == "(":
         self.eat("(")
         self.compile_expression()
         self.eat(")")
     elif self.tokenizer.current_token == "-" or self.tokenizer.current_token == "~":
         self.compile_unary()
     elif lexical_elements.is_identifier(self.tokenizer.current_token):
         if self.tokenizer.peek() == "[":
             self.compile_array_expression()
         elif self.tokenizer.peek() == "(" or self.tokenizer.peek() == ".":
             self.compile_subroutine_call()
         else:
             var_name = self.tokenizer.current_token
             self.eat(var_name)
             self.vm_writer.write_push(
                 segment_map[self.st.kind_of(var_name)],
                 self.st.index_of(var_name))
示例#10
0
 def is_valid_term(self, token):
     return (lexical_elements.is_identifier(token)
             or lexical_elements.is_int_constant(token)
             or lexical_elements.is_string_constant(token)
             or self.is_keyword_constant(token) or token == "("
             or token == "~" or token == "-")
示例#11
0
 def is_valid_type(self, token):
     return token in [INT, CHAR, BOOLEAN
                      ] or lexical_elements.is_identifier(token)
    def test_identifier(self):
        self.assertTrue(lexical_elements.is_identifier("A"))
        self.assertTrue(lexical_elements.is_identifier("AAA"))
        self.assertTrue(lexical_elements.is_identifier("a"))
        self.assertTrue(lexical_elements.is_identifier("abc"))
        self.assertTrue(lexical_elements.is_identifier("_AbC"))
        self.assertTrue(lexical_elements.is_identifier("A_Bc"))
        self.assertTrue(lexical_elements.is_identifier("A123"))
        self.assertTrue(lexical_elements.is_identifier("A_1_2_B"))
        self.assertTrue(lexical_elements.is_identifier("_1"))
        self.assertTrue(lexical_elements.is_identifier("_1wroi"))
        self.assertTrue(lexical_elements.is_identifier("_123"))
        self.assertTrue(lexical_elements.is_identifier("ABab_12345"))

        self.assertFalse(lexical_elements.is_identifier("1A"))
        self.assertFalse(lexical_elements.is_identifier("1 A"))