예제 #1
0
class Parser(object):
    def __init__(self, expression):
        self.sc = Scanner(expression)
        self.tok = Tokenizer(self.sc)
        self.tokens = None
        self.tokens = self.get_token_sequence()
        self.root = None

    def get_token_sequence(self):
        if self.tokens is not None:
            return self.tokens[::]

        self.tokens = []

        self.tokens.append(self.tok.get_next_token())
        while self.tokens[-1].get_type() != Token.EOF:
            self.tokens.append(self.tok.get_next_token())

        return self.tokens[::]

    def generate_tree(self):
        # get the expression subtree
        subtree = self.get_expr()

        while self.tokens[0].get_type() is Token.SEPARATOR:
            tok = self.tokens[0]
            # the subtree we already have becomes the left node
            self.tokens = self.tokens[1:]
            # if we got to the end there is no right child
            if self.tokens[0].get_type() is Token.EOF:
                right = None
            # fetch the subtree to the right of the separator node
            else:
                right = self.get_expr()

            subtree = BinOp(tok,
                            parent=None,
                            left_child=subtree,
                            right_child=right)

        # the subtree we got so far is the only child of the EOF node
        if self.tokens[0].get_type() is Token.EOF:
            self.root = UnOp(Token.EOF, parent=None, child=subtree)
        # something weird happened! :D
        else:
            self.error("Program did not terminate with expected EOF")

    def get_expr(self):
        subtree = self.get_term()

        while self.tokens[0].get_type() in (Token.PLUS, Token.MINUS):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # set the left subtree, get the right child
            subtree = BinOp(tok,
                            parent=None,
                            left_child=subtree,
                            right_child=self.get_term())

        return subtree

    def get_term(self):
        subtree = self.get_power()

        # do we have a * or / operator? if so, create its node
        while self.tokens[0].get_type() in (Token.PRODUCT, Token.DIVISION):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # set the left subtree, get the right child
            subtree = BinOp(tok,
                            parent=None,
                            left_child=subtree,
                            right_child=self.get_power())

        return subtree

    def get_power(self):
        subtree = self.get_base()

        # do we have a power operator here?
        while self.tokens[0].get_type() is Token.POWER:
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # if so, create its node and set the left subtree
            # get the right child subtree
            subtree = BinOp(tok,
                            parent=None,
                            left_child=subtree,
                            right_child=self.get_base())

        return subtree

    def get_base(self):
        tok = self.tokens[0]
        self.tokens = self.tokens[1:]

        # if we find a ( we must get the corresponding subtree
        if tok.get_type() in (Token.INTEGER, Token.FLOAT):
            return Literal(tok)
        elif tok.get_type() is Token.LGROUP:
            # get the subtree
            subtree = self.get_expr()
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # right enclosing parenthesis was found, return subtree
            if tok.get_type() is not Token.RGROUP:
                self.error("Unmatched left parenthesis")
            return base
        elif tok.get_type() is Token.MINUS:
            return UnOp(tok, parent=None, child=self.get_base())
        else:
            self.error("Unexpected {} Token".format(tok.get_type()))

    def error(self, msg):
        raise Exception(msg)

    def show(self):
        strings = self.root.get_strings()

        def simplify(l, depth):
            s = l[0]
            indent = "  " * depth
            for ele in l[1:]:
                if ele:
                    s += "\n  " + indent + " |:" + simplify(ele, depth + 1)

            return s

        print(simplify(strings, 0))
예제 #2
0
파일: docTokenizer.py 프로젝트: RojerGS/Roj
# ask for a file name
# read it
# tokenize it

from tokenizer import Tokenizer, Token
from scanner import Scanner

n = input(">> ")
with open(n, "r") as f:
    text = f.read()
    
sc = Scanner(text)
tok = Tokenizer(sc)

t = tok.get_next_token()
while t.get_type() != Token.EOF:
    print(t)
    t = tok.get_next_token()
예제 #3
0
파일: ASTgenerator.py 프로젝트: RojerGS/Roj
class Parser(object):
    def __init__(self, expression):
        self.sc = Scanner(expression)
        self.tok = Tokenizer(self.sc)
        self.tokens = None
        self.tokens = self.get_token_sequence()
        self.root = None
        
    def get_token_sequence(self):
        if self.tokens is not None:
            return self.tokens[::]
            
        self.tokens = []
        
        self.tokens.append(self.tok.get_next_token())
        while self.tokens[-1].get_type() != Token.EOF:
            self.tokens.append(self.tok.get_next_token())
            
        return self.tokens[::]
        
    def generate_tree(self):
        # get the expression subtree
        subtree = self.get_expr()
        
        while self.tokens[0].get_type() is Token.SEPARATOR:
            tok = self.tokens[0]
            # the subtree we already have becomes the left node
            self.tokens = self.tokens[1:]
            # if we got to the end there is no right child
            if self.tokens[0].get_type() is Token.EOF:
                right = None
            # fetch the subtree to the right of the separator node
            else:
                right = self.get_expr()
                
            subtree = BinOp(tok, parent=None, left_child=subtree,
                                                right_child=right)
                                                
        # the subtree we got so far is the only child of the EOF node
        if self.tokens[0].get_type() is Token.EOF:
            self.root = UnOp(Token.EOF, parent=None, child=subtree)
        # something weird happened! :D
        else:
            self.error("Program did not terminate with expected EOF")
    
    def get_expr(self):
        subtree = self.get_term()
        
        while self.tokens[0].get_type() in (Token.PLUS,
                                            Token.MINUS):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # set the left subtree, get the right child
            subtree = BinOp(tok, parent=None,
                        left_child=subtree, right_child=self.get_term())
                
        return subtree
        
    def get_term(self):
        subtree = self.get_power()
        
        # do we have a * or / operator? if so, create its node
        while self.tokens[0].get_type() in (Token.PRODUCT,
                                            Token.DIVISION):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # set the left subtree, get the right child
            subtree = BinOp(tok, parent=None,
                    left_child=subtree, right_child=self.get_power())
            
        return subtree
        
    def get_power(self):
        subtree = self.get_base()
        
        # do we have a power operator here?
        while self.tokens[0].get_type() is Token.POWER:
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # if so, create its node and set the left subtree
            # get the right child subtree
            subtree = BinOp(tok, parent=None,
                            left_child=subtree, right_child=self.get_base())
            
        return subtree
    
    def get_base(self):
        tok = self.tokens[0]
        self.tokens = self.tokens[1:]
        
        # if we find a ( we must get the corresponding subtree
        if tok.get_type() in (Token.INTEGER, Token.FLOAT):
            return Literal(tok)
        elif tok.get_type() is Token.LGROUP:
            # get the subtree
            subtree = self.get_expr()
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            # right enclosing parenthesis was found, return subtree
            if tok.get_type() is not Token.RGROUP:
                self.error("Unmatched left parenthesis")
            return base
        elif tok.get_type() is Token.MINUS:
            return UnOp(tok, parent=None, child=self.get_base())
        else:
            self.error("Unexpected {} Token".format(tok.get_type()))
        
    def error(self, msg):
        raise Exception(msg)
        
    def show(self):
        strings = self.root.get_strings()
        
        def simplify(l, depth):
            s = l[0]
            indent = "  " * depth
            for ele in l[1:]:
                if ele:
                    s += "\n  " + indent + " |:" + simplify(ele, depth+1)
            
            return s
            
        print(simplify(strings, 0))
예제 #4
0
파일: ASTParser.py 프로젝트: RojerGS/Roj
class Parser(object):
    """Implement a recursive-descent parser;
    The parser will attempt to build an AST of the program;
    Its only argument is the string to be parsed; Upon success, it will
        store the AST's root (the EOF Node) in self.root.
    The show() method provides pretty tree-like printing for debugging"""
    def __init__(self, expression):
        """Initialize the parser by generating the token sequence"""
        self.sc = Scanner(expression)
        self.tok = Tokenizer(self.sc)
        self.tokens = None
        self.tokens = self.get_token_sequence()
        self.root = None
        
    def get_token_sequence(self):
        """Generate the whole token sequence for the given program"""
        # prevent redundant calls
        if self.tokens is not None:
            return self.tokens[::]
            
        self.tokens = []
        
        self.tokens.append(self.tok.get_next_token())
        while self.tokens[-1].get_type() != Token.EOF:
            self.tokens.append(self.tok.get_next_token())
            
        return self.tokens[::]
    
    def get_program(self):
        """Entry point to the parser; The method will raise an error
            if it is not able to build an AST Tree for the program"""
        token_list = self.tokens[::]
        subtree, token_list = self.get_suite(token_list)
        if subtree is None:
            raise ParserException("Could not parse the program correctly")
        
        if token_list[0].get_type() != Token.EOF:
            raise ParserException("Could not parse the program")
        else:
            eof = token_list[0]
            self.root = UnOp(eof, parent=None, child=subtree)
            
    def get_suite(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_stmt(token_list)
        
        while token_list and token_list[0].get_type() is Token.SEPARATOR:
            tok = token_list[0]
            token_list = token_list[1:]
            if token_list[0].get_type() is Token.EOF:
                right = Literal(Token(Token.NULL, "Null"), parent=None)
            else:
                right, token_list = self.get_stmt(token_list)
                
            if right is None:
                break
            subtree = BinOp(tok, parent=None, left_child=subtree,
                                                right_child=right)

        if subtree is None:
            return None, tokens
        return subtree, token_list
        
    def get_stmt(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_io_stmt(token_list)
        
        if subtree is None:
            # could not get an IO statement, try a control
            subtree, token_list = self.get_control(token_list)
        if subtree is None:
            # could not get a control statement, try an assignment
            subtree, token_list = self.get_assignment(token_list)
        if subtree is None:
            # could not get an assignment, try a compound stmt
            subtree, token_list = self.get_compound(token_list)
        if subtree is None:
            # could not get a compound stmt, try an expression
            subtree, token_list = self.get_expression(token_list)
        if subtree is None:
            return None, tokens
        
        return subtree, token_list
        
    def get_io_stmt(self, token_list):
        tokens = token_list[::]
        
        subtree, token_list = self.get_in_stmt(token_list)
        if subtree is None:
            subtree, token_list = self.get_out_stmt(token_list)
        if subtree is None:
            return None, tokens
            
        return subtree, token_list
        
    def get_control(self, token_list):
        tokens = token_list[::]
        
        if token_list[0].get_type() not in [Token.STOP, Token.RETURN,
                                    Token.JUMPOVER, Token.HALT]:
            return None, tokens
        tok = token_list[0]
        token_list = token_list[1:]
        if tok.get_type() in [Token.STOP, Token.JUMPOVER]:
            sub = Literal(Token(Token.NULL, "Null"))
        else:
            sub, token_list = self.get_expression(token_list)
            if sub is None:
                sub = Literal(Token(Token.NULL, "Null"))
                
        return Control(tok, parent=None, child=sub), token_list
        
    def get_in_stmt(self, token_list):
        tokens = token_list[::]
        
        if token_list[0].get_type() in [Token.READ, Token.READINT,
                                    Token.READFLOAT, Token.READBOOL]:
            in_tok = token_list[0]
            token_list = token_list[1:]
        else:
            return None, tokens
            
        if token_list[0].get_type() == Token.USER_VAR:
            var_tok = token_list[0]
            var_node = Variable(var_tok)
            token_list = token_list[1:]
        else:
            self.error("After READ a user variable is expected")
            
        subtree = IOOp(in_tok, parent=None, child=var_node)
        return subtree, token_list
        
    def get_out_stmt(self, token_list):
        tokens = token_list[::]
        
        if token_list[0].get_type() == Token.OUT:
            out_tok = token_list[0]
            token_list = token_list[1:]
            subtree, token_list = self.get_expression(token_list)
            if subtree is None:
                self.error("Could not parse an 'out' statement")
        else:
            return None, tokens
            
        subtree = IOOp(out_tok, parent=None, child=subtree)
        return subtree, token_list
    
    def get_assignment(self, token_list):
        tokens = token_list[::]
        
        if token_list[0].get_type() != Token.USER_VAR:
            return None, tokens
        # create the variable node
        var_node = Variable(token_list[0], parent=None)
        # found a user_var, check for a Token.ASSIGNMENT
        token_list = token_list[1:]
        if token_list[0].get_type() != Token.ASSIGNMENT:
            return None, tokens
        tok = token_list[0]
        token_list = token_list[1:]
        
        # get the right node; is it another assignment?
        right, token_list = self.get_assignment(token_list)
        if right is None:
            right, token_list = self.get_expression(token_list)
        if right is None:
            return None, tokens
        return BinOp(tok, parent=None, left_child=var_node,
                                    right_child=right), token_list
                                    
    def get_compound(self, token_list):
        tokens = token_list[::]
        
        # try to get a while compound
        subtree, token_list = self.get_while(token_list)
        if subtree is None:
            subtree, token_list = self.get_if(token_list)
        if subtree is None:
            return None, tokens
        return subtree, token_list
            
    def get_while(self, token_list):
        tokens = token_list[::]
        
        if token_list[0].get_type() != Token.WHILE:
            return None, tokens
            
        tok = token_list[0]
        token_list = token_list[1:]
        expression, token_list = self.get_expression(token_list)
        if expression is None:
            return None, tokens
            
        if token_list[0].get_type() != Token.DO:
            return None, tokens
        token_list = token_list[1:]
        suite, token_list = self.get_suite(token_list)
        if suite is None:
            return None, tokens
            
        if token_list[0].get_type() != Token.END:
            return None, tokens
        token_list = token_list[1:]
        
        return CompStmt(tok, parent=None, left_child=expression,
                                        right_child=suite), token_list
                                        
    def get_if(self, token_list):
        tokens = token_list[::]
        
        if token_list[0].get_type() != Token.IF:
            return None, tokens
            
        if_tok = token_list[0]
        token_list = token_list[1:]
        expression, token_list = self.get_expression(token_list)
        if expression is None:
            return None, tokens
            
        if token_list[0].get_type() != Token.DO:
            return None, tokens
        token_list = token_list[1:]
        if_suite, token_list = self.get_suite(token_list)
        if if_suite is None:
            return None, tokens
            
        if token_list[0].get_type() != Token.END:
            return None, tokens
        token_list = token_list[1:]
        
        if token_list[0].get_type() != Token.ELSE:
            else_tok = None
        else:
            else_tok = token_list[0]
            token_list = token_list[1:]
            if token_list[0].get_type() != Token.DO:
                return None, tokens
            token_list = token_list[1:]
            
            else_suite, token_list = self.get_suite(token_list)
            if else_suite is None:
                return None, tokens
            
            if token_list[0].get_type() != Token.END:
                return None, tokens
            token_list = token_list[1:]
            
        if else_tok is None:
            else_tok = Token(Token.ELSE, "else")
            else_node = CompStmt(else_tok, parent=None,
                                left_child=if_suite, right_child=None)
        else:
            else_node = CompStmt(else_tok, parent=None,
                            left_child=if_suite, right_child=else_suite)
                            
        if_node = CompStmt(if_tok, parent=None,
                        left_child=expression, right_child=else_node)
        return if_node, token_list

    def get_expression(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_or_test(token_list)
        
        if subtree is None:
            return None, tokens
        return subtree, token_list
        
    def get_or_test(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_and_test(token_list)
        
        if subtree is None:
            return None, tokens
        
        while token_list[0].get_type() == Token.OR:
            tok = token_list[0]
            token_list = token_list[1:]
            right, token_list = self.get_and_test(token_list)
            if right is None:
                return None, tokens
            else:
                subtree = BoolBinOp(tok, parent=None, left_child=subtree,
                                                    right_child=right)
                                                    
        return subtree, token_list
        
    def get_and_test(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_not_test(token_list)
        
        if subtree is None:
            return None, tokens
        
        while token_list[0].get_type() == Token.AND:
            tok = token_list[0]
            token_list = token_list[1:]
            right, token_list = self.get_not_test(token_list)
            if right is None:
                return None, tokens
            else:
                subtree = BoolBinOp(tok, parent=None, left_child=subtree,
                                                    right_child=right)
                                                    
        return subtree, token_list
    
    def get_not_test(self, token_list):
        if token_list[0].get_type() != Token.NEGATION:
            return self.get_comparison(token_list)
        
        tokens = token_list[::]
        tok = token_list[0]
        token_list = token_list[1:]
        subtree, token_list = self.get_not_test(token_list)
        if subtree is None:
            return None, tokens
        else:
            return UnOp(tok, parent=None, child=subtree), token_list
            
    def get_comparison(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_arith(token_list)
        
        if subtree is None:
            return None, tokens
            
        comp_ops = [Token.EQUALITY, Token.INEQUALITY, Token.GREATER,
                    Token.LESSER, Token.GREATEREQUAL, Token.LESSEREQUAL]
        while token_list[0].get_type() in comp_ops:
            op = token_list[0]
            token_list = token_list[1:]
            right, token_list = self.get_arith(token_list)
            if right is None:
                return None, tokens
            else:
                subtree = CompBinOp(op, parent=None, left_child=subtree,
                                                    right_child=right)
                                                    
        return subtree, token_list
        
    def get_arith(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_term(token_list)
        
        if subtree is None:
            return None, tokens
            
        while token_list[0].get_type() in [Token.PLUS, Token.MINUS]:
            op = token_list[0]
            token_list = token_list[1:]
            right, token_list = self.get_term(token_list)
            if right is None:
                return None, tokens
            else:
                subtree = ArithBinOp(op, parent=None, left_child=subtree,
                                                    right_child=right)
                                                    
        return subtree, token_list
        
    def get_term(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_factor(token_list)
        
        if subtree is None:
            return None, tokens
            
        while token_list[0].get_type() in [Token.PRODUCT, Token.DIVISION]:
            op = token_list[0]
            token_list = token_list[1:]
            right, token_list = self.get_factor(token_list)
            if right is None:
                return None, tokens
            else:
                subtree = ArithBinOp(op, parent=None, left_child=subtree,
                                                    right_child=right)
                                                    
        return subtree, token_list
        
    def get_factor(self, token_list):
        tokens = token_list[::]
        subtree, token_list = self.get_atom(token_list)
        
        if subtree is None:
            return None, tokens
            
        while token_list[0].get_type() == Token.POWER:
            op = token_list[0]
            token_list = token_list[1:]
            right, token_list = self.get_atom(token_list)
            if right is None:
                return None, tokens
            else:
                subtree = ArithBinOp(op, parent=None, left_child=subtree,
                                                    right_child=right)
                                                    
        return subtree, token_list
        
    def get_atom(self, token_list): 
        tokens = token_list[::]
        
        literals = [Token.BOOL, Token.INTEGER, Token.FLOAT,
                    Token.NULL, Token.STRING, Token.BOOL]
        
        if token_list[0].get_type() in [Token.PLUS, Token.MINUS]:
            op = token_list[0]
            token_list = token_list[1:]
            subtree, token_list = self.get_atom(token_list)
            if subtree is None:
                return None, tokens
            else:
                return UnOp(op, parent=None, child=subtree), token_list
                
        elif token_list[0].get_type() in literals:
            return Literal(token_list[0]), token_list[1:]
            
        elif token_list[0].get_type() == Token.USER_VAR:
            return Variable(token_list[0]), token_list[1:]
            
        elif token_list[0].get_type() == Token.LGROUP:
            subtree, token_list = self.get_expression(token_list[1:])
            if subtree is None or token_list[0].get_type() != Token.RGROUP:
                return None, tokens
            else:
                return subtree, token_list[1:]
                
        else:
            return None, token_list
        
    def error(self, msg):
        print(msg)
        exit()
        
    def show(self):
        strings = self.root.get_strings()
        
        def simplify(l, depth):
            s = l[0]
            indent = "  " * depth
            for ele in l[1:]:
                if ele:
                    s += "\n  " + indent + " |:" + simplify(ele, depth+1)
            
            return s
            
        print(simplify(strings, 0))
예제 #5
0
from tokenizer import Tokenizer, Token  # Importing the Tokenizer class custom written by us (the team)

# Checking if file to lex is provided as a commandline argument
if len(sys.argv) < 2:
    print("Please enter filename as commandline argument")
    print("Exiting...")
    exit()

# Creating an instance of a new Tokenizer Class that contains the lexer.
my_tokenizer = Tokenizer(sys.argv[1])

print()

while True:
    # Getting next token from input file until we hit EOF
    token = my_tokenizer.get_next_token()
    if token.token == "EOF":
        break

    #########################################################################################################
    # If error in Token, it is printed on the standard output stream.
    # We identify 4 different classes of errors
    #     -invalid chars in string literals =================================================>> "string_error"
    #     -invalid char found in input stream ===============================================>> "char_error"
    #     -invalid floating point notation ==================================================>> "float_error"
    #     -miscellaneous errors (mostly detects only string tokens with missing closing ") ==>> "Invalid_Token"
    #########################################################################################################

    if token.token == "string_error" or token.token == "char_error" or token.token == "float_error" or token.token == "Invalid_Token":
        print("Error encountered at line " + str(token.line))
    # Printing received token to output stream
class Parser():
    def __init__(self, expr_str: str):
        self.tokenizer = Tokenizer(expr_str)

    def parse(self) -> float:
        self.tok = self.tokenizer.get_next_token()
        return self._Expr()

    def _Expr(self) -> float:
        """
            Expr  --> AddExpr
        """
        logger.debug("In Expr with {}".format(self.tok.lexeme))
        result = 0
        # Check for first set
        if self.tok.token in [
                Token.TOKPLUS, Token.TOKMINUS, Token.TOKIDENT, Token.TOKLPAREN,
                Token.TOKNUMBER
        ]:
            result = self._AddExpr()
        else:
            logger.error(
                "Error:Column {}: '{}' is not a valid start of an expression".
                format(self.tok.col_no + 1, self.tok.lexeme))
        logger.debug("Leaving Expr with {}".format(self.tok.lexeme))
        return result

    def _AddExpr(self) -> float:
        """
            AddExpr	 --> MulExpr { ("+" | "-") MulExpr }
        """
        logger.debug("In AddExpr with {}".format(self.tok.lexeme))

        lval = self._MulExpr()
        while self.tok.token == Token.TOKPLUS or self.tok.token == Token.TOKMINUS:
            tok = self.tok.token
            self.tok = self.tokenizer.get_next_token()
            lval = lval + self._MulExpr(
            ) if tok == Token.TOKPLUS else lval - self._MulExpr()

        logger.debug("Leaving AddExpr with {}".format(self.tok.lexeme))
        return lval

    def _MulExpr(self) -> float:
        """
            MulExpr	--> UnaryExpr { ("*" | "/" | "%") UnaryExpr }
        """
        logger.debug("In MulExpr with {}".format(self.tok.lexeme))

        lval = self._UnaryExpr()
        while self.tok.token == Token.TOKMUL or self.tok.token == Token.TOKDIVIDE or self.tok.token == Token.TOKMOD:
            tok = self.tok.token
            self.tok = self.tokenizer.get_next_token()
            rval = self._UnaryExpr()
            if tok == Token.TOKMUL:
                lval *= rval
            elif tok == Token.TOKDIVIDE:
                lval /= rval
            else:
                lval %= rval

        logger.debug("Leaving MulExpr with {}".format(self.tok.lexeme))
        return lval

    def _UnaryExpr(self) -> float:
        """
        UnaryExpr   --> PrimaryExpr
        			| ("+" | "-") UnaryExpr
        """
        logger.debug("In UnaryExpr with {}".format(self.tok.lexeme))
        result = 0
        if self.tok.token == Token.TOKIDENT or self.tok.token == Token.TOKNUMBER or self.tok.token == Token.TOKLPAREN:
            result = self._PrimaryExpr()

        elif self.tok.token == Token.TOKPLUS or self.tok.token == Token.TOKMINUS:
            tok = self.tok.token
            self.tok = self.tokenizer.get_next_token()
            result = self._UnaryExpr(
            ) if tok == Token.TOKPLUS else -1 * self._UnaryExpr()

        else:
            logger.error(
                "Error:Column {}: '{}' is an invalid start of a unary expression"
                .format(self.tok.col_no + 1, self.tok.lexeme))
            #Incase of error, just report and maintain default 0 for result

        logger.debug("Leaving UnaryExpr with {}".format(self.tok.lexeme))
        return result

    def _PrimaryExpr(self) -> float:
        """
        PrimaryExpr  --> number
        				        | Ident
        				        | Ident "(" [ Expr { "," Expr } ] ")"
                                | Ident "=" Expr
        				        | "(" Expr ")"
        """
        logger.debug("In PrimaryExpr with {}".format(self.tok.lexeme))
        result = 0
        if self.tok.token == Token.TOKNUMBER:
            result = float(self.tok.lexeme)
            self.tok = self.tokenizer.get_next_token()

        elif self.tok.token == Token.TOKIDENT:
            ident_lexeme = self.tok.lexeme
            col_no = self.tok.col_no
            is_defined = ident_lexeme in sym_tab
            is_var_access = True

            self.tok = self.tokenizer.get_next_token()

            if self.tok.token == Token.TOKLPAREN:
                arg_list = []
                self.tok = self.tokenizer.get_next_token()
                exp = self._Expr()
                arg_list.append(exp)

                while self.tok.token == Token.TOKCOMMA:
                    self.tok = self.tokenizer.get_next_token()
                    exp = self._Expr()
                    arg_list.append(exp)

                if self.tok.token == Token.TOKRPAREN:

                    if is_defined:
                        result = sym_tab[ident_lexeme](*arg_list)
                    else:
                        logger.error(
                            "Error:Column {}: '{}' is not part of the predefined functions"
                            .format(col_no + 1, ident_lexeme))

                    is_var_access = False

                    self.tok = self.tokenizer.get_next_token()

                else:
                    logger.error(
                        "Error:Column {}: ')' expected before '{}'".format(
                            self.tok.col_no + 1, self.tok.lexeme))

            elif self.tok.token == Token.TOKEQUAL:
                self.tok = self.tokenizer.get_next_token()
                result = self._Expr()
                #add variable to symbol table
                sym_tab[ident_lexeme] = result
                is_var_access = False

            if is_var_access == True:
                if not is_defined:
                    logger.error(
                        "Error:Column {}: the variable '{}' is not defined".
                        format(col_no + 1, ident_lexeme))

                else:
                    result = sym_tab[ident_lexeme]

        elif self.tok.token == Token.TOKLPAREN:
            self.tok = self.tokenizer.get_next_token()
            result = self._Expr()
            if self.tok.token == Token.TOKRPAREN:
                self.tok = self.tokenizer.get_next_token()
            else:
                logger.error(
                    "Error:Column {}: ')' expected before '{}'".format(
                        self.tok.col_no + 1, self.tok.lexeme))

        logger.debug("Leaving PrimaryExpr with {}".format(self.tok.lexeme))
        return result
예제 #7
0
class ParserInterpreter(object):
    def __init__(self, expression):
        self.sc = Scanner(expression)
        self.tok = Tokenizer(self.sc)
        self.tokens = None
        
    def get_token_sequence(self):
        if self.tokens is not None:
            return self.tokens[::]
            
        self.tokens = []
        
        self.tokens.append(self.tok.get_next_token())
        while self.tokens[-1].get_type() != Token.EOF:
            self.tokens.append(self.tok.get_next_token())
            
        return self.tokens[::]
        
    def get_program(self):
        result = self.get_expr()
        print("pre-result: {}".format(result))
        
        while self.tokens[0].get_type() is Token.SEPARATOR:
            self.tokens = self.tokens[1:]
            if self.tokens[0].get_type() is Token.EOF:
                break
            else:
                result = self.get_expr()
                print("pre-result: {}".format(result))
                
        if self.tokens[0].get_type() is Token.EOF:
            return result
        else:
            self.error("Program did not terminate with expected EOF")
    
    def get_expr(self):
        result = self.get_term()
        
        while self.tokens[0].get_type() in (Token.PLUS,
                                            Token.MINUS):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            other = self.get_term()
            
            if tok.get_type() == Token.PLUS:
                result += other
            elif tok.get_type() == Token.MINUS:
                result -= other
                
        return result
        
    def get_term(self):
        result = self.get_power()
        
        while self.tokens[0].get_type() in (Token.PRODUCT,
                                            Token.DIVISION):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            other = self.get_power()
            
            if tok.get_type() == Token.PRODUCT:
                result *= other
            elif tok.get_type() == Token.DIVISION:
                result /= other
                
        return result
        
    def get_power(self):
        result = self.get_base()
        
        while self.tokens[0].get_type() is Token.POWER:
            self.tokens = self.tokens[1:]
            other = self.get_base()
            
            result = pow(result, other)
            
        return result
    
    def get_base(self):
        tok = self.tokens[0]
        self.tokens = self.tokens[1:]
        
        if tok.get_type() not in (Token.INTEGER, Token.FLOAT):
            if tok.get_type() is not Token.LGROUP:
                self.error("Could not get a valid base")
            else:
                base = self.get_expr()
                tok = self.tokens[0]
                self.tokens = self.tokens[1:]
                if tok.get_type() is not Token.RGROUP:
                    self.error("Unmatched left parenthesis")
                return base
        else:
            return tok.get_value()
        
    def error(self, msg):
        raise Exception(msg)
예제 #8
0
            else:
                self.interpreter.parser.set_program(user_input)
                calculated_val = self.interpreter.interpret()
                print("Value of {} = {}".format(user_input, calculated_val))

            user_input = input("Enter Whatever you want to calculate, press 'h' for Help, or press 'q':\n")
        print("Thank You for using The Calculator 4000!. Goodbye!")


if __name__ == "__main__":
    program = "[1! + 5 * 7 & [7!]] $ 8"

    #Test the tokenizer by printing all the tokens
    t = Tokenizer(program)
    while t.has_more_tokens():
        print(t.get_next_token())

    #Test the parser by generating and printing the whole ast tree
    p = Parser(program)
    # print("AST Tree:\n{}".format(p.parse()))
    root = p.parse()
    root.bfs_pretty_print()
    print("\n")

    #Test the interpreter program
    i = Interpreter(program)
    print("Value of {} = {}".format(program, i.interpret()))

    #Test the Calculator
    calc = Calculator()
    calc.start()
예제 #9
0
class ParserInterpreter(object):
    def __init__(self, expression):
        self.sc = Scanner(expression)
        self.tok = Tokenizer(self.sc)
        self.tokens = None

    def get_token_sequence(self):
        if self.tokens is not None:
            return self.tokens[::]

        self.tokens = []

        self.tokens.append(self.tok.get_next_token())
        while self.tokens[-1].get_type() != Token.EOF:
            self.tokens.append(self.tok.get_next_token())

        return self.tokens[::]

    def get_program(self):
        result = self.get_expr()
        print("pre-result: {}".format(result))

        while self.tokens[0].get_type() is Token.SEPARATOR:
            self.tokens = self.tokens[1:]
            if self.tokens[0].get_type() is Token.EOF:
                break
            else:
                result = self.get_expr()
                print("pre-result: {}".format(result))

        if self.tokens[0].get_type() is Token.EOF:
            return result
        else:
            self.error("Program did not terminate with expected EOF")

    def get_expr(self):
        result = self.get_term()

        while self.tokens[0].get_type() in (Token.PLUS, Token.MINUS):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            other = self.get_term()

            if tok.get_type() == Token.PLUS:
                result += other
            elif tok.get_type() == Token.MINUS:
                result -= other

        return result

    def get_term(self):
        result = self.get_power()

        while self.tokens[0].get_type() in (Token.PRODUCT, Token.DIVISION):
            tok = self.tokens[0]
            self.tokens = self.tokens[1:]
            other = self.get_power()

            if tok.get_type() == Token.PRODUCT:
                result *= other
            elif tok.get_type() == Token.DIVISION:
                result /= other

        return result

    def get_power(self):
        result = self.get_base()

        while self.tokens[0].get_type() is Token.POWER:
            self.tokens = self.tokens[1:]
            other = self.get_base()

            result = pow(result, other)

        return result

    def get_base(self):
        tok = self.tokens[0]
        self.tokens = self.tokens[1:]

        if tok.get_type() not in (Token.INTEGER, Token.FLOAT):
            if tok.get_type() is not Token.LGROUP:
                self.error("Could not get a valid base")
            else:
                base = self.get_expr()
                tok = self.tokens[0]
                self.tokens = self.tokens[1:]
                if tok.get_type() is not Token.RGROUP:
                    self.error("Unmatched left parenthesis")
                return base
        else:
            return tok.get_value()

    def error(self, msg):
        raise Exception(msg)