Example #1
0
    def _primary(cls, tok: Token) -> Node:
        # primary = "(" expr ")" | ident | num
        if Tokenizer.equal(tok, "("):
            node = cls._expr(unwrap_optional(tok.next))
            cls._rest = unwrap_optional(Tokenizer.skip(cls._rest, ")"))
            return node

        if tok.kind == TokenKind.TK_IDENT:
            node = Node(NodeKind.ND_VAR, name=cls._prog[tok.loc])
            cls._rest = unwrap_optional(tok.next)
            return node
        if tok.kind == TokenKind.TK_NUM:
            node = Node(NodeKind.ND_NUM, val=tok.val)
            cls._rest = unwrap_optional(tok.next)
            return node

        raise TokenError(tok, cls._prog, "expected an expression")
Example #2
0
    def _unary(cls, tok: Token) -> Node:
        # unary = ("+" | "-") unary | primary
        if Tokenizer.equal(tok, "+"):
            return cls._unary(unwrap_optional(tok.next))

        if Tokenizer.equal(tok, "-"):
            return Node(NodeKind.ND_NEG, lhs=cls._unary(unwrap_optional(tok.next)))

        return cls._primary(tok)
Example #3
0
 def _assign(cls, tok: Token) -> Node:
     # assign = equality ("=" assign)?
     node = cls._equality(tok)
     if Tokenizer.equal(cls._rest, "="):
         node = Node(
             NodeKind.ND_ASSIGN,
             lhs=node,
             rhs=cls._assign(unwrap_optional(cls._rest.next)),
         )
     return node
Example #4
0
    def _mul(cls, tok: Token) -> Node:
        # mul = unary ("*" unary | "/" unary)*
        node = cls._unary(tok)

        while True:
            if Tokenizer.equal(cls._rest, "/"):
                node = Node(
                    NodeKind.ND_DIV,
                    lhs=node,
                    rhs=cls._unary(unwrap_optional(cls._rest.next)),
                )
                continue
            if Tokenizer.equal(cls._rest, "*"):
                node = Node(
                    NodeKind.ND_MUL,
                    lhs=node,
                    rhs=cls._unary(unwrap_optional(cls._rest.next)),
                )
                continue

            return node
Example #5
0
    def _equality(cls, tok: Token) -> Node:
        # equality = relational ("==" relational | "!=" relational)*
        node = cls._relational(tok)

        while True:
            if Tokenizer.equal(cls._rest, "=="):
                node = Node(
                    NodeKind.ND_EQ,
                    lhs=node,
                    rhs=cls._relational(unwrap_optional(cls._rest.next)),
                )
                continue

            if Tokenizer.equal(cls._rest, "!="):
                node = Node(
                    NodeKind.ND_NE,
                    lhs=node,
                    rhs=cls._relational(unwrap_optional(cls._rest.next)),
                )
                continue

            return node
Example #6
0
    def _add(cls, tok: Token) -> Node:
        # add = mul ("+" mul | "-" mul)*``
        node = cls._mul(tok)

        while True:
            if Tokenizer.equal(cls._rest, "+"):
                node = Node(
                    NodeKind.ND_ADD,
                    lhs=node,
                    rhs=cls._mul(unwrap_optional(cls._rest.next)),
                )
                continue

            if Tokenizer.equal(cls._rest, "-"):
                node = Node(
                    NodeKind.ND_SUB,
                    lhs=node,
                    rhs=cls._mul(unwrap_optional(cls._rest.next)),
                )
                continue

            return node
Example #7
0
    def _relational(cls, tok: Token) -> Node:
        # relational = add ("<" add | "<=" add | ">" add | ">=" add)*
        node = cls._add(tok)

        while True:
            if Tokenizer.equal(cls._rest, "<"):
                node = Node(
                    NodeKind.ND_LT,
                    lhs=node,
                    rhs=cls._add(unwrap_optional(cls._rest.next)),
                )
                continue

            if Tokenizer.equal(cls._rest, "<="):
                node = Node(
                    NodeKind.ND_LE,
                    lhs=node,
                    rhs=cls._add(unwrap_optional(cls._rest.next)),
                )
                continue

            if Tokenizer.equal(cls._rest, ">"):
                node = Node(
                    NodeKind.ND_LT,
                    lhs=cls._add(unwrap_optional(cls._rest.next)),
                    rhs=node,
                )
                continue

            if Tokenizer.equal(cls._rest, ">="):
                node = Node(
                    NodeKind.ND_LE,
                    lhs=cls._add(unwrap_optional(cls._rest.next)),
                    rhs=node,
                )
                continue

            return node
Example #8
0
    def parse(cls, tok: Token, prog: str) -> Node:
        """Parses the tokens into an AST.

        Arg:
            tok: The head of the token list.
            prog: The program to be parsed.

        Returns:
            The root node of the AST.
        """

        cls._prog = prog
        cls._rest = tok

        head = Node(NodeKind.ND_EXPR_STMT)  # dummy node
        cur = head

        while cls._rest.kind != TokenKind.TK_EOF:
            cur.next = cls._stmt(cls._rest)
            cur = cur.next

        return unwrap_optional(head.next)
Example #9
0
 def _expr_stmt(cls, tok: Token) -> Node:
     # expr-stmt = expr ";"
     node = Node(NodeKind.ND_EXPR_STMT, lhs=cls._expr(tok))
     cls._rest = unwrap_optional(Tokenizer.skip(cls._rest, ";"))
     return node