Example #1
0
    def get_tokens(self):
        t = self.get_token()
        tokens = []

        while t != token.Token('EOF'):
            tokens.append(t)
            t = self.get_token()

        tokens.append(token.Token('EOF'))

        return tokens
Example #2
0
 def get_operator(self) -> tok.Token:
     i = self._index
     while len(self.code) > i and tok.is_operator(
             self.code[self._index:i + 1]):
         i += 1
     res = tok.Token(self.code[self._index:i], tok.TokenType.OPERATOR)
     self._index = i
     return res
Example #3
0
 def _process_eof(self):
     try:
         self._current_line
         return None
     except text_buffer.EOFError:
         return self._set_current_token_and_skip(
             token.Token(EOF)
         )
Example #4
0
    def _process_eol(self):
        try:
            self._current_char
            return None
        except text_buffer.EOLError:
            self._text_storage.newline()

            return self._set_current_token_and_skip(
                token.Token(EOL)
            )
Example #5
0
 def get_number(self) -> tok.Token:
     return_type = tok.TokenType.INTEGER
     start = self._index
     while not self.is_eof():
         if self.cur == '.':
             if return_type == tok.TokenType.REAL:
                 raise ParseException('숫자 파싱중 에러')
             return_type = tok.TokenType.REAL
         elif not str.isdigit(self.cur):
             break
         self._index += 1
     return tok.Token(self.code[start:self._index], return_type)
Example #6
0
    def _process_integer(self):
        regexp = re.compile('\d+')

        match = regexp.match(
            self._text_storage.tail
        )

        if not match:
            return None

        token_string = match.group()

        return self._set_current_token_and_skip(
            token.Token(INTEGER, int(token_string))
        )
Example #7
0
    def _process_name(self):
        regexp = re.compile('[a-zA-Z_]+')

        match = regexp.match(
            self._text_storage.tail
        )

        if not match:
            return None

        token_string = match.group()

        return self._set_current_token_and_skip(
            token.Token(NAME, token_string)
        )
Example #8
0
 def get_paren(self) -> tok.Token:
     res = tok.Token(self.cur, tok.TokenType.PAREN)
     self._index += 1
     return res
Example #9
0
    def parse_assignment(self):
        variable = self._parse_variable()
        self.lexer.discard(token.Token(clex.LITERAL, '='))
        value = self.parse_expression()

        return AssignmentNode(variable, value)
Example #10
0
 def _process_literal(self):
     return self._set_current_token_and_skip(
         token.Token(LITERAL, self._current_char)
     )
Example #11
0
 def __init__(self, toks: List[tok.Token]):
     self.toks = toks
     self.toks.append(tok.Token('', tok.TokenType.EOF))