Beispiel #1
0
 def addToken(self, tokenType, literal=None, indent=""):
     #if we haven't been given an intentation, then fetch it from memory
     if not indent:
         indent = self.indent
     #return the text from the sourcecode (characters between the start and current position)
     text = self.source[self.start:self.current]
     #create and return a token
     return Tokens.Token(tokenType, text, literal, self.line, self.char,
                         indent)
Beispiel #2
0
 def insertToken(self,
                 ttype,
                 lexeme="",
                 literal=None,
                 line=0,
                 char=0,
                 indent=0):
     if isinstance(ttype, str):
         self.tokens.insert(
             self.current,
             Tokens.Token(ttype, lexeme, literal, line, char, indent))
     else:
         self.tokens.insert(self.current, ttype)
Beispiel #3
0
 def scanTokens(self):
     #empty token list
     self.tokens = []
     #while we are not at the end of the list, loop
     while not self.atEnd():
         #update the position of our lexer
         self.char += (self.current - self.start)
         self.start = self.current
         #fetch the next token
         token = self.tokenFromChar()
         #append the token to our list of tokens, provided we recieved one
         if token:
             #if the last token was an ending
             if self.checkPreviousToken("End"):
                 #fetch it
                 lt = self.previousToken()
                 #and check we haven't skipped an indentation
                 if token.indent < lt.indent - 1:
                     #create a new ending token at the same position, but with one lower indentation
                     self.tokens.append(
                         Tokens.Token("End", "", None, lt.line, lt.char,
                                      lt.indent - 1))
             #add the token
             self.tokens.append(token)
     #add an end token if none exists
     if not self.checkPreviousToken("End"):
         self.tokens.append(
             Tokens.Token("End", "", None, self.line, self.char + 1,
                          self.indent))
     #add an EOF token
     self.tokens.append(
         Tokens.Token("EOF", "", None, self.line, self.char + 1, 0))
     #remove any leading end tokens
     while self.tokens[0].type == "End":
         self.tokens.pop(0)
     return self.tokens