def state_73(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('operator', 'sharp', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) else: return LexicalAnalysis.switchState(self, 74)
def state_60(self, *argv): if argv: self.identifiers = argv[0] char = self.fp.nextChar() if char == 'eof': tokenId, tokenName, tokenAttributeValue = LexicalAnalysis.checkToken( self) if tokenId: self.tokens.append( Token(tokenName, tokenAttributeValue, tokenId, line=self.lineChar)) else: addTokenBool, tokenId, tokenName, tokenAttributeValue = LexicalAnalysis.addIdentifiers( self) if addTokenBool: self.tokens.append( Token(tokenName, tokenAttributeValue, tokenId, line=self.lineChar)) else: self.tokens.append('error to save token in token csv file') self.tokens.append(Token('eof', line=self.lineChar)) elif re.search('[A-Z]|[a-z]|[0-9]', char): self.identifiers = self.identifiers + char return LexicalAnalysis.switchState(self, 60) else: return LexicalAnalysis.switchState(self, 61)
def state_75(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('dot', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) else: return LexicalAnalysis.switchState(self, 76)
def state_50(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('punctuation', 'semicolon', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) else: return LexicalAnalysis.switchState(self, 51)
def state_79(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('punctuation', 'rightsqurebracket', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) else: return LexicalAnalysis.switchState(self, 80)
def state_54(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('punctuation', 'rightparantheses', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) else: return LexicalAnalysis.switchState(self, 55)
def state_48(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('delim', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == ' ': return LexicalAnalysis.switchState(self, 48) else: return LexicalAnalysis.switchState(self, 49)
def state_28(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('operator', 'NOT', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == '=': return LexicalAnalysis.switchState(self, 29) else: return LexicalAnalysis.switchState(self, 30)
def state_41(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('operator', 'bitwiseInsclusiveAND', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == '=': return LexicalAnalysis.switchState(self, 42) else: return LexicalAnalysis.switchState(self, 43)
def state_19(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('operator', 'multiplication', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == '=': return LexicalAnalysis.switchState(self, 20) else: return LexicalAnalysis.switchState(self, 21)
def state_68(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('number', self.number, line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif re.search('[0-9]', char): self.number = self.number + char return LexicalAnalysis.switchState(self, 68) else: return LexicalAnalysis.switchState(self, 63)
def state_35(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('operator', 'bitwiseOR', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == '|': return LexicalAnalysis.switchState(self, 36) elif char == '=': return LexicalAnalysis.switchState(self, 37) else: return LexicalAnalysis.switchState(self, 38)
def state_15(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('operator', 'subtraction', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == '-': return LexicalAnalysis.switchState(self, 16) elif char == '=': return LexicalAnalysis.switchState(self, 17) else: return LexicalAnalysis.switchState(self, 18)
def state_11(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append( Token('operator', 'addition', line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif char == '+': return LexicalAnalysis.switchState(self, 12) elif char == '=': return LexicalAnalysis.switchState(self, 13) else: return LexicalAnalysis.switchState(self, 14)
def state_0(self): char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('eof', line=self.lineChar)) elif char == '<': return LexicalAnalysis.switchState(self, 1) elif char == '=': return LexicalAnalysis.switchState(self, 5) elif char == '>': return LexicalAnalysis.switchState(self, 8) elif char == '+': return LexicalAnalysis.switchState(self, 11) elif char == '-': return LexicalAnalysis.switchState(self, 15) elif char == '*': return LexicalAnalysis.switchState(self, 19) elif char == '/': return LexicalAnalysis.switchState(self, 22) elif char == '%': return LexicalAnalysis.switchState(self, 25) elif char == '!': return LexicalAnalysis.switchState(self, 28) elif char == '&': return LexicalAnalysis.switchState(self, 31) elif char == '|': return LexicalAnalysis.switchState(self, 35) elif char == ',': return LexicalAnalysis.switchState(self, 39) elif char == '^': return LexicalAnalysis.switchState(self, 41) elif char == '~': return LexicalAnalysis.switchState(self, 44) elif char == ' ': return LexicalAnalysis.switchState(self, 48) elif char == ';': return LexicalAnalysis.switchState(self, 50) elif char == '(': return LexicalAnalysis.switchState(self, 52) elif char == ')': return LexicalAnalysis.switchState(self, 54) elif char == '{': return LexicalAnalysis.switchState(self, 56) elif char == '}': return LexicalAnalysis.switchState(self, 58) elif char == '\n': self.lineChar = self.lineChar + 1 return LexicalAnalysis.switchState(self, 0) elif char == '#': return LexicalAnalysis.switchState(self, 73) elif char == '.': return LexicalAnalysis.switchState(self, 75) elif char == '[': return LexicalAnalysis.switchState(self, 77) elif char == ']': return LexicalAnalysis.switchState(self, 79) elif re.search('[A-Z]|[a-z]', char): return LexicalAnalysis.switchState(self, 60, char) elif re.search('[0-9]', char): return LexicalAnalysis.switchState(self, 62, char)
def state_62(self, *argv): if argv: self.number = argv[0] char = self.fp.nextChar() if char == 'eof': self.tokens.append(Token('number', self.number, line=self.lineChar)) self.tokens.append(Token('eof', line=self.lineChar)) elif re.search('[0-9]', char): self.number = self.number + char return LexicalAnalysis.switchState(self, 62) elif char == '.': self.number = self.number + char return LexicalAnalysis.switchState(self, 64) elif char == 'E': self.number = self.number + char return LexicalAnalysis.switchState(self, 66) else: return LexicalAnalysis.switchState(self, 63)
def state_61(self): self.fp.previousChar() tokenId, tokenName, tokenAttributeValue = LexicalAnalysis.checkToken( self) if tokenId: self.tokens.append( Token(tokenName, tokenAttributeValue, tokenId, line=self.lineChar)) else: addTokenBool, tokenId, tokenName, tokenAttributeValue = LexicalAnalysis.addIdentifiers( self) if addTokenBool: self.tokens.append( Token(tokenName, tokenAttributeValue, tokenId, line=self.lineChar)) else: self.tokens.append('error to save token in token csv file') return LexicalAnalysis.switchState(self, 0)
def state_80(self): self.tokens.append( Token('punctuation', 'rightsqurebracket', line=self.lineChar)) self.fp.previousChar() return LexicalAnalysis.switchState(self, 0)
def state_43(self): self.fp.previousChar() self.tokens.append( Token('operator', 'bitwiseInsclusiveAND', line=self.lineChar)) return LexicalAnalysis.switchState(self, 0)
def state_76(self): self.tokens.append(Token('dot', line=self.lineChar)) self.fp.previousChar() return LexicalAnalysis.switchState(self, 0)
def state_51(self): self.tokens.append( Token('punctuation', 'semicolon', line=self.lineChar)) self.fp.previousChar() return LexicalAnalysis.switchState(self, 0)
def state_3(self): self.tokens.append(Token('operator', 'NE', line=self.lineChar)) return LexicalAnalysis.switchState(self, 0)
def state_63(self): self.fp.previousChar() self.tokens.append(Token('number', self.number, line=self.lineChar)) return LexicalAnalysis.switchState(self, 0)
def state_4(self): self.fp.previousChar() self.tokens.append(Token('operator', 'LT', line=self.lineChar)) return LexicalAnalysis.switchState(self, 0)
def state_55(self): self.tokens.append( Token('punctuation', 'rightparantheses', line=self.lineChar)) self.fp.previousChar() return LexicalAnalysis.switchState(self, 0)