def error(self): s = "Lexer error on '{lexeme}' line: {lineno} column: {column}".format( lexeme=self.current_char, lineno=self.lineno, column=self.column, ) raise LexerError(message=s)
def current(self): try: return self._tokens[self._pos] except IndexError: last_token = self._tokens[-1] raise LexerError('Unexpected end of input', last_token.line, last_token.column)
def consume_expected(self, *args): # consumes the given type of token token = None for expected_name in args: token = self.consume() if token.name != expected_name: raise LexerError( 'Expected {}, got {}'.format(expected_name, token.name), token.line, token.column) return token
def _tokenize_line(self, line, line_num): pos = 0 while pos < len(line): matches = self._regex.match(line, pos) if matches is not None: name = matches.lastgroup pos = matches.end(name) if name not in self.ignore_tokens: value = matches.group(name) if name in self.decoders: value = self.decoders[name](value) yield Token(name, value, line_num, matches.start() + 1) else: raise LexerError('Unexpected character "{}"'.format(line[pos]), line_num, pos + 1)
def err(self, msg, debug=False): if debug: raise LexerDebugError(msg, *self.curr_input.get_char_info(), self.state, self.curr_char, self.buffer) else: raise LexerError(msg, *self.curr_input.get_char_info())
def expect_end(self): if not self.is_end(): token = self.current() raise LexerError('End expected', token.line, token.column)
def error(self): message = f"Lexer error on '{self.current_char}' line: {self.lineno} column: {self.column}" raise LexerError(message=message)