def t_multilined_multilines_eof(t): 'Only happens when a multiline is not closed' t.lexpos = t.lexer.begin_lexpos - 2 raise lex.LexError( "Multiline string not closed at line %d col %d" % (t.lexer.begin_lineno, find_column(t.lexer.lexdata, t)), t.lexer.lexdata[t.lexer.begin_lexpos:])
def t_ID(t): r'[A-Za-z][A-Za-z\d_]*' if t.value in keywords: t.type = keywords.get(t.value) if t.type not in tokens: raise lex.LexError("unhandled keyword %s" % t.value, t.type) return t
def t_error(t): # pragma: no cover 'Unexpected error - should fail hard' print("Illegal character %s" % repr(t.value[0])) raise lex.LexError( "Scanning error. Illegal character %s at line %d col %d" % (repr(t.lexer.lexdata[t.lexer.lexpos]), t.lexer.lineno, find_column(t.lexer.lexdata, t)), t.lexer.lexdata[t.lexer.lexpos:])
def t_newline(t): r'(\n[ ]*)+' t.lexer.lineno += len([i for i in t.value if i == '\n']) new_indent = len(t.value) - t.value.rfind('\n') - 1 old_indent = t.lexer.indent[-1] # New Indentation if new_indent > old_indent: t.lexer.indent.append(new_indent) t.type = 'INDENT' t.value = '' t.lineno = t.lexer.lineno return t # Dedentation required if new_indent < old_indent: # Reset token if more dedents are required try: if (len(t.lexer.indent) - t.lexer.indent.index(new_indent) - 1) > 1: t.lexer.lexpos -= len(t.value) except ValueError: t.lexpos = t.lexer.lexpos raise lex.LexError( "Invalid indentation at line %d col %d" % (t.lexer.lineno, find_column(t.lexer.lexdata, t)), t.lexer.lexdata[t.lexer.lexpos:]) t.lexer.indent = t.lexer.indent[:-1] t.type = 'DEDENT' t.value = '' t.lineno = t.lexer.lineno return t
def t_EVENT(t): r'[A-Z]+:' if t.value in ECA: t.type = ECA.get(t.value) else: raise lex.LexError( 'The keyword "' + t.value + '" is unknown. Line ' + str(t.lineno), ' ') return t
def t_error(t): global lex_error lex_error = True last_cr = t.lexer.lexdata.rfind('\n', 0, t.lexpos) if last_cr < 0: last_cr = 0 column = (t.lexpos - last_cr) raise lex.LexError( 'Illegal word or character at line {1} column {2}:\n'.format( t.value[0], t.lineno, column) + t.lexer.lexdata.split('\n')[t.lineno - 1] + '\n' + ' ' * (column - 1) + '^', ' ')
def t_macro_return(t): r'[})]' if t.value!=t.lexer._mdepth[-1]: P = t.lexer.lexpos raise lex.LexError("Mis-matched bracket when closing macro", t.lexer.lexdata[P:P:20]) else: t.lexer._mval.append(t.value) t.lexer._mdepth.pop() if len(t.lexer._mdepth)==0: t.type = 'MACRO' t.value = ''.join(t.lexer._mval) del t.lexer._mval del t.lexer._mdepth t.lexer.pop_state() return t
def t_pythoncomment_ccomment_error(t): raise lex.LexError("error in tokenizer")
def t_error(t): raise lex.LexError("unknown token %s" % t.value, t.value)
def t_ccomment_eof(t): raise lex.LexError("ccomment not closed")
def parse(string): try: return parser.parse(string, lexer=lexer) except Exception as e: raise lex.LexError(e.message, e.args)
def t_error(t): raise lex.LexError( "Illegal character '%s' on line %s" % (t.value[0], t.lexer.lineno), t)
def t_error(self, t): raise lex.LexError("lex error caught", "")
def t_error(t): raise lex.LexError('Illegal character: {} at lexpos {}' .format(t.value, t.lexpos), '')
def t_error(t): s = t.lexer.lexdata[t.lexer.lexpos:] message = "Scanning error. Illegal character '%s' at line %d" % ( t.value[0], t.lineno or 0) raise lex.LexError(message, s)