def get_tokens_unprocessed(self, text=None, context=None): self._reset_stringescapes() return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context)
def get_tokens_unprocessed(self, text=None, context=None): ctx = LexerContext(text, 0) ctx.indent = 0 return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context=ctx)
def get_tokens_unprocessed(self, text=None, context=None): self._reset_stringescapes() return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context)
def get_tokens_unprocessed(self, text=None, context=None): ctx = LexerContext(text, 0) ctx.indent = 0 return ExtendedRegexLexer.get_tokens_unprocessed(self, text, context=ctx)