def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): if token is Name and value in self.EXTRA_KEYWORDS: yield index, Keyword, value elif token is Name and value in self.FUNC_KEYWORDS: yield index, Name.Function, value elif token is Name and value in self.TYPE_KEYWORDS: yield index, Name.Class, value elif token is Name and value in self.CONST_KEYWORDS: yield index, Name.Constant, value else: yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): if value in self.structure: yield index, Name.Builtin, value elif value in self.operators: yield index, Operator, value elif value in self.variables: yield index, Keyword.Reserved, value elif value in self.suppress_highlight: yield index, Name, value elif value in self.functions: yield index, Name.Function, value else: yield index, token, value
def process_file(path, args): with open(path, 'rb') as fp: text = fp.read().decode('utf-8') nl_style = determine_nl_style(path) lexer = CppLexer() tokens = lexer.get_tokens_unprocessed(text) fixup_tool = FixupTool(path, tokens, nl_style) changes = fixup_tool.parse() new_text = change_text(text, changes) if args.in_place: with open(path, 'wt') as fp: fp.write(new_text) else: print(new_text)
def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): if value in self.operators: yield index, Operator, value elif value in self.types: yield index, Keyword.Type, value elif value in self.fespaces: yield index, Name.Class, value elif value in self.preprocessor: yield index, Comment.Preproc, value elif value in self.keywords: yield index, Keyword.Reserved, value elif value in self.functions: yield index, Name.Function, value elif value in self.parameters: yield index, Keyword.Pseudo, value elif value in self.suppress_highlight: yield index, Name, value else: yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.constants: yield index, Keyword.Constant, value elif value in self.functions: yield index, Name.Function, value elif value in self.storage: yield index, Keyword.Type, value else: yield index, token, value elif token is Name.Function: if value in self.structure: yield index, Name.Other, value else: yield index, token, value elif token is Keyword: if value in self.storage: yield index, Keyword.Type, value else: yield index, token, value else: yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): if token is Name and value in self.EXTRA_TYPES: yield index, Keyword.Type, value else: yield index, token, value
def fetch_tokens(self): if self._tokens is not None: return self._tokens code = self.fetch() lexer = CppLexer() return list(lexer.get_tokens_unprocessed(code))