def add_tokens(self, tokens): for tokenname in tokens: token=tokens[tokenname] if tokenname in self.tokens: self.tokens[tokenname] = Tokenizer.best_match([self.tokens[tokenname], token]) else: self.tokens[tokenname] = token
def add_tokens(self, tokens): for tokenname in tokens: token=tokens[tokenname] lname = tokenname.lower() if lname in self.tokens: self.tokens[lname] = Tokenizer.best_match([self.tokens[lname], token]) else: self.tokens[lname] = token
def get_sel_token(self, view): if len(view.sel()) < 1: return (None, None) selword = view.word(view.sel()[0].end()) i = selword.begin() word = view.substr(selword) if i>2 and (view.substr(sublime.Region(i-2, i)) == "->" or view.substr(sublime.Region(i-1, i)) == "." or view.substr(sublime.Region(i-2, i)) == "::"): members = self.traverse_members(view, selword.end()) for m in members: if m[Tokenizer.T_NAME].endswith("::" + word): return (word, m) return (word, None) func = self.current_function(view) filename = self.currentfile if filename in self.cc.functiontokens and func in self.cc.functiontokens[filename] and self.cc.functiontokens[filename][func]: tokens = [x for x in self.cc.functiontokens[filename][func] if x[Tokenizer.T_NAME] == word] if len(tokens) > 0: return (word, Tokenizer.best_match(tokens)) if word.lower() in self.cc.tokens: return (word, self.cc.tokens[word.lower()]) return (word, None)