def get_tokens_unprocessed(self, text): for index, token, value in CLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.keywords: token = Keyword.Keyword elif value in self.types: token = Keyword.Type yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in CLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.ospray_types: token = Keyword.Type elif value in self.ospray_functions: token = Keyword.Function elif value in self.ospray_constants: token = Keyword.Constant yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in CLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.variable_qualifiers: token = Keyword.Type elif value in self.vector_types: token = Keyword.Type elif value in self.variables: token = Name.Builtin elif value in self.execution_confs: token = Keyword.Pseudo elif value in self.function_qualifiers: token = Keyword.Reserved elif value in self.functions: token = Name.Function yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in CLexer.get_tokens_unprocessed(self, text): if token is Name and value in self.EXTRA_KEYWORDS: yield index, Keyword, value else: yield index, token, value