def __init__(self, **options): from typecode._vendor.pygments.lexers._vim_builtins import command, option, auto self._cmd = command self._opt = option self._aut = auto RegexLexer.__init__(self, **options)
def __init__(self, **options): self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True) self.c99highlighting = get_bool_opt(options, 'c99highlighting', True) self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True) RegexLexer.__init__(self, **options)
def __init__(self, **options): self.smhighlighting = get_bool_opt(options, 'sourcemod', True) self._functions = set() if self.smhighlighting: from typecode._vendor.pygments.lexers._sourcemod_builtins import FUNCTIONS self._functions.update(FUNCTIONS) RegexLexer.__init__(self, **options)
def __init__(self, **options): level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic') if level not in self._all_tokens: # compile the regexes now self._tokens = self.__class__.process_tokendef(level) else: self._tokens = self._all_tokens[level] RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): # Convention: mark all upper case names as constants if token is Name: if value.isupper(): token = Name.Constant yield index, token, value
def get_tokens_unprocessed(self, text): stack = ['root'] for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text, stack): if token is Name and value in self.EXTRA_KEYWORDS: yield index, Name.Builtin, value else: yield index, token, value
def get_tokens_unprocessed(self, text): from typecode._vendor.pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): if token is Name and value in ASYFUNCNAME: token = Name.Function elif token is Name and value in ASYVARNAME: token = Name.Variable yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): if token is Name: if self.smhighlighting: if value in self.SM_TYPES: token = Keyword.Type elif value in self._functions: token = Name.Builtin yield index, token, value
def __init__(self, **options): self.funcnamehighlighting = get_bool_opt( options, 'funcnamehighlighting', True) self.disabledmodules = get_list_opt( options, 'disabledmodules', ['unknown']) self.startinline = get_bool_opt(options, 'startinline', False) # private option argument for the lexer itself if '_startinline' in options: self.startinline = options.pop('_startinline') # collect activated functions in a set self._functions = set() if self.funcnamehighlighting: from typecode._vendor.pygments.lexers._php_builtins import MODULES for key, value in iteritems(MODULES): if key not in self.disabledmodules: self._functions.update(value) RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text): stack = ['root'] if self.startinline: stack.append('php') for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text, stack): if token is Name.Other: if value in self._functions: yield index, Name.Builtin, value continue yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): if token is Name: if self.stdlibhighlighting and value in self.stdlib_types: token = Keyword.Type elif self.c99highlighting and value in self.c99_types: token = Keyword.Type elif self.platformhighlighting and value in self.linux_types: token = Keyword.Type yield index, token, value
def get_tokens_unprocessed(self, text): from typecode._vendor.pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \ COCOA_PROTOCOLS, COCOA_PRIMITIVES for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): if token is Name or token is Name.Class: if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \ or value in COCOA_PRIMITIVES: token = Name.Builtin.Pseudo yield index, token, value
def get_tokens_unprocessed(self, text): # TODO: builtins are only subsequent tokens on lines # and 'keywords' only happen at the beginning except # for :au ones for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): if token is Name.Other: if self.is_in(value, self._cmd): yield index, Keyword, value elif self.is_in(value, self._opt) or \ self.is_in(value, self._aut): yield index, Name.Builtin, value else: yield index, Text, value else: yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in RegexLexer.get_tokens_unprocessed( self, text): if token is Name: lowercase_value = value.lower() if lowercase_value in self.builtins: yield index, Name.Builtin, value continue if lowercase_value in self.keywords: yield index, Keyword, value continue if lowercase_value in self.functions: yield index, Name.Builtin, value continue if lowercase_value in self.operators: yield index, Operator, value continue yield index, token, value
def get_tokens_unprocessed(self, text): for index, token, value in RegexLexer.get_tokens_unprocessed( self, text): if token is Name: if value in self.KEYWORD: yield index, Keyword, value elif value in self.KEYWORD_OPERATOR: yield index, Operator.Word, value elif value in self.BUILTIN: yield index, Keyword, value elif value in self.BUILTIN_DECLARATION: yield index, Keyword.Declaration, value elif value in self.BUILTIN_NAMESPACE: yield index, Keyword.Namespace, value elif value in self.CONSTANT: yield index, Name.Constant, value elif value in self.PSEUDO_VAR: yield index, Name.Builtin.Pseudo, value else: yield index, token, value else: yield index, token, value
def get_tokens_unprocessed(self, text, stack=('root',)): """Reset the content-type state.""" self.content_type = None return RegexLexer.get_tokens_unprocessed(self, text, stack)
def __init__(self, **options): self.body_lexer = options.get('body_lexer', None) RegexLexer.__init__(self, **options)
def __init__(self, **options): self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) RegexLexer.__init__(self, **options)