Beispiel #1
0
class GenshiMarkupLexer(RegexLexer):
    """
    Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
    `GenshiLexer`.
    """

    flags = re.DOTALL

    tokens = {
        'root': [
            (r'[^<\$]+', Other),
            (r'(<\?python)(.*?)(\?>)',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            # yield style and script blocks as Other
            (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
            (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
            (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
            include('variable'),
            (r'[<\$]', Other),
        ],
        'pytag': [
            (r'\s+', Text),
            (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'pyattr'),
            (r'/?\s*>', Name.Tag, '#pop'),
        ],
        'pyattr': [
            ('(")(.*?)(")', bygroups(String, using(PythonLexer),
                                     String), '#pop'),
            ("(')(.*?)(')", bygroups(String, using(PythonLexer),
                                     String), '#pop'),
            (r'[^\s>]+', String, '#pop'),
        ],
        'tag': [
            (r'\s+', Text),
            (r'py:[a-zA-Z0-9_-]+\s*=', Name.Attribute, 'pyattr'),
            (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
            (r'/?\s*>', Name.Tag, '#pop'),
        ],
        'attr': [('"', String, 'attr-dstring'), ("'", String, 'attr-sstring'),
                 (r'[^\s>]*', String, '#pop')],
        'attr-dstring': [('"', String, '#pop'),
                         include('strings'), ("'", String)],
        'attr-sstring': [("'", String, '#pop'),
                         include('strings'), ("'", String)],
        'strings': [('[^"\'$]+', String),
                    include('variable')],
        'variable': [
            (r'(?<!\$)(\$\{)(.+?)(\})',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)', Name.Variable),
        ]
    }
Beispiel #2
0
class GenshiTextLexer(RegexLexer):
    """
    A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
    templates.
    """

    name = 'Genshi Text'
    aliases = ['genshitext']
    mimetypes = ['application/x-genshi-text', 'text/x-genshi']

    tokens = {
        'root': [
            (r'[^#\$\s]+', Other),
            (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
            (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
            include('variable'),
            (r'[#\$\s]', Other),
        ],
        'directive': [
            (r'\n', Text, '#pop'),
            (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
            (r'(choose|when|with)([^\S\n]+)(.*)',
             bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
            (r'(choose|otherwise)\b', Keyword, '#pop'),
            (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text,
                                                 Comment), '#pop'),
        ],
        'variable': [
            (r'(?<!\$)(\$\{)(.+?)(\})',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            (r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)', Name.Variable),
        ]
    }
Beispiel #3
0
class OcamlLexer(RegexLexer):
    """
    For the OCaml language.

    *New in Pygments 0.7.*
    """

    name = 'OCaml'
    aliases = ['ocaml']
    filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
    mimetypes = ['text/x-ocaml']

    keywords = [
      'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
      'downto', 'else', 'end', 'exception', 'external', 'false',
      'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
      'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
      'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
      'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
      'type', 'val', 'virtual', 'when', 'while', 'with'
    ]
    keyopts = [
      '!=','#','&','&&','\(','\)','\*','\+',',','-',
      '-\.','->','\.','\.\.',':','::',':=',':>',';',';;','<',
      '<-','=','>','>]','>}','\?','\?\?','\[','\[<','\[>','\[\|',
      ']','_','`','{','{<','\|','\|]','}','~'
    ]

    operators = r'[!$%&*+\./:<=>?@^|~-]'
    word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
    prefix_syms = r'[!?~]'
    infix_syms = r'[=<>@^|&+\*/$%-]'
    primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array']

    tokens = {
        'escape-sequence': [
            (r'\\[\"\'ntbr]', String.Escape),
            (r'\\[0-9]{3}', String.Escape),
            (r'\\x[0-9a-fA-F]{2}', String.Escape),
        ],
        'root': [
            (r'\s+', Text),
            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
            (r'\b([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
             Name.Namespace, 'dotted'),
            (r'\b([A-Z][A-Za-z0-9_\']*)', Name.Class),
            (r'\(\*', Comment, 'comment'),
            (r'\b(%s)\b' % '|'.join(keywords), Keyword),
            (r'(%s)' % '|'.join(keyopts), Operator),
            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
            (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
            (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),

            (r"[^\W\d][\w']*", Name),

            (r'\d[\d_]*', Number.Integer),
            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
            (r'0[oO][0-7][0-7_]*', Number.Oct),
            (r'0[bB][01][01_]*', Number.Binary),
            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),

            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
             String.Char),
            (r"'.'", String.Char),
            (r"'", Keyword), # a stray quote is another syntax element

            (r'"', String.Double, 'string'),

            (r'[~?][a-z][\w\']*:', Name.Variable),
        ],
        'comment': [
            (r'[^(*)]+', Comment),
            (r'\(\*', Comment, '#push'),
            (r'\*\)', Comment, '#pop'),
            (r'[(*)]', Comment),
        ],
        'string': [
            (r'[^\\"]+', String.Double),
            include('escape-sequence'),
            (r'\\\n', String.Double),
            (r'"', String.Double, '#pop'),
        ],
        'dotted': [
            (r'\s+', Text),
            (r'\.', Punctuation),
            (r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
            (r'[A-Z][A-Za-z0-9_\']*', Name.Class, '#pop'),
            (r'[a-z][a-z0-9_\']*', Name, '#pop'),
        ],
    }
Beispiel #4
0
class LlvmLexer(RegexLexer):
    """
    For LLVM assembly code.
    """
    name = 'LLVM'
    aliases = ['llvm']
    filenames = ['*.ll']
    mimetypes = ['text/x-llvm']

    #: optional Comment or Whitespace
    string = r'"[^"]*?"'
    identifier = r'([a-zA-Z$._][a-zA-Z$._0-9]*|' + string + ')'

    tokens = {
        'root': [
            include('whitespace'),

            # Before keywords, because keywords are valid label names :(...
            (r'^\s*' + identifier + '\s*:', Name.Label),
            include('keyword'),
            (r'%' + identifier, Name.Variable),  #Name.Identifier.Local),
            (r'@' + identifier, Name.Constant),  #Name.Identifier.Global),
            (r'%\d+', Name.Variable.Anonymous),  #Name.Identifier.Anonymous),
            (r'c?' + string, String),
            (r'0[xX][a-fA-F0-9]+', Number),
            (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
            (r'[=<>{}\[\]()*.,]|x\b', Punctuation)
        ],
        'whitespace': [(r'(\n|\s)+', Text), (r';.*?\n', Comment)],
        'keyword': [
            # Regular keywords
            (
                r'(void|label|float|double|opaque'
                r'|to'
                r'|alias|type'
                r'|zeroext|signext|inreg|sret|noalias|noreturn|nounwind|nest'
                r'|module|asm|target|datalayout|triple'
                r'|true|false|null|zeroinitializer|undef'
                r'|global|internal|external|linkonce|weak|appending|extern_weak'
                r'|dllimport|dllexport'
                r'|ccc|fastcc|coldcc|cc|tail'
                r'|default|hidden|protected'
                r'|thread_local|constant|align|section'
                r'|define|declare'

                # Statements & expressions
                r'|trunc|zext|sext|fptrunc|fpext|fptoui|fptosi|uitofp|sitofp'
                r'|ptrtoint|inttoptr|bitcast|getelementptr|select|icmp|fcmp'
                r'|extractelement|insertelement|shufflevector'
                r'|sideeffect|volatile'
                r'|ret|br|switch|invoke|unwind|unreachable'
                r'|add|sub|mul|udiv|sdiv|fdiv|urem|srem|frem'
                r'|shl|lshr|ashr|and|or|xor'
                r'|malloc|free|alloca|load|store'
                r'|phi|call|va_arg|va_list'

                # Comparison condition codes for icmp
                r'|eq|ne|ugt|uge|ult|ule|sgt|sge|slt|sle'
                # Ditto for fcmp: (minus keywords mentioned in other contexts)
                r'|oeq|ogt|oge|olt|ole|one|ord|ueq|ugt|uge|une|uno'
                r')\b',
                Keyword),
            # Integer types
            (r'i[1-9]\d*', Keyword)
        ]
    }
Beispiel #5
0
class MakoLexer(RegexLexer):
    """
    Generic `mako templates`_ lexer. Code that isn't Mako
    markup is yielded as `Token.Other`.

    *New in Pygments 0.7.*

    .. _mako templates: http://www.makotemplates.org/
    """

    name = 'Mako'
    aliases = ['mako']
    filenames = ['*.mao']
    mimetypes = ['application/x-mako']

    tokens = {
        'root': [
            (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
             bygroups(Text, Comment.Preproc, Keyword, Other)),
            (r'(\s*)(%)([^\n]*)(\n|\Z)',
             bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
            (r'(\s*)(##[^\n]*)(\n|\Z)', bygroups(Text, Comment.Preproc,
                                                 Other)),
            (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
            (r'(<%)([\w\.\:]+)', bygroups(Comment.Preproc,
                                          Name.Builtin), 'tag'),
            (r'(</%)([\w\.\:]+)(>)',
             bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
            (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
            (r'(<%(?:!?))(.*?)(%>)(?s)',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            (r'(\$\{)(.*?)(\})',
             bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
            (r'''(?sx)
                (.+?)                # anything, followed by:
                (?:
                 (?<=\n)(?=%|\#\#) | # an eval or comment line
                 (?=\#\*) |          # multiline comment
                 (?=</?%) |          # a python block
                                     # call start or end
                 (?=\$\{) |          # a substitution
                 (?<=\n)(?=\s*%) |
                                     # - don't consume
                 (\\\n) |            # an escaped newline
                 \Z                  # end of string
                )
            ''', bygroups(Other, Operator)),
            (r'\s+', Text),
        ],
        'ondeftags': [
            (r'<%', Comment.Preproc),
            (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
            include('tag'),
        ],
        'tag': [
            (r'((?:\w+)\s*=)\s*(".*?")', bygroups(Name.Attribute, String)),
            (r'/?\s*>', Comment.Preproc, '#pop'),
            (r'\s+', Text),
        ],
        'attr': [
            ('".*?"', String, '#pop'),
            ("'.*?'", String, '#pop'),
            (r'[^\s>]+', String, '#pop'),
        ],
    }
Beispiel #6
0
class DjangoLexer(RegexLexer):
    """
    Generic `django <http://www.djangoproject.com/documentation/templates/>`_
    and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.

    It just highlights django/jinja code between the preprocessor directives,
    other data is left untouched by the lexer.
    """

    name = 'Django/Jinja'
    aliases = ['django', 'jinja']
    mimetypes = ['application/x-django-templating', 'application/x-jinja']

    flags = re.M | re.S

    tokens = {
        'root': [
            (r'[^{]+', Other),
            (r'\{\{', Comment.Preproc, 'var'),
            # jinja/django comments
            (r'\{[*#].*?[*#]\}', Comment),
            # django comments
            (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
             r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
             bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
                      Comment, Comment.Preproc, Text, Keyword, Text,
                      Comment.Preproc)),
            # raw jinja blocks
            (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
             r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
             bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
                      Text, Comment.Preproc, Text, Keyword, Text,
                      Comment.Preproc)),
            # filter blocks
            (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
             bygroups(Comment.Preproc, Text, Keyword, Text,
                      Name.Function), 'block'),
            (r'(\{%)(-?\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
             bygroups(Comment.Preproc, Text, Keyword), 'block'),
            (r'\{', Other)
        ],
        'varnames': [
            (r'(\|)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
             bygroups(Operator, Text, Name.Function)),
            (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_][a-zA-Z0-9_]*)',
             bygroups(Keyword, Text, Keyword, Text, Name.Function)),
            (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
            (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
             r'with(?:(?:out)?\s*context)?)\b', Keyword),
            (r'(loop|block|super|forloop)\b', Name.Builtin),
            (r'[a-zA-Z][a-zA-Z0-9_]*', Name.Variable),
            (r'\.[a-zA-Z0-9_]+', Name.Variable),
            (r':?"(\\\\|\\"|[^"])*"', String.Double),
            (r":?'(\\\\|\\'|[^'])*'", String.Single),
            (r'([{}()\[\]+\-*/,:]|[><=]=?)', Operator),
            (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
             r"0[xX][0-9a-fA-F]+[Ll]?", Number),
        ],
        'var': [(r'\s+', Text),
                (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
                include('varnames')],
        'block': [(r'\s+', Text),
                  (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
                  include('varnames'), (r'.', Punctuation)]
    }

    def analyse_text(text):
        rv = 0.0
        if re.search(r'\{%\s*(block|extends)', text) is not None:
            rv += 0.4
        if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
            rv += 0.1
        if re.search(r'\{\{.*?\}\}', text) is not None:
            rv += 0.1
        return rv
Beispiel #7
0
class SLexer(RegexLexer):
    """
    For S, S-plus, and R source code.

    *New in Pygments 0.10.*
    """

    name = 'S'
    aliases = ['splus', 's', 'r']
    filenames = ['*.S', '*.R']
    mimetypes = ['text/S-plus', 'text/S', 'text/R']

    tokens = {
        'comments': [
            (r'#.*$', Comment.Single),
        ],
        'valid_name': [
            (r'[a-zA-Z][0-9a-zA-Z\._]+', Text),
            (r'`.+`', String.Backtick),
        ],
        'punctuation': [
            (r'\[|\]|\[\[|\]\]|\$|\(|\)|@|:::?|;|,', Punctuation),
        ],
        'keywords': [(r'for(?=\s*\()|while(?=\s*\()|if(?=\s*\()|(?<=\s)else|'
                      r'(?<=\s)break(?=;|$)|return(?=\s*\()|function(?=\s*\()',
                      Keyword.Reserved)],
        'operators': [(r'<-|-|==|<=|>=|<|>|&&|&|!=', Operator),
                      (r'\*|\+|\^|/|%%|%/%|=', Operator),
                      (r'%in%|%*%', Operator)],
        'builtin_symbols': [
            (r'NULL|NA|TRUE|FALSE', Keyword.Constant),
        ],
        'numbers': [
            (r'(?<![0-9a-zA-Z\)\}\]`\"])(?=\s*)[-\+]?[0-9]+'
             r'(\.[0-9]*)?(E[0-9][-\+]?(\.[0-9]*)?)?', Number),
        ],
        'statements': [
            include('comments'),
            # whitespaces
            (r'\s+', Text),
            (r'\"', String, 'string_dquote'),
            include('builtin_symbols'),
            include('numbers'),
            include('keywords'),
            include('punctuation'),
            include('operators'),
            include('valid_name'),
        ],
        'root': [
            include('statements'),
            # blocks:
            (r'\{|\}', Punctuation),
            #(r'\{', Punctuation, 'block'),
            (r'.', Text),
        ],
        #'block': [
        #    include('statements'),
        #    ('\{', Punctuation, '#push'),
        #    ('\}', Punctuation, '#pop')
        #],
        'string_dquote': [
            (r'[^\"]*\"', String, '#pop'),
        ],
    }

    def analyse_text(text):
        return '<-' in text
Beispiel #8
0
class CssLexer(RegexLexer):
    """
    For CSS (Cascading Style Sheets).
    """

    name = 'CSS'
    aliases = ['css']
    filenames = ['*.css']
    mimetypes = ['text/css']

    tokens = {
        'root': [
            include('basics'),
        ],
        'basics': [(r'\s+', Text), (r'/\*(?:.|\n)*?\*/', Comment),
                   (r'{', Punctuation, 'content'),
                   (r'\:[a-zA-Z0-9_-]+', Name.Decorator),
                   (r'\.[a-zA-Z0-9_-]+', Name.Class),
                   (r'\#[a-zA-Z0-9_-]+', Name.Function),
                   (r'@[a-zA-Z0-9_-]+', Keyword, 'atrule'),
                   (r'[a-zA-Z0-9_-]+', Name.Tag),
                   (r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
                   (r'"(\\\\|\\"|[^"])*"', String.Double),
                   (r"'(\\\\|\\'|[^'])*'", String.Single)],
        'atrule': [
            (r'{', Punctuation, 'atcontent'),
            (r';', Punctuation, '#pop'),
            include('basics'),
        ],
        'atcontent': [
            include('basics'),
            (r'}', Punctuation, '#pop:2'),
        ],
        'content':
        [(r'\s+', Text), (r'}', Punctuation, '#pop'),
         (r'url\(.*?\)', String.Other), (r'^@.*?$', Comment.Preproc),
         (r'(azimuth|background-attachment|background-color|'
          r'background-image|background-position|background-repeat|'
          r'background|border-bottom-color|border-bottom-style|'
          r'border-bottom-width|border-left-color|border-left-style|'
          r'border-left-width|border-right|border-right-color|'
          r'border-right-style|border-right-width|border-top-color|'
          r'border-top-style|border-top-width|border-bottom|'
          r'border-collapse|border-left|border-width|border-color|'
          r'border-spacing|border-style|border-top|border|caption-side|'
          r'clear|clip|color|content|counter-increment|counter-reset|'
          r'cue-after|cue-before|cue|cursor|direction|display|'
          r'elevation|empty-cells|float|font-family|font-size|'
          r'font-size-adjust|font-stretch|font-style|font-variant|'
          r'font-weight|font|height|letter-spacing|line-height|'
          r'list-style-type|list-style-image|list-style-position|'
          r'list-style|margin-bottom|margin-left|margin-right|'
          r'margin-top|margin|marker-offset|marks|max-height|max-width|'
          r'min-height|min-width|opacity|orphans|outline|outline-color|'
          r'outline-style|outline-width|overflow|padding-bottom|'
          r'padding-left|padding-right|padding-top|padding|page|'
          r'page-break-after|page-break-before|page-break-inside|'
          r'pause-after|pause-before|pause|pitch|pitch-range|'
          r'play-during|position|quotes|richness|right|size|'
          r'speak-header|speak-numeral|speak-punctuation|speak|'
          r'speech-rate|stress|table-layout|text-align|text-decoration|'
          r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
          r'vertical-align|visibility|voice-family|volume|white-space|'
          r'widows|width|word-spacing|z-index|bottom|left|'
          r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
          r'behind|below|bidi-override|blink|block|bold|bolder|both|'
          r'capitalize|center-left|center-right|center|circle|'
          r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
          r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
          r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
          r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
          r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
          r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
          r'inherit|inline-table|inline|inset|inside|invert|italic|'
          r'justify|katakana-iroha|katakana|landscape|larger|large|'
          r'left-side|leftwards|level|lighter|line-through|list-item|'
          r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
          r'lower|low|medium|message-box|middle|mix|monospace|'
          r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
          r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
          r'open-quote|outset|outside|overline|pointer|portrait|px|'
          r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
          r'rightwards|s-resize|sans-serif|scroll|se-resize|'
          r'semi-condensed|semi-expanded|separate|serif|show|silent|'
          r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
          r'spell-out|square|static|status-bar|super|sw-resize|'
          r'table-caption|table-cell|table-column|table-column-group|'
          r'table-footer-group|table-header-group|table-row|'
          r'table-row-group|text|text-bottom|text-top|thick|thin|'
          r'transparent|ultra-condensed|ultra-expanded|underline|'
          r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
          r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
          r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
         (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
          r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
          r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
          r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
          r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
          r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
          r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
          r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
          r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
          r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
          r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
          r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
          r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
          r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
          r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
          r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
          r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
          r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
          r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
          r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
          r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
          r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
          r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
          r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
          r'blueviolet|peachpuff)\b', Name.Builtin),
         (r'\!important', Comment.Preproc), (r'/\*(?:.|\n)*?\*/', Comment),
         (r'\#[a-zA-Z0-9]{1,6}', Number),
         (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex)', Number),
         (r'-?[0-9]+', Number), (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
         (r'[\[\]();]+', Punctuation), (r'"(\\\\|\\"|[^"])*"', String.Double),
         (r"'(\\\\|\\'|[^'])*'", String.Single),
         (r'[a-zA-Z][a-zA-Z0-9]+', Name)]
    }