Пример #1
0
 def testProcessedTokenMatcher_Bygroups(self):
   matcher = (r'"f(oo)(bar)"', ('bygroups', (pygments.token.Comment,
                                             pygments.token.String)))
   processed = extract._ProcessedTokenMatcher(matcher, None)
   self.assertEqual(
       'TokenActions.byGroups(Token.COMMENT, Token.LITERAL_STRING)',
       processed.token_action)
Пример #2
0
  def testProcessedTokenMatcher(self):
    matcher = (r'"fo\\o"', pygments.token.Comment)
    processed = extract._ProcessedTokenMatcher(matcher, None)
    self.assertEqual(r'\"fo\\\\o\"', processed.regex)
    self.assertEqual('TokenActions.singleToken(Token.COMMENT)',
                     processed.token_action)
    self.assertEqual('StateActions.NOOP', processed.state_action)

    matcher = (r'foo', pygments.token.Comment, (-1,))
    processed = extract._ProcessedTokenMatcher(matcher, None)
    self.assertEqual('StateActions.pop(1)', processed.state_action)

    matcher = (r'foo', pygments.token.Comment, ('root', 'import'))
    processed = extract._ProcessedTokenMatcher(matcher, None)
    self.assertEqual('StateActions.multiple(StateActions.push(State.ROOT), '
                     'StateActions.push(State.IMPORT))',
                     processed.state_action)
Пример #3
0
 def testProcessedTokenMatcher_Using(self):
   matcher = (r'"f(oo)(bar)"', ('bygroups', (pygments.token.Comment,
                                             ('using', (PythonLexer,)),
                                             pygments.token.String)))
   processed = extract._ProcessedTokenMatcher(matcher, None)
   self.assertEqual(
       'TokenActions.byGroups(TokenActions.singleToken(Token.COMMENT), '
       'PythonSyntax.USING_THIS, '
       'TokenActions.singleToken(Token.LITERAL_STRING))',
       processed.token_action)
Пример #4
0
 def testRegexConversion(self):
   dummy = extract._ProcessedTokenMatcher(('', pygments.token.Comment), None)
   def Check(transformed, initial):
     self.assertEqual(transformed, dummy._ProcessRegex(initial))
   Check('foo', 'foo')
   Check(r'f\"o\"\\\\o', r'f"o"\\o')
   # Various nasty character classes that contain punctuation.
   Check(r'[\\]{}:(),;\\[]', '[]{}:(),;[]')
   Check(r'\\[', r'[\[]')
   Check(r'\\[', '[[]')
   Check(r'[\\]\\[a]', '[][a]')
   Check('(a)', '(a)')
   Check(r'\\{foo.*\\}', '{foo.*}')
   Check(r'\\{foo3foo\\}', '{foo3foo}')
   Check('foo{1,2}bar{1,3}', r'foo{1,2}bar{1,3}')
   Check('a{3}', 'a{3}')
   Check('a{3,5}', 'a{3,5}')
   Check(r'foobar\\{baz', 'foobar{baz')
   Check(r'foobar\\]baz', 'foobar]baz')
   Check(r'foo\\)barbaz', r'foo\)barbaz')
   Check(r'\\(', r'\(')
   Check(r'\\n', r'\n')
Пример #5
0
 def testProcessedTokenMatcher_UsingThis(self):
   matcher = (r'"f(oo)(bar)"', ('using', (pygments.lexer.this,)))
   processed = extract._ProcessedTokenMatcher(matcher, RecordingPythonLexer())
   self.assertEqual('USING_THIS', processed.token_action)