Example #1
0
 def test_3(self):
     # Note that this will add a newline to the lexed output, since the
     # `ensurenl <http://pygments.org/docs/lexers/>`_ option is True by
     # default.
     lexer = get_lexer_by_name('python')
     token_iter = lex('', lexer)
     # Capture both group and string for help in debugging.
     token_group = list(_group_lexer_tokens(token_iter, True, False))
     assert token_group == [(_GROUP.whitespace, '\n')]
Example #2
0
 def test_3(self):
     # Note that this will add a newline to the lexed output, since the
     # `ensurenl <http://pygments.org/docs/lexers/>`_ option is True by
     # default.
     lexer = get_lexer_by_name('python')
     token_iter = lex('', lexer)
     # Capture both group and string for help in debugging.
     token_group = list(_group_lexer_tokens(token_iter, True, False))
     assert token_group == [(_GROUP.whitespace, '\n')]
Example #3
0
 def test_10(self):
     lexer = get_lexer_by_name('c')
     token_iter = lex(self.test_c_code, lexer)
     token_group = _group_lexer_tokens(token_iter, False, False)
     gathered_group = _gather_groups_on_newlines(token_group, (2, 2, 2))
     classified_group = list(_classify_groups(gathered_group, c_lexer))
     assert classified_group == [(-1, '#include <stdio.h>\n'), (-1, '\n'),
                                 (0, 'A multi-\n'), (0, 'line\n'),
                                 (0, 'comment \n'), (-1, '\n'),
                                 (-1, 'main(){\n'), (2, 'Empty.\n'),
                                 (-1, '}\n')]
Example #4
0
 def test_10(self):
     lexer = get_lexer_by_name('c')
     token_iter = lex(self.test_c_code, lexer)
     token_group = _group_lexer_tokens(token_iter, False, False)
     gathered_group = _gather_groups_on_newlines(token_group, (2, 2, 2))
     classified_group = list( _classify_groups(gathered_group, c_lexer) )
     assert classified_group == [(-1, '#include <stdio.h>\n'),
                                 (-1, '\n'),
                                 ( 0, 'A multi-\n'),
                                 ( 0, 'line\n'),
                                 ( 0, 'comment \n'),
                                 (-1, '\n'),
                                 (-1, 'main(){\n'),
                                 ( 2,   'Empty.\n'),
                                 (-1, '}\n')]
Example #5
0
 def test_2(self):
     lexer = get_lexer_by_name('c')
     token_iter = lex(self.test_c_code, lexer)
     # Capture both group and string for help in debugging.
     token_group = list(_group_lexer_tokens(token_iter, False, False))
     # But split the two into separate lists for unit tests.
     group_list, string_list = list(zip(*token_group))
     assert group_list == (
       _GROUP.other,               # The #include.
       _GROUP.whitespace,          # The space after #include.
       _GROUP.other,               # <stdio.h>\n
       _GROUP.whitespace,          # \n
       _GROUP.block_comment,       # The /* comment */.
       _GROUP.whitespace,          # Up to the code.
       _GROUP.other,               # main(){.
       _GROUP.whitespace,          # Up to the // comment.
       _GROUP.inline_comment, # // commnet.
       _GROUP.other,               # Closing }.
       _GROUP.whitespace, )        # Final \n.
Example #6
0
 def test_2(self):
     lexer = get_lexer_by_name('c')
     token_iter = lex(self.test_c_code, lexer)
     # Capture both group and string for help in debugging.
     token_group = list(_group_lexer_tokens(token_iter, False, False))
     # But split the two into separate lists for unit tests.
     group_list, string_list = list(zip(*token_group))
     assert group_list == (
         _GROUP.other,  # The #include.
         _GROUP.whitespace,  # The space after #include.
         _GROUP.other,  # <stdio.h>\n
         _GROUP.whitespace,  # \n
         _GROUP.block_comment,  # The /* comment */.
         _GROUP.whitespace,  # Up to the code.
         _GROUP.other,  # main(){.
         _GROUP.whitespace,  # Up to the // comment.
         _GROUP.inline_comment,  # // commnet.
         _GROUP.other,  # Closing }.
         _GROUP.whitespace,
     )  # Final \n.
Example #7
0
 def test_4(self):
     lexer = get_lexer_by_name('c')
     token_iter = lex(self.test_c_code, lexer)
     token_group = _group_lexer_tokens(token_iter, False, False)
     gathered_group = list(_gather_groups_on_newlines(token_group,
                                                      (1, 2, 2)))
     expected_group = [
       [(_GROUP.other, 0, '#include'),
         (_GROUP.whitespace, 0, ' '),
         (_GROUP.other, 0, '<stdio.h>\n')],
       [(_GROUP.whitespace, 0, '\n')],
       [(_GROUP.block_comment_start, 3, '/* A multi-\n')],
       [(_GROUP.block_comment_body,  3, '   line\n')],
       [(_GROUP.block_comment_end,   3, '   comment */'),
        (_GROUP.whitespace, 0, '\n')],
       [(_GROUP.whitespace, 0, '\n')],
       [(_GROUP.other, 0, 'main(){'), (_GROUP.whitespace, 0, '\n')],
       [(_GROUP.whitespace, 0, '  '),
        (_GROUP.inline_comment, 0, '// Empty.\n')],
       [(_GROUP.other, 0, '}'), (_GROUP.whitespace, 0, '\n')] ]
     assert gathered_group == expected_group
Example #8
0
 def test_4(self):
     lexer = get_lexer_by_name('c')
     token_iter = lex(self.test_c_code, lexer)
     token_group = _group_lexer_tokens(token_iter, False, False)
     gathered_group = list(
         _gather_groups_on_newlines(token_group, (1, 2, 2)))
     expected_group = [[(_GROUP.other, 0, '#include'),
                        (_GROUP.whitespace, 0, ' '),
                        (_GROUP.other, 0, '<stdio.h>\n')],
                       [(_GROUP.whitespace, 0, '\n')],
                       [(_GROUP.block_comment_start, 3, '/* A multi-\n')],
                       [(_GROUP.block_comment_body, 3, '   line\n')],
                       [(_GROUP.block_comment_end, 3, '   comment */'),
                        (_GROUP.whitespace, 0, '\n')],
                       [(_GROUP.whitespace, 0, '\n')],
                       [(_GROUP.other, 0, 'main(){'),
                        (_GROUP.whitespace, 0, '\n')],
                       [(_GROUP.whitespace, 0, '  '),
                        (_GROUP.inline_comment, 0, '// Empty.\n')],
                       [(_GROUP.other, 0, '}'),
                        (_GROUP.whitespace, 0, '\n')]]
     assert gathered_group == expected_group