Exemplo n.º 1
0
 def test_can_deline_tokens(self):
     assert list(analysis._delined_tokens([(token.Comment, "# a")])) == [(token.Comment, "# a")]
     assert list(analysis._delined_tokens([(token.Comment, "# a\n#  b")])) == [
         (token.Comment, "# a\n"),
         (token.Comment, "#  b"),
     ]
     assert list(analysis._delined_tokens([(token.Comment, "# a\n#  b\n")])) == [
         (token.Comment, "# a\n"),
         (token.Comment, "#  b\n"),
     ]
     assert list(analysis._delined_tokens([(token.Comment, "# a\n#  b\n # c\n")])) == [
         (token.Comment, "# a\n"),
         (token.Comment, "#  b\n"),
         (token.Comment, " # c\n"),
     ]
Exemplo n.º 2
0
 def test_can_deline_tokens(self):
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a')])),
         [(token.Comment, '# a')])
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a\n#  b')])),
         [(token.Comment, '# a\n'), (token.Comment, '#  b')])
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a\n#  b\n')])),
         [(token.Comment, '# a\n'), (token.Comment, '#  b\n')])
     self.assertEqual(
         list(
             analysis._delined_tokens([(token.Comment, '# a\n#  b\n # c\n')
                                       ])), [(token.Comment, '# a\n'),
                                             (token.Comment, '#  b\n'),
                                             (token.Comment, ' # c\n')])
Exemplo n.º 3
0
 def test_can_convert_python_strings_to_comments(self):
     source_code = (
         "#!/bin/python\n" '"Some tool."\n' "#(C) by me\n" "def x():\n" '    "Some function"\n' "    return 1"
     )
     python_lexer = lexers.get_lexer_by_name("python")
     python_tokens = python_lexer.get_tokens(source_code)
     for token_type, token_text in list(analysis._pythonized_comments(analysis._delined_tokens(python_tokens))):
         assert token_type not in token.String
Exemplo n.º 4
0
 def test_can_deline_tokens(self):
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a')])),
         [(token.Comment, '# a')]
     )
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a\n#  b')])),
         [(token.Comment, '# a\n'), (token.Comment, '#  b')]
     )
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a\n#  b\n')])),
         [(token.Comment, '# a\n'), (token.Comment, '#  b\n')]
     )
     self.assertEqual(
         list(analysis._delined_tokens([(token.Comment, '# a\n#  b\n # c\n')])),
         [(token.Comment, '# a\n'), (token.Comment, '#  b\n'), (token.Comment, ' # c\n')]
     )
Exemplo n.º 5
0
 def test_can_convert_python_strings_to_comments(self):
     source_code = \
         '#!/bin/python\n' \
         '"Some tool."\n' \
         '#(C) by me\n' \
         'def x():\n' \
         '    "Some function"\n' \
         '    return 1'
     python_lexer = lexers.get_lexer_by_name('python')
     python_tokens = python_lexer.get_tokens(source_code)
     for token_type, token_text in list(analysis._pythonized_comments(analysis._delined_tokens(python_tokens))):
         self.assertNotIn(token_type, token.String, 'token_text=%r' % token_text)
Exemplo n.º 6
0
 def test_can_convert_python_strings_to_comments(self):
     source_code = \
         '#!/bin/python\n' \
         '"Some tool."\n' \
         '#(C) by me\n' \
         'def x():\n' \
         '    "Some function"\n' \
         '    return 1'
     python_lexer = lexers.get_lexer_by_name('python')
     python_tokens = python_lexer.get_tokens(source_code)
     for token_type, token_text in list(analysis._pythonized_comments(analysis._delined_tokens(python_tokens))):
         self.assertNotIn(token_type, token.String, 'token_text=%r' % token_text)