예제 #1
0
 def test_get_docstring_tokens_singles(self):
     with open(get_absolute_path('data/docstring_singles.py'), 'r') as f:
         docstring_tokens = {t.string for t in get_docstring_tokens([], tokenize.generate_tokens(f.readline))}
     self.assertEqual(docstring_tokens, {
         "'''\nSingle quotes multiline module docstring\n'''",
         "'''\n    Single quotes multiline class docstring\n    '''",
         "'''\n        Single quotes multiline function docstring\n        '''",
     })
 def test_get_docstring_tokens_singles(self):
     with open(get_absolute_path('data/docstring_singles.py'), 'r') as f:
         tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
     docstring_tokens = {t.string for t in get_docstring_tokens(tokens)}
     self.assertEqual(docstring_tokens, {
         "'''\nSingle quotes multiline module docstring\n'''",
         "'''\n    Single quotes multiline class docstring\n    '''",
         "'''\n        Single quotes multiline function docstring\n        '''",
     })
예제 #3
0
 def test_get_docstring_tokens_doubles(self):
     with open(get_absolute_path('data/docstring_doubles.py'), 'r') as f:
         tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
     docstring_tokens = {t.string for t in get_docstring_tokens(tokens)}
     self.assertEqual(
         docstring_tokens, {
             '"""\nDouble quotes multiline module docstring\n"""',
             '"""\n    Double quotes multiline class docstring\n    """',
             '"""\n        Double quotes multiline function docstring\n        """',
         })
 def _get_docstring_tokens(self, filename):
     with open(get_absolute_path(filename), 'r') as f:
         tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
     return get_docstring_tokens(tokens)
예제 #5
0
 def _get_docstring_tokens(self, filename):
     with open(get_absolute_path(filename), 'r') as f:
         tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
     return get_docstring_tokens(tokens)
예제 #6
0
 def _get_docstring_tokens(self, filename):
     with open(get_absolute_path(filename), 'r') as f:
         return get_docstring_tokens([], tokenize.generate_tokens(f.readline))