def test_has_everything_for_sphinx(self): has_everything_root = sphinx.parse(Peaker(lex('\n'.join([ 'Short decscription.', '', 'Long description.', '', ':param x: Some value.', ':raises IntegrityError: Sometimes.', ':yields: The occasional value.', ':returns: When it completes.', '' ])), lookahead=3)) docstring = Docstring.from_sphinx(has_everything_root) self.assertTrue(all([ docstring.has_short_description(), docstring.has_long_description(), docstring.has_args_section(), docstring.has_raises_section(), docstring.has_yields_section(), docstring.has_returns_section(), ])) has_only_short_description = parse(Peaker(lex('\n'.join([ 'Short description' ])), lookahead=3)) docstring = Docstring.from_google(has_only_short_description) self.assertTrue( docstring.has_short_description(), ) self.assertFalse(any([ docstring.has_long_description(), docstring.has_args_section(), docstring.has_raises_section(), docstring.has_yields_section(), docstring.has_returns_section(), ]))
def test_has_everything_for_sphinx(self): has_everything_root = sphinx.parse( Peaker(lex('\n'.join([ 'Short decscription.', '', 'Long description.', '', ':param x: Some value.', ':raises IntegrityError: Sometimes.', ':yields: The occasional value.', ':returns: When it completes.', '' ])), lookahead=3)) docstring = Docstring.from_sphinx(has_everything_root) self.assertTrue( all([ docstring.get_section(Sections.SHORT_DESCRIPTION), docstring.get_section(Sections.LONG_DESCRIPTION), docstring.get_section(Sections.ARGUMENTS_SECTION), docstring.get_section(Sections.RAISES_SECTION), docstring.get_section(Sections.YIELDS_SECTION), docstring.get_section(Sections.RETURNS_SECTION), ])) has_only_short_description = google.parse( Peaker(lex('\n'.join(['Short description'])), lookahead=3)) docstring = Docstring.from_google(has_only_short_description) self.assertTrue(docstring.get_section(Sections.SHORT_DESCRIPTION), ) self.assertFalse( any([ docstring.get_section(Sections.LONG_DESCRIPTION), docstring.get_section(Sections.ARGUMENTS_SECTION), docstring.get_section(Sections.RAISES_SECTION), docstring.get_section(Sections.YIELDS_SECTION), docstring.get_section(Sections.RETURNS_SECTION), ]))
def test_must_have_parentheses_around(self): """Make sure the type has to start with ( and end with ).""" with self.assertRaises(ParserException): parse_type(Peaker(lex('(int'))) with self.assertRaises(ParserException): parse_type(Peaker(lex('int)'))) with self.assertRaises(ParserException): parse_type(Peaker(lex('( int (')))
def setUpClass(cls, *args, **kwargs): super().setUpClass(*args, **kwargs) cls.equivalent_docstrings = list() for google_doc, sphinx_doc in cls._equivalent_docstrings: google_root = google.parse(Peaker(lex(google_doc), 3)) sphinx_root = sphinx.parse(Peaker(lex(sphinx_doc), 2)) cls.equivalent_docstrings.append(( Docstring.from_google(google_root), Docstring.from_sphinx(sphinx_root), ))
def test_type_and_name_always_associated(self): """Make sure the type goes to the correct name.""" names = ['x', 'y', 'a', 'z', 'q'] types = ['int', 'float', 'Decimal', 'str', 'Any'] short_description = 'A short docstring.' # Change the types of the parameters. shuffle(names) shuffle(types) sphinx_params = [ ':param {}: An explanation'.format(name) for name in names ] + [ ':type {}: {}'.format(name, _type) for name, _type in zip(names, types) ] shuffle(sphinx_params) sphinx_docstring = '\n'.join( [short_description, '', '\n'.join(sphinx_params)]) google_params = [ ' {} ({}): An explanation.'.format(name, _type) for name, _type in zip(names, types) ] google_docstring = '\n'.join([ short_description, '', 'Args:', '\n'.join(google_params), ]) google_doc = Docstring.from_google( google.parse(Peaker(lex(google_docstring), 3))) sphinx_doc = Docstring.from_sphinx( sphinx.parse(Peaker(lex(sphinx_docstring), 2))) items = google_doc.get_items(Sections.ARGUMENTS_SECTION) self.assertTrue(items == sorted(items), 'The items should be sorted.') self.assertEqual( google_doc.get_items(Sections.ARGUMENTS_SECTION), sphinx_doc.get_items(Sections.ARGUMENTS_SECTION), 'Google and Sphinx items should be the same.', ) self.assertEqual( google_doc.get_types(Sections.ARGUMENTS_SECTION), sphinx_doc.get_types(Sections.ARGUMENTS_SECTION), 'Google and Sphinx types should be the same.', )
def test_get_argument_types(self): """Make sure we can get a dictionary of arguments to types.""" root = sphinx.parse( Peaker(lex('\n'.join([ 'Something.', '', ':param x: The first.', ':param y: The second.', ':type x: int', ':type y: List[int], optional' '\n', ])), lookahead=3)) from darglint.utils import generate_dot with open('_data/example.dot', 'w') as fout: fout.write(generate_dot(root)) docstring = Docstring.from_sphinx(root) argtypes = dict( zip( docstring.get_items(Sections.ARGUMENTS_SECTION) or [], docstring.get_types(Sections.ARGUMENTS_SECTION) or [], )) self.assertEqual( argtypes['x'], 'int', ) self.assertEqual( argtypes['y'], 'List[int], optional', )
def test_multiple_line_item_definition(self): """Make sure item definitions can span multiple lines.""" func = '\n'.join([ 'def do_nothing(x):', ' """Do nothing with x.', ' ', ' :param x: This is an argument which must be ', ' qualified by a large amount of text.', '', ' """', ' pass', ]) doc = ast.get_docstring(ast.parse(func).body[0]) node = parse(Peaker(lex(doc), lookahead=2)) item_node = None for child in node.walk(): if child.node_type == NodeType.ITEM: item_node = child break self.assertEqual( sum([1 if x.node_type == NodeType.LINE else 0 for x in item_node.walk()]), 2, 'There should be two lines in this item definition.' )
def test_parse_whole_docstring(self): """Make sure we can parse a whole docstring.""" node = parse(Peaker(lex('\n'.join([ 'Format the exception with a traceback.', '', ':param etype: exception type', ':param value: exception value', ':param tb: traceback object', ':param limit: maximum number of stack frames to show', ':type limit: integer or None', ':rtype: list of strings', '', ])), lookahead=2)) self.assertEqual( node.node_type, NodeType.DOCSTRING, ) colon_count = 0 has_type_for_limit = False for child in node.walk(): if child.node_type == NodeType.WORD and child.value == 'limit': has_type_for_limit = True elif child.node_type == NodeType.COLON: colon_count += 1 self.assertTrue(has_type_for_limit) self.assertEqual(colon_count, 12)
def test_item_name_with_return_can_have_type_but_not_argument(self): """Make sure the return item can can a type.""" node = parse_item( Peaker(lex( ':returns int: Whoa.' ), lookahead=2) ) self.assertEqual(node.node_type, NodeType.RETURNS_SECTION) node_types = [x.node_type for x in node.walk()] print('\n'.join(map(str, node_types))) self.assertEqual( node_types, [ NodeType.COLON, NodeType.RETURNS, NodeType.WORD, NodeType.TYPE, NodeType.COLON, NodeType.ITEM_NAME, NodeType.WORD, NodeType.LINE, NodeType.ITEM_DEFINITION, NodeType.ITEM, NodeType.RETURNS_SECTION, ] )
def test_inline_item_type(self): """Make sure we can get the type of the item in its definition.""" node = parse_item( Peaker(lex(':param int x: A number.\n'), lookahead=2) ) self.assertEqual( node.node_type, NodeType.ARGS_SECTION, ) node_types = [x.node_type for x in node.walk()] self.assertEqual(node_types, [ NodeType.COLON, NodeType.ARGUMENTS, NodeType.WORD, NodeType.TYPE, NodeType.WORD, NodeType.COLON, NodeType.ITEM_NAME, NodeType.WORD, NodeType.WORD, NodeType.LINE, NodeType.ITEM_DEFINITION, NodeType.ITEM, NodeType.ARGS_SECTION, ])
def test_item_without_argument(self): """Test that we can parse an item without an argument.""" node = parse_item( Peaker(lex(':returns: A value.\n'), lookahead=2) ) self.assertEqual( node.node_type, NodeType.RETURNS_SECTION, ) node_types = [x.node_type for x in node.walk()] self.assertEqual( node_types, [ NodeType.COLON, NodeType.RETURNS, NodeType.COLON, NodeType.ITEM_NAME, NodeType.WORD, NodeType.WORD, NodeType.LINE, NodeType.ITEM_DEFINITION, NodeType.ITEM, NodeType.RETURNS_SECTION, ], 'Incorrect node types. Got: \n\t{}'.format('\n\t'.join([ str(x) for x in node_types ])) )
def test_definition_with_colon_not_mistaken_for_inline_type(self): node = parse_item( Peaker(lex(':param x: : That shouldn\'t be there.\n'), lookahead=2)) self.assertEqual(node.node_type, NodeType.ARGS_SECTION) self.assertTrue( not any([x.node_type == NodeType.TYPE for x in node.walk()]))
def test_get_short_description(self): """Ensure we can get the short description.""" root = google.parse( Peaker(lex('Nothing but a short description.'), lookahead=3)) docstring = Docstring.from_google(root) self.assertEqual(docstring.get_section(Sections.SHORT_DESCRIPTION), 'Nothing but a short description.')
def test_all_nodes_have_line_numbers(self): """Make sure all nodes in the AST have line numbers.""" peaker = Peaker(lex('\n'.join([ 'The short description should have line numbers.', '', 'The long description should have line numbers.', '', 'noqa: I203', '', 'Args:', ' x (LineNumber, optional): The argument should have a', ' line number.', '', 'Raises:', ' IndexError: The exception should have a line number.', '', 'Yields:', ' LineNumber: The yields should have a line number.', '', 'Returns:', ' LineNumber: The return section should have a line number.', ])), lookahead=3) root = parse(peaker) for node in root.walk(): self.assertTrue( node.line_numbers is not None, 'The node ({}) {} does not have line numbers.'.format( node.node_type, node.value, ))
def test_has_next_returns_false_at_end_of_iteration(self): peaker = Peaker((x for x in 'ab')) self.assertTrue(peaker.has_next()) peaker.next() self.assertTrue(peaker.has_next()) peaker.next() self.assertFalse(peaker.has_next())
def test_parse_whole_description(self): """Make sure we can handle descriptions of multiple lines.""" node = parse_description( Peaker(lex('Short description\n' '\n' 'Long : (description)\n' '\n' ' <code></code>\n' '\n'), lookahead=3)) child_types = [x.node_type for x in node.walk()] self.assertEqual(child_types, [ NodeType.WORD, NodeType.WORD, NodeType.SHORT_DESCRIPTION, NodeType.WORD, NodeType.COLON, NodeType.LPAREN, NodeType.WORD, NodeType.RPAREN, NodeType.LINE, NodeType.LINE, NodeType.INDENT, NodeType.WORD, NodeType.LINE, NodeType.LINE, NodeType.LONG_DESCRIPTION, NodeType.DESCRIPTION, ])
def test_parses_all_section_types(self): """Make sure all section types can be parsed.""" node = parse( Peaker(lex('\n'.join([ 'Short description.', '', 'Long Description.', '', 'Args:', ' x: The first argument with', ' two lines.', ' y: The second argument.', '', 'Raises:', ' SomethingException: Randomly.', '', 'Non-Standard:' ' Everything about this.', '', 'Yields:', ' Values to analyze.', ])), lookahead=3)) child_types = [x.node_type for x in node.children] self.assertEqual(child_types, [ NodeType.DESCRIPTION, NodeType.ARGS_SECTION, NodeType.RAISES_SECTION, NodeType.LONG_DESCRIPTION, NodeType.YIELDS_SECTION, ])
def test_parse_item(self): """Make sure we can parse the parts of a compound section.""" node = parse_item( Peaker(lex(' x (int): The first number\n' ' to add\n'), lookahead=3)) self.assertEqual( node.node_type, NodeType.ITEM, ) child_types = [x.node_type for x in node.walk()] self.assertEqual(child_types, [ NodeType.INDENT, NodeType.WORD, NodeType.LPAREN, NodeType.WORD, NodeType.RPAREN, NodeType.TYPE, NodeType.ITEM_NAME, NodeType.COLON, NodeType.WORD, NodeType.WORD, NodeType.WORD, NodeType.LINE, NodeType.INDENT, NodeType.INDENT, NodeType.WORD, NodeType.WORD, NodeType.LINE, NodeType.ITEM_DEFINITION, NodeType.ITEM, ])
def test_parse_returns_section_with_type(self): """Make sure the returns section can have a type.""" node = parse_simple_section( Peaker(lex('Returns:\n' ' int: The square of something.\n' '\n'))) self.assertEqual( node.node_type, NodeType.SECTION, ) child_types = [x.node_type for x in node.walk()] self.assertEqual(child_types, [ NodeType.RETURNS, NodeType.COLON, NodeType.SECTION_HEAD, NodeType.INDENT, NodeType.WORD, NodeType.TYPE, NodeType.WORD, NodeType.WORD, NodeType.WORD, NodeType.WORD, NodeType.LINE, NodeType.SECTION_SIMPLE_BODY, NodeType.SECTION, ])
def test_parse_simple_section_cannot_start_with_args(self): """Make sure the simple section starts with return or yield.""" with self.assertRaises(ParserException): parse_simple_section( Peaker(lex('Args:\n' ' Not a simple section.\n' '\n')))
def test_get_argument_types(self): """Make sure we can get a dictionary of arguments to types.""" root = sphinx.parse( Peaker(lex('\n'.join([ 'Something.', '', ':param x: The first.', ':param y: The second.', ':type x: int', ':type y: List[int], optional' '\n', ])), lookahead=3)) docstring = Docstring.from_sphinx(root) argtypes = dict( zip( docstring.get_items(Sections.ARGUMENTS_SECTION) or [], docstring.get_types(Sections.ARGUMENTS_SECTION) or [], )) self.assertEqual( argtypes['x'], 'int', ) self.assertEqual( argtypes['y'], 'List[int], optional', )
def test_parse_line_with_parentheses(self): """Make sure lines can have parentheses in them.""" node = parse_line( Peaker(lex('This is a (parenthis-containing) line.\n'))) self.assertEqual( node.node_type, NodeType.LINE, )
def test_short_description_can_have_colons(self): """Make sure special characters are fine.""" content = ":param Something: Should be okay, I guess." node = parse_short_description(Peaker(lex(content))) self.assertEqual( node.node_type, NodeType.SHORT_DESCRIPTION, )
def test_get_short_description(self): """Ensure we can get the short description.""" root = parse( Peaker(lex('Nothing but a short description.'), lookahead=3)) docstring = Docstring.from_google(root) self.assertEqual( docstring.short_description, 'Nothing but a short description.' )
def test_parse_short_description(self): """Make sure we can parse the first line in the docstring.""" node = parse_short_description( Peaker(lex('This is a short description.\n'), lookahead=3)) child_types = [x.node_type for x in node.walk()] self.assertEqual(child_types, [ NodeType.WORD, ] * 5 + [ NodeType.SHORT_DESCRIPTION, ])
def test_parse_yields(self): node = parse_yields( Peaker(lex('Yields:\n' ' The total amount of information.\n' '\n'), lookahead=3)) self.assertEqual( node.node_type, NodeType.YIELDS_SECTION, )
def test_parse_args(self): """Make sure we can parse an args section.""" node = parse_args( Peaker(lex('\n'.join([ 'Args:', ' x: the item.', '\n', ])), lookahead=3)) self.assertEqual(node.node_type, NodeType.ARGS_SECTION)
def test_has_section(self): """Make sure the docstring can tell if it has the given sections.""" has_everything_root = google.parse( Peaker(lex('\n'.join([ 'Short decscription.', '', 'Long description.', '', 'Args:', ' x: Some value.', '', 'Raises:', ' IntegrityError: Sometimes.', '', 'Yields:', ' The occasional value.', '', 'Returns:', ' When it completes.', ])), lookahead=3)) docstring = Docstring.from_google(has_everything_root) self.assertTrue( all([ docstring.get_section(Sections.SHORT_DESCRIPTION), docstring.get_section(Sections.LONG_DESCRIPTION), docstring.get_section(Sections.ARGUMENTS_SECTION), docstring.get_section(Sections.RAISES_SECTION), docstring.get_section(Sections.YIELDS_SECTION), docstring.get_section(Sections.RETURNS_SECTION), ])) has_only_short_description = google.parse( Peaker(lex('\n'.join(['Short description'])), lookahead=3)) docstring = Docstring.from_google(has_only_short_description) self.assertTrue(docstring.get_section(Sections.SHORT_DESCRIPTION), ) self.assertFalse( any([ docstring.get_section(Sections.LONG_DESCRIPTION), docstring.get_section(Sections.ARGUMENTS_SECTION), docstring.get_section(Sections.RAISES_SECTION), docstring.get_section(Sections.YIELDS_SECTION), docstring.get_section(Sections.RETURNS_SECTION), ]))
def test_global_noqa_star_body(self): """Ensure noqa with * means ignore everything.""" root = parse(Peaker(lex('\n'.join([ 'A short explanation.', '', ' # noqa: *', '\n', ])), lookahead=3)) docstring = Docstring.from_google(root) self.assertTrue(docstring.ignore_all)
def test_parse_from_ast(self): """Make sure we can parse the docstring as returned from ast.""" func = '\n'.join([ 'def get_foobar(self, foo, bar=True):', ' """This gets the foobar', '', ' This really should have a full function definition, but I ' 'am too lazy.', '', ' >>> print get_foobar(10, 20)', ' 30', ' >>> print get_foobar(\'a\', \'b\')', ' ab', '', ' Isn\'t that what you want?', '', ' :param foo: The foo.', ' :param bar: The bar.', ' :returns: The foobar.', '', ' """', ' return foo + bar', ]) doc = ast.get_docstring(ast.parse(func).body[0]) peaker = Peaker(lex(doc), lookahead=2) node = parse(peaker) self.assertEqual( node.node_type, NodeType.DOCSTRING, ) param_count = 0 return_count = 0 words = set() for child in node.walk(): if child.node_type == NodeType.ARGUMENTS: param_count += 1 elif child.node_type == NodeType.RETURNS: return_count += 1 elif child.node_type == NodeType.WORD: words.add(child.value) self.assertEqual( param_count, 2, ) self.assertEqual( return_count, 1, ) for word in ['foobar', 'lazy.', 'get_foobar', 'Isn\'t', 'foo']: self.assertTrue( word in words, '"{}" was not a word, but should have been.'.format( word, ) )