def test_parse_yields_short_description_for_first_line_if_possible(self): tokens = condense(lex('\n'.join([ 'Short description.', '', 'Long description.' ]))) node = parse(tokens) self.assertTrue( CykNodeUtils.contains(node, 'short-description') ) self.assertTrue( CykNodeUtils.contains(node, 'long-description') )
def test_top_parse_separates_by_indent_if_section_starts(self): """Make sure we an ignore indentations if between sections.""" docstring = '\n'.join([ 'A short summary.', ' ', 'Args:', ' x: y.', ' ', 'Returns:', ' Something.', ]) parsed = top_parse(condense(lex(docstring))) self.assertEqual(len(parsed), 3)
def test_parse_args_section_with_newline_after_type(self): tokens = condense( lex('\n'.join([ 'Args:', ' points (:class:`numpy.ndarray`):', ' The points to test.', ]))) node = parse(tokens) self.assertEqual( node.symbol, 'arguments-section', str(node), )
def test_raises_section(self): raw_docstring = '\n'.join([ 'Always fail.', '', 'Raises', '------', 'Exception', ' Under all conditions.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertContains(docstring, 'raises-section')
def test_no_short_description_checks_for_others(self): program = '\n'.join([ '@abstract.abstractmethod', 'def __init__(self, config: dict):', ' """', '', ' :param config: config dict user defined in config file.', ' """', ]) doc = ast.get_docstring(ast.parse(program).body[0]) tokens = condense(lex(doc)) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section'))
def test_yields_section(self): raw_docstring = '\n'.join([ 'Yield the number two.', '', 'Yields', '-------', '{2}', ' The number two.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertContains(docstring, 'yields-section')
def test_yield_type_with_single_name(self): raw_docstring = '\n'.join([ 'Yield the number four.', '', 'Yields', '-------', 'number : int', ' A number to use.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertContains(docstring, 'yields-section')
def test_arguments_section_with_types(self): raw_docstring = '\n'.join([ 'Turn the person into a Mr. Fontaine.', '', 'Parameters', '----------', 'x : Person', ' The person to fontainify.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertIdentified(docstring, ArgumentTypeIdentifier, {'Person'})
def test_two_combined_parameters(self): raw_docstring = '\n'.join([ 'Get the cartesian product of two lists.', '', 'Parameters', '----------', 'x1, x2 : List[Any]', ' The lists to use for the product.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertIdentified(docstring, ArgumentItemIdentifier, {'x1, x2'})
def test_return_type_with_single_name(self): raw_docstring = '\n'.join([ 'Return the number four.', '', 'Returns', '-------', 'number : int', ' A number to use.', '', ]) tokens = condense(lex(raw_docstring)) docstring = parse(tokens) self.assertContains(docstring, 'returns-section')
def test_returns_section(self): raw_docstring = '\n'.join([ 'Return the number two.', '', 'Returns', '-------', '{2}', ' The number two.', '', ]) tokens = condense(lex(raw_docstring)) docstring = parse(tokens) self.assertContains(docstring, 'returns-section')
def test_parse_return_keywords_cyk(self): keywords = { 'returns-section': ['returns'], 'return-type': ['rtype'], } for keyword_section in keywords: for keyword in keywords[keyword_section]: docstring = 'Short.\n\n:{}: something'.format(keyword) node = parse(condense(lex(docstring))) self.assertTrue( CykNodeUtils.contains(node, keyword_section), '{}: {}'.format(keyword_section, node), )
def test_receives_without_yield_error(self): raw_docstring = '\n'.join([ 'Yield the number four.', '', 'Receives', '-------', 'repr_or_number : {REPR, NUMB}', ' Whether to yield a representation or number.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertContains(docstring, 'yields-section')
def test_long_description_with_noqa(self): tokens = condense( lex('\n'.join([ 'A docstring with noqas in it.', '', '# noqa: I203', '', '# noqa', '', ]))) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'long-description'))
def test_parse_whole_description(self): """Make sure we can handle descriptions of multiple lines.""" node = parse( condense( lex('Short description\n' '\n' 'Long : (description)\n' '\n' ' <code></code>\n' '\n'))) self.assertTrue(node) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'long-description'))
def test_extended_docstring(self): docstring = '\n'.join([ '"""The oneline description.', ' ', ' The more detailed description, which can be composed', ' of multiple lines.', '"""', ]) tokens = list(lex(docstring)) self.assertEqual(tokens[0].token_type, TokenType.DOCTERM) self.assertEqual(tokens[-1].token_type, TokenType.DOCTERM) self.assertEqual(tokens[4].token_type, TokenType.NEWLINE) self.assertEqual(tokens[5].token_type, TokenType.INDENT)
def test_warns_section(self): raw_docstring = '\n'.join([ 'Always warn.', '', 'Warns', '-----', 'Warning', ' Under all conditions.', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertContains(docstring, 'warns-section')
def test_parse_keyword(self): """Make sure we can parse keywords.""" for word, node_type in [('Returns', NodeType.RETURNS), ('Args', NodeType.ARGUMENTS), ('Arguments', NodeType.ARGUMENTS), ('Yields', NodeType.YIELDS), ('Raises', NodeType.RAISES)]: node = parse_keyword(Peaker(lex(word)), KEYWORDS) self.assertEqual(node.node_type, node_type) self.assertEqual( node.value, word, )
def test_get_long_description(self): """Make sure we can get the long description.""" root = google.parse( Peaker(lex('\n'.join([ 'Ignore short.', '', 'Long description should be contiguous.', '', ])), lookahead=3)) docstring = Docstring.from_google(root) self.assertEqual(docstring.get_section(Sections.LONG_DESCRIPTION), 'Long description should be contiguous.')
def test_parse_long_description_cyk(self): """Make sure we can parse a long description.""" node = parse( condense( lex('\n'.join([ 'Short descr.', '', 'A long description should be ', 'able to be multiple lines.', ' Code snippets should be allowed.', 'As should noqas # noqa', ])))) self.assertTrue(CykNodeUtils.contains(node, 'long-description'), )
def test_parse_long_description_with_noqa(self): """Make sure noqas can appear in a global scope.""" node = parse( condense( lex('\n'.join([ 'Short description can\'t have a noqa.' '' 'But a long description can.' '' '# noqa: I101 arg1' '\n' ])))) self.assertTrue(CykNodeUtils.contains(node, 'noqa'))
def test_parse_arguments(self): docstring = '\n'.join([ 'Estimate the probability of being cool.', '', 'Args:', ' hip: How hip it is.', ' hot: How hot it is.', ' coolness: Modified by this function.', ]) tokens = condense(lex(docstring)) tree = parse(tokens) self.assertTrue(tree is not None) self.assertContains(tree, 'arguments-section') self.assertContains(tree, 'ident')
def test_parse_line_with_colons(self): """Make sure lines with colons can be parsed.""" node = parse_line(Peaker(lex(' ::\n'))) self.assertEqual( node.node_type, NodeType.LINE, ) child_types = [x.node_type for x in node.walk()] self.assertEqual(child_types, [ NodeType.INDENT, NodeType.COLON, NodeType.COLON, NodeType.LINE, ])
def test_parse_line_with_multiple_indents(self): """Make sure code snippets are okay.""" node = parse_line(Peaker(lex(' word.\n'))) self.assertEqual( node.node_type, NodeType.LINE, ) child_types = [x.node_type for x in node.walk()] self.assertEqual(child_types, [ NodeType.INDENT, NodeType.INDENT, NodeType.WORD, NodeType.LINE, ])
def test_get_arguments_description(self): """Make sure we can get the arguments description.""" root = google.parse( Peaker(lex('\n'.join([ 'Something.', '', 'Args:', ' x: An integer.', '\n', ])), lookahead=3)) docstring = Docstring.from_google(root) self.assertEqual(docstring.get_section(Sections.ARGUMENTS_SECTION), 'Args:\n x: An integer.')
def test_description_ends_with_sections(self): """Make sure the description section doesn't eat everything.""" node = parse( condense( lex('Short description.\n' '\n' 'Long Description.\n' '\n' 'Returns:\n' ' Nothing!\n' '\n'))) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'long-description')) self.assertTrue(CykNodeUtils.contains(node, 'returns-section'))
def test_parse_noqa_for_global(self): """Make sure global targets are empty lists.""" func = '\n'.join([ 'def my_function():', ' """Ignore missing return.', '', ' # noqa: I201', '', ' """', ' return "This is ignored."', ]) doc = ast.get_docstring(ast.parse(func).body[0]) node = parse(condense(lex(doc))) self.assertTrue(CykNodeUtils.contains(node, 'noqa'))
def test_noqas_in_long_description(self): raw_docstring = '\n'.join([ 'Sore snork stort stort.', '', '# noqa: DAR101', '', ]) tokens = condense(lex(raw_docstring, config=self.config)) docstring = parse(tokens) self.assertIdentified( docstring, NoqaIdentifier, {'DAR101'}, )
def test_parse_global_noqa_with_target(self): """Make sure targets are present in the lists.""" func = '\n'.join([ 'def my_function(arg1):', ' """Ignore missing argument.', '', ' # noqa: I101 arg1', '', ' """', ' pass', ]) doc = ast.get_docstring(ast.parse(func).body[0]) node = parse(condense(lex(doc))) self.assertTrue(CykNodeUtils.contains(node, 'noqa'))
def test_parenthesis_in_types(self): """Make sure paretheses are their own token.""" string = '())(' tokens = list(lex(string)) token_types = [x.token_type for x in tokens] self.assertEqual( token_types, [ TokenType.LPAREN, TokenType.RPAREN, TokenType.RPAREN, TokenType.LPAREN, ] )