def test_parse_from_ast_cyk(self): """Make sure we can parse the docstring as returned from ast.""" func = '\n'.join([ 'def get_foobar(self, foo, bar=True):', ' """This gets the foobar', '', ' This really should have a full function definition, but I ' 'am too lazy.', '', ' >>> print get_foobar(10, 20)', ' 30', ' >>> print get_foobar(\'a\', \'b\')', ' ab', '', ' Isn\'t that what you want?', '', ' :param foo: The foo.', ' :param bar: The bar.', ' :returns: The foobar.', '', ' """', ' return foo + bar', ]) doc = ast.get_docstring(ast.parse(func).body[0]) node = parse(condense(lex(doc))) self.assertTrue( CykNodeUtils.contains(node, 'arguments-section'), node, ) self.assertTrue( CykNodeUtils.contains(node, 'returns-section'), node, )
def test_can_parse_return_type(self): docstring = '\n'.join([ 'Return an approximation of pi.', '', 'Returns:', ' Decimal: An approximation of pi.', ]) node = parse(condense(lex(docstring))) self.assertTrue(CykNodeUtils.contains(node, 'returns-section')) self.assertTrue(CykNodeUtils.contains(node, 'returns-type'))
def test_argument_types_can_be_parsed(self): docstring = '\n'.join([ 'This docstring contains types for its arguments.', '', 'Args:', ' x (int): The first number.', ' y (float): The second number.', ]) node = parse(condense(lex(docstring))) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section')) self.assertTrue(CykNodeUtils.contains(node, 'type-section-parens'))
def test_long_description_with_noqa(self): tokens = condense( lex('\n'.join([ 'A docstring with noqas in it.', '', '# noqa: I203', '', '# noqa', '', ]))) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'long-description'))
def test_parse_whole_description(self): """Make sure we can handle descriptions of multiple lines.""" node = parse( condense( lex('Short description\n' '\n' 'Long : (description)\n' '\n' ' <code></code>\n' '\n'))) self.assertTrue(node) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'long-description'))
def test_parse_yields_short_description_for_first_line_if_possible(self): tokens = condense(lex('\n'.join([ 'Short description.', '', 'Long description.' ]))) node = parse(tokens) self.assertTrue( CykNodeUtils.contains(node, 'short-description') ) self.assertTrue( CykNodeUtils.contains(node, 'long-description') )
def test_description_ends_with_sections(self): """Make sure the description section doesn't eat everything.""" node = parse( condense( lex('Short description.\n' '\n' 'Long Description.\n' '\n' 'Returns:\n' ' Nothing!\n' '\n'))) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'long-description')) self.assertTrue(CykNodeUtils.contains(node, 'returns-section'))
def test_parse_type_item_cyk(self): """Ensure we can parse a type item correctly.""" node = parse(condense(lex('a\n\n:type priorities: List[int]'))) self.assertTrue( CykNodeUtils.contains(node, 'argument-type-section'), str(node), )
def test_parse_all_keywords_cyk(self): """Make sure we can parse all of the keywords.""" keywords = { 'arguments-section': [ 'param', 'parameter', 'arg', 'argument', ], 'variables-section': [ 'key', 'keyword', 'var', 'ivar', 'cvar', ], 'argument-type-section': ['type'], 'variable-type-section': ['vartype'], 'raises-section': ['raises'], # 'returns-section': ['returns'], # 'return-type-section': ['rtype'], 'yields-section': ['yield', 'yields'], } for keyword_section in keywords: for keyword in keywords[keyword_section]: docstring = 'Short description.\n\n:{} a: something'.format( keyword, ) node = parse(condense(lex(docstring))) self.assertTrue(CykNodeUtils.contains(node, keyword_section), '{}: {}'.format(keyword_section, node))
def test_parser_sections_correctly(self): program = '\n'.join([ 'def func(x, l):', ' """Add an item to the head of the list.', ' ', ' :param x: The item to add to the list.', ' :return: The list with the item attached.', ' ', ' """', ' return l.appendleft(x)', ]) doc = ast.get_docstring(ast.parse(program).body[0]) tokens = condense(lex(doc)) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'returns-section'), ) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section'), )
def test_parse_vartype_item_cyk(self): """Ensure we can parse a variable type description.""" node = parse(condense(lex('short\n\n:vartype foo: Dict[str][str]'))) self.assertTrue( CykNodeUtils.contains(node, 'variable-type-section'), node, )
def test_definition_with_colon_not_mistaken_for_inline_type_cyk(self): node = parse( condense( lex('short description\n' '\n' ':param x: : That shouldn\'t be there.'))) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section'), )
def test_inline_item_type_cyk(self): """Make sure we can get the type of the item in its definition.""" node = parse(condense(lex('short\n\n:param int x: A number.'))) self.assertTrue( CykNodeUtils.contains(node, 'arguments-section'), node, )
def assertContains(self, docstring, node_name, msg=''): self.assertTrue( CykNodeUtils.contains(docstring, node_name), msg or 'Expected docstring to contain {} but it did not'.format( node_name, ) )
def test_parse_library_exception(self): tokens = condense( lex('\n'.join([ 'Use custom handlers for error conditions.', '', 'Raises:', ' aiohttp.web.HTTPException: Reraises any HTTPExceptions we don\'t have an override for.', '' ]))) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'raises-section')) exceptions = CykNodeUtils.get_annotated(node, ExceptionIdentifier) self.assertEqual(len(exceptions), 1) exception = list(exceptions)[0] self.assertEqual( ExceptionIdentifier.extract(exception), 'aiohttp.web.HTTPException', )
def test_parse_argument_with_two_lines(self): program = (''' class _BaseError(object): def message(self, verbosity=1, raises=True): # type: (int, bool) -> str """Get the message for this error, according to the verbosity. Args: verbosity: An integer in the set {1,2}, where 1 is a more terse message, and 2 includes a general description. raises: True if it should raise an exception. Raises: Exception: If the verbosity level is not recognized. Returns: An error message. """ pass ''') docstring = ast.get_docstring(ast.parse(program).body[0].body[0]) tokens = condense(lex(docstring)) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section')) annotation_lookup = self.get_annotation_lookup(node) self.assertEqual(len(annotation_lookup[ArgumentIdentifier]), 2) values = { ArgumentIdentifier.extract(x) for x in annotation_lookup[ArgumentIdentifier] } self.assertEqual( values, {'verbosity', 'raises'}, )
def test_parse_multiline_return(self): """Ensure we can parse multiline returns. See Issue #63. """ return_variants = [ ':return: shape: (n, m), dtype: float\n' ' Detailed description.\n', # No trailing newline ':return: shape: (n, m), dtype: float\n' ' Detailed description.', # Extra separation without indent ':return: shape: (n, m), dtype: float\n' '\n' ' Detailed description\n', # Extra separation with indent ':return: shape: (n, m), dtype: float\n' ' \n' ' Detailed description\n', ] doc_template = 'Short description.\n\n{}' for return_variant in return_variants: raw_docstring = doc_template.format(return_variant) tokens = condense(lex(raw_docstring)) node = parse(tokens) self.assertTrue( CykNodeUtils.contains(node, 'returns-section'), 'Variant failed: {}'.format(repr(return_variant)), )
def test_bare_noqa_can_be_parsed(self): docstring = '\n'.join([ 'The first line may have something, but others are missing.', '', '# noqa' ]) node = parse(condense(lex(docstring))) self.assertTrue(CykNodeUtils.contains(node, 'noqa'))
def test_docstring_can_end_with_newlines(self): sections = { 'arguments-section': '\n'.join([ 'Args:', ' x: y', ]), 'returns-section': '\n'.join([ 'Returns:', ' Something.', ]), 'yields-section': '\n'.join([ 'Yields:', ' Something.', ]), 'raises-section': '\n'.join([ 'Raises:', ' Exception: In circumstances.', ]), } for key in sections: docstring = 'Short\n\n{}\n'.format(sections[key]) node = parse(condense(lex(docstring))) self.assertTrue(CykNodeUtils.contains(node, key), '{}:\n\n{}'.format(key, node))
def test_parse_all_section_types(self): """Make sure all section types can be parsed.""" node = parse( condense( lex('\n'.join([ 'Short description.', '', 'Long Description.', '', 'Args:', ' x: The first argument with', ' two lines.', ' y: The second argument.', '', 'Raises:', ' SomethingException: Randomly.', '', 'Non-Standard:' ' Everything about this.', '', 'Yields:', ' Values to analyze.', ])))) for symbol in [ 'short-description', 'long-description', 'arguments-section', 'raises-section', 'yields-section', ]: self.assertTrue(CykNodeUtils.contains(node, symbol))
def test_type_can_have_indents(self): docstring = '\n'.join([ 'Test.', '', 'Args:', ' input (a, b): test', '', '# noqa: S001' ]) tokens = condense(lex(docstring)) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section'))
def test_nonhash_noqa_is_word(self): """Ensures we can distinguish a noqa from the word noqa.""" docstring = '\n'.join([ 'The first line may have something, but others are missing.', '', 'noqa' ]) node = parse(condense(lex(docstring))) self.assertFalse(CykNodeUtils.contains(node, 'noqa'))
def test_noqa_always_on_left(self): raw = '\n'.join([ '# noqa', 'b', ]) node = self.parse_string(raw) for noqa in self.get_identifiers(node, NoqaIdentifier): self.assertFalse( CykNodeUtils.contains(noqa, 'long-description'), 'The noqa should be on its own, but was not:\n{}'.format(noqa))
def test_type_is_type(self): docstring = '\n'.join([ 'Takes a class and returns an instance.', '', 'Args:', ' klass (type): A class to instantiate.', ' args (List[int]): The initial arguments to pass to it.', '', ]) node = parse(condense(lex(docstring))) self.assertTrue(CykNodeUtils.contains(node, 'type-section-parens'))
def test_type_with_multiple_words_multiple_lines(self): docstring = '\n'.join([ 'Test.', '', 'Args:', ' input (:obj:`DataFrame <pandas.DataFrame>`, \\', ' :obj:`ndarray <numpy.ndarray>`, list): test', ]) tokens = condense(lex(docstring)) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section'))
def assertIdentified(self, docstring, identifier, expected, msg=''): actual = { identifier.extract(x) for x in CykNodeUtils.get_annotated( docstring, identifier, ) } self.assertEqual( expected, actual, msg or 'Expected identified {}, but found {}'.format( repr(expected), repr(actual)))
def test_single_word_sections_parse_correctly(self): """Make sure we can have a minimal amount of words in each section.""" contents = '\n'.join([ 'def f(foo):', ' """foobar', '', ' Args:', ' foo: foobar', '', ' Returns:', ' bar', '', ' """', ' return "bar"', ]) function = ast.parse(contents).body[0] docstring = ast.get_docstring(function) tokens = condense(lex(docstring)) node = parse(tokens) self.assertTrue(CykNodeUtils.contains(node, 'short-description')) self.assertTrue(CykNodeUtils.contains(node, 'returns-section')) self.assertTrue(CykNodeUtils.contains(node, 'arguments-section'))
def test_parse_long_description_with_noqa(self): """Make sure noqas can appear in a global scope.""" node = parse( condense( lex('\n'.join([ 'Short description can\'t have a noqa.' '' 'But a long description can.' '' '# noqa: I101 arg1' '\n' ])))) self.assertTrue(CykNodeUtils.contains(node, 'noqa'))
def test_parse_long_description_cyk(self): """Make sure we can parse a long description.""" node = parse( condense( lex('\n'.join([ 'Short descr.', '', 'A long description should be ', 'able to be multiple lines.', ' Code snippets should be allowed.', 'As should noqas # noqa', ])))) self.assertTrue(CykNodeUtils.contains(node, 'long-description'), )
def test_parse_return_keywords_cyk(self): keywords = { 'returns-section': ['returns'], 'return-type': ['rtype'], } for keyword_section in keywords: for keyword in keywords[keyword_section]: docstring = 'Short.\n\n:{}: something'.format(keyword) node = parse(condense(lex(docstring))) self.assertTrue( CykNodeUtils.contains(node, keyword_section), '{}: {}'.format(keyword_section, node), )