def test_generate_text_3(): langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) tests = [ # page name, text fragment, target ('PageName', '[//WikiWord]', 'WikiWord'), ('PageName', '[//wikiword]', 'wikiword'), ('PageName', '[/wikiword]', 'PageName/wikiword'), ('PageName', '[/wikiword/subsub]', 'PageName/wikiword/subsub'), ('PageName', '[.]', 'PageName'), ('PageName', 'PageName', 'PageName'), ('pageName', '[pageName]', 'pageName'), ('Main/SubPage', '[.]', 'Main/SubPage'), ('Main/SubPage', '[Test]', 'Main/Test'), ('Main/SubPage', '[..]', 'Main'), ('Main/SubPage', '[../Chair]', 'Chair'), ] for nr, (pageName, text_fragment, target) in enumerate(tests, 1): wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) text = '\n%s\n\n' % text_fragment page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) assert nf.count('wikiWord') == 1 link_core = nf.wikiWord().linkPath.getLinkCore() resolved = langHelper.resolveWikiWordLink(link_core, page) assert resolved == target, ( '%d: %r on %r -> %r != %r' % (nr, link_core, pageName, resolved, target)) result = langHelper.generate_text(ast, page) assert result == text
def test_parse_wikiwords(): page_name = u'PageName' text_fragments = [ # (text, wikiword) (u'WikiWord', u'WikiWord'), (u'[[wikiword]]', u'wikiword'), (u'WikiWord!anchor', u'WikiWord'), (u'[[WikiWord|title]]', u'WikiWord'), (u'[[WikiWord|title]]!anchor', u'WikiWord'), (u'[[WikiWord#search_fragment]]', u'WikiWord'), (u'[[WikiWord#search_fragment|title]]', u'WikiWord'), (u'[[WikiWord#search_fragment|title]]!anchor', u'WikiWord'), (u'[[.]]', page_name), (u'Is sentence [WikiWord].', u'WikiWord'), # recognizes WikiWord, but square brackets are not part of it... (u'+ Heading\nThis is a sentence.', None), ] for text_fragment, wikiword in text_fragments: text = u'\n%s\n\n' % text_fragment ast = parse(text, page_name, LANGUAGE_NAME) nf = NodeFinder(ast) if wikiword is not None: assert nf.count('wikiWord') == 1 assert nf.wikiWord().wikiWord == wikiword else: assert nf.count('wikiWord') == 0
def test_generate_text_3(): langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) tests = [ # page name, text fragment, target ('PageName', '[//WikiWord]', 'WikiWord'), ('PageName', '[//wikiword]', 'wikiword'), ('PageName', '[/wikiword]', 'PageName/wikiword'), ('PageName', '[/wikiword/subsub]', 'PageName/wikiword/subsub'), ('PageName', '[.]', 'PageName'), ('PageName', 'PageName', 'PageName'), ('pageName', '[pageName]', 'pageName'), ('Main/SubPage', '[.]', 'Main/SubPage'), ('Main/SubPage', '[Test]', 'Main/Test'), ('Main/SubPage', '[..]', 'Main'), ('Main/SubPage', '[../Chair]', 'Chair'), ] for nr, (pageName, text_fragment, target) in enumerate(tests, 1): wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) text = '\n%s\n\n' % text_fragment page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) assert nf.count('wikiWord') == 1 link_core = nf.wikiWord().linkPath.getLinkCore() resolved = langHelper.resolveWikiWordLink(link_core, page) assert resolved == target, ('%d: %r on %r -> %r != %r' % (nr, link_core, pageName, resolved, target)) result = langHelper.generate_text(ast, page) assert result == text
def test_parse_wikiwords(): page_name = u'PageName' text_fragments = [ # (text, is_wikiword) (u'WikiWord', None), (u'[[wikiword]]', u'wikiword'), (u'WikiWord!anchor', None), (u'[[WikiWord|title]]', u'WikiWord'), (u'[[WikiWord|title]]!anchor', u'WikiWord'), (u'[[WikiWord#search_fragment]]', u'WikiWord'), (u'[[WikiWord#search_fragment|title]]', u'WikiWord'), (u'[[WikiWord#search_fragment|title]]!anchor', u'WikiWord'), (u'[[.]]', page_name), (u'CamelCase is not seen as a WikiWord.', None), ] wiki_content = {page_name: u''} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) for (text_fragment, wikiword) in text_fragments: text = u'\n%s\n\n' % text_fragment page = wikidoc.getWikiPage(page_name) page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) if wikiword is not None: assert nf.count('wikiWord') == 1 assert nf.wikiWord().wikiWord == wikiword else: assert not nf.count('wikiWord')
def test_parse_wikiwords(): """ assert text fragments are recognized as wiki words by parser """ text_fragments = [ # (text, wikiword) ('WikiWord', 'WikiWord'), ('[wikiword]', 'wikiword'), ('WikiWord!anchor', 'WikiWord'), ('[WikiWord|title]', 'WikiWord'), ('[WikiWord|title]!anchor', 'WikiWord'), ('[wikiword]#searchfragment', 'wikiword'), ('[wikiword#searchfragment]', 'wikiword'), ('WikiWord#searchfragment', 'WikiWord'), ('WikiWord#search# fragment', 'WikiWord'), ('[WikiWord#search fragment]', 'WikiWord'), ('[wikiword#search fragment]', 'wikiword'), ('[WikiWord#searchfragment|title]', 'WikiWord'), ('[WikiWord#searchfragment|title]!anchor', 'WikiWord'), ('[.]', 'PageName'), ('This is a sentence', None), ('+ Heading\n\n * item\n * item 2\n\n', None), ('wikiword', None), ('wikiword!thisisnotananchor', None), ('wikiword#hash', None), ('wikiword|thisisnotitle', None), ] wiki_content = {'PageName': ''} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) for (text_fragment, wikiword) in text_fragments: text = '\n%s\n\n' % text_fragment page = wikidoc.getWikiPage('PageName') page.setContent(text) ast = page.getLivePageAst() assert ast.getString() == text nf = NodeFinder(ast) if wikiword is not None: assert nf.count('wikiWord') == 1 assert nf.wikiWord().wikiWord == wikiword else: assert nf.count('wikiWord') == 0