def test_generate_text_3(): langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) tests = [ # page name, text fragment, target ('PageName', '[//WikiWord]', 'WikiWord'), ('PageName', '[//wikiword]', 'wikiword'), ('PageName', '[/wikiword]', 'PageName/wikiword'), ('PageName', '[/wikiword/subsub]', 'PageName/wikiword/subsub'), ('PageName', '[.]', 'PageName'), ('PageName', 'PageName', 'PageName'), ('pageName', '[pageName]', 'pageName'), ('Main/SubPage', '[.]', 'Main/SubPage'), ('Main/SubPage', '[Test]', 'Main/Test'), ('Main/SubPage', '[..]', 'Main'), ('Main/SubPage', '[../Chair]', 'Chair'), ] for nr, (pageName, text_fragment, target) in enumerate(tests, 1): wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) text = '\n%s\n\n' % text_fragment page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) assert nf.count('wikiWord') == 1 link_core = nf.wikiWord().linkPath.getLinkCore() resolved = langHelper.resolveWikiWordLink(link_core, page) assert resolved == target, ('%d: %r on %r -> %r != %r' % (nr, link_core, pageName, resolved, target)) result = langHelper.generate_text(ast, page) assert result == text
def test_generate_text_1(): """ text -> |parser| -> AST -> |text generator| -> result assert result == text """ pageName = 'PageName' text = """+ Heading 1 A sentence, a link: [test], and some more text, even some *bold*. Something _simple_ to start. """ wiki_content = {pageName: text} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) ast = page.getLivePageAst() result = langHelper.generate_text(ast, page) assert result == text tf = langHelper.TextFormatter() result = tf.format(ast, page) assert result == text assert tf.count('heading') == 1 assert tf.count('wikiWord') == 1 assert tf.count('bold') == 1 assert tf.count('italics') == 1
def test_generate_text_3(): langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) tests = [ # page name, text fragment, target ('PageName', '[//WikiWord]', 'WikiWord'), ('PageName', '[//wikiword]', 'wikiword'), ('PageName', '[/wikiword]', 'PageName/wikiword'), ('PageName', '[/wikiword/subsub]', 'PageName/wikiword/subsub'), ('PageName', '[.]', 'PageName'), ('PageName', 'PageName', 'PageName'), ('pageName', '[pageName]', 'pageName'), ('Main/SubPage', '[.]', 'Main/SubPage'), ('Main/SubPage', '[Test]', 'Main/Test'), ('Main/SubPage', '[..]', 'Main'), ('Main/SubPage', '[../Chair]', 'Chair'), ] for nr, (pageName, text_fragment, target) in enumerate(tests, 1): wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) text = '\n%s\n\n' % text_fragment page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) assert nf.count('wikiWord') == 1 link_core = nf.wikiWord().linkPath.getLinkCore() resolved = langHelper.resolveWikiWordLink(link_core, page) assert resolved == target, ( '%d: %r on %r -> %r != %r' % (nr, link_core, pageName, resolved, target)) result = langHelper.generate_text(ast, page) assert result == text
def test_parse_wikiwords(): page_name = u'PageName' text_fragments = [ # (text, is_wikiword) (u'WikiWord', None), (u'[[wikiword]]', u'wikiword'), (u'WikiWord!anchor', None), (u'[[WikiWord|title]]', u'WikiWord'), (u'[[WikiWord|title]]!anchor', u'WikiWord'), (u'[[WikiWord#search_fragment]]', u'WikiWord'), (u'[[WikiWord#search_fragment|title]]', u'WikiWord'), (u'[[WikiWord#search_fragment|title]]!anchor', u'WikiWord'), (u'[[.]]', page_name), (u'CamelCase is not seen as a WikiWord.', None), ] wiki_content = {page_name: u''} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) for (text_fragment, wikiword) in text_fragments: text = u'\n%s\n\n' % text_fragment page = wikidoc.getWikiPage(page_name) page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) if wikiword is not None: assert nf.count('wikiWord') == 1 assert nf.wikiWord().wikiWord == wikiword else: assert not nf.count('wikiWord')
def test_generate_text_1(): langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) pageName = u'PageName' text = u""" [[test]] """ wiki_content = {pageName: text} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) ast = page.getLivePageAst() result = langHelper.generate_text(ast, page) assert result == text
def test_generate_text_quotes(): test_values = ''' ## input text -> expected output [person.email: "*****@*****.**"] | [person.email: "*****@*****.**"] [:page: "IncrementalSearch"] | [:page: IncrementalSearch] [:page: //IncrementalSearch//] | [:page: IncrementalSearch] [:page: /////IncrementalSearch/////] | [:page: IncrementalSearch] [:page: \\IncrementalSearch\\] | [:page: IncrementalSearch] [:eval:"5+6"] | [:eval: "5+6"] [contact: "Carl [Home]"] | [contact: "Carl [Home]"] [alias: Foo; Bar; FooBar] | [alias: Foo; Bar; FooBar] [key: ""value 1 with " in it""; "value2"; "final #%! value ?"] | [key: ""value 1 with " in it""; value2; final #%! value ?] [key: """value"""""] | [key: """value"""""] [key: """value "special" ""a"" "b" """; c] | [key: """value "special" ""a"" "b" """; c] [key: a;; b; c; ""d"""] | [key: a; b; c; ""d"""] [:rel: "children"; existingonly; columns 2; ""coldir down"""] | [:rel: children; existingonly; columns 2; ""coldir down"""] [key: "Elisabeth Willemslaan F. 156450, 113150 Wespelaar (Haacht) B"] | [key: "Elisabeth Willemslaan F. 156450, 113150 Wespelaar (Haacht) B"] ''' def parse_test_values(s): for line in s.splitlines(): line = line.strip() if not line or line.startswith('#'): continue input_text, output_text = [s.strip() for s in line.split('|')] yield input_text, output_text def err_msg(): msg = '%r -> %r != %r' return msg % (text_fragment_in, result_fragment, text_fragment_out) pageName = 'PageName' wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) tests = parse_test_values(test_values) for text_fragment_in, text_fragment_out in tests: text_in = '\n%s\n\n' % text_fragment_in text_out = '\n%s\n\n' % text_fragment_out page.setContent(text_in) ast = page.getLivePageAst() nf = NodeFinder(ast) assert (nf.count('attribute') == 1 or nf.count('insertion') == 1), text_fragment_in result = langHelper.generate_text(ast, page) result_fragment = result.strip() assert result == text_out, err_msg()
def test_parse_1(): """ getLivePageAst """ text = """+ Heading 1 This is a sentence. """ wiki_content = {'TestPage': text} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) page = wikidoc.getWikiPage('TestPage') ast = page.getLivePageAst() nf = NodeFinder(ast) assert nf.count('heading') == 1 assert nf.count('wikiWord') == 0 assert nf.heading.headingContent().getString() == 'Heading 1' ast_ = parse(text, 'TestPage', LANGUAGE_NAME) assert ast_eq(ast, ast_)
def test_generate_text(): pageName = u'PageName' wikidoc = MockWikiDocument({pageName: u''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) text_fragments = [ (u'[[wikiword]]', 'wikiWord'), # 1 (u'[[wikiword]]!anchor', 'wikiWord'), (u'[[wikiword|title]]', 'wikiWord'), (u'[[WikiWord|title]]', 'wikiWord'), (u'[[wikiword|title]]!anchor', 'wikiWord'), (u'[[WikiWord|title]]!anchor', 'wikiWord'), (u'[[wikiword#search_fragment]]', 'wikiWord'), (u'[[wikiword#search fragment]]', 'wikiWord'), (u'[[WikiWord#search# fragment]]', 'wikiWord'), (u'[[wikiword#search_fragment]]!anchor', 'wikiWord'), # 10 (u'[[WikiWord#search_fragment]]!anchor', 'wikiWord'), (u'[[wikiword#search_fragment|title]]', 'wikiWord'), (u'[[WikiWord#search_fragment|title]]', 'wikiWord'), (u'[[wikiword#search_fragment|title]]!anchor', 'wikiWord'), (u'[[WikiWord#search_fragment|title]]!anchor', 'wikiWord'), (u'WikiWord', None), (u'WikiWord!anchor', None), # 17 (u'WikiWord#search_fragment', None), (u'[[key: value]]', 'attribute'), (u'[[test: ok; nok]]', 'attribute'), (u'[[:page: wikiword]]', 'insertion'), (u'this is a sentence', None), ] for nr, (text_fragment, node_name) in enumerate(text_fragments, 1): text = u'\n%s\n\n' % text_fragment page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) if node_name: assert nf.count(node_name) == 1, nr else: assert not nf.count('wikiWord'), nr assert not nf.count('attribute'), nr assert not nf.count('insertion'), nr result = langHelper.generate_text(ast, page) assert result == text
def test_parse_wikiwords(): """ assert text fragments are recognized as wiki words by parser """ text_fragments = [ # (text, wikiword) ('WikiWord', 'WikiWord'), ('[wikiword]', 'wikiword'), ('WikiWord!anchor', 'WikiWord'), ('[WikiWord|title]', 'WikiWord'), ('[WikiWord|title]!anchor', 'WikiWord'), ('[wikiword]#searchfragment', 'wikiword'), ('[wikiword#searchfragment]', 'wikiword'), ('WikiWord#searchfragment', 'WikiWord'), ('WikiWord#search# fragment', 'WikiWord'), ('[WikiWord#search fragment]', 'WikiWord'), ('[wikiword#search fragment]', 'wikiword'), ('[WikiWord#searchfragment|title]', 'WikiWord'), ('[WikiWord#searchfragment|title]!anchor', 'WikiWord'), ('[.]', 'PageName'), ('This is a sentence', None), ('+ Heading\n\n * item\n * item 2\n\n', None), ('wikiword', None), ('wikiword!thisisnotananchor', None), ('wikiword#hash', None), ('wikiword|thisisnotitle', None), ] wiki_content = {'PageName': ''} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) for (text_fragment, wikiword) in text_fragments: text = '\n%s\n\n' % text_fragment page = wikidoc.getWikiPage('PageName') page.setContent(text) ast = page.getLivePageAst() assert ast.getString() == text nf = NodeFinder(ast) if wikiword is not None: assert nf.count('wikiWord') == 1 assert nf.wikiWord().wikiWord == wikiword else: assert nf.count('wikiWord') == 0
def test_generate_text_2(): pageName = 'PageName' wikidoc = MockWikiDocument({pageName: ''}, LANGUAGE_NAME) page = wikidoc.getWikiPage(pageName) langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) text_fragments = [ ('[wikiword]', 'wikiWord'), ('[wikiword]!anchor', 'wikiWord'), ('[wikiword|title]', 'wikiWord'), ('[WikiWord|title]', 'wikiWord'), ('[wikiword|title]!anchor', 'wikiWord'), ('[WikiWord|title]!anchor', 'wikiWord'), ('[wikiword#searchfragment]', 'wikiWord'), ('[wikiword#search fragment]', 'wikiWord'), ('[WikiWord#search# fragment]', 'wikiWord'), ('[wikiword#searchfragment]!anchor', 'wikiWord'), ('[WikiWord#searchfragment]!anchor', 'wikiWord'), ('[wikiword#searchfragment|title]', 'wikiWord'), ('[WikiWord#searchfragment|title]', 'wikiWord'), ('[wikiword#searchfragment|title]!anchor', 'wikiWord'), ('[WikiWord#searchfragment|title]!anchor', 'wikiWord'), ('WikiWord', 'wikiWord'), ('WikiWord!anchor', 'wikiWord'), ('WikiWord#searchfragment', 'wikiWord'), ('[key: value]', 'attribute'), ('[test: ok; nok]', 'attribute'), ('[:page: wikiword]', 'insertion'), ('this is a sentence', None), ] for text_fragment, node_name in text_fragments: text = '\n%s\n\n' % text_fragment page.setContent(text) ast = page.getLivePageAst() nf = NodeFinder(ast) if node_name: assert nf.count(node_name) == 1 result = langHelper.generate_text(ast, page) assert result == text
def test_generate_WikidPadHelp_selection(): test_fragments = [ # (page_name, text, node_name, formatted_text) ('pageName', '[.]', 'wikiWord', '[.]'), # 1 ('PageName', '[.]', 'wikiWord', '[.]'), ('PageName', 'PageName', 'wikiWord', 'PageName'), ('PageName', '[PageName]', 'wikiWord', '[PageName]'), ('PageName', '[contact: "Carl [Home]"]', 'attribute', '[contact: "Carl [Home]"]'), ('PageName', '[//OptionsDialog]', 'wikiWord', '[//OptionsDialog]'), ('PageName', '[//ebay/Circlet]', 'wikiWord', '[//ebay/Circlet]'), ('PageName', '[WikiWord| This is the title ]', 'wikiWord', '[WikiWord|This is the title]'), ('PageName', '[:rel: parents]', 'insertion', '[:rel: parents]'), ('PageName', '[:rel: parents; aslist]', 'insertion', '[:rel: parents; aslist]'), ('PageName', '[:rel: children; existingonly;columns 2]', 'insertion', '[:rel: children; existingonly; columns 2]'), ('PageName', '[key: value]', 'attribute', '[key: value]'), ('PageName', '[:toc: ]', 'insertion', '[:toc:]'), ( 'ChangeLog2008', '[test:foo; ]', # still legal?! 'attribute', '[test: foo]'), ( 'TestPage', '[test:foo;; ]', # still legal?! 'attribute', '[test: foo]'), ('PageName', '[key: value with spaces]', 'attribute', '[key: value with spaces]'), ('PageName', '[key: value; value2]', 'attribute', '[key: value; value2]'), ('PageName', '[key: "value: with special char"]', 'attribute', '[key: value: with special char]'), ('PageName', '[key: "value = special"]', 'attribute', '[key: value = special]'), ('pageName', '[wikiword]#searchfragment', 'wikiWord', '[wikiword#searchfragment]'), ('pageName', '[wikiword#searchfragment]', 'wikiWord', '[wikiword#searchfragment]'), ('pageName', '[wikiword#search fragment]', 'wikiWord', '[wikiword#search fragment]'), ('AutoCompletion', '[bookmarked=true]', 'attribute', '[bookmarked: true]'), ('ChangeLog', '[ChangeLog2011]', 'wikiWord', '[ChangeLog2011]'), ('ChronViewWindow', '[OptionsDialog#+++ Chron. view]', 'wikiWord', '[OptionsDialog#+++ Chron. view]'), ('ChronViewWindow', '[OptionsDialog#+++ Chronological]', 'wikiWord', '[OptionsDialog#+++ Chronological]'), ('CommandLineSupport', '[WikiMaintenance#++ Update ext. modif. wiki files]', 'wikiWord', '[WikiMaintenance#++ Update ext. modif. wiki files]'), ('ExternalGraphicalApplications', '[:eqn:"a^2 + b^2 = c^2"]', 'insertion', '[:eqn: "a^2 + b^2 = c^2"]'), ('Icon airbrush', '[icon:airbrush]', 'attribute', '[icon: airbrush]'), ('Icon cd_audio', '[icon:cd_audio ]', 'attribute', '[icon: cd_audio]'), ('Insertions', '[:page: "IncrementalSearch"]', 'insertion', '[:page: IncrementalSearch]'), ('Insertions', '[:page: "IncrementalSearch"]', 'insertion', '[:page: IncrementalSearch]'), ('Insertions', '[:rel: children;existingonly;columns 2;coldir down]', 'insertion', '[:rel: children; existingonly; columns 2; coldir down]'), ('Insertions', '[:search:"todo:todo"]', 'insertion', '[:search: todo:todo]'), ('Insertions', '[:search:"todo:todo";showtext]', 'insertion', '[:search: todo:todo; showtext]'), ('Insertions', '[:eval:"5+6"]', 'insertion', '[:eval: "5+6"]'), ('ExternalGraphicalApplications', '[:dot:"\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]', 'insertion', '[:dot: "\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]' ), ('ExternalGraphicalApplications', ('[:ploticus:"\n' '#proc areadef\n' ' title: Annual Revenues, in thousands\n' ' rectangle: 1 1 5 2\n' ' xrange: 0 4\n' ' yrange: -5000 15000\n' ' yaxis.stubs: incremental 5000\n' ' yaxis.grid: color=pink\n' ' xaxis.stubs: text\n' 'ABC Corp\n' 'NetStuff\n' 'MicroMason\n' '\n' '#proc getdata\n' ' data: 6430 -780 13470\n' '\n' '#proc processdata\n' ' action: rotate\n' '\n' '#proc bars\n' ' lenfield: 1\n' ' color: dullyellow\n' ' labelword: $ @@N\n' ' crossover: 0\n' '"]'), 'insertion', ('[:ploticus: "\n#proc areadef\n title: Annual Revenues, in ' 'thousands\n rectangle: 1 1 5 2\n xrange: 0 4\n yrange: -5000 ' '15000\n yaxis.stubs: incremental 5000\n yaxis.grid: color=pink\n' ' xaxis.stubs: text\nABC Corp\nNetStuff\nMicroMason\n\n' '#proc getdata\n data: 6430 -780 13470\n\n#proc processdata\n' ' action: rotate\n\n#proc bars\n lenfield: 1\n color: dullyellow\n' ' labelword: $ @@N\n crossover: 0\n"]')), ('ExternalGraphicalApplications', """[:gnuplot:" set key right nobox set samples 100 plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1)) "]""", 'insertion', """[:gnuplot: " set key right nobox set samples 100 plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1)) "]"""), ] langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) wikidoc = MockWikiDocument(None, LANGUAGE_NAME) tests = enumerate(test_fragments, 1) for nr, (pageName, text, node_name, formatted_text) in tests: text_ = '\n%s\n\n' % text try: page = wikidoc.getWikiPage(pageName) except WikiWordNotFoundException: page = wikidoc.createWikiPage(pageName) page.setContent(text_) ast = page.getLivePageAst() nf = NodeFinder(ast) if node_name is not None: assert nf.count(node_name) == 1 else: assert nf.count('wikiWord') == 0 assert nf.count('attribute') == 0 assert nf.count('insertion') == 0 result = langHelper.generate_text(ast, page)[1:-2] assert result == formatted_text, '%d: %r on %r -> %r != %r' % ( nr, text, pageName, result, formatted_text)
def test_generate_WikidPadHelp(): """Run over *complete* WikidPadHelp wiki: parse each page, generate text from AST using text generator, and check if generated text matches the original text:: text -> |parser| -> AST -> |text generator| -> result assert result == text The *first time*, set `add_unknown_differences_to_annotation_file` to True and annotate generated file with differences: put '!=' if different, and '==' if equal (in semantics, maybe not in syntax, e.g., [key: value ] is equal to [key: value] (note the extra spaces)). """ # add_unknown_differences_to_annotation_file = True add_unknown_differences_to_annotation_file = False def load_annotations(path): equivalents = defaultdict(dict) known_differences = defaultdict(dict) page_name, text, result = None, None, None try: with io.open(path, 'r', encoding='utf-8') as f: for line in f: line = line.strip() if not line or line.startswith('#'): continue if line.startswith('-- '): text = line[3:] elif line.startswith('== '): result = line[3:] equivalents[page_name][text] = result elif line.startswith('!= '): result = line[3:] known_differences[page_name][text] = result else: page_name = line except IOError: pass # no file yet?! return equivalents, known_differences equivalents, known_differences = load_annotations(WIKIDPADHELP_ANNOTATIONS) langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) wikidoc = MockWikiDocument(None, LANGUAGE_NAME) paths = glob.glob(os.path.join(WIKIDPADHELP_DATA_DIR, '*.wiki')) skip = set() # Page itself is in WikidPad syntax so it has to work as well # skip = set( # u'MediaWiki%2FTextFormatting', # Media Wiki syntax, not WikidPadParser # ) nof_known_differences = 0 nof_unknown_differences = 0 for nr, path in enumerate(sorted(paths), 1): pageName, _ = os.path.splitext(os.path.basename(path)) if pageName in skip: continue text = get_text(path) try: page = wikidoc.getWikiPage(pageName) except WikiWordNotFoundException: page = wikidoc.createWikiPage(pageName) page.setContent(text) ast = page.getLivePageAst() result = langHelper.generate_text(ast, page) # assert result == text current_page_correct = True with io.open(WIKIDPADHELP_ANNOTATIONS, 'a', encoding='utf-8') as f: to_compare = list( zip(result.splitlines(True), text.splitlines(True))) for result_line, text_line in to_compare: result_line = result_line.rstrip() text_line = text_line.rstrip() if result_line == text_line: continue # ok, equal try: equivalent_line = equivalents[pageName][text_line] except KeyError: equivalent_line = None if result_line == equivalent_line: continue # ok, lines are considered equal try: known_difference = known_differences[pageName][text_line] except KeyError: known_difference = None if result_line == known_difference: nof_known_differences += 1 continue # ok, we know about this difference # we have an unknown difference here nof_unknown_differences += 1 if add_unknown_differences_to_annotation_file: if current_page_correct: # first error for this page current_page_correct = False f.write(pageName + '\n') f.write('-- ' + text_line + '\n') f.write('!= ' + result_line + '\n') msg = 'TOTAL: %d known differences, %d unknown differences' msg %= (nof_known_differences, nof_unknown_differences) assert not nof_unknown_differences, msg
def test_parse_2(): text = """+ WikidPad http://wikidpad.sourceforge.net/ WikidPad is an open source, [Python]-based wiki-like outliner for storing thoughts, ideas, to-do lists, contacts, and other notes with wiki-like linking between pages. WikidPad!Help ++ Features * On the fly WikiWord linking, as you type * WikiWord auto-completion * Easy WikiWord navigation * Wiki text styling * ... anchor:Help ++ Help A complete online documentation for beginners and advanced users is here: GettingStarted. WikiDocumentAttributes#*short_hint* """ ast = parse(text, 'WikidPad', LANGUAGE_NAME) nf = NodeFinder(ast) assert nf.count('heading') == 3 assert nf.count('urlLink') == 1 assert nf.count('wikiWord') == 9 # title is also a wikiword...! assert nf.count('anchorDef') == 1 assert nf.count('unorderedList') == 1 assert nf.heading.headingContent().getString() == 'WikidPad' assert nf.urlLink().url == 'http://wikidpad.sourceforge.net/' ww_3_node = nf.wikiWord_3() assert ww_3_node.wikiWord == 'Python' assert ww_3_node.linkPath.getLinkCore() == 'Python' ww_9_node = nf.wikiWord_9() assert ww_9_node.wikiWord == 'WikiDocumentAttributes' assert ww_9_node.linkPath.getLinkCore() == 'WikiDocumentAttributes' assert ww_9_node.anchorLink is None assert ww_9_node.fragmentNode is not None assert ww_9_node.searchFragment == '*short_hint*' assert ww_9_node.titleNode is None begin = nf.unorderedList.bullet_3().pos end = nf.unorderedList.bullet_4().pos assert text[begin:end] == '* Easy WikiWord navigation\n ' assert nf.wikiWord_4().anchorLink == 'Help' assert nf[0].name == 'heading' assert nf[231].name == 'heading' assert nf[401].name == 'heading' wiki_content = {'WikidPad': text} wikidoc = MockWikiDocument(wiki_content, LANGUAGE_NAME) page = wikidoc.getWikiPage('WikidPad') ast_ = page.getLivePageAst() assert ast_eq(ast, ast_)
def test_generate_WikidPadHelp_selection(): test_fragments = [ # (page_name, text, node_name, formatted_text) ('pageName', '[.]', 'wikiWord', '[.]'), # 1 ('PageName', '[.]', 'wikiWord', '[.]'), ('PageName', 'PageName', 'wikiWord', 'PageName'), ('PageName', '[PageName]', 'wikiWord', '[PageName]'), ('PageName', '[contact: "Carl [Home]"]', 'attribute', '[contact: "Carl [Home]"]'), ('PageName', '[//OptionsDialog]', 'wikiWord', '[//OptionsDialog]'), ('PageName', '[//ebay/Circlet]', 'wikiWord', '[//ebay/Circlet]'), ('PageName', '[WikiWord| This is the title ]', 'wikiWord', '[WikiWord|This is the title]'), ('PageName', '[:rel: parents]', 'insertion', '[:rel: parents]'), ('PageName', '[:rel: parents; aslist]', 'insertion', '[:rel: parents; aslist]'), ('PageName', '[:rel: children; existingonly;columns 2]', 'insertion', '[:rel: children; existingonly; columns 2]'), ('PageName', '[key: value]', 'attribute', '[key: value]'), ('PageName', '[:toc: ]', 'insertion', '[:toc:]'), ('ChangeLog2008', '[test:foo; ]', # still legal?! 'attribute', '[test: foo]'), ('TestPage', '[test:foo;; ]', # still legal?! 'attribute', '[test: foo]'), ('PageName', '[key: value with spaces]', 'attribute', '[key: value with spaces]'), ('PageName', '[key: value; value2]', 'attribute', '[key: value; value2]'), ('PageName', '[key: "value: with special char"]', 'attribute', '[key: value: with special char]'), ('PageName', '[key: "value = special"]', 'attribute', '[key: value = special]'), ('pageName', '[wikiword]#searchfragment', 'wikiWord', '[wikiword#searchfragment]'), ('pageName', '[wikiword#searchfragment]', 'wikiWord', '[wikiword#searchfragment]'), ('pageName', '[wikiword#search fragment]', 'wikiWord', '[wikiword#search fragment]'), ('AutoCompletion', '[bookmarked=true]', 'attribute', '[bookmarked: true]'), ('ChangeLog', '[ChangeLog2011]', 'wikiWord', '[ChangeLog2011]'), ('ChronViewWindow', '[OptionsDialog#+++ Chron. view]', 'wikiWord', '[OptionsDialog#+++ Chron. view]'), ('ChronViewWindow', '[OptionsDialog#+++ Chronological]', 'wikiWord', '[OptionsDialog#+++ Chronological]'), ('CommandLineSupport', '[WikiMaintenance#++ Update ext. modif. wiki files]', 'wikiWord', '[WikiMaintenance#++ Update ext. modif. wiki files]'), ('ExternalGraphicalApplications', '[:eqn:"a^2 + b^2 = c^2"]', 'insertion', '[:eqn: "a^2 + b^2 = c^2"]'), ('Icon airbrush', '[icon:airbrush]', 'attribute', '[icon: airbrush]'), ('Icon cd_audio', '[icon:cd_audio ]', 'attribute', '[icon: cd_audio]'), ('Insertions', '[:page: "IncrementalSearch"]', 'insertion', '[:page: IncrementalSearch]'), ('Insertions', '[:page: "IncrementalSearch"]', 'insertion', '[:page: IncrementalSearch]'), ('Insertions', '[:rel: children;existingonly;columns 2;coldir down]', 'insertion', '[:rel: children; existingonly; columns 2; coldir down]'), ('Insertions', '[:search:"todo:todo"]', 'insertion', '[:search: todo:todo]'), ('Insertions', '[:search:"todo:todo";showtext]', 'insertion', '[:search: todo:todo; showtext]'), ('Insertions', '[:eval:"5+6"]', 'insertion', '[:eval: "5+6"]'), ('ExternalGraphicalApplications', '[:dot:"\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]', 'insertion', '[:dot: "\ndigraph {\na -> b\nb -> c\nb -> d\nd -> a\n}\n"; noerror]'), ('ExternalGraphicalApplications', ('[:ploticus:"\n' '#proc areadef\n' ' title: Annual Revenues, in thousands\n' ' rectangle: 1 1 5 2\n' ' xrange: 0 4\n' ' yrange: -5000 15000\n' ' yaxis.stubs: incremental 5000\n' ' yaxis.grid: color=pink\n' ' xaxis.stubs: text\n' 'ABC Corp\n' 'NetStuff\n' 'MicroMason\n' '\n' '#proc getdata\n' ' data: 6430 -780 13470\n' '\n' '#proc processdata\n' ' action: rotate\n' '\n' '#proc bars\n' ' lenfield: 1\n' ' color: dullyellow\n' ' labelword: $ @@N\n' ' crossover: 0\n' '"]'), 'insertion', ('[:ploticus: "\n#proc areadef\n title: Annual Revenues, in ' 'thousands\n rectangle: 1 1 5 2\n xrange: 0 4\n yrange: -5000 ' '15000\n yaxis.stubs: incremental 5000\n yaxis.grid: color=pink\n' ' xaxis.stubs: text\nABC Corp\nNetStuff\nMicroMason\n\n' '#proc getdata\n data: 6430 -780 13470\n\n#proc processdata\n' ' action: rotate\n\n#proc bars\n lenfield: 1\n color: dullyellow\n' ' labelword: $ @@N\n crossover: 0\n"]')), ('ExternalGraphicalApplications', """[:gnuplot:" set key right nobox set samples 100 plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1)) "]""", 'insertion', """[:gnuplot: " set key right nobox set samples 100 plot [-pi/2:pi] cos(x),-(sin(x) > sin(x+1) ? sin(x) : sin(x+1)) "]"""), ] langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) wikidoc = MockWikiDocument(None, LANGUAGE_NAME) tests = enumerate(test_fragments, 1) for nr, (pageName, text, node_name, formatted_text) in tests: text_ = '\n%s\n\n' % text try: page = wikidoc.getWikiPage(pageName) except WikiWordNotFoundException: page = wikidoc.createWikiPage(pageName) page.setContent(text_) ast = page.getLivePageAst() nf = NodeFinder(ast) if node_name is not None: assert nf.count(node_name) == 1 else: assert nf.count('wikiWord') == 0 assert nf.count('attribute') == 0 assert nf.count('insertion') == 0 result = langHelper.generate_text(ast, page)[1:-2] assert result == formatted_text, '%d: %r on %r -> %r != %r' % ( nr, text, pageName, result, formatted_text)
def test_generate_WikidPadHelp(): """Run over *complete* WikidPadHelp wiki: parse each page, generate text from AST using text generator, and check if generated text matches the original text:: text -> |parser| -> AST -> |text generator| -> result assert result == text The *first time*, set `add_unknown_differences_to_annotation_file` to True and annotate generated file with differences: put '!=' if different, and '==' if equal (in semantics, maybe not in syntax, e.g., [key: value ] is equal to [key: value] (note the extra spaces)). """ # add_unknown_differences_to_annotation_file = True add_unknown_differences_to_annotation_file = False def load_annotations(path): equivalents = defaultdict(dict) known_differences = defaultdict(dict) page_name, text, result = None, None, None try: with io.open(path, 'r', encoding='utf-8') as f: for line in f: line = line.strip() if not line or line.startswith('#'): continue if line.startswith('-- '): text = line[3:] elif line.startswith('== '): result = line[3:] equivalents[page_name][text] = result elif line.startswith('!= '): result = line[3:] known_differences[page_name][text] = result else: page_name = line except IOError: pass # no file yet?! return equivalents, known_differences equivalents, known_differences = load_annotations(WIKIDPADHELP_ANNOTATIONS) langHelper = getApp().createWikiLanguageHelper(LANGUAGE_NAME) wikidoc = MockWikiDocument(None, LANGUAGE_NAME) paths = glob.glob(os.path.join(WIKIDPADHELP_DATA_DIR, '*.wiki')) skip = set() # Page itself is in WikidPad syntax so it has to work as well # skip = set( # u'MediaWiki%2FTextFormatting', # Media Wiki syntax, not WikidPadParser # ) nof_known_differences = 0 nof_unknown_differences = 0 for nr, path in enumerate(sorted(paths), 1): pageName, _ = os.path.splitext(os.path.basename(path)) if pageName in skip: continue text = get_text(path) try: page = wikidoc.getWikiPage(pageName) except WikiWordNotFoundException: page = wikidoc.createWikiPage(pageName) page.setContent(text) ast = page.getLivePageAst() result = langHelper.generate_text(ast, page) # assert result == text current_page_correct = True with io.open(WIKIDPADHELP_ANNOTATIONS, 'a', encoding='utf-8') as f: to_compare = list(zip(result.splitlines(True), text.splitlines(True))) for result_line, text_line in to_compare: result_line = result_line.rstrip() text_line = text_line.rstrip() if result_line == text_line: continue # ok, equal try: equivalent_line = equivalents[pageName][text_line] except KeyError: equivalent_line = None if result_line == equivalent_line: continue # ok, lines are considered equal try: known_difference = known_differences[pageName][text_line] except KeyError: known_difference = None if result_line == known_difference: nof_known_differences += 1 continue # ok, we know about this difference # we have an unknown difference here nof_unknown_differences += 1 if add_unknown_differences_to_annotation_file: if current_page_correct: # first error for this page current_page_correct = False f.write(pageName + '\n') f.write('-- ' + text_line + '\n') f.write('!= ' + result_line + '\n') msg = 'TOTAL: %d known differences, %d unknown differences' msg %= (nof_known_differences, nof_unknown_differences) assert not nof_unknown_differences, msg